Compare commits

..

11 Commits

Author SHA1 Message Date
CI Bot
5f0311ffa5 model : ajoute beat_atoms (V0.10 atomes — voir video_analysis/adr/0018)
Table pour les atomes de mouvement par-beat (1 enregistrement par beat × danseur).
Modèle hybride à 3 niveaux : continu (foot_height/speed/stability) + soft
distribution (state_dist_json sommant à 1 sur planted/touch/lifted/transitioning)
+ argmax (state + confidence).

Implémenté dans pipeline/atoms.py côté video_analysis (commit 654320a).

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-10 01:24:05 +02:00
Gabriel Radureau
73b2ccb917 added protection agains deletion 2025-10-21 19:14:17 +02:00
Gabriel Radureau
30e4a35362 set label logic default to 'AND' 2025-10-21 19:06:25 +02:00
Gabriel Radureau
225a1911c8 suppression de vidéos 2025-10-21 18:52:39 +02:00
Gabriel Radureau
fbe3c01de7 Give alias and random suffix for whatsapp videos without created timestamp 2025-10-21 17:35:01 +02:00
Gabriel Radureau
78313ffbef correct application of playlists rules 2025-10-16 17:18:58 +02:00
Gabriel Radureau
d2e2028610 program2 exports the playlists 2025-10-13 17:31:57 +02:00
Gabriel Radureau
65d63ec828 playlist edition 2025-10-13 16:53:15 +02:00
Gabriel Radureau
0fa5a30809 infinite scroll 2025-10-13 15:05:54 +02:00
Gabriel Radureau
cc9fb9cede label editor 2025-10-12 15:37:08 +02:00
Gabriel Radureau
9cb9790974 first streamlit poc 2025-10-12 14:59:40 +02:00
32 changed files with 2500 additions and 35 deletions

32
.gitignore vendored
View File

@@ -1 +1,31 @@
.DS_Store
.DS_Store
.venv
# --- Environnement Python ---
.venv/
__pycache__/
*.pyc
*.pyo
*.pyd
# --- Fichiers Streamlit temporaires ---
.streamlit/
.cache/
*/.streamlit/
# --- Fichiers SQLite / temporaires ---
*.sqlite
*.db
*.db-journal
*.db-shm
*.db-wal
# --- Logs et outputs ---
*.log
*.tmp
*.bak
.DS_Store
# --- Fichiers de lapplication ---
app/__pycache__/
app/.pytest_cache/

View File

@@ -28,6 +28,9 @@ un programme se déclenche pour synchroniser son contenu avec le dossier de sort
2. Une carte SD nommée SD_DANSE formatée en `MS-DOS (FAT32)` pour un meilleur support des projecteurs
3. les programmes parallel, exiftool et ffmpeg
`brew install parallel exiftool ffmpeg`
4. `uv venv --prompt DanceVideos --allow-existing .venv -p 3.12`
5. `source .venv/bin/activate && uv pip install -r app/requirements.txt && uv tool install streamlit`
6. `streamlit run app/app.py`
## [Surveillance des répertoires](./doc/01.SurveillerRepertoire.md)
@@ -79,4 +82,9 @@ flowchart TB
L_raw_pgrm1_0@{ animation: slow }
L_pgrm1_videos_0@{ animation: slow }
L_videos_playlists_0@{ animation: fast }
```
```
# TODO
- [ ] Pouvoir supprimer une vidéo / regénérer une vidéo

37
app/app.py Normal file
View File

@@ -0,0 +1,37 @@
# app.py
import streamlit as st
from views.label_views import video_filter_sidebar, video_list_view
from playlists import playlist_page
import argparse
# --- Parse arguments (avant tout Streamlit UI) ---
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("--unlabeled", action="store_true", help="Afficher uniquement les vidéos sans labels")
args, _ = parser.parse_known_args()
# ==========================
# 🧭 Configuration
# ==========================
st.set_page_config(page_title="Dance Video Manager", layout="wide")
st.sidebar.title("💃 Menu principal")
page = st.sidebar.radio(
"Navigation",
["Vidéos", "Playlists"],
key="nav_main"
)
# ==========================
# 🎬 PAGE : VIDÉOS
# ==========================
if page == "Vidéos":
st.title("🎬 Gestion et annotation des vidéos")
filters = video_filter_sidebar(unlabeled=args.unlabeled)
video_list_view(filters)
# ==========================
# 🎵 PAGE : PLAYLISTS
# ==========================
elif page == "Playlists":
playlist_page.main()

9
app/cache/video_summary.py vendored Normal file
View File

@@ -0,0 +1,9 @@
# cache/video_summary.py
import db
from playlists import playlist_db
def rebuild_video_summary():
with db.get_conn() as conn:
conn.execute("DELETE FROM video_summary_materialized;")
conn.execute("CREATE TABLE IF NOT EXISTS video_summary_materialized AS SELECT * FROM video_summary;")
conn.commit()

View File

View File

@@ -0,0 +1,38 @@
# controllers.py
import streamlit as st
import db
def label_widget(video, preselected=None):
"""Widget multiselect pour labels, avec création dynamique."""
preselected = preselected or []
key_multiselect = f"labels_{video.id}"
key_input = f"new_label_{video.id}"
labels = db.load_labels()
if key_multiselect not in st.session_state:
st.session_state[key_multiselect] = preselected
current_selected = st.session_state[key_multiselect]
# Si "Autre…" dans sélection, afficher input
if "Autre…" in current_selected:
new_label = st.text_input("Entrer un label personnalisé", value="", key=key_input)
if new_label.strip():
db.create_labels([new_label.strip()])
labels = db.load_labels()
current_selected = [l for l in current_selected if l != "Autre…"] + [new_label.strip()]
st.session_state[key_multiselect] = current_selected
selected = st.multiselect(
"Labels",
options=labels + ["Autre…"],
default=st.session_state[key_multiselect],
key=key_multiselect,
)
if st.button("💾 Sauvegarder labels", key=f"save_{video.id}"):
video.save_labels(selected)
st.success(f"{len(selected)} label(s) enregistré(s) pour {video.mp4_file_name}")
return selected

291
app/db.py Normal file
View File

@@ -0,0 +1,291 @@
# db.py (modifié)
import sqlite3
from pathlib import Path
import pandas as pd
DB_PATH = Path.home() / "Documents/.DanceVideos/db.sqlite"
def get_conn():
conn = sqlite3.connect(DB_PATH, timeout=30, check_same_thread=False)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA foreign_keys = ON;")
return conn
def delete_video(file_name):
with get_conn() as conn:
conn.execute("DELETE FROM videos WHERE file_name = ?", (file_name,))
conn.commit()
def load_videos():
with get_conn() as conn:
return pd.read_sql_query("SELECT * FROM videos ORDER BY record_datetime DESC", conn)
def load_labels():
with get_conn() as conn:
df = pd.read_sql_query("SELECT name FROM labels ORDER BY name", conn)
return df["name"].tolist()
def create_labels(label_names):
if not label_names:
return
with get_conn() as conn:
conn.executemany(
"INSERT OR IGNORE INTO labels (name) VALUES (?)",
[(name,) for name in label_names]
)
conn.commit()
def get_label_ids(label_names):
label_ids = {}
with get_conn() as conn:
cursor = conn.cursor()
for name in label_names:
cursor.execute("SELECT id FROM labels WHERE name=?", (name,))
row = cursor.fetchone()
if row:
label_ids[name] = row[0]
return label_ids
def load_video_labels(file_name):
with get_conn() as conn:
cursor = conn.cursor()
query = """
SELECT l.name
FROM labels l
JOIN video_labels vl ON l.id = vl.label_id
WHERE vl.video_file_name = ?
"""
return [row[0] for row in cursor.execute(query, (file_name,))]
def save_video_labels(file_name, label_names):
if label_names is None:
label_names = []
create_labels(label_names)
label_ids = get_label_ids(label_names)
with get_conn() as conn:
cursor = conn.cursor()
cursor.execute("DELETE FROM video_labels WHERE video_file_name = ?", (file_name,))
for lid in label_ids.values():
cursor.execute(
"INSERT OR REPLACE INTO video_labels (video_file_name, label_id) VALUES (?, ?)",
(file_name, lid),
)
cursor.execute("""
DELETE FROM labels
WHERE id NOT IN (SELECT DISTINCT label_id FROM video_labels)
""")
conn.commit()
def update_video_difficulty(file_name, level):
with get_conn() as conn:
conn.execute("UPDATE videos SET difficulty_level = ? WHERE file_name = ?", (level, file_name))
conn.commit()
def update_video_alias(file_name, alias):
with get_conn() as conn:
conn.execute("UPDATE videos SET alias = ? WHERE file_name = ?", (alias, file_name))
conn.commit()
def get_unique_days():
"""Retourne la liste unique des jours de la semaine enregistrés dans la base."""
with get_conn() as conn:
df = pd.read_sql_query("SELECT DISTINCT day_of_week FROM videos WHERE day_of_week IS NOT NULL ORDER BY day_of_week", conn)
return [d for d in df["day_of_week"].dropna().tolist() if d.strip()]
def get_unique_difficulties():
"""Retourne la liste des niveaux de difficulté existants."""
with get_conn() as conn:
df = pd.read_sql_query("SELECT DISTINCT difficulty_level FROM videos WHERE difficulty_level IS NOT NULL ORDER BY difficulty_level", conn)
return [d for d in df["difficulty_level"].dropna().tolist() if d.strip()]
def get_unique_addresses():
"""Retourne les adresses connues (exclut 'unknown')."""
with get_conn() as conn:
df = pd.read_sql_query("SELECT DISTINCT address FROM videos WHERE address NOT LIKE '%unknown%' ORDER BY address", conn)
return [a for a in df["address"].dropna().tolist() if a.strip()]
def search_videos(
label_names=None,
day_of_week=None,
address_keyword=None,
start_date=None,
end_date=None,
difficulty=None,
label_logic="OR",
include_playlists=None,
exclude_playlists=None,
logic="OR", # logique entre playlists incluses
**kwargs,
):
"""
Retourne une DataFrame filtrée selon les critères fournis.
label_logic: "OR" (au moins un label) ou "AND" (tous les labels)
logic: "OR" ou "AND" pour la combinaison de playlists incluses
"""
label_names = label_names or []
include_playlists = include_playlists or []
exclude_playlists = exclude_playlists or []
params = []
base_query = """
SELECT DISTINCT v.*
FROM videos v
WHERE 1=1
"""
# 🔖 Filtres par label
if label_names:
if label_logic == "AND":
placeholders = ",".join("?" * len(label_names))
base_query += f"""
AND v.file_name IN (
SELECT vl.video_file_name
FROM video_labels vl
JOIN labels l ON l.id = vl.label_id
WHERE l.name IN ({placeholders})
GROUP BY vl.video_file_name
HAVING COUNT(DISTINCT l.name) = {len(label_names)}
)
"""
params.extend(label_names)
else:
placeholders = ",".join("?" * len(label_names))
base_query += f"""
AND v.file_name IN (
SELECT vl.video_file_name
FROM video_labels vl
JOIN labels l ON l.id = vl.label_id
WHERE l.name IN ({placeholders})
)
"""
params.extend(label_names)
# 🎵 Filtres par playlists incluses
if include_playlists:
placeholders = ",".join("?" * len(include_playlists))
if logic == "AND":
# Vidéos présentes dans TOUTES les playlists
base_query += f"""
AND v.file_name IN (
SELECT vp.video_file_name
FROM video_playlists vp
JOIN playlists p ON p.id = vp.playlist_id
WHERE p.id IN ({placeholders})
GROUP BY vp.video_file_name
HAVING COUNT(DISTINCT p.id) = {len(include_playlists)}
)
"""
params.extend(include_playlists)
else:
# Vidéos présentes dans AU MOINS une playlist
base_query += f"""
AND v.file_name IN (
SELECT vp.video_file_name
FROM video_playlists vp
JOIN playlists p ON p.id = vp.playlist_id
WHERE p.id IN ({placeholders})
)
"""
params.extend(include_playlists)
# ❌ Filtres par playlists exclues
if exclude_playlists:
placeholders = ",".join("?" * len(exclude_playlists))
base_query += f"""
AND v.file_name NOT IN (
SELECT vp.video_file_name
FROM video_playlists vp
JOIN playlists p ON p.id = vp.playlist_id
WHERE p.id IN ({placeholders})
)
"""
params.extend(exclude_playlists)
# 📆 Jour de la semaine
if day_of_week:
base_query += " AND v.day_of_week = ?"
params.append(day_of_week)
# 🗺️ Mot-clé d'adresse (et exclusion unknown)
if address_keyword:
base_query += " AND v.address NOT LIKE '%unknown%' AND v.address LIKE ?"
params.append(f"%{address_keyword}%")
# 📅 Plage de dates
if start_date:
base_query += " AND v.record_datetime >= ?"
params.append(start_date)
if end_date:
base_query += " AND v.record_datetime <= ?"
params.append(end_date)
# 💪 Niveau de difficulté
if difficulty and difficulty != "Tous":
base_query += " AND v.difficulty_level = ?"
params.append(difficulty)
# 🔽 Tri
base_query += " ORDER BY v.record_datetime DESC"
with get_conn() as conn:
return pd.read_sql_query(base_query, conn, params=params)
def get_video_playlists(file_name):
with get_conn() as conn:
query = """
SELECT p.name
FROM playlists p
JOIN video_playlists vp ON vp.playlist_id = p.id
WHERE vp.video_file_name = ?
"""
return [row[0] for row in conn.execute(query, (file_name,))]
def get_video_file_names_in_playlist(playlist_id):
"""Retourne un set de file_name pour une playlist donnée."""
with get_conn() as conn:
rows = conn.execute(
# "SELECT video_file_name FROM video_playlists WHERE playlist_id = ?",
"SELECT video_file_name FROM playlist_videos WHERE playlist_id = ?",
(playlist_id,)
).fetchall()
names = []
for r in rows:
if hasattr(r, "keys"):
names.append(r["video_file_name"])
else:
names.append(r[0])
return set(names)
def get_videos_in_playlist(playlist_id):
"""Retourne une liste d'objets Video complets pour une playlist donnée."""
import pandas as pd
from models import Video
with get_conn() as conn:
df = pd.read_sql_query("""
SELECT v.*
FROM videos v
JOIN video_playlists vp ON vp.video_file_name = v.file_name
WHERE vp.playlist_id = ?
ORDER BY vp.position ASC
""", conn, params=(playlist_id,))
return [Video(**row) for _, row in df.iterrows()]
def add_video_to_playlist(playlist_id, file_name):
print(dict(playlist_id=playlist_id, video_file_name=file_name),flush=True,)
with get_conn() as conn:
conn.execute("""
INSERT OR IGNORE INTO video_playlists (video_file_name, playlist_id, position)
VALUES (?, ?, COALESCE((SELECT MAX(position)+1 FROM video_playlists WHERE playlist_id=?), 0))
""", (file_name, playlist_id, playlist_id))
conn.commit()
def remove_video_from_playlist(playlist_id, file_name):
with get_conn() as conn:
conn.execute("DELETE FROM video_playlists WHERE playlist_id=? AND video_file_name=?", (playlist_id, file_name))
conn.commit()

56
app/models.py Normal file
View File

@@ -0,0 +1,56 @@
# models.py
from pydantic import BaseModel, Field, validator
from typing import Optional, List
import db
class Video(BaseModel):
file_name: str
raw_file: str = Field(..., description="Identifiant unique de la vidéo")
duration: Optional[float] = None
mp4_file: Optional[str] = None
mp4_file_name: Optional[str] = None
rotated_file: Optional[str] = None
thumbnail_file: Optional[str] = None
record_datetime: Optional[str] = None
day_of_week: Optional[str] = None
lat: Optional[float] = None
long: Optional[float] = None
address: Optional[str] = None
difficulty_level: Optional[str] = Field("Tout niveau", description="Niveau de difficulté")
alias: Optional[str] = None
# --- propriétés pratiques ---
@property
def id(self) -> str:
"""Identifiant unique (basé sur file_name)."""
return self.file_name
@property
def title(self) -> str:
"""Nom court à afficher."""
return self.alias or self.mp4_file_name or self.file_name
@property
def difficulty_display(self) -> str:
return self.difficulty_level or "Tout niveau"
# --- Méthodes métiers ---
def load_labels(self) -> List[str]:
return db.load_video_labels(self.file_name)
def save_labels(self, label_names: List[str]):
db.save_video_labels(self.file_name, label_names)
# --- Validation optionnelle ---
@validator("raw_file")
def validate_raw_file(cls, v):
if not v or not isinstance(v, str):
raise ValueError("raw_file doit être une chaîne non vide")
return v
@validator("duration")
def validate_duration(cls, v):
if v is not None and v < 0:
raise ValueError("duration ne peut pas être négative")
return v

View File

View File

@@ -0,0 +1,140 @@
# playlists/playlist_controller.py
import streamlit as st
import db
from playlists.playlist_model import Playlist, RuleSet
from playlists import playlist_db
from views.label_views import video_filter_sidebar, video_list_view
from views.video_views import show_video_row
from models import Video
def playlist_manual_editor(playlist: Playlist):
"""Permet de gérer les vidéos d'une playlist manuelle."""
st.subheader(f"🎞️ Édition manuelle : {playlist.name}")
# Filtres standard pour explorer les vidéos
filters = video_filter_sidebar()
# Précharge les vidéos déjà incluses dans la playlist
playlist_video_ids = db.get_video_file_names_in_playlist(playlist.id)
# Récupère les vidéos filtrées
df_videos = db.search_videos(
label_names=filters["selected_labels"],
day_of_week=filters["day_filter"],
difficulty=filters["difficulty_filter"],
**filters
)
if df_videos.empty:
st.info("Aucune vidéo trouvée avec ces filtres.")
return
videos = [Video(**row) for _, row in df_videos.iterrows()]
st.write(f"🎬 {len(videos)} vidéo(s) disponibles.")
# Affiche les vidéos avec boutons dajout/retrait à droite
summary_map = summary_map = get_video_summary_cached()
for video in videos[:50]: # limite de sécurité
summary = summary_map.get(video.file_name, {"labels": [], "playlists": []})
preselected = summary["labels"]
playlists = summary["playlists"]
show_video_row(
video,
preselected_labels=preselected,
editable_labels=False,
editable_difficulty=False,
editable_alias=False,
playlist=playlist,
playlist_video_ids=playlist_video_ids,
video_playlists=playlists
)
if st.button("📦 Charger plus"):
st.session_state.video_page += 1
st.rerun()
@st.cache_data(ttl=30)
def get_video_summary_cached():
return playlist_db.load_video_summary_map()
def playlist_dynamic_editor(playlist: Playlist):
"""Édite les règles d'une playlist dynamique et affiche le rendu en temps réel."""
st.subheader(f"⚙️ Playlist dynamique : {playlist.name}")
rules = playlist.rules or RuleSet()
labels = db.load_labels()
rules.include_labels = st.multiselect("Inclure labels", labels, default=rules.include_labels)
rules.exclude_labels = st.multiselect("Exclure labels", labels, default=rules.exclude_labels)
all_playlists = playlist_db.load_all_playlists()
name_to_id = {p.name: p.id for p in all_playlists}
id_to_name = {p.id: p.name for p in all_playlists}
default_include = [id_to_name.get(pid) for pid in rules.include_playlists if pid in id_to_name]
default_exclude = [id_to_name.get(pid) for pid in rules.exclude_playlists if pid in id_to_name]
selected_includes = st.multiselect("Inclure playlists", id_to_name.values(), default=default_include)
selected_excludes = st.multiselect("Exclure playlists", id_to_name.values(), default=default_exclude)
rules.include_playlists = [name_to_id[name] for name in selected_includes]
rules.exclude_playlists = [name_to_id[name] for name in selected_excludes]
col1, col2 = st.columns(2)
with col1:
use_delta = st.checkbox("⏱️ Utiliser un delta de jours", value=bool(rules.date_delta_days))
if use_delta:
rules.date_delta_days = st.number_input(
"Nombre de jours depuis aujourdhui (négatif pour passé)",
value=rules.date_delta_days or -15
)
rules.date_after = None
rules.date_before = None
else:
rules.date_after = st.date_input("📅 Après le", value=rules.date_after or None)
rules.date_before = st.date_input("📅 Avant le", value=rules.date_before or None)
rules.date_delta_days = None
with col2:
rules.logic = st.radio("Logique de combinaison", ["AND", "OR"], index=0 if rules.logic == "AND" else 1)
rules.label_logic = st.radio("Logique de combinaison entre labels", ["AND", "OR"], index=0 if rules.label_logic == "AND" else 1)
# --- Enregistrement ---
if st.button("💾 Enregistrer les règles"):
playlist.rules = rules
playlist.save()
st.success("Règles mises à jour ✅")
(st.rerun if hasattr(st, "rerun") else st.experimental_rerun)()
st.markdown("---")
st.subheader("🧩 Rendu de la playlist")
rows = playlist_db.get_videos_for_playlist(playlist)
if not rows:
st.info("Aucune vidéo ne correspond aux règles actuelles.")
return
videos = [Video(**row) for row in rows]
st.write(f"🎬 {len(videos)} vidéo(s) trouvée(s).")
playlist_video_ids = db.get_video_file_names_in_playlist(playlist.id)
summary_map = summary_map = get_video_summary_cached()
for v in videos[:50]:
summary = summary_map.get(v.file_name, {"labels": [], "playlists": []})
preselected = summary["labels"]
playlists = summary["playlists"]
show_video_row(
v,
preselected_labels=preselected,
editable_labels=False,
editable_difficulty=False,
editable_alias=False,
playlist=playlist,
playlist_video_ids=playlist_video_ids,
video_playlists=playlists
)
if st.button("📦 Charger plus"):
st.session_state.video_page += 1
st.rerun()

View File

@@ -0,0 +1,83 @@
# playlists/playlist_db.py
import db
import json
import pandas as pd
from playlists.playlist_model import Playlist, RuleSet
from playlists.sql_builder import build_sql_from_rules
def load_all_playlists():
"""Retourne une liste de Playlist (Pydantic) — tolérant aux rules_json nuls / vides."""
with db.get_conn() as conn:
# s'assurer que les lignes sont accessibles par nom (si get_conn ne l'a pas fait)
try:
conn.row_factory = conn.row_factory # no-op si déjà réglé
except Exception:
pass
rows = conn.execute("SELECT * FROM playlists ORDER BY created_at DESC").fetchall()
playlists = []
for row in rows:
# si row est sqlite3.Row, on peut accéder par nom, sinon c'est un tuple et on mappe par index
if hasattr(row, "__getitem__") and isinstance(row, dict) is False and getattr(row, "keys", None):
# sqlite3.Row behaves like mapping
row_dict = {k: row[k] for k in row.keys()}
elif isinstance(row, dict):
row_dict = row
else:
# fallback: convert tuple -> dict using cursor.description
# but here we assume conn.row_factory set in db.get_conn; keep robust fallback
cols = [d[0] for d in conn.execute("PRAGMA table_info(playlists)").fetchall()]
row_dict = dict(zip(cols, row))
try:
created_at = str(row["created_at"]) if row["created_at"] is not None else None
updated_at = str(row["updated_at"]) if row["updated_at"] is not None else None
pl = Playlist(
id=row["id"],
name=row["name"],
description=row["description"],
type=row["type"],
rules=row["rules_json"],
created_at=created_at,
updated_at=updated_at
)
playlists.append(pl)
except Exception as e:
# Ne bloque pas toute la lecture : logue et passe à la suivante
print(f"⚠️ Ignored invalid playlist row (id={row_dict.get('id')}, name={row_dict.get('name')}): {e}")
return playlists
def delete_playlist(playlist_id: int):
with db.get_conn() as conn:
conn.execute("DELETE FROM playlists WHERE id = ?", (playlist_id,))
conn.execute("DELETE FROM video_playlists WHERE playlist_id = ?", (playlist_id,))
conn.commit()
def get_videos_for_playlist(playlist):
"""Retourne les vidéos correspondant aux règles d'une playlist dynamique."""
if playlist.type == "manual":
with db.get_conn() as conn:
query = """
SELECT v.*
FROM videos v
JOIN video_playlists vp ON vp.video_file_name = v.file_name
WHERE vp.playlist_id = ?
ORDER BY vp.position
"""
return conn.execute(query, (playlist.id,)).fetchall()
else:
sql, params = build_sql_from_rules(playlist.rules)
with db.get_conn() as conn:
return conn.execute(sql, params).fetchall()
def load_video_summary_map():
"""Retourne un dict {file_name: {'labels': [...], 'playlists': [...]}} depuis la vue video_summary."""
with db.get_conn() as conn:
df = pd.read_sql_query("SELECT file_name, labels, playlists FROM video_summary", conn)
summary = {}
for _, row in df.iterrows():
summary[row["file_name"]] = {
"labels": row["labels"].split(",") if row["labels"] else [],
"playlists": row["playlists"].split(",") if row["playlists"] else [],
}
return summary

View File

@@ -0,0 +1,107 @@
# playlists/playlist_model.py
from pydantic import BaseModel, Field, validator, field_validator
from typing import List, Optional, Literal
from datetime import date, datetime
import json
import db
from typing import Optional, List, Literal
from pydantic import BaseModel, Field
import json
class RuleSet(BaseModel):
include_labels: List[str] = []
exclude_labels: List[str] = []
include_playlists: List[int] = []
exclude_playlists: List[int] = []
date_after: Optional[str] = None
date_before: Optional[str] = None
date_delta_days: Optional[int] = None
difficulty: Optional[str] = None
day_of_week: Optional[str] = None
address_keyword: Optional[str] = None
label_logic: Literal["AND", "OR"] = "AND"
logic: Literal["AND", "OR"] = "AND"
# --- Normalisation des dates (entrée) ---
@field_validator("date_after", "date_before", mode="before")
def normalize_date(cls, v):
"""Convertit automatiquement date/datetime en str ISO avant stockage."""
if isinstance(v, (date, datetime)):
return v.isoformat()
return v
def model_dump(self, *args, **kwargs):
"""Force la sortie JSON-safe (dates converties en str)."""
data = super().model_dump(*args, **kwargs)
for key in ["date_after", "date_before"]:
v = data.get(key)
if isinstance(v, (date, datetime)):
data[key] = v.isoformat()
return data
@validator("date_after", "date_before", pre=True, always=True)
def convert_date(cls, v):
"""Convertit automatiquement les objets date/datetime en ISO string."""
if isinstance(v, (date, datetime)):
return v.isoformat()
return v
def dict(self, *args, **kwargs):
"""Sassure que toutes les dates sont des chaînes sérialisables."""
data = super().dict(*args, **kwargs)
for key in ["date_after", "date_before"]:
v = data.get(key)
if isinstance(v, (date, datetime)):
data[key] = v.isoformat()
return data
def to_json(self) -> str:
return json.dumps(self.dict(), ensure_ascii=False, indent=2)
@classmethod
def from_json(cls, raw):
if not raw:
return cls()
if isinstance(raw, dict):
return cls(**raw)
try:
return cls(**json.loads(raw))
except Exception:
return cls()
class Playlist(BaseModel):
id: Optional[int] = None
name: str
description: Optional[str] = ""
type: Literal["manual", "dynamic"] = "manual"
rules: RuleSet = Field(default_factory=RuleSet)
created_at: Optional[str] = None
updated_at: Optional[str] = None
@validator("rules", pre=True, always=True)
def ensure_rules(cls, v):
"""Transforme la valeur de rules_json (None, str ou dict) en RuleSet."""
if isinstance(v, RuleSet):
return v
return RuleSet.from_json(v)
def save(self):
"""Insert or update playlist"""
with db.get_conn() as conn:
cur = conn.cursor()
if self.id:
cur.execute("""
UPDATE playlists
SET name=?, description=?, type=?, rules_json=?, updated_at=CURRENT_TIMESTAMP
WHERE id=?
""", (self.name, self.description, self.type, self.rules.to_json(), self.id))
else:
cur.execute("""
INSERT INTO playlists (name, description, type, rules_json)
VALUES (?, ?, ?, ?)
""", (self.name, self.description, self.type, self.rules.to_json()))
self.id = cur.lastrowid
conn.commit()

View File

@@ -0,0 +1,107 @@
import streamlit as st
from playlists import playlist_db
from playlists.playlist_model import Playlist, RuleSet
from playlists.playlist_controller import playlist_manual_editor, playlist_dynamic_editor
from datetime import datetime
from cache.video_summary import rebuild_video_summary
def main():
st.title("🎵 Gestion des Playlists")
if st.button("🔁 Recalculer le cache vidéo"):
rebuild_video_summary()
st.success("Cache mis à jour !")
# --- Barre latérale : recherche & filtres ---
st.sidebar.header("🔎 Recherche de playlists")
search_term = st.sidebar.text_input("Filtrer par nom ou description")
date_filter = st.sidebar.date_input("Créées après", value=None)
playlists = playlist_db.load_all_playlists()
# Appliquer les filtres
filtered = []
for p in playlists:
# filtrage texte
if search_term.lower() not in p.name.lower() and search_term.lower() not in (p.description or "").lower():
continue
# filtrage date de création
if date_filter:
created = p.created_at
if isinstance(created, datetime):
created_dt = created
elif isinstance(created, (str, bytes)):
try:
created_dt = datetime.fromisoformat(created)
except ValueError:
continue # ignore invalid date
else:
continue
if created_dt < datetime.combine(date_filter, datetime.min.time()):
continue
filtered.append(p)
# --- Sélection ou création ---
names = ["( Nouvelle playlist)"] + [p.name for p in filtered]
if "new_playlist_name" in st.session_state:
st.session_state["playlist_select"] = st.session_state.pop("new_playlist_name")
selected_name = st.selectbox("Sélectionnez une playlist", names, key="playlist_select")
# --- Création ---
if selected_name == "( Nouvelle playlist)":
st.subheader("Créer une nouvelle playlist")
name = st.text_input("Nom")
desc = st.text_area("Description")
type_choice = st.radio("Type", ["manual", "dynamic"])
if st.button("Créer"):
if not name.strip():
st.error("Le nom ne peut pas être vide.")
else:
pl = Playlist(name=name.strip(), description=desc, type=type_choice, rules=RuleSet())
pl.save()
st.session_state["new_playlist_name"] = pl.name # on stocke temporairement
st.rerun()
return
# --- Mode édition ---
current = next((p for p in playlists if p.name == selected_name), None)
if not current:
st.warning("Aucune playlist sélectionnée.")
return
# --- Barre dactions ---
st.subheader(f"🎞️ Playlist : {current.name}")
new_name = st.text_input("Renommer", value=current.name)
new_desc = st.text_area("Description", value=current.description or "")
if st.button("💾 Sauvegarder les métadonnées"):
current.name = new_name
current.description = new_desc
current.save()
st.success("Mise à jour enregistrée ✅")
if hasattr(st, "rerun"):
st.rerun()
else:
st.experimental_rerun()
col1, col2, col3 = st.columns([1, 1, 2])
with col1:
if st.button("🗑️ Supprimer"):
playlist_db.delete_playlist(current.id)
st.success("Playlist supprimée ✅")
if hasattr(st, "rerun"):
st.rerun()
else:
st.experimental_rerun()
with col2:
if st.button("⏪ Retour à la liste"):
st.session_state.pop("playlist_select", None)
st.rerun()
st.divider()
# --- Éditeur selon le type ---
if current.type == "manual":
playlist_manual_editor(current)
else:
playlist_dynamic_editor(current)

View File

@@ -0,0 +1,15 @@
# playlists/playlist_views.py
import streamlit as st
from playlists import playlist_db
from models import Video
from views import show_video_thumbnail
def preview_playlist(playlist):
st.subheader(f"🎬 Aperçu de la playlist : {playlist.name}")
rows = playlist_db.get_videos_for_playlist(playlist)
videos = [Video(**row) for row in rows]
if not videos:
st.info("Aucune vidéo correspondante.")
return
for video in videos[:30]: # limiter pour performance
show_video_thumbnail(video)

View File

@@ -0,0 +1,109 @@
# playlists/sql_builder.py
from typing import Tuple, List
from playlists.playlist_model import RuleSet
from datetime import datetime, timedelta
def build_sql_from_rules(rules: RuleSet) -> Tuple[str, List]:
"""
Construit une requête SQL complète (SELECT * FROM videos ...)
en fonction d'un RuleSet.
Retourne (sql_query, params).
"""
where = ["1=1"]
params = []
# --- DATES (delta prioritaire) ---
if rules.date_delta_days is not None:
try:
delta_days = int(rules.date_delta_days)
date_after = (datetime.now() + timedelta(days=delta_days)).strftime("%Y-%m-%d")
where.append("record_datetime >= ?")
params.append(date_after)
except (ValueError, TypeError) as e:
print(f"⚠️ [SQL Builder] Delta invalide ({rules.date_delta_days!r}) : {e}")
else:
if rules.date_after:
where.append("record_datetime >= ?")
params.append(rules.date_after)
if rules.date_before:
where.append("record_datetime <= ?")
params.append(rules.date_before)
# --- LABELS ---
if rules.include_labels:
placeholders = ",".join("?" * len(rules.include_labels))
if rules.label_logic == "AND":
where.append(f"""
file_name IN (
SELECT vl.video_file_name
FROM video_labels vl
JOIN labels l ON l.id = vl.label_id
WHERE l.name IN ({placeholders})
GROUP BY vl.video_file_name
HAVING COUNT(DISTINCT l.name) = {len(rules.include_labels)}
)
""")
else:
where.append(f"""
file_name IN (
SELECT vl.video_file_name
FROM video_labels vl
JOIN labels l ON l.id = vl.label_id
WHERE l.name IN ({placeholders})
)
""")
params.extend(rules.include_labels)
if rules.exclude_labels:
placeholders = ",".join("?" * len(rules.exclude_labels))
where.append(f"""
file_name NOT IN (
SELECT vl.video_file_name
FROM video_labels vl
JOIN labels l ON l.id = vl.label_id
WHERE l.name IN ({placeholders})
)
""")
params.extend(rules.exclude_labels)
# --- PLAYLISTS ---
if rules.include_playlists:
placeholders = ",".join("?" * len(rules.include_playlists))
where.append(f"""
file_name IN (
SELECT vp.video_file_name
FROM video_playlists vp
JOIN playlists p ON p.id = vp.playlist_id
WHERE p.name IN ({placeholders})
)
""")
params.extend(rules.include_playlists)
if rules.exclude_playlists:
placeholders = ",".join("?" * len(rules.exclude_playlists))
where.append(f"""
file_name NOT IN (
SELECT vp.video_file_name
FROM video_playlists vp
JOIN playlists p ON p.id = vp.playlist_id
WHERE p.name IN ({placeholders})
)
""")
params.extend(rules.exclude_playlists)
# --- DIFFICULTÉ ---
if rules.difficulty and rules.difficulty != "Tous":
where.append("difficulty_level = ?")
params.append(rules.difficulty)
# --- JOUR ---
if rules.day_of_week:
where.append("day_of_week = ?")
params.append(rules.day_of_week)
# --- ADRESSE ---
if rules.address_keyword:
where.append("address NOT LIKE '%unknown%' AND address LIKE ?")
params.append(f"%{rules.address_keyword}%")
sql = f"SELECT * FROM videos WHERE {' AND '.join(where)} ORDER BY record_datetime DESC"
return sql, params

45
app/requirements.txt Normal file
View File

@@ -0,0 +1,45 @@
altair==5.5.0
annotated-types==0.7.0
attrs==25.4.0
blinker==1.9.0
cachetools==6.2.0
certifi==2025.10.5
charset-normalizer==3.4.3
click==8.3.0
click-default-group==1.2.4
gitdb==4.0.12
gitpython==3.1.45
idna==3.10
jinja2==3.1.6
jsonschema==4.25.1
jsonschema-specifications==2025.9.1
markupsafe==3.0.3
narwhals==2.7.0
numpy==2.3.3
packaging==25.0
pandas==2.3.3
pillow==11.3.0
pluggy==1.6.0
protobuf==6.32.1
pyarrow==21.0.0
pydantic==2.12.0
pydantic-core==2.41.1
pydeck==0.9.1
python-dateutil==2.9.0.post0
pytz==2025.2
referencing==0.36.2
requests==2.32.5
rpds-py==0.27.1
six==1.17.0
smmap==5.0.2
sqlite-fts4==1.0.3
sqlite-utils==3.38
streamlit==1.50.0
tabulate==0.9.0
tenacity==9.1.2
toml==0.10.2
tornado==6.5.2
typing-extensions==4.15.0
typing-inspection==0.4.2
tzdata==2025.2
urllib3==2.5.0

18
app/views.py Normal file
View File

@@ -0,0 +1,18 @@
# views.py
import streamlit as st
import os
def show_video_thumbnail(video):
col1, col2, col3 = st.columns([1, 2, 1])
with col1:
if video.thumbnail_file and os.path.exists(video.thumbnail_file):
st.image(video.thumbnail_file, width="content")
st.caption(video.mp4_file_name)
with col2:
if video.mp4_file and os.path.exists(video.mp4_file):
if st.button(f"▶️ Lire 📅 {video.record_datetime or ''} — 🕒 {video.day_of_week or ''} - 📍 {video.address or ''}", key=f"play_{video.id}"):
st.video(video.mp4_file)
return col3

0
app/views/__init__.py Normal file
View File

107
app/views/label_views.py Normal file
View File

@@ -0,0 +1,107 @@
import streamlit as st
import db
from models import Video
from views.video_views import show_video_row
from controllers.label_controller import label_widget
def video_filter_sidebar(unlabeled=False):
"""Affiche les filtres dans la barre latérale et renvoie les paramètres de recherche."""
st.sidebar.header("⚙️ Filtres et affichage")
max_height = st.sidebar.slider("Hauteur max (px)", 100, 800, 300, 50)
all_labels = db.load_labels()
unique_days = db.get_unique_days()
unique_difficulties = db.get_unique_difficulties()
unique_addresses = db.get_unique_addresses()
selected_labels = st.sidebar.multiselect("Filtrer par labels", all_labels)
label_logic = st.sidebar.radio(
"Logique entre labels",
["AND", "OR"],
help="Détermine si la vidéo doit contenir tous les labels sélectionnés (AND) ou au moins un (OR)"
)
day_filter = st.sidebar.selectbox("Jour de la semaine", ["Tous"] + unique_days)
difficulty_filter = st.sidebar.selectbox("Niveau de difficulté", ["Tous"] + unique_difficulties)
address_keyword = st.sidebar.selectbox("Adresse (mot-clé)", [""] + unique_addresses)
start_date = st.sidebar.date_input("Date de début", value=None)
end_date = st.sidebar.date_input("Date de fin", value=None)
show_unlabeled_only = st.sidebar.checkbox("Afficher uniquement les vidéos sans labels", value=unlabeled)
return dict(
max_height=max_height,
selected_labels=selected_labels,
label_logic=label_logic,
day_filter=None if day_filter == "Tous" else day_filter,
difficulty_filter=difficulty_filter,
address_keyword=address_keyword if address_keyword else None,
start_date=start_date.isoformat() if start_date else None,
end_date=end_date.isoformat() if end_date else None,
show_unlabeled_only=show_unlabeled_only
)
def video_list_view(filters: dict, editable_labels=True, editable_difficulty=True, editable_alias=True, playlist=None):
"""Affiche les vidéos selon les filtres fournis."""
st.markdown(f"""
<style>
img, video {{
max-height: {filters["max_height"]}px !important;
object-fit: contain;
border-radius: 8px;
transition: all 0.3s ease-in-out;
}}
.unlabeled {{
border: 3px solid #f39c12;
box-shadow: 0 0 10px #f39c12;
border-radius: 10px;
padding: 5px;
margin-bottom: 10px;
}}
</style>
""", unsafe_allow_html=True)
df_videos = db.search_videos(
label_names=filters["selected_labels"],
label_logic=filters["label_logic"],
day_of_week=filters["day_filter"],
address_keyword=filters["address_keyword"],
start_date=filters["start_date"],
end_date=filters["end_date"],
difficulty=filters["difficulty_filter"]
)
if df_videos.empty:
st.warning("Aucune vidéo trouvée avec ces critères.")
return []
# préchargement des labels
video_labels_map = {row["file_name"]: db.load_video_labels(row["file_name"]) for _, row in df_videos.iterrows()}
if filters["show_unlabeled_only"]:
df_videos = df_videos[df_videos["file_name"].apply(lambda fn: not video_labels_map.get(fn))]
videos = [Video(**row) for _, row in df_videos.iterrows()]
# lazy loading
page_size = 20
st.session_state.setdefault("video_page", 1)
start = 0
end = st.session_state.video_page * page_size
subset = videos[start:end]
# affichage
for video in subset:
preselected = video_labels_map.get(video.file_name, [])
css_class = "unlabeled" if not preselected else ""
with st.container():
st.markdown(f"<div class='{css_class}'>", unsafe_allow_html=True)
show_video_row(video, preselected, editable_labels, editable_difficulty, editable_alias, playlist)
st.markdown("</div>", unsafe_allow_html=True)
# lazy loading bouton
if end < len(videos):
if st.button("📦 Charger plus de vidéos"):
st.session_state.video_page += 1
st.rerun()
else:
st.info("✅ Toutes les vidéos sont affichées.")
return subset

149
app/views/video_views.py Normal file
View File

@@ -0,0 +1,149 @@
# video_views.py
import os
import shutil
import streamlit as st
import db
from models import Video
from controllers.label_controller import label_widget
def show_video_row(
video: Video,
preselected_labels,
editable_labels=True,
editable_difficulty=True,
editable_alias=True,
playlist=None,
playlist_video_ids=None,
video_playlists=None,
):
"""
Affiche une ligne Streamlit pour une vidéo :
- miniature + lecture conditionnelle
- métadonnées et labels
- édition des labels, difficulté et alias
- (optionnel) ajout/retrait d'une playlist
"""
# --- Vérifie si la vidéo est dans la playlist ---
in_playlist = False
if playlist and playlist_video_ids is not None:
in_playlist = video.file_name in playlist_video_ids
elif playlist:
playlist_video_ids = db.get_video_file_names_in_playlist(playlist.id)
in_playlist = video.file_name in playlist_video_ids
# --- Layout : miniature / infos / édition / action ---
if playlist:
col1, col2, col3, col4 = st.columns([1, 3, 2, 0.8])
else:
col1, col2, col3 = st.columns([1, 3, 2])
col4 = None
# --- Colonne 1 : miniature + boutons Play/Stop ---
play_key = f"playing_{video.file_name}"
st.session_state.setdefault(play_key, False)
with col1:
if getattr(video, "thumbnail_file", None) and os.path.exists(video.thumbnail_file):
st.image(video.thumbnail_file)
else:
st.caption("Pas de miniature")
st.caption(video.file_name or video.mp4_file_name)
c1, c2, c3 = st.columns(3)
with c1:
if st.button("▶️", key=f"play_{video.file_name}"):
st.session_state[play_key] = True
(st.rerun if hasattr(st, "rerun") else st.experimental_rerun)()
with c2:
if st.button("⏸️", key=f"stop_{video.file_name}"):
st.session_state[play_key] = False
(st.rerun if hasattr(st, "rerun") else st.experimental_rerun)()
with c3:
if st.button("🗑️", key=f"del_{video.file_name}"):
if preselected_labels and len(preselected_labels) > 0:
st.warning("Vidéo labelisée.")
else:
st.session_state[play_key] = False
if os.path.exists(video.raw_file ):
os.remove(video.raw_file)
if os.path.exists(video.mp4_file):
shutil.rmtree(os.path.dirname(video.mp4_file))
db.delete_video(video.file_name)
st.warning("Vidéo supprimée.")
(st.rerun if hasattr(st, "rerun") else st.experimental_rerun)()
if st.session_state[play_key]:
mp4_path = getattr(video, "mp4_file", None)
if mp4_path and os.path.exists(mp4_path):
st.video(mp4_path)
else:
st.warning("Fichier vidéo introuvable.")
# --- Colonne 2 : métadonnées ---
with col2:
st.markdown(f"**📅 {video.record_datetime or ''}** — {video.day_of_week or ''}")
st.write(f"📍 {video.address or 'Inconnue'}")
st.write(f"💪 Difficulté : {video.difficulty_display}")
st.text(f"🏷️ Labels: {', '.join(preselected_labels) or 'Aucun'}")
if video_playlists:
st.text(f"🎵 Playlists: {', '.join(video_playlists)}")
else:
playlists = db.get_video_playlists(video.file_name)
if playlists:
st.text(f"🎵 Playlists: {', '.join(playlists)}")
# --- Colonne 3 : édition ---
with col3:
if editable_labels:
label_widget(video, preselected=preselected_labels)
if editable_difficulty:
levels = ["Tout niveau", "Débutant", "Intermédiaire", "Avancé", "Star"]
try:
idx = levels.index(video.difficulty_display)
except ValueError:
idx = 0
new_level = st.selectbox(
"🎚 Niveau",
levels,
index=idx,
key=f"diff_{video.file_name}"
)
if new_level != video.difficulty_display:
db.update_video_difficulty(video.file_name, new_level)
st.success(f"Niveau mis à jour pour {video.file_name}")
if editable_alias:
alias = st.text_input("Alias (optionnel)", value=video.alias, key=f'new_alias_{video.id}')
if alias != video.alias:
db.update_video_alias(video.file_name, alias.strip())
st.success(f"Alias mis à jour pour {video.file_name}")
# --- Colonne 4 : action playlist ---
if col4 and playlist:
with col4:
key_toggle = f"toggle_{playlist.id}_{video.file_name}"
prev_state_key = f"{key_toggle}_prev"
prev_state = st.session_state.get(prev_state_key, in_playlist)
toggled = st.toggle(
"🎵",
value=in_playlist,
key=key_toggle,
help="Inclure dans la playlist"
)
if toggled != prev_state:
if toggled:
db.add_video_to_playlist(file_name=video.file_name, playlist_id=playlist.id)
st.toast(f"{video.file_name} ajouté à {playlist.name}")
else:
db.remove_video_from_playlist(file_name=video.file_name, playlist_id=playlist.id)
st.toast(f"🗑️ {video.file_name} retiré de {playlist.name}")
st.session_state[prev_state_key] = toggled
(st.rerun if hasattr(st, "rerun") else st.experimental_rerun)()
else:
st.session_state[prev_state_key] = toggled

View File

@@ -15,11 +15,13 @@ register_video() {
local lat=${9:-0.000000}
local long=${10:-0.000000}
local address=${11:-Unknown}
address=$(sed "s|'| |g" <<< $address)
if [ -z "$raw_file" ] || [ -z "$mp4_file" ]; then
echo "Error: raw_file and mp4_file are required"
exit 1
fi
local mp4_file_name=$(basename $(dirname $mp4_file))
sqlite3 $DANCE_VIDEOS_DB "INSERT OR REPLACE INTO videos (file_name, raw_file, duration, mp4_file, rotated_file, thumbnail_file, record_datetime, day_of_week, lat, long, address) VALUES('$file_name','$raw_file', '$duration', '$mp4_file', '$rotated_file', '$thumbnail_file', $record_datetime, '$day_of_week', $lat, $long, '$address')"
sqlite3 $DANCE_VIDEOS_DB "PRAGMA busy_timeout = 10000; INSERT OR REPLACE INTO videos (file_name, raw_file, duration, mp4_file, mp4_file_name, rotated_file, thumbnail_file, record_datetime, day_of_week, lat, long, address) VALUES('$file_name','$raw_file', '$duration', '$mp4_file', '$mp4_file_name', '$rotated_file', '$thumbnail_file', $record_datetime, '$day_of_week', $lat, $long, '$address');"
}
export -f register_video

View File

@@ -0,0 +1,290 @@
-- Extension du schéma DanceVideos/ pour le projet video_analysis/
-- Voir adr/0004-db-extension-strategy.md
-- Convention héritée de DanceVideos : la PK de `videos` est `file_name VARCHAR(255)`,
-- les tables filles utilisent `video_file_name VARCHAR(255)` comme FK.
PRAGMA foreign_keys = ON;
-- Groupes de vidéos d'un même cours (V0.6, voir adr/0010)
CREATE TABLE IF NOT EXISTS video_groups (
id INTEGER PRIMARY KEY AUTOINCREMENT,
label TEXT,
time_start DATETIME,
time_end DATETIME,
n_videos INTEGER,
style TEXT,
pipeline_version TEXT NOT NULL,
notes TEXT
);
CREATE TABLE IF NOT EXISTS video_group_members (
video_file_name VARCHAR(255) PRIMARY KEY,
group_id INTEGER NOT NULL,
role TEXT CHECK (role IN ('explicative', 'demonstration', 'mixte', 'unknown')),
seq_idx INTEGER,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE,
FOREIGN KEY (group_id) REFERENCES video_groups(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_vgm_group ON video_group_members(group_id);
-- Une analyse par version de pipeline et par vidéo
CREATE TABLE IF NOT EXISTS analyses (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
niveau TEXT NOT NULL, -- "audio_demix" | "beats" | "asr" | "pose" | "segment" | "label"
pipeline_version TEXT NOT NULL,
started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
finished_at DATETIME,
summary_json TEXT,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_analyses_video ON analyses(video_file_name, niveau);
-- Segmentation audio (parole / musique / silence / mixed)
CREATE TABLE IF NOT EXISTS audio_segments (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
start_s REAL NOT NULL,
end_s REAL NOT NULL,
kind TEXT NOT NULL CHECK (kind IN ('parole', 'musique', 'silence', 'mixed')),
speaker_id TEXT,
text TEXT,
confidence REAL,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_audio_segments_video ON audio_segments(video_file_name, start_s);
-- Beats / downbeats / tempo
CREATE TABLE IF NOT EXISTS beats (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
t_s REAL NOT NULL,
beat_in_bar INTEGER NOT NULL, -- 1..4 typiquement
is_downbeat BOOLEAN NOT NULL,
bpm REAL,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_beats_video ON beats(video_file_name, t_s);
-- Régimes intra-vidéo (avec_compte / en_musique / mixte / explication / silence) — voir adr/0009
CREATE TABLE IF NOT EXISTS regime_segments (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
start_s REAL NOT NULL,
end_s REAL NOT NULL,
regime TEXT NOT NULL CHECK (regime IN ('avec_compte', 'en_musique', 'mixte', 'explication', 'compte_avec_explication', 'silence')),
confidence REAL,
pipeline_version TEXT NOT NULL,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_regime_video_t ON regime_segments(video_file_name, start_s);
-- Transcription word-level (WhisperX) — voir adr/0008
CREATE TABLE IF NOT EXISTS transcription_words (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
audio_segment_id INTEGER,
t_start_s REAL NOT NULL,
t_end_s REAL NOT NULL,
word TEXT NOT NULL,
speaker_id TEXT,
confidence REAL,
pipeline_version TEXT NOT NULL,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE,
FOREIGN KEY (audio_segment_id) REFERENCES audio_segments(id) ON DELETE SET NULL
);
CREATE INDEX IF NOT EXISTS idx_words_video_t ON transcription_words(video_file_name, t_start_s);
CREATE INDEX IF NOT EXISTS idx_words_text ON transcription_words(word);
-- Labels sémantiques attachés aux dance_segments depuis l'ASR (V0.5.4)
CREATE TABLE IF NOT EXISTS dance_segment_labels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
segment_id TEXT NOT NULL,
label TEXT NOT NULL,
category TEXT, -- "body_part" | "movement" | "direction" | "figure" | "rhythm"
source TEXT NOT NULL CHECK (source IN ('asr-keyword', 'asr-figure', 'user', 'manual')),
t_offset_s REAL, -- décalage du keyword par rapport au début du segment (négatif = avant)
confidence REAL,
pipeline_version TEXT NOT NULL,
FOREIGN KEY (segment_id) REFERENCES dance_segments(segment_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_dsl_segment ON dance_segment_labels(segment_id);
CREATE INDEX IF NOT EXISTS idx_dsl_label ON dance_segment_labels(label);
-- Personnes détectées dans la vidéo (prof, partenaire, audience)
CREATE TABLE IF NOT EXISTS persons (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
person_id INTEGER NOT NULL, -- ID stable inter-frames (ByteTrack)
role TEXT NOT NULL CHECK (role IN ('prof', 'partner', 'audience', 'unknown')),
notes TEXT,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE,
UNIQUE (video_file_name, person_id)
);
-- Pose keypoints (frame-decimated, 1/N frame typiquement)
CREATE TABLE IF NOT EXISTS pose_keypoints (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
frame INTEGER NOT NULL,
person_id INTEGER NOT NULL,
kp_json TEXT NOT NULL, -- JSON compressé : 133 keypoints {x, y, score}
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_pose_kp_video_frame ON pose_keypoints(video_file_name, frame);
-- Segments hiérarchiques de chorégraphie (cœur de l'UX cumulation/réduction)
CREATE TABLE IF NOT EXISTS dance_segments (
segment_id TEXT PRIMARY KEY, -- ex. "v17:meso:0008"
video_file_name VARCHAR(255) NOT NULL,
start_s REAL NOT NULL,
end_s REAL NOT NULL,
level TEXT NOT NULL CHECK (level IN ('macro', 'meso', 'micro', 'nano')),
parent_segment_id TEXT,
label TEXT,
source TEXT NOT NULL CHECK (source IN ('auto-rules', 'auto-tcn', 'teacher-said', 'user')),
confidence REAL,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE,
FOREIGN KEY (parent_segment_id) REFERENCES dance_segments(segment_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_dance_segments_video ON dance_segments(video_file_name, level, start_s);
CREATE INDEX IF NOT EXISTS idx_dance_segments_parent ON dance_segments(parent_segment_id);
-- Patterns nommés (mise en espagnol, dile que no, pas de bourré...)
CREATE TABLE IF NOT EXISTS patterns (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE NOT NULL,
description TEXT,
style TEXT, -- "salsa cubaine" | "bachata" | "common" | ...
templates_json TEXT, -- liste d'occurrences (video_file_name + segment_id) servant de templates
occurrences_count INTEGER DEFAULT 0
);
-- Benchmarks — résultats d'évaluation par pipeline_version × méthode × tolérance × vidéo
-- (voir doc/observabilite-backoffice.md § 5, notebooks/02-segmentation-eval)
CREATE TABLE IF NOT EXISTS benchmarks (
id INTEGER PRIMARY KEY AUTOINCREMENT,
pipeline_version TEXT NOT NULL, -- ex. "v0.5.0", "v0.5.0-beat_this-raw-audio"
run_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
notebook TEXT, -- nom/chemin du notebook qui a produit la mesure
golden_set TEXT, -- ex. "decoupe/diegoRiviera.json" ou "decoupe/all"
video_file_name VARCHAR(255), -- vidéo ; nullable si métrique agrégée
method TEXT NOT NULL, -- "V0_meso" | "4bar_grid" | "uniform_5s" | ...
tol_s REAL NOT NULL, -- tolérance en secondes
precision_score REAL NOT NULL,
recall_score REAL NOT NULL,
f1_score REAL NOT NULL,
n_pred INTEGER NOT NULL,
n_gold INTEGER NOT NULL,
notes TEXT,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE SET NULL
);
CREATE INDEX IF NOT EXISTS idx_benchmarks_version ON benchmarks(pipeline_version, method, tol_s);
CREATE INDEX IF NOT EXISTS idx_benchmarks_run ON benchmarks(run_at);
-- Journal d'éditions (audit trail + undo/redo unifiés) — voir adr/0013
CREATE TABLE IF NOT EXISTS edits_journal (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL DEFAULT 'default',
video_file_name VARCHAR(255),
transaction_id TEXT NOT NULL,
op_type TEXT NOT NULL,
target_table TEXT NOT NULL,
target_id TEXT NOT NULL,
before_json TEXT,
after_json TEXT,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
undone_at DATETIME,
redone_at DATETIME,
pipeline_version TEXT,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE SET NULL
);
CREATE INDEX IF NOT EXISTS idx_edits_video_t ON edits_journal(video_file_name, created_at);
CREATE INDEX IF NOT EXISTS idx_edits_txn ON edits_journal(transaction_id);
CREATE INDEX IF NOT EXISTS idx_edits_active ON edits_journal(undone_at, created_at);
-- Handles d'ancrage cross-vidéo (paires _C_/_M_) — voir adr/0014
CREATE TABLE IF NOT EXISTS video_group_handles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
group_id INTEGER NOT NULL,
source_video VARCHAR(255) NOT NULL,
source_t_s REAL NOT NULL,
target_video VARCHAR(255) NOT NULL,
target_t_s REAL NOT NULL,
label TEXT,
created_by TEXT NOT NULL DEFAULT 'default',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
notes TEXT,
FOREIGN KEY (group_id) REFERENCES video_groups(id) ON DELETE CASCADE,
FOREIGN KEY (source_video) REFERENCES videos(file_name) ON DELETE CASCADE,
FOREIGN KEY (target_video) REFERENCES videos(file_name) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_handle_group ON video_group_handles(group_id);
CREATE INDEX IF NOT EXISTS idx_handle_pair ON video_group_handles(source_video, target_video);
-- Prédictions de style par classifieur RF (V0.6.1, voir adr/0011)
CREATE TABLE IF NOT EXISTS style_predictions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
file_name VARCHAR(255) NOT NULL,
style TEXT NOT NULL,
confidence REAL,
model_version TEXT NOT NULL,
predicted_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (file_name) REFERENCES videos(file_name) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_stylepred_file ON style_predictions(file_name);
CREATE INDEX IF NOT EXISTS idx_stylepred_style ON style_predictions(style);
-- État de navigation persistant (UX cumulation/réduction)
CREATE TABLE IF NOT EXISTS nav_state (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
user_id TEXT NOT NULL DEFAULT 'default',
current_segment_id TEXT,
view_level TEXT CHECK (view_level IN ('macro', 'meso', 'micro', 'nano')),
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE,
FOREIGN KEY (current_segment_id) REFERENCES dance_segments(segment_id) ON DELETE SET NULL,
UNIQUE (video_file_name, user_id)
);
-- Atomes de mouvement (V0.10, voir adr/0018) — granularité beat / demi-beat.
-- Modèle hybride : continu + soft distribution + argmax + confidence.
CREATE TABLE IF NOT EXISTS beat_atoms (
id INTEGER PRIMARY KEY AUTOINCREMENT,
video_file_name VARCHAR(255) NOT NULL,
person_id INTEGER NOT NULL DEFAULT 0,
t_s REAL NOT NULL,
beat_position REAL NOT NULL,
is_downbeat BOOLEAN NOT NULL,
-- Pied gauche
left_foot_height REAL,
left_foot_speed REAL,
left_foot_stability REAL,
left_foot_state VARCHAR(20),
left_foot_state_confidence REAL,
left_foot_state_dist_json TEXT,
-- Pied droit
right_foot_height REAL,
right_foot_speed REAL,
right_foot_stability REAL,
right_foot_state VARCHAR(20),
right_foot_state_confidence REAL,
right_foot_state_dist_json TEXT,
-- Transfert de poids
weight_ratio REAL,
weight_transfer_velocity REAL,
weight_on VARCHAR(10),
-- Cinématique bassin (en hip-widths)
pelvis_x REAL,
pelvis_y REAL,
pelvis_vel_x REAL,
pelvis_vel_y REAL,
-- Qualité globale
pose_confidence REAL NOT NULL,
pipeline_version TEXT NOT NULL,
computed_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_atom_video_t ON beat_atoms(video_file_name, t_s);
CREATE INDEX IF NOT EXISTS idx_atom_video_beat ON beat_atoms(video_file_name, beat_position);

View File

@@ -3,6 +3,7 @@ CREATE TABLE IF NOT EXISTS videos (
raw_file VARCHAR(255) UNIQUE,
duration DECIMAL(10,2),
mp4_file VARCHAR(255),
mp4_file_name VARCHAR(255),
rotated_file VARCHAR(255),
thumbnail_file VARCHAR(255),
record_datetime TIMESTAMP,
@@ -10,5 +11,8 @@ CREATE TABLE IF NOT EXISTS videos (
record_time TIME GENERATED ALWAYS AS (TIME(record_datetime)) VIRTUAL,
lat DECIMAL(10,6),
long DECIMAL(11,7),
address VARCHAR(255)
);
address VARCHAR(255),
difficulty_level VARCHAR(255) DEFAULT 'Tout niveau',
alias VARCHAR(255) DEFAULT ''
);

14
model/videos_labels.sql Normal file
View File

@@ -0,0 +1,14 @@
-- Table des labels (mots-clés / tags indépendants)
CREATE TABLE IF NOT EXISTS labels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name VARCHAR(100) UNIQUE NOT NULL
);
-- Table d'association entre vidéos et labels (relation many-to-many)
CREATE TABLE IF NOT EXISTS video_labels (
video_file_name VARCHAR(255),
label_id INTEGER,
PRIMARY KEY (video_file_name, label_id),
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE,
FOREIGN KEY (label_id) REFERENCES labels(id) ON DELETE CASCADE
);

View File

@@ -0,0 +1,30 @@
-- =========================================================
-- Table principale des playlists
-- =========================================================
CREATE TABLE IF NOT EXISTS playlists (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE NOT NULL,
description TEXT,
type TEXT CHECK (type IN ('manual', 'dynamic')) NOT NULL DEFAULT 'manual',
rules_json TEXT, -- JSON décrivant les règles pour les playlists dynamiques
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- =========================================================
-- Table dassociation vidéos ↔ playlists (manuelles uniquement)
-- =========================================================
CREATE TABLE IF NOT EXISTS video_playlists (
video_file_name TEXT NOT NULL,
playlist_id INTEGER NOT NULL,
position INTEGER DEFAULT 0, -- ordre dans la playlist
PRIMARY KEY (video_file_name, playlist_id),
FOREIGN KEY (video_file_name) REFERENCES videos(file_name) ON DELETE CASCADE,
FOREIGN KEY (playlist_id) REFERENCES playlists(id) ON DELETE CASCADE
);
-- =========================================================
-- Index pour accélérer les recherches
-- =========================================================
CREATE INDEX IF NOT EXISTS idx_playlist_type ON playlists(type);
CREATE INDEX IF NOT EXISTS idx_video_playlists_playlist ON video_playlists(playlist_id);

244
model/videos_summary.sql Normal file
View File

@@ -0,0 +1,244 @@
-- ============================================================================
-- VIEW : playlist_videos
-- ============================================================================
DROP VIEW IF EXISTS playlist_videos;
CREATE VIEW playlist_videos AS
WITH
-- 1⃣ Playlists manuelles
manual_playlists_videos AS (
SELECT
p.id AS playlist_id,
p.name AS playlist_name,
'manual' AS playlist_type,
vp.video_file_name
FROM playlists p
JOIN video_playlists vp ON p.id = vp.playlist_id
WHERE p.type = 'manual'
),
-- 2⃣ Dynamiques sans include/exclude
dynamic_playlist_videos_base AS (
SELECT DISTINCT
p.id AS playlist_id,
p.name AS playlist_name,
'dynamic' AS playlist_type,
v.file_name AS video_file_name
FROM playlists p
JOIN videos v
WHERE p.type = 'dynamic'
-- Labels inclus (AND/OR)
AND (
json_array_length(json_extract(p.rules_json, '$.include_labels')) = 0
OR (
json_extract(p.rules_json, '$.label_logic') = 'OR'
AND EXISTS (
SELECT 1 FROM json_each(json_extract(p.rules_json, '$.include_labels')) jl
JOIN labels l ON l.name = jl.value
JOIN video_labels vl ON vl.label_id = l.id AND vl.video_file_name = v.file_name
)
)
OR (
json_extract(p.rules_json, '$.label_logic') = 'AND'
AND NOT EXISTS (
SELECT 1
FROM json_each(json_extract(p.rules_json, '$.include_labels')) jl
WHERE jl.value NOT IN (
SELECT l2.name
FROM labels l2
JOIN video_labels vl2 ON l2.id = vl2.label_id
WHERE vl2.video_file_name = v.file_name
)
)
)
)
-- Labels exclus
AND NOT EXISTS (
SELECT 1 FROM json_each(json_extract(p.rules_json, '$.exclude_labels')) je
JOIN labels lx ON lx.name = je.value
JOIN video_labels vlx ON vlx.label_id = lx.id AND vlx.video_file_name = v.file_name
)
-- Dates
AND (
json_extract(p.rules_json, '$.date_after') IS NULL
OR v.record_datetime >= json_extract(p.rules_json, '$.date_after')
OR (
json_extract(p.rules_json, '$.date_delta_days') IS NOT NULL
AND v.record_datetime >= date('now', (json_extract(p.rules_json, '$.date_delta_days') || ' days'))
)
)
AND (
json_extract(p.rules_json, '$.date_before') IS NULL
OR v.record_datetime <= json_extract(p.rules_json, '$.date_before')
)
-- Difficulty / day / address
AND (
json_extract(p.rules_json, '$.difficulty') IS NULL
OR v.difficulty_level = json_extract(p.rules_json, '$.difficulty')
)
AND (
json_extract(p.rules_json, '$.day_of_week') IS NULL
OR v.day_of_week = json_extract(p.rules_json, '$.day_of_week')
)
AND (
json_extract(p.rules_json, '$.address_keyword') IS NULL
OR (
v.address NOT LIKE '%unknown%'
AND v.address LIKE '%' || json_extract(p.rules_json, '$.address_keyword') || '%'
)
)
),
-- 3⃣ Inclusions directes (parent → enfant)
playlist_direct_includes AS (
SELECT
p.id AS parent_playlist_id,
CAST(inc.value AS INTEGER) AS child_playlist_id
FROM playlists p
JOIN json_each(json_extract(p.rules_json, '$.include_playlists')) inc
ON json_type(inc.value) IN ('integer', 'text')
WHERE json_array_length(json_extract(p.rules_json, '$.include_playlists')) > 0
),
-- 4⃣ Récursion : parent → descendant (transitif)
playlist_includes_recursive AS (
WITH RECURSIVE rec(parent_playlist_id, child_playlist_id) AS (
SELECT parent_playlist_id, child_playlist_id FROM playlist_direct_includes
UNION ALL
SELECT d.parent_playlist_id, di.child_playlist_id
FROM playlist_direct_includes d
JOIN rec di ON di.parent_playlist_id = d.child_playlist_id
)
SELECT DISTINCT parent_playlist_id AS parent_id, child_playlist_id AS child_id FROM rec
),
-- 5⃣ Exclusions directes et récursives
playlist_direct_excludes AS (
SELECT
p.id AS parent_playlist_id,
CAST(exc.value AS INTEGER) AS child_playlist_id
FROM playlists p
JOIN json_each(json_extract(p.rules_json, '$.exclude_playlists')) exc
ON json_type(exc.value) IN ('integer', 'text')
WHERE json_array_length(json_extract(p.rules_json, '$.exclude_playlists')) > 0
),
playlist_excludes_recursive AS (
WITH RECURSIVE rec_ex(parent_playlist_id, child_playlist_id) AS (
SELECT parent_playlist_id, child_playlist_id FROM playlist_direct_excludes
UNION ALL
SELECT d.parent_playlist_id, di.child_playlist_id
FROM playlist_direct_excludes d
JOIN rec_ex di ON di.parent_playlist_id = d.child_playlist_id
)
SELECT DISTINCT parent_playlist_id AS parent_id, child_playlist_id AS child_id FROM rec_ex
),
-- 6⃣ Vidéos issues des playlists incluses
playlist_included_videos AS (
SELECT pir.parent_id AS parent_playlist_id, mpv.video_file_name
FROM playlist_includes_recursive pir
JOIN manual_playlists_videos mpv ON mpv.playlist_id = pir.child_id
UNION
SELECT pir.parent_id AS parent_playlist_id, dpb.video_file_name
FROM playlist_includes_recursive pir
JOIN dynamic_playlist_videos_base dpb ON dpb.playlist_id = pir.child_id
),
-- 7⃣ Inclusion logique OR
playlist_includes_union AS (
SELECT DISTINCT db.playlist_id, db.playlist_name, db.playlist_type, db.video_file_name
FROM dynamic_playlist_videos_base db
JOIN playlists p ON p.id = db.playlist_id
WHERE json_extract(p.rules_json, '$.logic') = 'OR'
UNION
SELECT DISTINCT iv.parent_playlist_id AS playlist_id,
(SELECT name FROM playlists WHERE id = iv.parent_playlist_id) AS playlist_name,
'dynamic' AS playlist_type,
iv.video_file_name
FROM playlist_included_videos iv
JOIN playlists p ON p.id = iv.parent_playlist_id
WHERE json_extract(p.rules_json, '$.logic') = 'OR'
),
-- 8⃣ Inclusion logique AND (+ fallback si 0 include)
playlist_includes_intersection AS (
SELECT DISTINCT db.playlist_id, db.playlist_name, db.playlist_type, db.video_file_name
FROM dynamic_playlist_videos_base db
JOIN playlists p ON p.id = db.playlist_id
WHERE json_extract(p.rules_json, '$.logic') = 'AND'
AND json_array_length(json_extract(p.rules_json, '$.include_playlists')) > 0
AND db.video_file_name IN (
SELECT iv.video_file_name
FROM playlist_included_videos iv
WHERE iv.parent_playlist_id = db.playlist_id
)
UNION ALL
SELECT DISTINCT db.playlist_id, db.playlist_name, db.playlist_type, db.video_file_name
FROM dynamic_playlist_videos_base db
JOIN playlists p ON p.id = db.playlist_id
WHERE json_extract(p.rules_json, '$.logic') = 'AND'
AND (
json_array_length(json_extract(p.rules_json, '$.include_playlists')) IS NULL
OR json_array_length(json_extract(p.rules_json, '$.include_playlists')) = 0
)
),
-- 9⃣ Fusion des deux logiques dinclusion
playlist_after_includes AS (
SELECT * FROM playlist_includes_union
UNION ALL
SELECT * FROM playlist_includes_intersection
),
-- 🔟 Vidéos exclues (via exclude_playlists récursif)
playlist_excluded_videos AS (
SELECT per.parent_id AS parent_playlist_id, mpv.video_file_name
FROM playlist_excludes_recursive per
JOIN manual_playlists_videos mpv ON mpv.playlist_id = per.child_id
UNION
SELECT per.parent_id AS parent_playlist_id, dpb.video_file_name
FROM playlist_excludes_recursive per
JOIN dynamic_playlist_videos_base dpb ON dpb.playlist_id = per.child_id
),
-- 1⃣1⃣ Application des exclusions
playlist_after_excludes AS (
SELECT pai.playlist_id, pai.playlist_name, pai.playlist_type, pai.video_file_name
FROM playlist_after_includes pai
LEFT JOIN playlist_excluded_videos pev
ON pev.parent_playlist_id = pai.playlist_id AND pev.video_file_name = pai.video_file_name
WHERE pev.parent_playlist_id IS NULL
)
-- 1⃣2⃣ Résultat final : union manuelles + dynamiques
SELECT playlist_id, playlist_name, playlist_type, video_file_name
FROM manual_playlists_videos
UNION ALL
SELECT playlist_id, playlist_name, playlist_type, video_file_name
FROM playlist_after_excludes;
-- ============================================================================
-- TABLE AGRÉGÉE : video_summary
-- ============================================================================
DROP VIEW IF EXISTS video_summary;
CREATE VIEW video_summary AS
SELECT
v.file_name,
v.raw_file,
v.mp4_file,
v.record_datetime,
v.day_of_week,
v.difficulty_level,
v.address,
GROUP_CONCAT(DISTINCT l.name) AS labels,
GROUP_CONCAT(DISTINCT pv.playlist_name) AS playlists
FROM videos v
LEFT JOIN video_labels vl ON vl.video_file_name = v.file_name
LEFT JOIN labels l ON l.id = vl.label_id
LEFT JOIN playlist_videos pv ON pv.video_file_name = v.file_name
GROUP BY v.file_name;
CREATE TABLE IF NOT EXISTS video_summary_materialized AS
SELECT * FROM video_summary;

236
model/views.sql Normal file
View File

@@ -0,0 +1,236 @@
-- 1) Vidéos manuelles (source unique, simple)
DROP VIEW IF EXISTS manual_playlists_videos;
CREATE VIEW manual_playlists_videos AS
SELECT
p.id AS playlist_id,
p.name AS playlist_name,
'manual' AS playlist_type,
vp.video_file_name
FROM playlists p
JOIN video_playlists vp ON p.id = vp.playlist_id
WHERE p.type = 'manual';
-- 2) Vidéos dynamiques : tout sauf include_playlists / exclude_playlists
-- (ceci applique labels, date_after/date_before/date_delta_days, difficulty, day_of_week, address_keyword)
DROP VIEW IF EXISTS dynamic_playlist_videos_base;
CREATE VIEW dynamic_playlist_videos_base AS
SELECT DISTINCT
p.id AS playlist_id,
p.name AS playlist_name,
'dynamic' AS playlist_type,
v.file_name AS video_file_name
FROM playlists p
JOIN videos v
WHERE p.type = 'dynamic'
/* --- include_labels (AND/OR) --- */
AND (
json_array_length(json_extract(p.rules_json, '$.include_labels')) = 0
OR (
json_extract(p.rules_json, '$.label_logic') = 'OR'
AND EXISTS (
SELECT 1 FROM json_each(json_extract(p.rules_json, '$.include_labels')) jl
JOIN labels l ON l.name = jl.value
JOIN video_labels vl ON vl.label_id = l.id AND vl.video_file_name = v.file_name
)
)
OR (
json_extract(p.rules_json, '$.label_logic') = 'AND'
AND NOT EXISTS (
-- il existe un label requis qui n'est pas présent pour la video
SELECT 1
FROM json_each(json_extract(p.rules_json, '$.include_labels')) jl
WHERE jl.value NOT IN (
SELECT l2.name
FROM labels l2
JOIN video_labels vl2 ON l2.id = vl2.label_id
WHERE vl2.video_file_name = v.file_name
)
)
)
)
/* --- exclude_labels --- */
AND NOT EXISTS (
SELECT 1 FROM json_each(json_extract(p.rules_json, '$.exclude_labels')) je
JOIN labels lx ON lx.name = je.value
JOIN video_labels vlx ON vlx.label_id = lx.id AND vlx.video_file_name = v.file_name
)
/* --- date_after / date_delta_days / date_before --- */
AND (
json_extract(p.rules_json, '$.date_after') IS NULL
OR v.record_datetime >= json_extract(p.rules_json, '$.date_after')
OR (
json_extract(p.rules_json, '$.date_delta_days') IS NOT NULL
AND v.record_datetime >= date('now', (json_extract(p.rules_json, '$.date_delta_days') || ' days'))
)
)
AND (
json_extract(p.rules_json, '$.date_before') IS NULL
OR v.record_datetime <= json_extract(p.rules_json, '$.date_before')
)
/* --- difficulty, day_of_week, address_keyword --- */
AND (
json_extract(p.rules_json, '$.difficulty') IS NULL
OR v.difficulty_level = json_extract(p.rules_json, '$.difficulty')
)
AND (
json_extract(p.rules_json, '$.day_of_week') IS NULL
OR v.day_of_week = json_extract(p.rules_json, '$.day_of_week')
)
AND (
json_extract(p.rules_json, '$.address_keyword') IS NULL
OR (
v.address NOT LIKE '%unknown%'
AND v.address LIKE '%' || json_extract(p.rules_json, '$.address_keyword') || '%'
)
);
-- 3) Mapping direct parent -> child playlist ids (extrait JSON include_playlists)
DROP VIEW IF EXISTS playlist_direct_includes;
CREATE VIEW playlist_direct_includes AS
SELECT
p.id AS parent_playlist_id,
CAST(inc.value AS INTEGER) AS child_playlist_id
FROM playlists p
JOIN json_each(json_extract(p.rules_json, '$.include_playlists')) inc
ON json_type(inc.value) IN ('integer', 'text')
WHERE json_array_length(json_extract(p.rules_json, '$.include_playlists')) > 0;
-- 4) Fermeture transitive des inclusions : parent -> descendant child
DROP VIEW IF EXISTS playlist_includes_recursive;
CREATE VIEW playlist_includes_recursive AS
WITH RECURSIVE rec(parent_playlist_id, child_playlist_id) AS (
-- base : les inclusions directes
SELECT parent_playlist_id, child_playlist_id FROM playlist_direct_includes
UNION ALL
-- récursion : si A inclut B et B inclut C, alors A inclut C
SELECT d.parent_playlist_id, di.child_playlist_id
FROM playlist_direct_includes d
JOIN rec di ON di.parent_playlist_id = d.child_playlist_id
)
SELECT DISTINCT parent_playlist_id AS parent_id, child_playlist_id AS child_id FROM rec;
-- 5) De même pour exclusions directes et récursives
DROP VIEW IF EXISTS playlist_direct_excludes;
CREATE VIEW playlist_direct_excludes AS
SELECT
p.id AS parent_playlist_id,
CAST(exc.value AS INTEGER) AS child_playlist_id
FROM playlists p
JOIN json_each(json_extract(p.rules_json, '$.exclude_playlists')) exc
ON json_type(exc.value) IN ('integer', 'text')
WHERE json_array_length(json_extract(p.rules_json, '$.exclude_playlists')) > 0;
DROP VIEW IF EXISTS playlist_excludes_recursive;
CREATE VIEW playlist_excludes_recursive AS
WITH RECURSIVE rec_ex(parent_playlist_id, child_playlist_id) AS (
SELECT parent_playlist_id, child_playlist_id FROM playlist_direct_excludes
UNION ALL
SELECT d.parent_playlist_id, di.child_playlist_id
FROM playlist_direct_excludes d
JOIN rec_ex di ON di.parent_playlist_id = d.child_playlist_id
)
SELECT DISTINCT parent_playlist_id AS parent_id, child_playlist_id AS child_id FROM rec_ex;
-- 6) vidéos apportées par les playlists incluses (manuelles + dynamic_base)
-- For each parent, collect videos that belong to any included child playlist (direct or transitive).
-- union logique (rules.logic = 'OR')
DROP VIEW IF EXISTS playlist_includes_union;
CREATE VIEW playlist_includes_union AS
-- union de base + vidéos incluses
SELECT DISTINCT db.playlist_id, db.playlist_name, db.playlist_type, db.video_file_name
FROM dynamic_playlist_videos_base db
WHERE
json_extract((SELECT rules_json FROM playlists WHERE id = db.playlist_id), '$.logic') = 'OR'
UNION
SELECT DISTINCT iv.parent_playlist_id AS playlist_id,
(SELECT name FROM playlists WHERE id = iv.parent_playlist_id) AS playlist_name,
'dynamic' AS playlist_type,
iv.video_file_name
FROM playlist_included_videos iv
JOIN playlists p ON p.id = iv.parent_playlist_id
WHERE json_extract(p.rules_json, '$.logic') = 'OR';
-- 7) intersection logique (rules.logic = 'AND')
DROP VIEW IF EXISTS playlist_includes_intersection;
CREATE VIEW playlist_includes_intersection AS
-- Cas 1⃣ : AND + playlists incluses => intersection stricte
SELECT DISTINCT db.playlist_id, db.playlist_name, db.playlist_type, db.video_file_name
FROM dynamic_playlist_videos_base db
WHERE json_extract((SELECT rules_json FROM playlists WHERE id = db.playlist_id), '$.logic') = 'AND'
AND json_array_length(json_extract((SELECT rules_json FROM playlists WHERE id = db.playlist_id), '$.include_playlists')) > 0
AND db.video_file_name IN (
SELECT iv.video_file_name
FROM playlist_included_videos iv
WHERE iv.parent_playlist_id = db.playlist_id
)
UNION ALL
-- Cas 2⃣ : AND + aucune playlist incluse => garder la base telle quelle
SELECT DISTINCT db.playlist_id, db.playlist_name, db.playlist_type, db.video_file_name
FROM dynamic_playlist_videos_base db
WHERE json_extract((SELECT rules_json FROM playlists WHERE id = db.playlist_id), '$.logic') = 'AND'
AND (
json_array_length(json_extract((SELECT rules_json FROM playlists WHERE id = db.playlist_id), '$.include_playlists')) IS NULL
OR json_array_length(json_extract((SELECT rules_json FROM playlists WHERE id = db.playlist_id), '$.include_playlists')) = 0
);
-- 8) regroupement des deux logiques
DROP VIEW IF EXISTS playlist_after_includes;
CREATE VIEW playlist_after_includes AS
SELECT * FROM playlist_includes_union
UNION ALL
SELECT * FROM playlist_includes_intersection;
-- 9) exclusions
DROP VIEW IF EXISTS playlist_excluded_videos;
CREATE VIEW playlist_excluded_videos AS
SELECT per.parent_id AS parent_playlist_id, mpv.video_file_name
FROM playlist_excludes_recursive per
JOIN manual_playlists_videos mpv ON mpv.playlist_id = per.child_id
UNION
SELECT per.parent_id AS parent_playlist_id, dpb.video_file_name
FROM playlist_excludes_recursive per
JOIN dynamic_playlist_videos_base dpb ON dpb.playlist_id = per.child_id;
-- 10) Appliquer exclusions
DROP VIEW IF EXISTS playlist_after_excludes;
CREATE VIEW playlist_after_excludes AS
SELECT pai.playlist_id, pai.playlist_name, pai.playlist_type, pai.video_file_name
FROM playlist_after_includes pai
LEFT JOIN playlist_excluded_videos pev
ON pev.parent_playlist_id = pai.playlist_id AND pev.video_file_name = pai.video_file_name
WHERE pev.parent_playlist_id IS NULL;
-- 11) résultat final « flat »
DROP VIEW IF EXISTS playlist_videos_flat;
CREATE VIEW playlist_videos_flat AS
SELECT playlist_id, playlist_name, playlist_type, video_file_name
FROM manual_playlists_videos
UNION ALL
SELECT playlist_id, playlist_name, playlist_type, video_file_name
FROM playlist_after_excludes;

View File

@@ -0,0 +1,48 @@
# Fonction pour ajouter une ligne à un fichier avec verrouillage (bloquant + timeout + gestion des signaux)
append_with_lock() {
local line="$2"
local file="$3"
local lock_dir="${file::-4}.lock"
local timeout=30 # Timeout en secondes (ajustable)
local start_time=$(date +%s)
local got_lock=false
# Fonction de nettoyage du verrou (appelée en cas de signal ou de sortie)
cleanup_lock() {
if [ -d "$lock_dir" ]; then
rmdir "$lock_dir" 2>/dev/null || :
fi
}
# Piège les signaux d'interruption pour libérer le verrou
trap cleanup_lock INT TERM EXIT
# Attendre le verrou avec timeout
while true; do
# Vérifier le timeout
local current_time=$(date +%s)
if [ $((current_time - start_time)) -ge $timeout ]; then
echo "Timeout atteint pour l'obtention du verrou sur $file" >&2
trap - INT TERM EXIT # Désactive le piège pour éviter un nettoyage double
return 1
fi
# Tentative de verrouillage
if mkdir "$lock_dir" 2>/dev/null; then
got_lock=true
break
else
sleep 0.1 # Attendre avant de réessayer
fi
done
# Écriture dans le fichier (si verrou obtenu)
if $got_lock; then
echo "$line" >> "$file"
echo "$line" >> "$file" # write them twice because of bug where a line is skipped
# Libérer le verrou
rmdir "$lock_dir" || :
trap - INT TERM EXIT # Désactive le piège après utilisation
fi
}
export -f append_with_lock

View File

@@ -2,11 +2,55 @@
set -euo pipefail
IFS=$'\n\t'
# === Gestion des arguments ===
PROCESS_ALL=false
PRINT_ERR=false
# Fonction pour afficher l'aide
usage() {
echo "Usage: $0 [--all|-a]"
echo "Options:"
echo " --all, -a Traiter tous les fichiers (ignore la liste de fichiers)"
echo " --force, -f Traiter les fichiers ignorés"
echo " --help, -h Afficher cette aide"
echo " --print-err, -e Afficher les erreurs en stderr et non dans le fichier de logs"
exit 1
}
# Traitement des arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--all|-a)
PROCESS_ALL=true
shift
;;
--force|-f)
PROCESS_IGNORED_FILES=true
shift
;;
--help|-h)
usage
;;
--print-err|-e)
PRINT_ERR=true
shift
;;
*)
echo "Option inconnue : $1" >&2
usage
;;
esac
done
SCRIPTS_DIR=$(dirname `realpath ${BASH_SOURCE[0]}`)
# === CONFIGURATION ===
export DOSSIER_SOURCE="${HOME}/Downloads"
export DOSSIER_DESTINATION_RAW="${HOME}/Documents/.DanceVideos/raw"
export TEMP_FILE="/tmp/dancevideos_moved_files_$RANDOM.txt"
# Initialiser le fichier temporaire
> "$TEMP_FILE"
sanitize_name() {
local name="$1"
@@ -17,6 +61,8 @@ sanitize_name() {
}
export -f sanitize_name
source $SCRIPTS_DIR/append_with_lock.sh
# Fonction pour vérifier et déplacer un fichier
check_and_move() {
local fichier="$1"
@@ -28,6 +74,9 @@ check_and_move() {
if [ "$taille1" -eq "$taille2" ] && [ "$taille2" -gt 0 ]; then
echo "Déplacement de $(basename "$fichier")"
rsync -av --remove-source-files "$fichier" "$DOSSIER_DESTINATION_RAW/$(sanitize_name "$(basename "$fichier")")"
if ! append_with_lock "$DOSSIER_DESTINATION_RAW/$(sanitize_name "$(basename "$fichier")")" "$TEMP_FILE"; then
echo "Échec de l'écriture dans $TEMP_FILE (timeout)" >&2
fi
else
echo "Fichier $(basename "$fichier") encore en cours de réception."
fi
@@ -65,17 +114,14 @@ mp4_dir() {
local dir=$DOSSIER_DESTINATION_MP4
dir=$dir/$(date -jf "%Y-%m-%dT%H:%M:%S" "$(get_creation_time $raw)" +%Y%m%d_%H%M%S)
dir=${dir}_${weekday}_${address}
mkdir -p $dir
echo $dir
}
export -f mp4_dir
write_thumbnail() {
set -x
local raw="$1"
local thumbnail="$2"
ffmpeg -ss 00:00:03 -i $raw -vframes 1 $thumbnail 2>/dev/null
set +x
}
export -f write_thumbnail
@@ -86,13 +132,53 @@ write_mp4() {
}
export -f write_mp4
generate_funny_suffix() {
local file="$1"
# Grande liste de suffixes uniques et lisibles
local funny_names=(
"pamplemousse" "canard" "yolo" "flan" "tortue" "biscotte" "poney" "baguette"
"chaussette" "banane" "hippopotame" "tuba" "bretzel" "chocolatine" "carambar"
"frometon" "krokmou" "gnocchi" "clafoutis" "capybara" "choubidou" "pingouin"
"maracas" "raclette" "saucisson" "pistache" "chamallow" "boomerang" "pirouette"
"moustache" "abricot" "falafel" "pouet" "zouzou" "cornichon" "gnouf" "mouette"
"paprika" "crouton" "galipette" "gratin" "pouliche" "brocoli" "nugget"
)
# Calcul du hash MD5 compatible macOS
local hash_val
hash_val=$(md5 -q "$file" 2>/dev/null)
# Fallback si le hash échoue (ex: fichier non lisible)
if [[ -z "$hash_val" ]]; then
echo "default"
return
fi
# Extraire les 8 derniers caractères hexadécimaux pour faire un modulo fiable
local short_hex="${hash_val: -8}"
# Convertir en entier et choisir un mot dans la liste
local index=$(( 0x$short_hex % ${#funny_names[@]} ))
echo "${funny_names[$index]}"
}
export -f generate_funny_suffix
process_raw_file() {
local raw="$1"
local dir_suffixe="${2:-}"
local ct weekday duration lat lon address
IFS="|" read -r ct weekday duration lat lon address <<<"$(process_video "$raw")"
# récupérer les infos eventuelles dans la BDD
# si fichier videos existent toujours ingorer, sinon écraser
# TODO
local dir=$(mp4_dir $raw $weekday "$address")
[ -n "$dir_suffixe" ] && dir=${dir}_${dir_suffixe} || :
mkdir -p $dir
local thumbnail=${dir}/thumbnail.jpg
$(write_thumbnail $raw $thumbnail)
@@ -121,24 +207,84 @@ export -f screen_video
whatsapp_video() {
local raw="$1"
echo whatsapp $raw
process_raw_file $raw
local suffixe=$(generate_funny_suffix $raw)
process_raw_file $raw $suffixe
}
export -f whatsapp_video
ignored_video() {
local raw="$1"
echo ignored "$raw"
local suffixe=$(generate_funny_suffix $raw)
process_raw_file $raw $suffixe
}
export -f ignored_video
convert_raws() {
if $PROCESS_ALL; then
# ##_##.mov
find "$DOSSIER_DESTINATION_RAW" -type f \
-name "*.mov" \
-print0 | parallel -0 -j 4 iphone_video
find "$DOSSIER_DESTINATION_RAW" -type f \
-name "*.mov" \
-print0 | parallel -0 -j 4 iphone_video
find "$DOSSIER_DESTINATION_RAW" -type f \
-name "screenrecording*.mp4" \
-print0 | parallel -0 -j 4 screen_video
find "$DOSSIER_DESTINATION_RAW" -type f \
-name "screenrecording*.mp4" \
-print0 | parallel -0 -j 4 screen_video
find "$DOSSIER_DESTINATION_RAW" -type f \
-name '*[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]*.mp4' \
-print0 | parallel -0 -j 4 whatsapp_video
find "$DOSSIER_DESTINATION_RAW" -type f \
-name '*[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-[0-9a-f][0-9a-f][0-9a-f][0-9a-f]-[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]*.mp4' \
-print0 | parallel -0 -j 4 whatsapp_video
else
if [ -f "$TEMP_FILE" ]; then
while IFS= read -r file || [ -n "$file" ]; do
file=$(echo "$file" | tr -d '\r' | xargs)
# Ignorer les lignes vides
if [ -z "$file" ]; then
continue
fi
echo "Dealing with $file"
local filename=$(basename "$file")
if [[ "$filename" == *.mov ]]; then
iphone_video "$file"
elif [[ "$filename" == screenrecording*.mp4 ]]; then
screen_video "$file"
elif [[ "$filename" =~ ^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\.mp4$ ]]; then
whatsapp_video "$file"
else
echo "$file didn't match any pattern"
fi
done < "$TEMP_FILE"
fi
fi
if [[ -n "$PROCESS_IGNORED_FILES" ]]; then
set -x
for f in $(find "$DOSSIER_DESTINATION_RAW" -maxdepth 1 -type f \( -iname "*.mp4" -o -iname "*.mov" \) -print); do
if [[ -z $(sqlite3 "$DANCE_VIDEOS_DB" "SELECT 1 FROM VIDEOS WHERE RAW_FILE='$f';") ]]; then
ignored_video "$f"
fi
done
set +x
fi
}
convert_raws
$PRINT_ERR || exec 2> /tmp/DanceVideos.stderr
convert_raws
set -x
if [ 0 -lt $(wc -l $TEMP_FILE | awk '{print $1}') ]; then
set +o pipefail
STREAMLIT_PID="$(ps aux | grep streamlit | grep -v 'grep' | awk '{print $2}')"
set -o pipefail
if [ ! -z "$STREAMLIT_PID" ]; then
kill $STREAMLIT_PID
fi
# (cd $SCRIPTS_DIR/..; source .venv/bin/activate; streamlit run app/app.py -- --unlabeled &)
ROOT=$(realpath $SCRIPTS_DIR/..)
source $ROOT/.venv/bin/activate
streamlit run $ROOT/app/app.py -- --unlabeled &
fi

View File

@@ -2,25 +2,21 @@ get_rotation_filter() {
local f="$1"
local rotation
rotation=$(exiftool -Rotation -n "$f" | awk '{print $3}')
# echo $rotation $f >> "rotations.txt"
case "$rotation" in
# 90) echo "transpose=1" ;;
# 270) echo "transpose=2" ;;
# 180) echo "hflip,vflip" ;;
# *) echo "" ;;
*) echo "transpose=1" ;;
esac
}
export -f get_rotation_filter
reencode_with_rotation() {
set -x
local src="$1"
local dst="$2"
local filter
filter="$(get_rotation_filter "$src")"
if [ -n "$filter" ]; then
echo " Correction dorientation (rotation=${filter})"
#echo " Correction dorientation (rotation=${filter})"
if ffmpeg -encoders 2>/dev/null | grep -q 'h264_videotoolbox'; then
ffmpeg -nostdin -i "$src" -vf "$filter" \
-c:v h264_videotoolbox -b:v 5M -c:a aac -map_metadata -1 -y "$dst"
@@ -42,5 +38,6 @@ reencode_with_rotation() {
fi
fi
fi
set +x
}
export -f reencode_with_rotation

34
program2/check.sh Executable file
View File

@@ -0,0 +1,34 @@
#!/bin/bash
set -euo pipefail
DANCE_VIDEOS_DB="${HOME}/Documents/.DanceVideos/db.sqlite"
RAW_VIDEOS_DIR="${HOME}/Documents/.DanceVideos/raw/"
check_registered_files_exist() {
sqlite3 -separator '|' "$DANCE_VIDEOS_DB" "
SELECT file_name, raw_file, mp4_file FROM videos;
" | while IFS='|' read -r file_name raw_file mp4_file; do
# Sauter l'en-tête
if [[ "$file_name" == "file_name" ]]; then
continue
fi
# Ignorer les lignes vides
if [[ ! -e "$raw_file" || ! -e "$mp4_file" ]]; then
echo "$raw_file KO : $mp4_file" >&2
else
echo "$raw_file OK"
fi
done
}
check_files_are_registered() {
for f in $(find "$RAW_VIDEOS_DIR" -maxdepth 1 -type f \( -iname "*.mp4" -o -iname "*.mov" \) -print); do
if [[ -z $(sqlite3 "$DANCE_VIDEOS_DB" "SELECT 1 FROM VIDEOS WHERE RAW_FILE='$f';") ]]; then
echo "$f not registered" >&2
# open $f
fi
done
}
check_files_are_registered

View File

@@ -1,14 +1,85 @@
#!/bin/bash
set -euo pipefail
SCRIPTS_DIR=$(dirname `realpath ${BASH_SOURCE[0]}`)
export DANCE_VIDEOS_DB="${HOME}/Documents/.DanceVideos/db.sqlite"
export DOSSIER_PLAYLIST="$(dirname $DANCE_VIDEOS_DB)/playlists"
SCRIPTS_DIR="$(dirname "$(realpath "${BASH_SOURCE[0]}")")"
DANCE_VIDEOS_DB="${HOME}/Documents/.DanceVideos/db.sqlite"
DOSSIER_PLAYLIST="$(dirname "$DANCE_VIDEOS_DB")/playlists"
rm -rf $DOSSIER_PLAYLIST
# Nettoyer et recréer le dossier des playlists
rm -rf "$DOSSIER_PLAYLIST"
mkdir -p "$DOSSIER_PLAYLIST"
export PLAYLIST_ALL=$DOSSIER_PLAYLIST/all
mkdir -p $PLAYLIST_ALL
# Créer un dossier "all" avec toutes les vidéos
# PLAYLIST_ALL="$DOSSIER_PLAYLIST/all"
# mkdir -p "$PLAYLIST_ALL"
# while IFS= read -r v; do
# ln -s "$v" "$PLAYLIST_ALL/$(basename "$(dirname "$v")").mp4"
# done < <(sqlite3 "$DANCE_VIDEOS_DB" "SELECT rotated_file FROM videos WHERE rotated_file IS NOT NULL;")
for v in $(sqlite3 $DANCE_VIDEOS_DB "select rotated_file from videos"); do
ln -s $v $PLAYLIST_ALL/$(basename $(dirname $v)).mp4
done
video_filename () {
local rotated_file=$1
local alias=${2:-}
local count_or_music=${3:-}
name_part="$(basename "$(dirname "$rotated_file")")"
# Décomposer le name_part pour insérer l'alias après la date
# On suppose un schéma de départ du type YYYYMMDD_HHMMSS...
if [[ "$name_part" =~ ^([0-9]{8})_([0-9]{6}.*)$ ]]; then
date_part="${BASH_REMATCH[1]}"
rest_part="${BASH_REMATCH[2]}"
else
echo unexpected format >&2
exit 2
fi
output_name="${date_part:2}" #ignore the 20 from 20xx years
if [[ -n "$alias" ]]; then
# Nettoyer l'alias pour le rendre sûr dans le nom de fichier
safe_alias="$(echo "$alias" | tr ' /' '_' | tr -dc '[:alnum:]_')"
output_name="${output_name}_${safe_alias}"
fi
if [[ -n "$count_or_music" ]]; then
output_name="${output_name}_${count_or_music}"
fi
echo "${output_name}_${rest_part}.mp4"
}
# Pour chaque playlist, créer un dossier et ajouter les liens symboliques
sqlite3 -separator '|' "$DANCE_VIDEOS_DB" "
SELECT pv.playlist_id, pv.playlist_name, pv.video_file_name, v.rotated_file, v.alias, COALESCE( count_or_music_labels.count_or_music , '') count_or_music
FROM playlist_videos pv JOIN videos v ON pv.video_file_name=v.file_name
LEFT JOIN (
select * from video_labels
natural join (
select id as label_id, CASE WHEN name='comptes' THEN 'C' ELSE 'M' END as count_or_music
from labels where name in ('comptes','en musique')
)
) count_or_music_labels ON v.file_name=count_or_music_labels.video_file_name
WHERE v.rotated_file IS NOT NULL;
" | while IFS='|' read -r playlist_id playlist_name video_file_name rotated_file alias count_or_music; do
# Sauter l'en-tête
if [[ "$playlist_id" == "playlist_id" ]]; then
continue
fi
# Ignorer les lignes vides
if [[ -z "$playlist_id" || -z "$rotated_file" ]]; then
continue
fi
set -x
# Créer le dossier de la playlist
PLAYLIST_DIR="$DOSSIER_PLAYLIST/$playlist_name"
mkdir -p "$PLAYLIST_DIR"
# Créer le lien symbolique
# ln -sf "$rotated_file" "$PLAYLIST_DIR/$(basename "$(dirname "$rotated_file")").mp4"
ln -sf "$rotated_file" "$PLAYLIST_DIR/$(video_filename $rotated_file $alias $count_or_music)"
set +x
done