feat(admin-web, functions): overhaul music library and add AI genre classification
Some checks failed
CI / TypeScript (push) Failing after 48s
CI / ESLint (push) Failing after 20s
CI / Tests (push) Failing after 33s
CI / Build Check (push) Has been skipped
CI / Admin Web Tests (push) Failing after 18s
CI / Deploy Edge Functions (push) Has been skipped

- admin-web: Added an "All Music" library view with search, genre, and status filters.
- admin-web: Converted Jobs view to use expandable cards instead of a split pane.
- admin-web: Added ability to delete individual tracks from a job.
- functions: Added new `youtube-classify` edge function to automatically categorize tracks using Gemini LLM.
- functions: Integrated AI genre classification during initial playlist import if no manual genre is provided.
- worker: Added `/classify` endpoint for the worker to securely interface with Gemini.
- scripts: Updated deployment script to include `GEMINI_API_KEY`.
This commit is contained in:
Millian Lamiaux
2026-03-29 12:52:02 +02:00
parent 3d8d9efd70
commit edcd857c70
9 changed files with 1331 additions and 180 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -120,11 +120,17 @@ async function invokePatch<T>(
return res.json();
}
export interface ItemWithPlaylist extends DownloadItem {
playlist_title: string | null;
}
export function useYouTubeDownload() {
const [jobs, setJobs] = useState<DownloadJob[]>([]);
const [allItems, setAllItems] = useState<ItemWithPlaylist[]>([]);
const [activeJob, setActiveJob] = useState<JobWithItems | null>(null);
const [isProcessing, setIsProcessing] = useState(false);
const [isImporting, setIsImporting] = useState(false);
const [isClassifying, setIsClassifying] = useState(false);
const abortRef = useRef<AbortController | null>(null);
/** Fetch all jobs (list view). */
@@ -134,6 +140,43 @@ export function useYouTubeDownload() {
return data.jobs;
}, []);
/** Fetch ALL download items across all jobs, enriched with playlist title. */
const fetchAllItems = useCallback(async () => {
// Fetch all items via Supabase directly (RLS ensures admin-only).
// Cast needed because the Database type only defines Row (no Insert/Update)
// for download_items, causing Supabase client to infer `never`.
const { data: items, error: itemsErr } = (await supabase
.from("download_items")
.select("*")
.order("created_at", { ascending: false })) as {
data: DownloadItem[] | null;
error: { message: string } | null;
};
if (itemsErr) throw new Error(itemsErr.message);
// Build a map of job_id -> playlist_title from the current jobs list,
// or fetch jobs if we don't have them yet.
let jobMap: Record<string, string | null> = {};
let currentJobs = jobs;
if (currentJobs.length === 0) {
const data = await invokeGet<{ jobs: DownloadJob[] }>("youtube-status");
currentJobs = data.jobs;
setJobs(currentJobs);
}
for (const j of currentJobs) {
jobMap[j.id] = j.playlist_title;
}
const enriched: ItemWithPlaylist[] = (items ?? []).map((item) => ({
...item,
playlist_title: jobMap[item.job_id] ?? null,
}));
setAllItems(enriched);
return enriched;
}, [jobs]);
/** Fetch a single job with its items. */
const refreshStatus = useCallback(async (jobId: string) => {
const data = await invokeGet<{ job: DownloadJob; items: DownloadItem[] }>(
@@ -263,6 +306,45 @@ export function useYouTubeDownload() {
[]
);
/** Delete a single download item and its audio file from storage. */
const deleteItem = useCallback(
async (itemId: string) => {
const result = await invokeDelete<{
deleted: boolean;
itemId: string;
jobId: string;
}>("youtube-status", { itemId });
// Remove from allItems
setAllItems((prev) => prev.filter((i) => i.id !== itemId));
// Remove from activeJob items if present
setActiveJob((prev) => {
if (!prev) return prev;
return {
...prev,
items: prev.items.filter((i) => i.id !== itemId),
};
});
// Update the parent job counters in jobs list
setJobs((prev) =>
prev.map((j) => {
if (j.id !== result.jobId) return j;
// We don't know the item status here, so just decrement total.
// The next fetchJobs() will reconcile exact counts from the server.
return {
...j,
total_items: Math.max(0, j.total_items - 1),
};
})
);
return result;
},
[]
);
/** Update the genre on a single download item. */
const updateItemGenre = useCallback(
async (itemId: string, genre: MusicGenre | null) => {
@@ -285,17 +367,46 @@ export function useYouTubeDownload() {
[]
);
/** Re-classify genres for a job's items via YouTube metadata + Gemini. */
const reclassifyJob = useCallback(
async (jobId: string, force = false) => {
setIsClassifying(true);
try {
const { data, error } = await supabase.functions.invoke(
"youtube-classify",
{ body: { jobId, force } }
);
if (error) throw new Error(error.message ?? "Classification failed");
if (data?.error) throw new Error(data.error);
// Refresh job items and library to reflect updated genres
await refreshStatus(jobId);
await fetchAllItems().catch(() => {});
return data as { classified: number; skipped: number };
} finally {
setIsClassifying(false);
}
},
[refreshStatus, fetchAllItems]
);
return {
jobs,
allItems,
activeJob,
isProcessing,
isImporting,
isClassifying,
fetchJobs,
fetchAllItems,
refreshStatus,
importPlaylist,
startProcessing,
stopProcessing,
deleteJob,
deleteItem,
updateItemGenre,
reclassifyJob,
};
}

View File

@@ -45,6 +45,7 @@ ssh "$DEPLOY_USER@$DEPLOY_HOST" "\
-e SUPABASE_URL=\$(docker exec supabase-edge-functions printenv SUPABASE_URL) \
-e SUPABASE_SERVICE_ROLE_KEY=\$(docker exec supabase-edge-functions printenv SUPABASE_SERVICE_ROLE_KEY) \
-e SUPABASE_PUBLIC_URL=https://supabase.1000co.fr \
-e GEMINI_API_KEY=\$(cat /opt/supabase/.env.gemini 2>/dev/null || echo '') \
-e STORAGE_BUCKET=workout-audio \
-e PORT=3001 \
youtube-worker:latest"

View File

@@ -10,6 +10,7 @@ const WORKER_URL =
export interface PlaylistItem {
videoId: string;
title: string;
author: string | null;
durationSeconds: number;
thumbnailUrl: string | null;
}
@@ -86,3 +87,33 @@ export async function downloadAndUploadAudio(
): Promise<DownloadResult> {
return workerFetch<DownloadResult>("/download", { videoId, jobId });
}
export interface ClassifyInput {
videoId: string;
title: string;
author: string | null;
}
interface ClassifyResult {
genres: Record<string, string>;
warning?: string;
}
/**
* Classifies tracks into music genres via YouTube metadata + Gemini LLM.
* Best-effort: returns empty object on failure (never throws).
*/
export async function classifyGenres(
items: ClassifyInput[]
): Promise<Record<string, string>> {
try {
const result = await workerFetch<ClassifyResult>("/classify", { items });
if (result.warning) {
console.warn("Genre classification warning:", result.warning);
}
return result.genres ?? {};
} catch (err) {
console.error("Genre classification failed:", err);
return {};
}
}

View File

@@ -0,0 +1,119 @@
import { corsHeaders, handleCors } from "../_shared/cors.ts";
import { verifyAdmin, AuthError } from "../_shared/auth.ts";
import { createServiceClient } from "../_shared/supabase-client.ts";
import { classifyGenres } from "../_shared/youtube-client.ts";
Deno.serve(async (req: Request) => {
const corsResponse = handleCors(req);
if (corsResponse) return corsResponse;
try {
if (req.method !== "POST") {
return new Response(JSON.stringify({ error: "Method not allowed" }), {
status: 405,
headers: { ...corsHeaders, "Content-Type": "application/json" },
});
}
await verifyAdmin(req);
const { jobId, force = false } = await req.json();
if (!jobId || typeof jobId !== "string") {
return new Response(JSON.stringify({ error: "jobId is required" }), {
status: 400,
headers: { ...corsHeaders, "Content-Type": "application/json" },
});
}
const supabase = createServiceClient();
// Verify job exists
const { data: job, error: jobError } = await supabase
.from("download_jobs")
.select("id")
.eq("id", jobId)
.single();
if (jobError || !job) {
return new Response(JSON.stringify({ error: "Job not found" }), {
status: 404,
headers: { ...corsHeaders, "Content-Type": "application/json" },
});
}
// Fetch items — if force=false, only those with null genre
let query = supabase
.from("download_items")
.select("id, video_id, title")
.eq("job_id", jobId);
if (!force) {
query = query.is("genre", null);
}
const { data: items, error: itemsError } = await query;
if (itemsError) {
throw new Error(`Failed to fetch items: ${itemsError.message}`);
}
if (!items || items.length === 0) {
return new Response(
JSON.stringify({ classified: 0, skipped: 0, message: "No items to classify" }),
{
status: 200,
headers: { ...corsHeaders, "Content-Type": "application/json" },
}
);
}
// Call sidecar to classify genres
const genres = await classifyGenres(
items.map((item) => ({
videoId: item.video_id,
title: item.title ?? "",
author: null,
}))
);
// Update each item's genre in the database
let classified = 0;
let skipped = 0;
for (const item of items) {
const genre = genres[item.video_id];
if (genre) {
const { error: updateError } = await supabase
.from("download_items")
.update({ genre })
.eq("id", item.id);
if (updateError) {
console.error(`Failed to update genre for ${item.video_id}:`, updateError.message);
skipped++;
} else {
classified++;
}
} else {
skipped++;
}
}
return new Response(
JSON.stringify({ classified, skipped }),
{
status: 200,
headers: { ...corsHeaders, "Content-Type": "application/json" },
}
);
} catch (error) {
const status = error instanceof AuthError ? error.status : 500;
const message =
error instanceof Error ? error.message : "Unknown error occurred";
return new Response(JSON.stringify({ error: message }), {
status,
headers: { ...corsHeaders, "Content-Type": "application/json" },
});
}
});

View File

@@ -4,6 +4,7 @@ import { createServiceClient } from "../_shared/supabase-client.ts";
import {
parsePlaylistId,
getPlaylistItems,
classifyGenres,
} from "../_shared/youtube-client.ts";
Deno.serve(async (req: Request) => {
@@ -54,6 +55,19 @@ Deno.serve(async (req: Request) => {
// Create the download job in the database
const supabase = createServiceClient();
// Auto-classify genres via YouTube metadata + Gemini (best-effort).
// Only runs when no manual genre was provided — manual genre overrides all.
let classifiedGenres: Record<string, string> = {};
if (!genre) {
classifiedGenres = await classifyGenres(
playlist.items.map((item) => ({
videoId: item.videoId,
title: item.title,
author: item.author ?? null,
}))
);
}
const { data: job, error: jobError } = await supabase
.from("download_jobs")
.insert({
@@ -73,6 +87,7 @@ Deno.serve(async (req: Request) => {
}
// Create download items for each video
// Priority: manual genre > auto-classified genre > null
const itemRows = playlist.items.map((item) => ({
job_id: job.id,
video_id: item.videoId,
@@ -80,7 +95,7 @@ Deno.serve(async (req: Request) => {
duration_seconds: item.durationSeconds,
thumbnail_url: item.thumbnailUrl,
status: "pending",
genre: genre || null,
genre: genre || classifiedGenres[item.videoId] || null,
}));
const { error: itemsError } = await supabase

View File

@@ -13,11 +13,93 @@ Deno.serve(async (req: Request) => {
const supabase = createServiceClient();
// ── DELETE: remove a job, its items, and storage files ─────
// ── DELETE: remove a job OR a single item ───────────────
if (req.method === "DELETE") {
const { jobId } = await req.json();
const body = await req.json();
const { jobId, itemId } = body as { jobId?: string; itemId?: string };
// ── Delete a single track ──────────────────────────────
if (itemId && typeof itemId === "string") {
// Fetch the item to get its storage_path, status, and parent job_id
const { data: item, error: fetchErr } = await supabase
.from("download_items")
.select("*")
.eq("id", itemId)
.single();
if (fetchErr || !item) {
return new Response(JSON.stringify({ error: "Item not found" }), {
status: 404,
headers: { ...corsHeaders, "Content-Type": "application/json" },
});
}
// Remove audio file from storage if it exists
if (item.storage_path) {
const { error: storageError } = await supabase.storage
.from("workout-audio")
.remove([item.storage_path]);
if (storageError) {
console.error("Storage cleanup error:", storageError.message);
// Continue with DB deletion even if storage cleanup fails
}
}
// Delete the item row
const { error: deleteError } = await supabase
.from("download_items")
.delete()
.eq("id", itemId);
if (deleteError) {
throw new Error(`Failed to delete item: ${deleteError.message}`);
}
// Update the parent job counters
const decrement: Record<string, number> = { total_items: -1 };
if (item.status === "completed") {
decrement.completed_items = -1;
} else if (item.status === "failed") {
decrement.failed_items = -1;
}
// Fetch current job to compute new values (Supabase doesn't support atomic decrement)
const { data: parentJob } = await supabase
.from("download_jobs")
.select("total_items, completed_items, failed_items")
.eq("id", item.job_id)
.single();
if (parentJob) {
const updates: Record<string, number> = {
total_items: Math.max(0, parentJob.total_items + (decrement.total_items ?? 0)),
};
if (decrement.completed_items) {
updates.completed_items = Math.max(0, parentJob.completed_items + decrement.completed_items);
}
if (decrement.failed_items) {
updates.failed_items = Math.max(0, parentJob.failed_items + decrement.failed_items);
}
await supabase
.from("download_jobs")
.update(updates)
.eq("id", item.job_id);
}
return new Response(
JSON.stringify({ deleted: true, itemId, jobId: item.job_id }),
{
status: 200,
headers: { ...corsHeaders, "Content-Type": "application/json" },
}
);
}
// ── Delete an entire job ───────────────────────────────
if (!jobId || typeof jobId !== "string") {
return new Response(JSON.stringify({ error: "jobId is required" }), {
return new Response(JSON.stringify({ error: "jobId or itemId is required" }), {
status: 400,
headers: { ...corsHeaders, "Content-Type": "application/json" },
});

View File

@@ -7,7 +7,8 @@
"start": "node server.js"
},
"dependencies": {
"youtubei.js": "^17.0.1",
"@supabase/supabase-js": "^2.49.1"
"@google/genai": "^1.46.0",
"@supabase/supabase-js": "^2.49.1",
"youtubei.js": "^17.0.1"
}
}

View File

@@ -4,6 +4,7 @@ import { readFile, unlink, mkdir } from "node:fs/promises";
import { promisify } from "node:util";
import { Innertube } from "youtubei.js";
import { createClient } from "@supabase/supabase-js";
import { GoogleGenAI, Type } from "@google/genai";
const execFileAsync = promisify(execFile);
@@ -16,6 +17,7 @@ const STORAGE_BUCKET = process.env.STORAGE_BUCKET || "workout-audio";
// SUPABASE_URL is the internal Docker network URL (e.g. http://kong:8000)
// which browsers cannot reach.
const SUPABASE_PUBLIC_URL = process.env.SUPABASE_PUBLIC_URL || SUPABASE_URL;
const GEMINI_API_KEY = process.env.GEMINI_API_KEY || "";
if (!SUPABASE_URL || !SUPABASE_SERVICE_ROLE_KEY) {
console.error("Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY");
@@ -107,6 +109,7 @@ async function handlePlaylist(req, res) {
items.push({
videoId: item.id,
title: item.title?.toString() ?? "Untitled",
author: item.author?.name ?? null,
durationSeconds: item.duration?.seconds ?? 0,
thumbnailUrl: item.thumbnails?.[0]?.url ?? null,
});
@@ -193,6 +196,182 @@ async function handleHealth(_req, res) {
jsonResponse(res, 200, { status: "ok" });
}
// ── Genre classification ─────────────────────────────────────
const VALID_GENRES = new Set([
"edm", "hip-hop", "pop", "rock", "latin", "house",
"drum-and-bass", "dubstep", "r-and-b", "country", "metal", "ambient",
]);
const CLASSIFY_SYSTEM_PROMPT = `You classify music tracks into exactly one genre for a fitness/workout app.
Available genres (pick exactly one per track):
- edm: Electronic dance music, techno, trance, electro
- hip-hop: Hip-hop, rap, trap beats
- pop: Pop music, mainstream hits
- rock: Rock, alternative, indie rock, punk
- latin: Reggaeton, salsa, bachata, latin pop
- house: House music, deep house, tech house
- drum-and-bass: Drum and bass, jungle, liquid DnB
- dubstep: Dubstep, bass music, brostep
- r-and-b: R&B, soul, neo-soul
- country: Country, country pop, Americana
- metal: Heavy metal, metalcore, hard rock
- ambient: Ambient, chill, lo-fi, downtempo, meditation
For each track, pick the single best-fit genre. If the track is clearly a
workout/tabata/HIIT track, infer genre from musical style cues in the title.
If truly ambiguous, default to "edm" for workout/tabata tracks.`;
/**
* Fetch YouTube metadata (category + keywords) for a batch of video IDs.
* Runs in parallel batches of 10 to avoid rate limits.
* Returns Map<videoId, {category, keywords}>.
*/
async function fetchVideoMetadata(videoIds) {
const yt = await getYouTubeClient();
const metadata = new Map();
const BATCH_SIZE = 10;
for (let i = 0; i < videoIds.length; i += BATCH_SIZE) {
const batch = videoIds.slice(i, i + BATCH_SIZE);
const results = await Promise.allSettled(
batch.map(async (id) => {
const info = await yt.getBasicInfo(id);
return {
id,
category: info.basic_info?.category ?? null,
keywords: info.basic_info?.keywords ?? [],
};
})
);
for (const result of results) {
if (result.status === "fulfilled") {
const { id, category, keywords } = result.value;
metadata.set(id, { category, keywords });
}
// Rejected = skip that video's metadata silently
}
}
return metadata;
}
/**
* Classify tracks into genres using Gemini.
* Input: array of {videoId, title, author, category?, keywords?}
* Batches into groups of 50 to keep schema/prompt manageable.
* Returns: Record<videoId, genre>
*/
async function classifyWithGemini(tracks) {
if (!GEMINI_API_KEY) {
console.warn("GEMINI_API_KEY not set — skipping genre classification");
return {};
}
const ai = new GoogleGenAI({ apiKey: GEMINI_API_KEY });
const BATCH_SIZE = 50;
const allGenres = {};
for (let i = 0; i < tracks.length; i += BATCH_SIZE) {
const batch = tracks.slice(i, i + BATCH_SIZE);
// Build concise input for the prompt
const trackDescriptions = batch.map((t) => {
const parts = [`"${t.title}"`];
if (t.author) parts.push(`by ${t.author}`);
if (t.category) parts.push(`[${t.category}]`);
if (t.keywords?.length) parts.push(`tags: ${t.keywords.slice(0, 8).join(", ")}`);
return `${t.videoId}: ${parts.join(" — ")}`;
});
const userPrompt = `Classify each track into one genre. Return a JSON object mapping videoId to genre string.\n\n${trackDescriptions.join("\n")}`;
try {
const response = await ai.models.generateContent({
model: "gemini-3.1-flash-lite-preview",
contents: userPrompt,
config: {
systemInstruction: CLASSIFY_SYSTEM_PROMPT,
responseMimeType: "application/json",
responseSchema: {
type: Type.OBJECT,
properties: Object.fromEntries(
batch.map((t) => [
t.videoId,
{
type: Type.STRING,
description: "Genre classification",
enum: [...VALID_GENRES],
},
])
),
required: batch.map((t) => t.videoId),
},
temperature: 0.1,
},
});
const parsed = JSON.parse(response.text);
// Validate each genre against the allowed set
for (const [videoId, genre] of Object.entries(parsed)) {
if (VALID_GENRES.has(genre)) {
allGenres[videoId] = genre;
} else {
console.warn(`Invalid genre "${genre}" for ${videoId} — skipping`);
}
}
} catch (batchErr) {
console.error(`Gemini batch ${i / BATCH_SIZE + 1} failed:`, batchErr.message);
// Continue with next batch — partial results are fine
}
}
return allGenres;
}
async function handleClassify(req, res) {
const { items } = await readBody(req);
if (!Array.isArray(items) || items.length === 0) {
return jsonResponse(res, 400, { error: "items array is required" });
}
console.log(`Classifying ${items.length} tracks...`);
try {
// Step 1: Fetch YouTube metadata for enrichment
const videoIds = items.map((i) => i.videoId);
console.log("Fetching YouTube metadata...");
const metadata = await fetchVideoMetadata(videoIds);
console.log(`Got metadata for ${metadata.size}/${items.length} videos`);
// Step 2: Merge metadata with input items
const enriched = items.map((item) => {
const meta = metadata.get(item.videoId);
return {
videoId: item.videoId,
title: item.title,
author: item.author ?? null,
category: meta?.category ?? null,
keywords: meta?.keywords ?? [],
};
});
// Step 3: Classify via Gemini
console.log("Calling Gemini for classification...");
const genres = await classifyWithGemini(enriched);
console.log(`Classified ${Object.keys(genres).length}/${items.length} tracks`);
jsonResponse(res, 200, { genres });
} catch (err) {
// Classification is best-effort — never block the import
console.error("Classification failed:", err.message);
jsonResponse(res, 200, { genres: {}, warning: err.message });
}
}
// ── Server ────────────────────────────────────────────────────
const server = http.createServer(async (req, res) => {
@@ -206,6 +385,9 @@ const server = http.createServer(async (req, res) => {
if (req.method === "POST" && req.url === "/download") {
return await handleDownload(req, res);
}
if (req.method === "POST" && req.url === "/classify") {
return await handleClassify(req, res);
}
jsonResponse(res, 404, { error: "Not found" });
} catch (err) {
console.error(`${req.method} ${req.url} error:`, err.message);