Some checks failed
- admin-web: Added an "All Music" library view with search, genre, and status filters. - admin-web: Converted Jobs view to use expandable cards instead of a split pane. - admin-web: Added ability to delete individual tracks from a job. - functions: Added new `youtube-classify` edge function to automatically categorize tracks using Gemini LLM. - functions: Integrated AI genre classification during initial playlist import if no manual genre is provided. - worker: Added `/classify` endpoint for the worker to securely interface with Gemini. - scripts: Updated deployment script to include `GEMINI_API_KEY`.
413 lines
12 KiB
TypeScript
413 lines
12 KiB
TypeScript
"use client";
|
|
|
|
import { useState, useCallback, useRef } from "react";
|
|
import { supabase } from "@/lib/supabase";
|
|
import type { Database, MusicGenre } from "@/lib/supabase";
|
|
|
|
type DownloadJob = Database["public"]["Tables"]["download_jobs"]["Row"];
|
|
type DownloadItem = Database["public"]["Tables"]["download_items"]["Row"];
|
|
|
|
export interface JobWithItems extends DownloadJob {
|
|
items: DownloadItem[];
|
|
}
|
|
|
|
const PROCESS_DELAY_MS = 1000;
|
|
|
|
/**
|
|
* Construct a GET request to a Supabase edge function with query params.
|
|
* supabase.functions.invoke() doesn't support query params, so we use fetch.
|
|
*/
|
|
async function invokeGet<T>(
|
|
functionName: string,
|
|
params?: Record<string, string>
|
|
): Promise<T> {
|
|
const {
|
|
data: { session },
|
|
} = await supabase.auth.getSession();
|
|
if (!session) throw new Error("Not authenticated");
|
|
|
|
const supabaseUrl =
|
|
process.env.NEXT_PUBLIC_SUPABASE_URL ||
|
|
process.env.EXPO_PUBLIC_SUPABASE_URL ||
|
|
"http://localhost:54321";
|
|
|
|
const url = new URL(`${supabaseUrl}/functions/v1/${functionName}`);
|
|
if (params) {
|
|
Object.entries(params).forEach(([k, v]) => url.searchParams.set(k, v));
|
|
}
|
|
|
|
const res = await fetch(url.toString(), {
|
|
method: "GET",
|
|
headers: {
|
|
Authorization: `Bearer ${session.access_token}`,
|
|
"Content-Type": "application/json",
|
|
},
|
|
});
|
|
|
|
if (!res.ok) {
|
|
const body = await res.json().catch(() => ({ error: res.statusText }));
|
|
throw new Error(body.error || `HTTP ${res.status}`);
|
|
}
|
|
|
|
return res.json();
|
|
}
|
|
|
|
/**
|
|
* Send a DELETE request to a Supabase edge function with a JSON body.
|
|
*/
|
|
async function invokeDelete<T>(
|
|
functionName: string,
|
|
body: Record<string, unknown>
|
|
): Promise<T> {
|
|
const {
|
|
data: { session },
|
|
} = await supabase.auth.getSession();
|
|
if (!session) throw new Error("Not authenticated");
|
|
|
|
const supabaseUrl =
|
|
process.env.NEXT_PUBLIC_SUPABASE_URL ||
|
|
process.env.EXPO_PUBLIC_SUPABASE_URL ||
|
|
"http://localhost:54321";
|
|
|
|
const res = await fetch(`${supabaseUrl}/functions/v1/${functionName}`, {
|
|
method: "DELETE",
|
|
headers: {
|
|
Authorization: `Bearer ${session.access_token}`,
|
|
"Content-Type": "application/json",
|
|
},
|
|
body: JSON.stringify(body),
|
|
});
|
|
|
|
if (!res.ok) {
|
|
const data = await res.json().catch(() => ({ error: res.statusText }));
|
|
throw new Error(data.error || `HTTP ${res.status}`);
|
|
}
|
|
|
|
return res.json();
|
|
}
|
|
|
|
/**
|
|
* Send a PATCH request to a Supabase edge function with a JSON body.
|
|
*/
|
|
async function invokePatch<T>(
|
|
functionName: string,
|
|
body: Record<string, unknown>
|
|
): Promise<T> {
|
|
const {
|
|
data: { session },
|
|
} = await supabase.auth.getSession();
|
|
if (!session) throw new Error("Not authenticated");
|
|
|
|
const supabaseUrl =
|
|
process.env.NEXT_PUBLIC_SUPABASE_URL ||
|
|
process.env.EXPO_PUBLIC_SUPABASE_URL ||
|
|
"http://localhost:54321";
|
|
|
|
const res = await fetch(`${supabaseUrl}/functions/v1/${functionName}`, {
|
|
method: "PATCH",
|
|
headers: {
|
|
Authorization: `Bearer ${session.access_token}`,
|
|
"Content-Type": "application/json",
|
|
},
|
|
body: JSON.stringify(body),
|
|
});
|
|
|
|
if (!res.ok) {
|
|
const data = await res.json().catch(() => ({ error: res.statusText }));
|
|
throw new Error(data.error || `HTTP ${res.status}`);
|
|
}
|
|
|
|
return res.json();
|
|
}
|
|
|
|
export interface ItemWithPlaylist extends DownloadItem {
|
|
playlist_title: string | null;
|
|
}
|
|
|
|
export function useYouTubeDownload() {
|
|
const [jobs, setJobs] = useState<DownloadJob[]>([]);
|
|
const [allItems, setAllItems] = useState<ItemWithPlaylist[]>([]);
|
|
const [activeJob, setActiveJob] = useState<JobWithItems | null>(null);
|
|
const [isProcessing, setIsProcessing] = useState(false);
|
|
const [isImporting, setIsImporting] = useState(false);
|
|
const [isClassifying, setIsClassifying] = useState(false);
|
|
const abortRef = useRef<AbortController | null>(null);
|
|
|
|
/** Fetch all jobs (list view). */
|
|
const fetchJobs = useCallback(async () => {
|
|
const data = await invokeGet<{ jobs: DownloadJob[] }>("youtube-status");
|
|
setJobs(data.jobs);
|
|
return data.jobs;
|
|
}, []);
|
|
|
|
/** Fetch ALL download items across all jobs, enriched with playlist title. */
|
|
const fetchAllItems = useCallback(async () => {
|
|
// Fetch all items via Supabase directly (RLS ensures admin-only).
|
|
// Cast needed because the Database type only defines Row (no Insert/Update)
|
|
// for download_items, causing Supabase client to infer `never`.
|
|
const { data: items, error: itemsErr } = (await supabase
|
|
.from("download_items")
|
|
.select("*")
|
|
.order("created_at", { ascending: false })) as {
|
|
data: DownloadItem[] | null;
|
|
error: { message: string } | null;
|
|
};
|
|
|
|
if (itemsErr) throw new Error(itemsErr.message);
|
|
|
|
// Build a map of job_id -> playlist_title from the current jobs list,
|
|
// or fetch jobs if we don't have them yet.
|
|
let jobMap: Record<string, string | null> = {};
|
|
let currentJobs = jobs;
|
|
if (currentJobs.length === 0) {
|
|
const data = await invokeGet<{ jobs: DownloadJob[] }>("youtube-status");
|
|
currentJobs = data.jobs;
|
|
setJobs(currentJobs);
|
|
}
|
|
for (const j of currentJobs) {
|
|
jobMap[j.id] = j.playlist_title;
|
|
}
|
|
|
|
const enriched: ItemWithPlaylist[] = (items ?? []).map((item) => ({
|
|
...item,
|
|
playlist_title: jobMap[item.job_id] ?? null,
|
|
}));
|
|
|
|
setAllItems(enriched);
|
|
return enriched;
|
|
}, [jobs]);
|
|
|
|
/** Fetch a single job with its items. */
|
|
const refreshStatus = useCallback(async (jobId: string) => {
|
|
const data = await invokeGet<{ job: DownloadJob; items: DownloadItem[] }>(
|
|
"youtube-status",
|
|
{ jobId }
|
|
);
|
|
const jobWithItems: JobWithItems = { ...data.job, items: data.items };
|
|
setActiveJob(jobWithItems);
|
|
|
|
// Also update the job in the list
|
|
setJobs((prev) =>
|
|
prev.map((j) => (j.id === jobId ? data.job : j))
|
|
);
|
|
|
|
return jobWithItems;
|
|
}, []);
|
|
|
|
/** Import a playlist: creates a job + download_items rows. */
|
|
const importPlaylist = useCallback(
|
|
async (playlistUrl: string, genre?: MusicGenre) => {
|
|
setIsImporting(true);
|
|
try {
|
|
const { data, error } = await supabase.functions.invoke(
|
|
"youtube-playlist",
|
|
{ body: { playlistUrl, genre: genre || null } }
|
|
);
|
|
if (error) throw new Error(error.message ?? "Import failed");
|
|
if (data?.error) throw new Error(data.error);
|
|
|
|
// Refresh the jobs list and select the new job
|
|
await fetchJobs();
|
|
if (data.jobId) {
|
|
await refreshStatus(data.jobId);
|
|
}
|
|
|
|
return data as {
|
|
jobId: string;
|
|
playlistTitle: string;
|
|
totalItems: number;
|
|
};
|
|
} finally {
|
|
setIsImporting(false);
|
|
}
|
|
},
|
|
[fetchJobs, refreshStatus]
|
|
);
|
|
|
|
/** Process all pending items for a job, one at a time. */
|
|
const startProcessing = useCallback(
|
|
async (jobId: string) => {
|
|
// Abort any existing processing loop
|
|
if (abortRef.current) {
|
|
abortRef.current.abort();
|
|
}
|
|
|
|
const controller = new AbortController();
|
|
abortRef.current = controller;
|
|
setIsProcessing(true);
|
|
|
|
try {
|
|
let done = false;
|
|
|
|
while (!done && !controller.signal.aborted) {
|
|
const { data, error } = await supabase.functions.invoke(
|
|
"youtube-process",
|
|
{ body: { jobId } }
|
|
);
|
|
|
|
if (error) throw new Error(error.message ?? "Processing failed");
|
|
if (data?.error) throw new Error(data.error);
|
|
|
|
done = data.done === true;
|
|
|
|
// Refresh the job status to get updated items
|
|
await refreshStatus(jobId);
|
|
|
|
// Delay between calls to avoid hammering the function
|
|
if (!done && !controller.signal.aborted) {
|
|
await new Promise<void>((resolve, reject) => {
|
|
const timer = setTimeout(resolve, PROCESS_DELAY_MS);
|
|
controller.signal.addEventListener(
|
|
"abort",
|
|
() => {
|
|
clearTimeout(timer);
|
|
reject(new DOMException("Aborted", "AbortError"));
|
|
},
|
|
{ once: true }
|
|
);
|
|
});
|
|
}
|
|
}
|
|
} catch (err) {
|
|
if (err instanceof DOMException && err.name === "AbortError") {
|
|
// Graceful stop — not an error
|
|
return;
|
|
}
|
|
throw err;
|
|
} finally {
|
|
setIsProcessing(false);
|
|
abortRef.current = null;
|
|
// Final refresh to get latest state
|
|
await refreshStatus(jobId).catch(() => {});
|
|
}
|
|
},
|
|
[refreshStatus]
|
|
);
|
|
|
|
/** Stop the current processing loop. */
|
|
const stopProcessing = useCallback(() => {
|
|
if (abortRef.current) {
|
|
abortRef.current.abort();
|
|
abortRef.current = null;
|
|
}
|
|
}, []);
|
|
|
|
/** Delete a job, its items, and associated storage files. */
|
|
const deleteJob = useCallback(
|
|
async (jobId: string) => {
|
|
await invokeDelete<{ deleted: boolean }>("youtube-status", { jobId });
|
|
|
|
// Remove the job from local state
|
|
setJobs((prev) => prev.filter((j) => j.id !== jobId));
|
|
|
|
// Clear active job if it was the deleted one
|
|
setActiveJob((prev) => (prev?.id === jobId ? null : prev));
|
|
},
|
|
[]
|
|
);
|
|
|
|
/** Delete a single download item and its audio file from storage. */
|
|
const deleteItem = useCallback(
|
|
async (itemId: string) => {
|
|
const result = await invokeDelete<{
|
|
deleted: boolean;
|
|
itemId: string;
|
|
jobId: string;
|
|
}>("youtube-status", { itemId });
|
|
|
|
// Remove from allItems
|
|
setAllItems((prev) => prev.filter((i) => i.id !== itemId));
|
|
|
|
// Remove from activeJob items if present
|
|
setActiveJob((prev) => {
|
|
if (!prev) return prev;
|
|
return {
|
|
...prev,
|
|
items: prev.items.filter((i) => i.id !== itemId),
|
|
};
|
|
});
|
|
|
|
// Update the parent job counters in jobs list
|
|
setJobs((prev) =>
|
|
prev.map((j) => {
|
|
if (j.id !== result.jobId) return j;
|
|
// We don't know the item status here, so just decrement total.
|
|
// The next fetchJobs() will reconcile exact counts from the server.
|
|
return {
|
|
...j,
|
|
total_items: Math.max(0, j.total_items - 1),
|
|
};
|
|
})
|
|
);
|
|
|
|
return result;
|
|
},
|
|
[]
|
|
);
|
|
|
|
/** Update the genre on a single download item. */
|
|
const updateItemGenre = useCallback(
|
|
async (itemId: string, genre: MusicGenre | null) => {
|
|
await invokePatch<{ updated: boolean }>("youtube-status", {
|
|
itemId,
|
|
genre,
|
|
});
|
|
|
|
// Update local state
|
|
setActiveJob((prev) => {
|
|
if (!prev) return prev;
|
|
return {
|
|
...prev,
|
|
items: prev.items.map((item) =>
|
|
item.id === itemId ? { ...item, genre } : item
|
|
),
|
|
};
|
|
});
|
|
},
|
|
[]
|
|
);
|
|
|
|
/** Re-classify genres for a job's items via YouTube metadata + Gemini. */
|
|
const reclassifyJob = useCallback(
|
|
async (jobId: string, force = false) => {
|
|
setIsClassifying(true);
|
|
try {
|
|
const { data, error } = await supabase.functions.invoke(
|
|
"youtube-classify",
|
|
{ body: { jobId, force } }
|
|
);
|
|
if (error) throw new Error(error.message ?? "Classification failed");
|
|
if (data?.error) throw new Error(data.error);
|
|
|
|
// Refresh job items and library to reflect updated genres
|
|
await refreshStatus(jobId);
|
|
await fetchAllItems().catch(() => {});
|
|
|
|
return data as { classified: number; skipped: number };
|
|
} finally {
|
|
setIsClassifying(false);
|
|
}
|
|
},
|
|
[refreshStatus, fetchAllItems]
|
|
);
|
|
|
|
return {
|
|
jobs,
|
|
allItems,
|
|
activeJob,
|
|
isProcessing,
|
|
isImporting,
|
|
isClassifying,
|
|
fetchJobs,
|
|
fetchAllItems,
|
|
refreshStatus,
|
|
importPlaylist,
|
|
startProcessing,
|
|
stopProcessing,
|
|
deleteJob,
|
|
deleteItem,
|
|
updateItemGenre,
|
|
reclassifyJob,
|
|
};
|
|
}
|