mirror of
https://github.com/gabehf/music-importer.git
synced 2026-04-22 11:31:52 -07:00
mbz discover + auto import
This commit is contained in:
parent
986b0273be
commit
c7d6a088ed
11 changed files with 2036 additions and 266 deletions
63
CLAUDE.md
Normal file
63
CLAUDE.md
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
# CLAUDE.md
|
||||||
|
|
||||||
|
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build
|
||||||
|
go build -o importer .
|
||||||
|
|
||||||
|
# Build with version baked in
|
||||||
|
go build -ldflags="-X main.version=v1.0.0" -o importer .
|
||||||
|
|
||||||
|
# Run locally (requires IMPORT_DIR and LIBRARY_DIR env vars)
|
||||||
|
IMPORT_DIR=/path/to/import LIBRARY_DIR=/path/to/library ./importer
|
||||||
|
|
||||||
|
# Build Docker image
|
||||||
|
docker build -t music-importer .
|
||||||
|
|
||||||
|
# Build Docker image with version
|
||||||
|
docker build --build-arg VERSION=v1.0.0 -t music-importer .
|
||||||
|
```
|
||||||
|
|
||||||
|
There are no tests in this codebase.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
This is a single-package Go web app (`package main`) that runs as a web server on port 8080. Users trigger an import via the web UI, which runs the import pipeline in a background goroutine.
|
||||||
|
|
||||||
|
**Pipeline flow** (`importer.go: RunImporter`):
|
||||||
|
1. **Cluster** — loose audio files at the top of `IMPORT_DIR` are grouped into subdirectories by album tag (`files.go: cluster`)
|
||||||
|
2. For each album directory:
|
||||||
|
- **Clean tags** — removes COMMENT/DESCRIPTION tags via `metaflac` (`audio.go`)
|
||||||
|
- **Tag metadata** — tries `beets` first; falls back to reading existing file tags, then MusicBrainz API (`metadata.go: getAlbumMetadata`)
|
||||||
|
- **Lyrics** — fetches synced LRC lyrics from LRClib API; falls back to plain lyrics formatted as LRC (`lrc.go`)
|
||||||
|
- **ReplayGain** — runs `rsgain easy` on the directory (`audio.go`)
|
||||||
|
- **Cover art** — looks for existing image files, downloads from Cover Art Archive via MusicBrainz if missing, then embeds into tracks (`media.go`)
|
||||||
|
- **Move** — moves tracks, .lrc files, and cover image into `LIBRARY_DIR/{Artist}/[{Date}] {Album} [{Quality}]/` (`files.go: moveToLibrary`)
|
||||||
|
|
||||||
|
**Key types** (`importer.go`):
|
||||||
|
- `AlbumResult` — tracks per-step success/failure/skip for one album
|
||||||
|
- `ImportSession` — holds all `AlbumResult`s for one run; stored in `lastSession` global
|
||||||
|
- `MusicMetadata` — artist/album/title/date/quality used throughout the pipeline
|
||||||
|
|
||||||
|
**Web layer** (`main.go`):
|
||||||
|
- `GET /` — renders `index.html.tmpl` with the last session's results
|
||||||
|
- `POST /run` — starts `RunImporter()` in a goroutine; prevents concurrent runs via `importerMu` mutex
|
||||||
|
|
||||||
|
**External tool dependencies** (must be present in PATH at runtime):
|
||||||
|
- `ffprobe` — reads audio tags and stream info
|
||||||
|
- `beet` — metadata tagging via MusicBrainz (primary metadata source)
|
||||||
|
- `rsgain` — ReplayGain calculation
|
||||||
|
- `metaflac` — FLAC tag manipulation and cover embedding
|
||||||
|
- `curl` — MusicBrainz API fallback queries
|
||||||
|
|
||||||
|
**Environment variables**:
|
||||||
|
- `IMPORT_DIR` — source directory scanned for albums
|
||||||
|
- `LIBRARY_DIR` — destination library root
|
||||||
|
- `COPYMODE=true` — copies files instead of moving (still destructive on the destination)
|
||||||
|
- `SLSKD_URL` — base URL of the slskd instance (e.g. `http://localhost:5030`)
|
||||||
|
- `SLSKD_API_KEY` — slskd API key (sent as `X-API-Key` header)
|
||||||
|
|
||||||
|
**Releases**: Docker image `gabehf/music-importer` is built and pushed to Docker Hub via GitHub Actions on `v*` tags.
|
||||||
402
discover.go
Normal file
402
discover.go
Normal file
|
|
@ -0,0 +1,402 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ── MusicBrainz types ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
type mbArtistCredit struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Artist struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
} `json:"artist"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type mbRelease struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Date string `json:"date"`
|
||||||
|
ArtistCredit []mbArtistCredit `json:"artist-credit"`
|
||||||
|
ReleaseGroup struct {
|
||||||
|
PrimaryType string `json:"primary-type"`
|
||||||
|
} `json:"release-group"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type mbArtist struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Country string `json:"country"`
|
||||||
|
Disambiguation string `json:"disambiguation"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type mbReleaseGroup struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
PrimaryType string `json:"primary-type"`
|
||||||
|
FirstReleaseDate string `json:"first-release-date"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func mbGet(path string, out interface{}) error {
|
||||||
|
req, err := http.NewRequest("GET", "https://musicbrainz.org"+path, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", "music-importer/1.0 (https://github.com/gabehf/music-importer)")
|
||||||
|
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("MusicBrainz returned %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
return json.NewDecoder(resp.Body).Decode(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
func searchMBReleases(query string) ([]mbRelease, error) {
|
||||||
|
var result struct {
|
||||||
|
Releases []mbRelease `json:"releases"`
|
||||||
|
}
|
||||||
|
err := mbGet("/ws/2/release/?query="+url.QueryEscape(query)+"&fmt=json&limit=20", &result)
|
||||||
|
return result.Releases, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func searchMBArtists(query string) ([]mbArtist, error) {
|
||||||
|
var result struct {
|
||||||
|
Artists []mbArtist `json:"artists"`
|
||||||
|
}
|
||||||
|
err := mbGet("/ws/2/artist/?query="+url.QueryEscape(query)+"&fmt=json&limit=20", &result)
|
||||||
|
return result.Artists, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// getMBArtistReleaseGroups returns all Album and EP release groups for an artist,
|
||||||
|
// paginating through the MusicBrainz browse API with the required 1 req/s rate limit.
|
||||||
|
func getMBArtistReleaseGroups(artistMBID string) ([]mbReleaseGroup, error) {
|
||||||
|
const limit = 100
|
||||||
|
var all []mbReleaseGroup
|
||||||
|
|
||||||
|
for offset := 0; ; offset += limit {
|
||||||
|
path := fmt.Sprintf(
|
||||||
|
"/ws/2/release-group?artist=%s&type=album%%7Cep&fmt=json&limit=%d&offset=%d",
|
||||||
|
url.QueryEscape(artistMBID), limit, offset,
|
||||||
|
)
|
||||||
|
|
||||||
|
var result struct {
|
||||||
|
ReleaseGroups []mbReleaseGroup `json:"release-groups"`
|
||||||
|
Count int `json:"release-group-count"`
|
||||||
|
}
|
||||||
|
if err := mbGet(path, &result); err != nil {
|
||||||
|
return all, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, rg := range result.ReleaseGroups {
|
||||||
|
t := strings.ToLower(rg.PrimaryType)
|
||||||
|
if t == "album" || t == "ep" {
|
||||||
|
all = append(all, rg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if offset+limit >= result.Count {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
time.Sleep(time.Second) // MusicBrainz rate limit
|
||||||
|
}
|
||||||
|
|
||||||
|
return all, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchArtist fetches every Album and EP release group for an artist by running
|
||||||
|
// fetchRelease for each one sequentially, then registers each for monitoring.
|
||||||
|
func fetchArtist(artistMBID, artistName string, logf func(string)) error {
|
||||||
|
log.Printf("[discover] artist fetch started: %s (%s)", artistName, artistMBID)
|
||||||
|
logf(fmt.Sprintf("Looking up discography for %s on MusicBrainz…", artistName))
|
||||||
|
|
||||||
|
groups, err := getMBArtistReleaseGroups(artistMBID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("MusicBrainz discography lookup failed: %w", err)
|
||||||
|
}
|
||||||
|
if len(groups) == 0 {
|
||||||
|
return fmt.Errorf("no albums or EPs found for %s on MusicBrainz", artistName)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("[discover] found %d release groups for %s", len(groups), artistName)
|
||||||
|
logf(fmt.Sprintf("Found %d albums/EPs", len(groups)))
|
||||||
|
|
||||||
|
failed := 0
|
||||||
|
for i, rg := range groups {
|
||||||
|
logf(fmt.Sprintf("[%d/%d] %s", i+1, len(groups), rg.Title))
|
||||||
|
folder, err := fetchRelease(artistName, rg.Title, rg.ID, logf)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[discover] fetch failed for %q by %s: %v", rg.Title, artistName, err)
|
||||||
|
logf(fmt.Sprintf(" ↳ failed: %v", err))
|
||||||
|
failed++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
registerDownload(rg.ID, artistName, rg.Title, folder, nil)
|
||||||
|
logf(fmt.Sprintf(" ↳ registered for import (mbid: %s)", rg.ID))
|
||||||
|
}
|
||||||
|
|
||||||
|
if failed > 0 {
|
||||||
|
logf(fmt.Sprintf("Done — %d/%d queued, %d failed", len(groups)-failed, len(groups), failed))
|
||||||
|
} else {
|
||||||
|
logf(fmt.Sprintf("Done — all %d downloads queued, monitoring for import", len(groups)))
|
||||||
|
}
|
||||||
|
log.Printf("[discover] artist fetch complete: %s (%d/%d succeeded)", artistName, len(groups)-failed, len(groups))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Fetch state ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
type fetchEntry struct {
|
||||||
|
mu sync.Mutex
|
||||||
|
ID string `json:"id"`
|
||||||
|
Artist string `json:"artist"`
|
||||||
|
Album string `json:"album"`
|
||||||
|
Log []string `json:"log"`
|
||||||
|
Done bool `json:"done"`
|
||||||
|
Success bool `json:"success"`
|
||||||
|
ErrMsg string `json:"error,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
fetchesMu sync.Mutex
|
||||||
|
fetchMap = make(map[string]*fetchEntry)
|
||||||
|
)
|
||||||
|
|
||||||
|
func newFetchEntry(id, artist, album string) *fetchEntry {
|
||||||
|
e := &fetchEntry{ID: id, Artist: artist, Album: album}
|
||||||
|
fetchesMu.Lock()
|
||||||
|
fetchMap[id] = e
|
||||||
|
fetchesMu.Unlock()
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *fetchEntry) appendLog(msg string) {
|
||||||
|
e.mu.Lock()
|
||||||
|
e.Log = append(e.Log, msg)
|
||||||
|
e.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *fetchEntry) finish(err error) {
|
||||||
|
e.mu.Lock()
|
||||||
|
e.Done = true
|
||||||
|
if err != nil {
|
||||||
|
e.ErrMsg = err.Error()
|
||||||
|
} else {
|
||||||
|
e.Success = true
|
||||||
|
}
|
||||||
|
e.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *fetchEntry) snapshot() fetchEntry {
|
||||||
|
e.mu.Lock()
|
||||||
|
defer e.mu.Unlock()
|
||||||
|
cp := *e
|
||||||
|
cp.Log = append([]string(nil), e.Log...)
|
||||||
|
return cp
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── HTTP handlers ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
// handleDiscoverSearch handles GET /discover/search?q=...&type=release|artist
|
||||||
|
func handleDiscoverSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
|
q := r.URL.Query().Get("q")
|
||||||
|
if q == "" {
|
||||||
|
http.Error(w, "missing q", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
searchType := r.URL.Query().Get("type")
|
||||||
|
if searchType == "" {
|
||||||
|
searchType = "release"
|
||||||
|
}
|
||||||
|
log.Printf("[discover] search: type=%s q=%q", searchType, q)
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
switch searchType {
|
||||||
|
case "artist":
|
||||||
|
artists, err := searchMBArtists(q)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[discover] artist search error: %v", err)
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Printf("[discover] artist search returned %d results", len(artists))
|
||||||
|
json.NewEncoder(w).Encode(artists)
|
||||||
|
|
||||||
|
default: // "release"
|
||||||
|
releases, err := searchMBReleases(q)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[discover] release search error: %v", err)
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Printf("[discover] release search returned %d results", len(releases))
|
||||||
|
json.NewEncoder(w).Encode(releases)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleDiscoverFetch handles POST /discover/fetch
|
||||||
|
// Body: {"id":"mbid","artist":"...","album":"..."}
|
||||||
|
func handleDiscoverFetch(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
http.Error(w, "POST only", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var body struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Artist string `json:"artist"`
|
||||||
|
Album string `json:"album"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&body); err != nil || body.ID == "" || body.Artist == "" || body.Album == "" {
|
||||||
|
http.Error(w, "id, artist and album are required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a fetch for this ID is already in progress, return its ID without starting a new one.
|
||||||
|
fetchesMu.Lock()
|
||||||
|
existing := fetchMap[body.ID]
|
||||||
|
fetchesMu.Unlock()
|
||||||
|
if existing != nil {
|
||||||
|
existing.mu.Lock()
|
||||||
|
done := existing.Done
|
||||||
|
existing.mu.Unlock()
|
||||||
|
if !done {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(map[string]string{"id": body.ID})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("[discover] starting fetch: %q by %s (mbid: %s)", body.Album, body.Artist, body.ID)
|
||||||
|
entry := newFetchEntry(body.ID, body.Artist, body.Album)
|
||||||
|
go func() {
|
||||||
|
folder, err := fetchRelease(body.Artist, body.Album, body.ID, entry.appendLog)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[discover] fetch failed for %q by %s: %v", body.Album, body.Artist, err)
|
||||||
|
entry.finish(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Printf("[discover] fetch complete for %q by %s, registering for import", body.Album, body.Artist)
|
||||||
|
registerDownload(body.ID, body.Artist, body.Album, folder, entry)
|
||||||
|
// entry.finish is called by the monitor when import completes
|
||||||
|
}()
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(map[string]string{"id": body.ID})
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleDiscoverFetchArtist handles POST /discover/fetch/artist
|
||||||
|
// Body: {"id":"artist-mbid","name":"Artist Name"}
|
||||||
|
func handleDiscoverFetchArtist(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
http.Error(w, "POST only", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var body struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&body); err != nil || body.ID == "" || body.Name == "" {
|
||||||
|
http.Error(w, "id and name are required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchesMu.Lock()
|
||||||
|
existing := fetchMap[body.ID]
|
||||||
|
fetchesMu.Unlock()
|
||||||
|
if existing != nil {
|
||||||
|
existing.mu.Lock()
|
||||||
|
done := existing.Done
|
||||||
|
existing.mu.Unlock()
|
||||||
|
if !done {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(map[string]string{"id": body.ID})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("[discover] starting artist fetch: %s (%s)", body.Name, body.ID)
|
||||||
|
entry := newFetchEntry(body.ID, body.Name, "")
|
||||||
|
go func() {
|
||||||
|
err := fetchArtist(body.ID, body.Name, entry.appendLog)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[discover] artist fetch failed for %s: %v", body.Name, err)
|
||||||
|
} else {
|
||||||
|
log.Printf("[discover] artist fetch complete for %s", body.Name)
|
||||||
|
}
|
||||||
|
entry.finish(err)
|
||||||
|
}()
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(map[string]string{"id": body.ID})
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleDiscoverFetchStatus handles GET /discover/fetch/status?id=...
|
||||||
|
func handleDiscoverFetchStatus(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id := r.URL.Query().Get("id")
|
||||||
|
if id == "" {
|
||||||
|
http.Error(w, "missing id", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchesMu.Lock()
|
||||||
|
entry := fetchMap[id]
|
||||||
|
fetchesMu.Unlock()
|
||||||
|
|
||||||
|
if entry == nil {
|
||||||
|
http.Error(w, "not found", http.StatusNotFound)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
snap := entry.snapshot()
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(snap)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchListItem is a summary of a fetch entry for the list endpoint.
|
||||||
|
type fetchListItem struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Done bool `json:"done"`
|
||||||
|
Success bool `json:"success"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleDiscoverFetchList handles GET /discover/fetch/list
|
||||||
|
// Returns a summary of all known fetch entries so the frontend can discover
|
||||||
|
// entries created server-side (e.g. per-album entries from an artist fetch).
|
||||||
|
func handleDiscoverFetchList(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fetchesMu.Lock()
|
||||||
|
items := make([]fetchListItem, 0, len(fetchMap))
|
||||||
|
for _, e := range fetchMap {
|
||||||
|
e.mu.Lock()
|
||||||
|
title := e.Artist
|
||||||
|
if e.Album != "" {
|
||||||
|
title = e.Artist + " \u2014 " + e.Album
|
||||||
|
}
|
||||||
|
items = append(items, fetchListItem{
|
||||||
|
ID: e.ID,
|
||||||
|
Title: title,
|
||||||
|
Done: e.Done,
|
||||||
|
Success: e.Success,
|
||||||
|
})
|
||||||
|
e.mu.Unlock()
|
||||||
|
}
|
||||||
|
fetchesMu.Unlock()
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(items)
|
||||||
|
}
|
||||||
|
|
@ -185,7 +185,7 @@ func RunImporter() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println("→ Tagging album metadata:")
|
fmt.Println("→ Tagging album metadata:")
|
||||||
md, src, err := getAlbumMetadata(albumPath, tracks[0])
|
md, src, err := getAlbumMetadata(albumPath, tracks[0], "")
|
||||||
result.TagMetadata.Err = err
|
result.TagMetadata.Err = err
|
||||||
result.MetadataSource = src
|
result.MetadataSource = src
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
368
index.html.tmpl
368
index.html.tmpl
|
|
@ -1,223 +1,53 @@
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html>
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
<title>Music Importer</title>
|
<title>Music Importer</title>
|
||||||
<style>
|
<link rel="stylesheet" href="/static/style.css?v={{.Version}}">
|
||||||
*, *::before, *::after { box-sizing: border-box; }
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: sans-serif;
|
|
||||||
background: #111;
|
|
||||||
color: #eee;
|
|
||||||
text-align: center;
|
|
||||||
padding: 60px 24px 80px;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 { margin-bottom: 32px; }
|
|
||||||
|
|
||||||
button {
|
|
||||||
font-size: 32px;
|
|
||||||
padding: 20px 40px;
|
|
||||||
border-radius: 10px;
|
|
||||||
border: none;
|
|
||||||
cursor: pointer;
|
|
||||||
background: #4CAF50;
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
button:disabled {
|
|
||||||
background: #555;
|
|
||||||
cursor: not-allowed;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ── Last run summary ── */
|
|
||||||
.session {
|
|
||||||
margin: 48px auto 0;
|
|
||||||
max-width: 820px;
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
.session-header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: baseline;
|
|
||||||
border-bottom: 1px solid #333;
|
|
||||||
padding-bottom: 8px;
|
|
||||||
margin-bottom: 20px;
|
|
||||||
}
|
|
||||||
.session-header h2 { margin: 0; font-size: 18px; color: #ccc; }
|
|
||||||
.session-header .duration { font-size: 13px; color: #666; }
|
|
||||||
|
|
||||||
.album {
|
|
||||||
background: #1a1a1a;
|
|
||||||
border: 1px solid #2a2a2a;
|
|
||||||
border-radius: 8px;
|
|
||||||
padding: 16px 20px;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
.album-header {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 10px;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
.album-name {
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: 15px;
|
|
||||||
flex: 1;
|
|
||||||
white-space: nowrap;
|
|
||||||
overflow: hidden;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
}
|
|
||||||
.badge {
|
|
||||||
font-size: 11px;
|
|
||||||
font-weight: bold;
|
|
||||||
padding: 2px 8px;
|
|
||||||
border-radius: 4px;
|
|
||||||
white-space: nowrap;
|
|
||||||
}
|
|
||||||
.badge-ok { background: #1e4d2b; color: #4CAF50; }
|
|
||||||
.badge-warn { background: #4d3a00; color: #f0a500; }
|
|
||||||
.badge-fatal { background: #4d1a1a; color: #e05050; }
|
|
||||||
|
|
||||||
/* ── Metadata row ── */
|
|
||||||
.metadata {
|
|
||||||
display: flex;
|
|
||||||
align-items: baseline;
|
|
||||||
gap: 14px;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
font-size: 12px;
|
|
||||||
color: #777;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
.metadata-title {
|
|
||||||
color: #aaa;
|
|
||||||
font-size: 13px;
|
|
||||||
}
|
|
||||||
.metadata-pill {
|
|
||||||
display: inline-flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 4px;
|
|
||||||
background: #222;
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 2px 7px;
|
|
||||||
font-size: 11px;
|
|
||||||
}
|
|
||||||
.pill-label { color: #555; }
|
|
||||||
.pill-beets { color: #7ec8e3; }
|
|
||||||
.pill-musicbrainz { color: #c084fc; }
|
|
||||||
.pill-file_tags { color: #f0a500; }
|
|
||||||
.pill-unknown { color: #888; }
|
|
||||||
|
|
||||||
/* ── Rich info grid ── */
|
|
||||||
.info-grid {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: repeat(auto-fill, minmax(180px, 1fr));
|
|
||||||
gap: 6px;
|
|
||||||
margin-bottom: 12px;
|
|
||||||
}
|
|
||||||
.info-card {
|
|
||||||
background: #222;
|
|
||||||
border-radius: 6px;
|
|
||||||
padding: 8px 12px;
|
|
||||||
font-size: 12px;
|
|
||||||
}
|
|
||||||
.info-card-label {
|
|
||||||
font-size: 10px;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.05em;
|
|
||||||
color: #555;
|
|
||||||
margin-bottom: 5px;
|
|
||||||
}
|
|
||||||
.info-card-value {
|
|
||||||
color: #ccc;
|
|
||||||
font-size: 13px;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
.info-card-sub {
|
|
||||||
margin-top: 3px;
|
|
||||||
color: #666;
|
|
||||||
font-size: 11px;
|
|
||||||
line-height: 1.4;
|
|
||||||
}
|
|
||||||
.info-ok { color: #4CAF50; }
|
|
||||||
.info-warn { color: #f0a500; }
|
|
||||||
.info-dim { color: #555; }
|
|
||||||
|
|
||||||
/* ── Pipeline steps ── */
|
|
||||||
.steps-label {
|
|
||||||
font-size: 10px;
|
|
||||||
text-transform: uppercase;
|
|
||||||
letter-spacing: 0.05em;
|
|
||||||
color: #444;
|
|
||||||
margin-bottom: 6px;
|
|
||||||
}
|
|
||||||
.steps {
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
|
|
||||||
gap: 6px;
|
|
||||||
}
|
|
||||||
.step {
|
|
||||||
font-size: 12px;
|
|
||||||
padding: 5px 10px;
|
|
||||||
border-radius: 5px;
|
|
||||||
background: #222;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 2px;
|
|
||||||
}
|
|
||||||
.step-label { color: #888; }
|
|
||||||
.step-ok { color: #4CAF50; }
|
|
||||||
.step-warn { color: #f0a500; }
|
|
||||||
.step-fatal { color: #e05050; }
|
|
||||||
.step-err { font-size: 11px; color: #c0392b; margin-top: 2px; word-break: break-word; }
|
|
||||||
|
|
||||||
footer {
|
|
||||||
position: fixed;
|
|
||||||
bottom: 16px;
|
|
||||||
width: 100%;
|
|
||||||
font-size: 13px;
|
|
||||||
color: #999;
|
|
||||||
text-align: center;
|
|
||||||
left: 0;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<h1>Music Importer</h1>
|
<h1>Music Importer</h1>
|
||||||
|
|
||||||
<form action="/run" method="POST">
|
<nav class="tabs">
|
||||||
<button type="submit" {{if .Running}}disabled{{end}}>
|
<button class="tab-btn active" data-tab="import">Import</button>
|
||||||
{{if .Running}}Importer Running...{{else}}Run Importer{{end}}
|
<button class="tab-btn" data-tab="discover">Discover</button>
|
||||||
</button>
|
</nav>
|
||||||
</form>
|
|
||||||
|
|
||||||
{{with .Session}}
|
<!-- ── Import ─────────────────────────────────────────────────────────── -->
|
||||||
<div class="session">
|
<section id="tab-import" class="tab-pane active">
|
||||||
<div class="session-header">
|
<form action="/run" method="POST">
|
||||||
<h2>Last Run — {{.StartedAt.Format "Jan 2, 2006 15:04:05"}}</h2>
|
<button type="submit" class="run-btn" {{if .Running}}disabled{{end}}>
|
||||||
<span class="duration">{{duration .StartedAt .FinishedAt}}</span>
|
{{if .Running}}Importer Running…{{else}}Run Importer{{end}}
|
||||||
</div>
|
</button>
|
||||||
|
</form>
|
||||||
|
|
||||||
{{range .Albums}}{{$album := .}}
|
{{with .Session}}
|
||||||
<div class="album">
|
<div class="content-box session">
|
||||||
<div class="album-header">
|
<div class="session-header">
|
||||||
<span class="album-name" title="{{.Path}}">{{.Name}}</span>
|
<h2>Last Run — {{.StartedAt.Format "Jan 2, 2006 15:04:05"}}</h2>
|
||||||
{{if .Succeeded}}
|
<span class="duration">{{duration .StartedAt .FinishedAt}}</span>
|
||||||
{{if .HasWarnings}}
|
|
||||||
<span class="badge badge-warn">⚠ warnings</span>
|
|
||||||
{{else}}
|
|
||||||
<span class="badge badge-ok">✓ ok</span>
|
|
||||||
{{end}}
|
|
||||||
{{else}}
|
|
||||||
<span class="badge badge-fatal">✗ failed at {{.FatalStep}}</span>
|
|
||||||
{{end}}
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{{with .Metadata}}
|
{{range .Albums}}{{$album := .}}
|
||||||
<div class="metadata">
|
<article class="album">
|
||||||
<span class="metadata-title">{{.Artist}} — {{.Album}}{{if .Year}} ({{.Year}}){{end}}</span>
|
<div class="album-header">
|
||||||
{{if $album.MetadataSource}}
|
<span class="album-name" title="{{.Path}}">{{.Name}}</span>
|
||||||
|
{{if .Succeeded}}
|
||||||
|
{{if .HasWarnings}}
|
||||||
|
<span class="badge badge-warn">⚠ warnings</span>
|
||||||
|
{{else}}
|
||||||
|
<span class="badge badge-ok">✓ ok</span>
|
||||||
|
{{end}}
|
||||||
|
{{else}}
|
||||||
|
<span class="badge badge-fatal">✗ failed at {{.FatalStep}}</span>
|
||||||
|
{{end}}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{{with .Metadata}}
|
||||||
|
<div class="metadata">
|
||||||
|
<span class="metadata-title">{{.Artist}} — {{.Album}}{{if .Year}} ({{.Year}}){{end}}</span>
|
||||||
|
{{if $album.MetadataSource}}
|
||||||
<span class="metadata-pill">
|
<span class="metadata-pill">
|
||||||
<span class="pill-label">via</span>
|
<span class="pill-label">via</span>
|
||||||
{{if eq (print $album.MetadataSource) "beets"}}
|
{{if eq (print $album.MetadataSource) "beets"}}
|
||||||
|
|
@ -231,67 +61,83 @@
|
||||||
{{end}}
|
{{end}}
|
||||||
</span>
|
</span>
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
|
||||||
{{end}}
|
|
||||||
|
|
||||||
{{/* ── Rich info cards ── */}}
|
|
||||||
<div class="info-grid">
|
|
||||||
{{/* Tracks */}}
|
|
||||||
<div class="info-card">
|
|
||||||
<div class="info-card-label">Tracks</div>
|
|
||||||
<div class="info-card-value">{{.TrackCount}}</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
{{end}}
|
||||||
|
|
||||||
{{/* Lyrics */}}
|
<div class="info-grid">
|
||||||
<div class="info-card">
|
<div class="info-card">
|
||||||
<div class="info-card-label">Lyrics</div>
|
<div class="info-card-label">Tracks</div>
|
||||||
{{if eq .LyricsStats.Total 0}}
|
<div class="info-card-value">{{.TrackCount}}</div>
|
||||||
<div class="info-card-value info-dim">n/a</div>
|
</div>
|
||||||
{{else}}
|
|
||||||
<div class="info-card-value {{if gt .LyricsStats.Downloaded 0}}info-ok{{else}}info-dim{{end}}">
|
|
||||||
{{.LyricsStats.Downloaded}} / {{.LyricsStats.Total}}
|
|
||||||
</div>
|
|
||||||
<div class="info-card-sub">
|
|
||||||
{{if gt .LyricsStats.Synced 0}}<span class="info-ok">{{.LyricsStats.Synced}} synced</span>{{end}}
|
|
||||||
{{if and (gt .LyricsStats.Synced 0) (gt .LyricsStats.Plain 0)}} · {{end}}
|
|
||||||
{{if gt .LyricsStats.Plain 0}}<span class="info-warn">{{.LyricsStats.Plain}} plain</span>{{end}}
|
|
||||||
{{if gt .LyricsStats.AlreadyHad 0}}<span class="info-dim"> {{.LyricsStats.AlreadyHad}} existing</span>{{end}}
|
|
||||||
{{if gt .LyricsStats.NotFound 0}}<span class="info-dim"> {{.LyricsStats.NotFound}} missing</span>{{end}}
|
|
||||||
</div>
|
|
||||||
{{end}}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{{/* Cover art */}}
|
<div class="info-card">
|
||||||
<div class="info-card">
|
<div class="info-card-label">Lyrics</div>
|
||||||
<div class="info-card-label">Cover Art</div>
|
{{if eq .LyricsStats.Total 0}}
|
||||||
{{if .CoverArtStats.Found}}
|
<div class="info-card-value info-dim">n/a</div>
|
||||||
{{if .CoverArtStats.Embedded}}
|
|
||||||
<div class="info-card-value info-ok">Embedded</div>
|
|
||||||
<div class="info-card-sub info-dim">{{.CoverArtStats.Source}}</div>
|
|
||||||
{{else}}
|
{{else}}
|
||||||
<div class="info-card-value info-warn">Found, not embedded</div>
|
<div class="info-card-value {{if gt .LyricsStats.Downloaded 0}}info-ok{{else}}info-dim{{end}}">
|
||||||
<div class="info-card-sub info-dim">{{.CoverArtStats.Source}}</div>
|
{{.LyricsStats.Downloaded}} / {{.LyricsStats.Total}}
|
||||||
|
</div>
|
||||||
|
<div class="info-card-sub">
|
||||||
|
{{if gt .LyricsStats.Synced 0}}<span class="info-ok">{{.LyricsStats.Synced}} synced</span>{{end}}
|
||||||
|
{{if and (gt .LyricsStats.Synced 0) (gt .LyricsStats.Plain 0)}} · {{end}}
|
||||||
|
{{if gt .LyricsStats.Plain 0}}<span class="info-warn">{{.LyricsStats.Plain}} plain</span>{{end}}
|
||||||
|
{{if gt .LyricsStats.AlreadyHad 0}}<span class="info-dim"> {{.LyricsStats.AlreadyHad}} existing</span>{{end}}
|
||||||
|
{{if gt .LyricsStats.NotFound 0}}<span class="info-dim"> {{.LyricsStats.NotFound}} missing</span>{{end}}
|
||||||
|
</div>
|
||||||
{{end}}
|
{{end}}
|
||||||
{{else}}
|
</div>
|
||||||
<div class="info-card-value info-dim">Not found</div>
|
|
||||||
{{end}}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="steps-label">Pipeline</div>
|
<div class="info-card">
|
||||||
<div class="steps">
|
<div class="info-card-label">Cover Art</div>
|
||||||
{{stepCell "Clean Tags" .CleanTags ""}}
|
{{if .CoverArtStats.Found}}
|
||||||
{{stepCell "Metadata" .TagMetadata .FatalStep}}
|
{{if .CoverArtStats.Embedded}}
|
||||||
{{stepCell "Lyrics" .Lyrics ""}}
|
<div class="info-card-value info-ok">Embedded</div>
|
||||||
{{stepCell "ReplayGain" .ReplayGain .FatalStep}}
|
<div class="info-card-sub info-dim">{{.CoverArtStats.Source}}</div>
|
||||||
{{stepCell "Cover Art" .CoverArt .FatalStep}}
|
{{else}}
|
||||||
{{stepCell "Move" .Move ""}}
|
<div class="info-card-value info-warn">Found, not embedded</div>
|
||||||
</div>
|
<div class="info-card-sub info-dim">{{.CoverArtStats.Source}}</div>
|
||||||
|
{{end}}
|
||||||
|
{{else}}
|
||||||
|
<div class="info-card-value info-dim">Not found</div>
|
||||||
|
{{end}}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="steps-label">Pipeline</div>
|
||||||
|
<div class="steps">
|
||||||
|
{{stepCell "Clean Tags" .CleanTags ""}}
|
||||||
|
{{stepCell "Metadata" .TagMetadata .FatalStep}}
|
||||||
|
{{stepCell "Lyrics" .Lyrics ""}}
|
||||||
|
{{stepCell "ReplayGain" .ReplayGain .FatalStep}}
|
||||||
|
{{stepCell "Cover Art" .CoverArt .FatalStep}}
|
||||||
|
{{stepCell "Move" .Move ""}}
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</section>
|
||||||
{{end}}
|
|
||||||
|
<!-- ── Discover ───────────────────────────────────────────────────────── -->
|
||||||
|
<section id="tab-discover" class="tab-pane">
|
||||||
|
<div class="content-box">
|
||||||
|
<div class="search-form">
|
||||||
|
<div class="type-toggle">
|
||||||
|
<button class="type-btn active" data-type="release">Release</button>
|
||||||
|
<button class="type-btn" data-type="artist">Artist</button>
|
||||||
|
</div>
|
||||||
|
<input id="search-q" class="search-input" type="search"
|
||||||
|
placeholder="Search MusicBrainz…" autocomplete="off">
|
||||||
|
<button id="search-btn" class="search-btn">Search</button>
|
||||||
|
</div>
|
||||||
|
<div id="search-results"></div>
|
||||||
|
</div>
|
||||||
|
<div class="content-box fetch-list" id="fetch-list"></div>
|
||||||
|
</section>
|
||||||
|
|
||||||
<footer>{{.Version}}</footer>
|
<footer>{{.Version}}</footer>
|
||||||
|
|
||||||
|
<script src="/static/app.js?v={{.Version}}" defer></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
||||||
10
main.go
10
main.go
|
|
@ -18,6 +18,9 @@ var importerRunning bool
|
||||||
|
|
||||||
//go:embed index.html.tmpl
|
//go:embed index.html.tmpl
|
||||||
var tmplFS embed.FS
|
var tmplFS embed.FS
|
||||||
|
|
||||||
|
//go:embed static
|
||||||
|
var staticFS embed.FS
|
||||||
var tmpl = template.Must(
|
var tmpl = template.Must(
|
||||||
template.New("index.html.tmpl").
|
template.New("index.html.tmpl").
|
||||||
Funcs(template.FuncMap{
|
Funcs(template.FuncMap{
|
||||||
|
|
@ -120,8 +123,15 @@ func handleRun(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
log.Printf("Music Importer %s starting on http://localhost:8080", version)
|
log.Printf("Music Importer %s starting on http://localhost:8080", version)
|
||||||
|
startMonitor()
|
||||||
|
http.Handle("/static/", http.FileServer(http.FS(staticFS)))
|
||||||
http.HandleFunc("/", handleHome)
|
http.HandleFunc("/", handleHome)
|
||||||
http.HandleFunc("/run", handleRun)
|
http.HandleFunc("/run", handleRun)
|
||||||
|
http.HandleFunc("/discover/search", handleDiscoverSearch)
|
||||||
|
http.HandleFunc("/discover/fetch", handleDiscoverFetch)
|
||||||
|
http.HandleFunc("/discover/fetch/artist", handleDiscoverFetchArtist)
|
||||||
|
http.HandleFunc("/discover/fetch/status", handleDiscoverFetchStatus)
|
||||||
|
http.HandleFunc("/discover/fetch/list", handleDiscoverFetchList)
|
||||||
|
|
||||||
log.Fatal(http.ListenAndServe(":8080", nil))
|
log.Fatal(http.ListenAndServe(":8080", nil))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
16
metadata.go
16
metadata.go
|
|
@ -206,7 +206,9 @@ func snapMP3Bitrate(bpsStr string) int {
|
||||||
// A temp log file is passed to beets via -l so that skipped albums
|
// A temp log file is passed to beets via -l so that skipped albums
|
||||||
// (which exit 0 but produce a "skip" log entry) are detected and
|
// (which exit 0 but produce a "skip" log entry) are detected and
|
||||||
// returned as errors, triggering the MusicBrainz fallback.
|
// returned as errors, triggering the MusicBrainz fallback.
|
||||||
func tagWithBeets(path string) error {
|
// If mbid is non-empty it is passed as --search-id to pin beets to a specific
|
||||||
|
// MusicBrainz release.
|
||||||
|
func tagWithBeets(path, mbid string) error {
|
||||||
fmt.Println("→ Tagging with beets:", path)
|
fmt.Println("→ Tagging with beets:", path)
|
||||||
|
|
||||||
logFile, err := os.CreateTemp("", "beets-log-*.txt")
|
logFile, err := os.CreateTemp("", "beets-log-*.txt")
|
||||||
|
|
@ -217,7 +219,12 @@ func tagWithBeets(path string) error {
|
||||||
logFile.Close()
|
logFile.Close()
|
||||||
defer os.Remove(logPath)
|
defer os.Remove(logPath)
|
||||||
|
|
||||||
if err := runCmd("beet", "import", "-Cq", "-l", logPath, path); err != nil {
|
args := []string{"import", "-Cq", "-l", logPath}
|
||||||
|
if mbid != "" {
|
||||||
|
args = append(args, "--search-id", mbid)
|
||||||
|
}
|
||||||
|
args = append(args, path)
|
||||||
|
if err := runCmd("beet", args...); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -311,10 +318,11 @@ func fetchMusicBrainzInfo(filename string) (*MusicMetadata, error) {
|
||||||
|
|
||||||
// getAlbumMetadata attempts beets tagging on the album directory, reads tags
|
// getAlbumMetadata attempts beets tagging on the album directory, reads tags
|
||||||
// back from the first track, and falls back to MusicBrainz if tags are missing.
|
// back from the first track, and falls back to MusicBrainz if tags are missing.
|
||||||
func getAlbumMetadata(albumPath, trackPath string) (*MusicMetadata, MetadataSource, error) {
|
// If mbid is non-empty it is forwarded to beets as --search-id.
|
||||||
|
func getAlbumMetadata(albumPath, trackPath, mbid string) (*MusicMetadata, MetadataSource, error) {
|
||||||
fmt.Println("→ Tagging track with beets:", trackPath)
|
fmt.Println("→ Tagging track with beets:", trackPath)
|
||||||
|
|
||||||
beetsErr := tagWithBeets(albumPath)
|
beetsErr := tagWithBeets(albumPath, mbid)
|
||||||
if beetsErr != nil {
|
if beetsErr != nil {
|
||||||
fmt.Println("Beets tagging failed; fallback to manual MusicBrainz lookup:", beetsErr)
|
fmt.Println("Beets tagging failed; fallback to manual MusicBrainz lookup:", beetsErr)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
277
monitor.go
Normal file
277
monitor.go
Normal file
|
|
@ -0,0 +1,277 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// pendingDownload tracks a queued slskd download that should be auto-imported
|
||||||
|
// once all files have transferred successfully.
|
||||||
|
type pendingDownload struct {
|
||||||
|
MBID string
|
||||||
|
Artist string
|
||||||
|
Album string
|
||||||
|
Username string // slskd peer username
|
||||||
|
Dir string // remote directory path on the peer
|
||||||
|
Files []slskdFile // files that were queued for download
|
||||||
|
Entry *fetchEntry // fetch card to update with import progress
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
pendingMu sync.Mutex
|
||||||
|
pendingDownloads = make(map[string]*pendingDownload) // keyed by MBID
|
||||||
|
)
|
||||||
|
|
||||||
|
// registerDownload records a queued slskd download for monitoring and eventual
|
||||||
|
// auto-import. If entry is nil a new fetchEntry is created, keyed by mbid,
|
||||||
|
// so the frontend can discover it via /discover/fetch/list.
|
||||||
|
func registerDownload(mbid, artist, album string, folder *albumFolder, entry *fetchEntry) {
|
||||||
|
pd := &pendingDownload{
|
||||||
|
MBID: mbid,
|
||||||
|
Artist: artist,
|
||||||
|
Album: album,
|
||||||
|
Username: folder.Username,
|
||||||
|
Dir: folder.Dir,
|
||||||
|
Files: folder.Files,
|
||||||
|
Entry: entry,
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry == nil {
|
||||||
|
e := newFetchEntry(mbid, artist, album)
|
||||||
|
e.appendLog(fmt.Sprintf("Queued %d files from %s — waiting for download",
|
||||||
|
len(folder.Files), folder.Username))
|
||||||
|
pd.Entry = e
|
||||||
|
}
|
||||||
|
|
||||||
|
pendingMu.Lock()
|
||||||
|
pendingDownloads[mbid] = pd
|
||||||
|
pendingMu.Unlock()
|
||||||
|
|
||||||
|
log.Printf("[monitor] registered: %q by %s (mbid: %s, peer: %s, %d files)",
|
||||||
|
album, artist, mbid, folder.Username, len(folder.Files))
|
||||||
|
}
|
||||||
|
|
||||||
|
// startMonitor launches a background goroutine that periodically checks whether
|
||||||
|
// pending downloads have completed and triggers import when they have.
|
||||||
|
func startMonitor() {
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
time.Sleep(15 * time.Second)
|
||||||
|
checkPendingDownloads()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
log.Println("[monitor] started")
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkPendingDownloads polls slskd transfer state for every pending download
|
||||||
|
// and kicks off importPendingRelease for any that are fully complete.
|
||||||
|
func checkPendingDownloads() {
|
||||||
|
pendingMu.Lock()
|
||||||
|
if len(pendingDownloads) == 0 {
|
||||||
|
pendingMu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
snapshot := make(map[string]*pendingDownload, len(pendingDownloads))
|
||||||
|
for k, v := range pendingDownloads {
|
||||||
|
snapshot[k] = v
|
||||||
|
}
|
||||||
|
pendingMu.Unlock()
|
||||||
|
|
||||||
|
log.Printf("[monitor] checking %d pending download(s)", len(snapshot))
|
||||||
|
|
||||||
|
// Group by username to minimise API calls.
|
||||||
|
byUser := make(map[string][]*pendingDownload)
|
||||||
|
for _, pd := range snapshot {
|
||||||
|
byUser[pd.Username] = append(byUser[pd.Username], pd)
|
||||||
|
}
|
||||||
|
|
||||||
|
for username, pds := range byUser {
|
||||||
|
dirs, err := getSlskdTransfers(username)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[monitor] failed to get transfers for %s: %v", username, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Index transfer dirs by normalised path.
|
||||||
|
transfersByDir := make(map[string][]slskdTransferFile, len(dirs))
|
||||||
|
for _, d := range dirs {
|
||||||
|
norm := strings.ReplaceAll(d.Directory, "\\", "/")
|
||||||
|
transfersByDir[norm] = d.Files
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pd := range pds {
|
||||||
|
normDir := strings.ReplaceAll(pd.Dir, "\\", "/")
|
||||||
|
files, ok := transfersByDir[normDir]
|
||||||
|
if !ok {
|
||||||
|
log.Printf("[monitor] transfer dir not found yet for %q (peer: %s)", pd.Dir, username)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if !allFilesCompleted(files) {
|
||||||
|
log.Printf("[monitor] %q by %s: download still in progress", pd.Album, pd.Artist)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
localDir := localDirForDownload(pd, files)
|
||||||
|
if localDir == "" {
|
||||||
|
log.Printf("[monitor] cannot determine local dir for %q by %s", pd.Album, pd.Artist)
|
||||||
|
pd.Entry.appendLog("Error: could not determine local download path from transfer info")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("[monitor] download complete: %q by %s → %s", pd.Album, pd.Artist, localDir)
|
||||||
|
|
||||||
|
// Remove from pending before starting import to avoid double-import.
|
||||||
|
pendingMu.Lock()
|
||||||
|
delete(pendingDownloads, pd.MBID)
|
||||||
|
pendingMu.Unlock()
|
||||||
|
|
||||||
|
go importPendingRelease(pd, localDir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// allFilesCompleted reports whether every file in a transfer directory has
|
||||||
|
// reached a terminal Completed state. Returns false if files is empty.
|
||||||
|
func allFilesCompleted(files []slskdTransferFile) bool {
|
||||||
|
if len(files) == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, f := range files {
|
||||||
|
if !strings.Contains(f.State, "Completed") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// localDirForDownload resolves the local filesystem path for a completed download.
|
||||||
|
//
|
||||||
|
// Strategy 1 — localFilename from transfer metadata: slskd sets this field to
|
||||||
|
// the absolute path of the downloaded file. Works when paths are consistent
|
||||||
|
// across containers (same volume mount point).
|
||||||
|
//
|
||||||
|
// Strategy 2 — SLSKD_DOWNLOAD_DIR reconstruction: slskd stores files under
|
||||||
|
// {downloadDir}/{username}/{sanitized_remote_dir}/. Used when localFilename is
|
||||||
|
// empty or when SLSKD_DOWNLOAD_DIR is explicitly set to override.
|
||||||
|
func localDirForDownload(pd *pendingDownload, files []slskdTransferFile) string {
|
||||||
|
// Strategy 1: use localFilename from transfer response.
|
||||||
|
for _, f := range files {
|
||||||
|
if f.LocalFilename != "" {
|
||||||
|
dir := filepath.Dir(f.LocalFilename)
|
||||||
|
log.Printf("[monitor] local dir from localFilename: %s", dir)
|
||||||
|
return dir
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy 2: reconstruct from SLSKD_DOWNLOAD_DIR.
|
||||||
|
// slskd places downloaded files directly into {downloadDir}/{album_folder_name}/,
|
||||||
|
// where the folder name is the last path component of the remote directory.
|
||||||
|
dlDir := os.Getenv("SLSKD_DOWNLOAD_DIR")
|
||||||
|
if dlDir == "" {
|
||||||
|
log.Printf("[monitor] localFilename empty and SLSKD_DOWNLOAD_DIR not set — cannot determine local dir for %q", pd.Album)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := filepath.Join(dlDir, filepath.Base(filepath.FromSlash(pd.Dir)))
|
||||||
|
log.Printf("[monitor] local dir reconstructed from SLSKD_DOWNLOAD_DIR: %s", dir)
|
||||||
|
return dir
|
||||||
|
}
|
||||||
|
|
||||||
|
// importPendingRelease runs the full import pipeline on a completed download.
|
||||||
|
// It mirrors RunImporter's per-album logic but uses the MBID for beets tagging.
|
||||||
|
func importPendingRelease(pd *pendingDownload, localDir string) {
|
||||||
|
entry := pd.Entry
|
||||||
|
logf := func(msg string) {
|
||||||
|
entry.appendLog("[import] " + msg)
|
||||||
|
log.Printf("[monitor/import %s] %s", pd.MBID, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
logf(fmt.Sprintf("Starting import from %s", localDir))
|
||||||
|
|
||||||
|
libraryDir := os.Getenv("LIBRARY_DIR")
|
||||||
|
if libraryDir == "" {
|
||||||
|
entry.finish(fmt.Errorf("LIBRARY_DIR is not set"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tracks, err := getAudioFiles(localDir)
|
||||||
|
if err != nil {
|
||||||
|
entry.finish(fmt.Errorf("scanning audio files: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(tracks) == 0 {
|
||||||
|
entry.finish(fmt.Errorf("no audio files found in %s", localDir))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
logf(fmt.Sprintf("Found %d tracks", len(tracks)))
|
||||||
|
|
||||||
|
if err := cleanAlbumTags(localDir); err != nil {
|
||||||
|
logf(fmt.Sprintf("Clean tags warning: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
md, src, err := getAlbumMetadata(localDir, tracks[0], pd.MBID)
|
||||||
|
if err != nil {
|
||||||
|
entry.finish(fmt.Errorf("metadata failed: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
logf(fmt.Sprintf("Tagged via %s: %s — %s", src, md.Artist, md.Album))
|
||||||
|
|
||||||
|
if _, err := DownloadAlbumLyrics(localDir); err != nil {
|
||||||
|
logf(fmt.Sprintf("Lyrics warning: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := applyReplayGain(localDir); err != nil {
|
||||||
|
entry.finish(fmt.Errorf("ReplayGain failed: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
logf("ReplayGain applied")
|
||||||
|
|
||||||
|
if _, err := FindCoverImage(localDir); err != nil {
|
||||||
|
if err := DownloadCoverArt(localDir, md); err != nil {
|
||||||
|
logf(fmt.Sprintf("Cover art download warning: %v", err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := EmbedAlbumArtIntoFolder(localDir); err != nil {
|
||||||
|
entry.finish(fmt.Errorf("cover embed failed: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
logf("Cover art embedded")
|
||||||
|
|
||||||
|
var moveErr error
|
||||||
|
for _, track := range tracks {
|
||||||
|
if err := moveToLibrary(libraryDir, md, track); err != nil {
|
||||||
|
logf(fmt.Sprintf("Move warning: %v", err))
|
||||||
|
moveErr = err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lyrics, _ := getLyricFiles(localDir)
|
||||||
|
for _, file := range lyrics {
|
||||||
|
if err := moveToLibrary(libraryDir, md, file); err != nil {
|
||||||
|
logf(fmt.Sprintf("Move lyrics warning: %v", err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if coverImg, err := FindCoverImage(localDir); err == nil {
|
||||||
|
if err := moveToLibrary(libraryDir, md, coverImg); err != nil {
|
||||||
|
logf(fmt.Sprintf("Move cover warning: %v", err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
os.Remove(localDir)
|
||||||
|
|
||||||
|
if moveErr != nil {
|
||||||
|
entry.finish(fmt.Errorf("import completed with move errors: %w", moveErr))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
logf("Import complete")
|
||||||
|
entry.finish(nil)
|
||||||
|
}
|
||||||
BIN
music-import
Executable file
BIN
music-import
Executable file
Binary file not shown.
435
slskd.go
Normal file
435
slskd.go
Normal file
|
|
@ -0,0 +1,435 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// slskdAttr is a Soulseek file attribute (bitrate, sample rate, bit depth, etc.).
|
||||||
|
// Attribute types: 0 = bitrate (kbps), 1 = duration (s), 2 = VBR flag,
|
||||||
|
//
|
||||||
|
// 4 = sample rate (Hz), 5 = bit depth.
|
||||||
|
type slskdAttr struct {
|
||||||
|
Type int `json:"type"`
|
||||||
|
Value int `json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// slskdFile is a single file in a slskd search response.
|
||||||
|
type slskdFile struct {
|
||||||
|
Filename string `json:"filename"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
Extension string `json:"extension"`
|
||||||
|
Attributes []slskdAttr `json:"attributes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// slskdPeerResponse is one peer's response to a search.
|
||||||
|
type slskdPeerResponse struct {
|
||||||
|
Username string `json:"username"`
|
||||||
|
Files []slskdFile `json:"files"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// slskdSearch is the search-state object returned by GET /api/v0/searches/{id}.
|
||||||
|
// File responses are not included here; fetch them from /searches/{id}/responses.
|
||||||
|
type slskdSearch struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
State string `json:"state"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Quality tiers; higher value = more preferred.
|
||||||
|
const (
|
||||||
|
qualityUnknown = 0
|
||||||
|
qualityMP3Any = 1
|
||||||
|
qualityMP3_320 = 2
|
||||||
|
qualityFLACOther = 3 // FLAC at unspecified or uncommon specs
|
||||||
|
qualityFLAC24_96 = 4
|
||||||
|
qualityFLAC16_44 = 5 // most preferred: standard CD-quality lossless
|
||||||
|
)
|
||||||
|
|
||||||
|
// albumFolder groups audio files from the same peer and directory path.
|
||||||
|
type albumFolder struct {
|
||||||
|
Username string
|
||||||
|
Dir string
|
||||||
|
Files []slskdFile
|
||||||
|
Quality int
|
||||||
|
}
|
||||||
|
|
||||||
|
func slskdBaseURL() string {
|
||||||
|
return strings.TrimRight(os.Getenv("SLSKD_URL"), "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
// slskdDo performs an authenticated HTTP request against the slskd API.
|
||||||
|
func slskdDo(method, endpoint string, body interface{}) (*http.Response, error) {
|
||||||
|
base := slskdBaseURL()
|
||||||
|
if base == "" {
|
||||||
|
return nil, fmt.Errorf("SLSKD_URL is not configured")
|
||||||
|
}
|
||||||
|
|
||||||
|
var br io.Reader
|
||||||
|
if body != nil {
|
||||||
|
data, err := json.Marshal(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
br = bytes.NewReader(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest(method, base+endpoint, br)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if key := os.Getenv("SLSKD_API_KEY"); key != "" {
|
||||||
|
req.Header.Set("X-API-Key", key)
|
||||||
|
}
|
||||||
|
if body != nil {
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
}
|
||||||
|
|
||||||
|
return http.DefaultClient.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
// createSlskdSearch starts a new slskd search and returns its ID.
|
||||||
|
func createSlskdSearch(searchText string) (string, error) {
|
||||||
|
payload := map[string]interface{}{
|
||||||
|
"searchText": searchText,
|
||||||
|
"fileLimit": 10000,
|
||||||
|
"filterResponses": true,
|
||||||
|
"maximumPeerQueueLength": 1000,
|
||||||
|
"minimumPeerUploadSpeed": 0,
|
||||||
|
"responseLimit": 100,
|
||||||
|
"timeout": 15000,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := slskdDo("POST", "/api/v0/searches", payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK {
|
||||||
|
b, _ := io.ReadAll(resp.Body)
|
||||||
|
return "", fmt.Errorf("slskd search failed (%d): %s", resp.StatusCode, strings.TrimSpace(string(b)))
|
||||||
|
}
|
||||||
|
|
||||||
|
var s slskdSearch
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&s); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return s.ID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// slskdSearchIsTerminal reports whether a slskd SearchStates string has reached
|
||||||
|
// a terminal state. slskd serialises its [Flags] enum as a comma-separated list
|
||||||
|
// (e.g. "Completed, TimedOut"), so we check for containment rather than equality.
|
||||||
|
func slskdSearchIsTerminal(state string) bool {
|
||||||
|
for _, term := range []string{"Completed", "TimedOut", "Errored", "Cancelled"} {
|
||||||
|
if strings.Contains(state, term) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// pollSlskdSearch waits up to 30 s for a search to reach a terminal state,
|
||||||
|
// then returns the responses from the dedicated /responses sub-endpoint.
|
||||||
|
// Each poll check-in is reported via logf.
|
||||||
|
func pollSlskdSearch(id string, logf func(string)) ([]slskdPeerResponse, error) {
|
||||||
|
deadline := time.Now().Add(30 * time.Second)
|
||||||
|
for {
|
||||||
|
resp, err := slskdDo("GET", "/api/v0/searches/"+id, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var s slskdSearch
|
||||||
|
err = json.NewDecoder(resp.Body).Decode(&s)
|
||||||
|
resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
logf(fmt.Sprintf("Search state: %s", s.State))
|
||||||
|
|
||||||
|
if slskdSearchIsTerminal(s.State) {
|
||||||
|
return fetchSlskdResponses(id, logf)
|
||||||
|
}
|
||||||
|
|
||||||
|
if time.Now().After(deadline) {
|
||||||
|
logf("Poll deadline reached, fetching current results")
|
||||||
|
return fetchSlskdResponses(id, logf)
|
||||||
|
}
|
||||||
|
time.Sleep(2 * time.Second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchSlskdResponses fetches file responses from the dedicated sub-endpoint.
|
||||||
|
// The main GET /searches/{id} endpoint only returns metadata; responses live at
|
||||||
|
// /searches/{id}/responses.
|
||||||
|
func fetchSlskdResponses(id string, logf func(string)) ([]slskdPeerResponse, error) {
|
||||||
|
resp, err := slskdDo("GET", "/api/v0/searches/"+id+"/responses", nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
b, _ := io.ReadAll(resp.Body)
|
||||||
|
return nil, fmt.Errorf("fetching responses failed (%d): %s", resp.StatusCode, strings.TrimSpace(string(b)))
|
||||||
|
}
|
||||||
|
|
||||||
|
var responses []slskdPeerResponse
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&responses); err != nil {
|
||||||
|
return nil, fmt.Errorf("decoding responses: %w", err)
|
||||||
|
}
|
||||||
|
logf(fmt.Sprintf("Fetched %d peer responses", len(responses)))
|
||||||
|
return responses, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// deleteSlskdSearch removes a search from slskd (best-effort cleanup).
|
||||||
|
func deleteSlskdSearch(id string) {
|
||||||
|
resp, err := slskdDo("DELETE", "/api/v0/searches/"+id, nil)
|
||||||
|
if err == nil {
|
||||||
|
resp.Body.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fileDir returns the directory portion of a Soulseek filename,
|
||||||
|
// normalising backslashes to forward slashes first.
|
||||||
|
func fileDir(filename string) string {
|
||||||
|
return path.Dir(strings.ReplaceAll(filename, "\\", "/"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// normaliseExt returns a lower-case extension that always starts with ".".
|
||||||
|
func normaliseExt(raw string) string {
|
||||||
|
ext := strings.ToLower(raw)
|
||||||
|
if ext != "" && !strings.HasPrefix(ext, ".") {
|
||||||
|
ext = "." + ext
|
||||||
|
}
|
||||||
|
return ext
|
||||||
|
}
|
||||||
|
|
||||||
|
// fileQuality scores a single file by the preferred quality tier.
|
||||||
|
func fileQuality(f slskdFile) int {
|
||||||
|
ext := normaliseExt(f.Extension)
|
||||||
|
if ext == "." || ext == "" {
|
||||||
|
ext = strings.ToLower(path.Ext(strings.ReplaceAll(f.Filename, "\\", "/")))
|
||||||
|
}
|
||||||
|
|
||||||
|
switch ext {
|
||||||
|
case ".flac":
|
||||||
|
var depth, rate int
|
||||||
|
for _, a := range f.Attributes {
|
||||||
|
switch a.Type {
|
||||||
|
case 4:
|
||||||
|
rate = a.Value
|
||||||
|
case 5:
|
||||||
|
depth = a.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if depth == 16 && rate == 44100 {
|
||||||
|
return qualityFLAC16_44
|
||||||
|
}
|
||||||
|
if depth == 24 && rate == 96000 {
|
||||||
|
return qualityFLAC24_96
|
||||||
|
}
|
||||||
|
return qualityFLACOther
|
||||||
|
|
||||||
|
case ".mp3":
|
||||||
|
for _, a := range f.Attributes {
|
||||||
|
if a.Type == 0 && a.Value >= 315 {
|
||||||
|
return qualityMP3_320
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return qualityMP3Any
|
||||||
|
}
|
||||||
|
|
||||||
|
return qualityUnknown
|
||||||
|
}
|
||||||
|
|
||||||
|
// groupAlbumFolders groups audio files by (username, directory) and scores each group.
|
||||||
|
func groupAlbumFolders(responses []slskdPeerResponse) []albumFolder {
|
||||||
|
type key struct{ user, dir string }
|
||||||
|
m := make(map[key]*albumFolder)
|
||||||
|
|
||||||
|
for _, r := range responses {
|
||||||
|
for _, f := range r.Files {
|
||||||
|
ext := normaliseExt(f.Extension)
|
||||||
|
if ext == "." || ext == "" {
|
||||||
|
ext = strings.ToLower(path.Ext(strings.ReplaceAll(f.Filename, "\\", "/")))
|
||||||
|
}
|
||||||
|
if ext != ".flac" && ext != ".mp3" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
k := key{r.Username, fileDir(f.Filename)}
|
||||||
|
if m[k] == nil {
|
||||||
|
m[k] = &albumFolder{Username: r.Username, Dir: k.dir}
|
||||||
|
}
|
||||||
|
m[k].Files = append(m[k].Files, f)
|
||||||
|
if q := fileQuality(f); q > m[k].Quality {
|
||||||
|
m[k].Quality = q
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out := make([]albumFolder, 0, len(m))
|
||||||
|
for _, af := range m {
|
||||||
|
out = append(out, *af)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// bestAlbumFolder picks the highest-quality folder; file count breaks ties.
|
||||||
|
func bestAlbumFolder(folders []albumFolder) *albumFolder {
|
||||||
|
if len(folders) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
best := &folders[0]
|
||||||
|
for i := 1; i < len(folders); i++ {
|
||||||
|
a := &folders[i]
|
||||||
|
if a.Quality > best.Quality || (a.Quality == best.Quality && len(a.Files) > len(best.Files)) {
|
||||||
|
best = a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return best
|
||||||
|
}
|
||||||
|
|
||||||
|
// queueSlskdDownload sends a batch download request to slskd for all files in folder.
|
||||||
|
func queueSlskdDownload(folder *albumFolder) error {
|
||||||
|
type dlFile struct {
|
||||||
|
Filename string `json:"filename"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
}
|
||||||
|
files := make([]dlFile, len(folder.Files))
|
||||||
|
for i, f := range folder.Files {
|
||||||
|
files[i] = dlFile{Filename: f.Filename, Size: f.Size}
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := slskdDo("POST", "/api/v0/transfers/downloads/"+folder.Username, files)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK {
|
||||||
|
b, _ := io.ReadAll(resp.Body)
|
||||||
|
return fmt.Errorf("slskd download request failed (%d): %s", resp.StatusCode, strings.TrimSpace(string(b)))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// qualityLabel returns a human-readable label for a quality tier constant.
|
||||||
|
func qualityLabel(q int) string {
|
||||||
|
switch q {
|
||||||
|
case qualityFLAC16_44:
|
||||||
|
return "FLAC 16bit/44.1kHz"
|
||||||
|
case qualityFLAC24_96:
|
||||||
|
return "FLAC 24bit/96kHz"
|
||||||
|
case qualityFLACOther:
|
||||||
|
return "FLAC"
|
||||||
|
case qualityMP3_320:
|
||||||
|
return "MP3 320kbps"
|
||||||
|
case qualityMP3Any:
|
||||||
|
return "MP3"
|
||||||
|
default:
|
||||||
|
return "unknown"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// slskdTransferFile is one file entry in a slskd transfers response.
|
||||||
|
type slskdTransferFile struct {
|
||||||
|
Filename string `json:"filename"`
|
||||||
|
LocalFilename string `json:"localFilename"`
|
||||||
|
State string `json:"state"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// slskdTransferDir groups transfer files by remote directory.
|
||||||
|
type slskdTransferDir struct {
|
||||||
|
Directory string `json:"directory"`
|
||||||
|
Files []slskdTransferFile `json:"files"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// slskdUserTransfers is the object returned by GET /api/v0/transfers/downloads/{username}.
|
||||||
|
type slskdUserTransfers struct {
|
||||||
|
Directories []slskdTransferDir `json:"directories"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSlskdTransfers returns all active/pending download transfer directories for a peer.
|
||||||
|
func getSlskdTransfers(username string) ([]slskdTransferDir, error) {
|
||||||
|
resp, err := slskdDo("GET", "/api/v0/transfers/downloads/"+username, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
b, _ := io.ReadAll(resp.Body)
|
||||||
|
return nil, fmt.Errorf("slskd transfers (%d): %s", resp.StatusCode, strings.TrimSpace(string(b)))
|
||||||
|
}
|
||||||
|
|
||||||
|
var ut slskdUserTransfers
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&ut); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ut.Directories, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchRelease searches slskd for an album, queues the best-quality match for
|
||||||
|
// download, and returns the chosen folder so the caller can monitor completion.
|
||||||
|
// mbid, if non-empty, will be stored for use during import (beets --search-id).
|
||||||
|
func fetchRelease(artist, album, mbid string, logf func(string)) (*albumFolder, error) {
|
||||||
|
query := artist + " " + album
|
||||||
|
log.Printf("[discover] fetch started: %q by %s", album, artist)
|
||||||
|
logf("Starting fetch for: " + query)
|
||||||
|
|
||||||
|
logf("Creating slskd search…")
|
||||||
|
id, err := createSlskdSearch(query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("create search: %w", err)
|
||||||
|
}
|
||||||
|
log.Printf("[discover] slskd search created: %s", id)
|
||||||
|
logf(fmt.Sprintf("Search created (id: %s)", id))
|
||||||
|
defer func() {
|
||||||
|
log.Printf("[discover] deleting slskd search %s", id)
|
||||||
|
deleteSlskdSearch(id)
|
||||||
|
}()
|
||||||
|
|
||||||
|
logf("Polling for results…")
|
||||||
|
responses, err := pollSlskdSearch(id, logf)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("poll search: %w", err)
|
||||||
|
}
|
||||||
|
log.Printf("[discover] search %s finished: %d peer responses", id, len(responses))
|
||||||
|
logf(fmt.Sprintf("Search finished: %d peer responses received", len(responses)))
|
||||||
|
|
||||||
|
logf("Grouping results into album folders…")
|
||||||
|
folders := groupAlbumFolders(responses)
|
||||||
|
log.Printf("[discover] grouped into %d candidate album folders", len(folders))
|
||||||
|
logf(fmt.Sprintf("Found %d candidate album folders", len(folders)))
|
||||||
|
|
||||||
|
if len(folders) == 0 {
|
||||||
|
return nil, fmt.Errorf("no audio files found for %q by %s", album, artist)
|
||||||
|
}
|
||||||
|
|
||||||
|
best := bestAlbumFolder(folders)
|
||||||
|
log.Printf("[discover] selected folder: %s from %s (%s, %d files)",
|
||||||
|
best.Dir, best.Username, qualityLabel(best.Quality), len(best.Files))
|
||||||
|
logf(fmt.Sprintf("Selected folder: %s", best.Dir))
|
||||||
|
logf(fmt.Sprintf(" Peer: %s | Quality: %s | Files: %d",
|
||||||
|
best.Username, qualityLabel(best.Quality), len(best.Files)))
|
||||||
|
|
||||||
|
logf(fmt.Sprintf("Queuing %d files for download…", len(best.Files)))
|
||||||
|
if err := queueSlskdDownload(best); err != nil {
|
||||||
|
return nil, fmt.Errorf("queue download: %w", err)
|
||||||
|
}
|
||||||
|
log.Printf("[discover] download queued: %d files from %s", len(best.Files), best.Username)
|
||||||
|
logf("Download queued — waiting for completion before import")
|
||||||
|
return best, nil
|
||||||
|
}
|
||||||
284
static/app.js
Normal file
284
static/app.js
Normal file
|
|
@ -0,0 +1,284 @@
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
// IDs of fetch cards we've already created, so we don't duplicate them.
|
||||||
|
const knownFetchIds = new Set();
|
||||||
|
|
||||||
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
|
initTabs();
|
||||||
|
initSearch();
|
||||||
|
initFetchList();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ── Tabs ───────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function initTabs() {
|
||||||
|
document.querySelector('.tabs').addEventListener('click', e => {
|
||||||
|
const btn = e.target.closest('.tab-btn');
|
||||||
|
if (!btn) return;
|
||||||
|
showTab(btn.dataset.tab);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function showTab(name) {
|
||||||
|
document.querySelectorAll('.tab-pane').forEach(p => p.classList.remove('active'));
|
||||||
|
document.querySelectorAll('.tab-btn').forEach(b => b.classList.remove('active'));
|
||||||
|
document.getElementById('tab-' + name).classList.add('active');
|
||||||
|
document.querySelector(`.tab-btn[data-tab="${name}"]`).classList.add('active');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Search ─────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
let searchType = 'release';
|
||||||
|
|
||||||
|
function initSearch() {
|
||||||
|
document.querySelector('.type-toggle').addEventListener('click', e => {
|
||||||
|
const btn = e.target.closest('.type-btn');
|
||||||
|
if (btn) setSearchType(btn.dataset.type);
|
||||||
|
});
|
||||||
|
|
||||||
|
const searchBtn = document.getElementById('search-btn');
|
||||||
|
const searchInput = document.getElementById('search-q');
|
||||||
|
searchBtn.addEventListener('click', doSearch);
|
||||||
|
searchInput.addEventListener('keydown', e => { if (e.key === 'Enter') doSearch(); });
|
||||||
|
|
||||||
|
// Event delegation for dynamically rendered result buttons
|
||||||
|
document.getElementById('search-results').addEventListener('click', e => {
|
||||||
|
const btn = e.target.closest('.fetch-btn');
|
||||||
|
if (!btn || btn.disabled) return;
|
||||||
|
if (btn.dataset.fetchType === 'artist') startArtistFetch(btn);
|
||||||
|
else startReleaseFetch(btn);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function setSearchType(type) {
|
||||||
|
searchType = type;
|
||||||
|
document.querySelectorAll('.type-btn').forEach(b => {
|
||||||
|
b.classList.toggle('active', b.dataset.type === type);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function doSearch() {
|
||||||
|
const q = document.getElementById('search-q').value.trim();
|
||||||
|
if (!q) return;
|
||||||
|
|
||||||
|
const btn = document.getElementById('search-btn');
|
||||||
|
const resultsEl = document.getElementById('search-results');
|
||||||
|
|
||||||
|
btn.disabled = true;
|
||||||
|
btn.textContent = 'Searching\u2026';
|
||||||
|
resultsEl.innerHTML = '<p class="search-msg">Searching MusicBrainz\u2026</p>';
|
||||||
|
|
||||||
|
fetch(`/discover/search?q=${encodeURIComponent(q)}&type=${searchType}`)
|
||||||
|
.then(r => {
|
||||||
|
if (!r.ok) return r.text().then(t => { throw new Error(t || r.statusText); });
|
||||||
|
return r.json();
|
||||||
|
})
|
||||||
|
.then(data => renderResults(data))
|
||||||
|
.catch(err => {
|
||||||
|
resultsEl.innerHTML = `<p class="search-msg error">Error: ${esc(err.message)}</p>`;
|
||||||
|
})
|
||||||
|
.finally(() => {
|
||||||
|
btn.disabled = false;
|
||||||
|
btn.textContent = 'Search';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Results rendering ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function renderResults(data) {
|
||||||
|
const el = document.getElementById('search-results');
|
||||||
|
if (!data || data.length === 0) {
|
||||||
|
el.innerHTML = '<p class="search-msg">No results found.</p>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const renderer = searchType === 'artist' ? renderArtist : renderRelease;
|
||||||
|
el.innerHTML = data.map(renderer).join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderRelease(r) {
|
||||||
|
const credits = r['artist-credit'] ?? [];
|
||||||
|
const artist = credits.map(c => c.name || c.artist?.name || '').join('') || 'Unknown Artist';
|
||||||
|
const year = r.date?.substring(0, 4) ?? '';
|
||||||
|
const type = r['release-group']?.['primary-type'] ?? '';
|
||||||
|
const meta = [year, type].filter(Boolean).join(' \u00b7 ');
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="result-row">
|
||||||
|
<div class="result-info">
|
||||||
|
<span class="result-title">${esc(artist)} \u2014 ${esc(r.title)}</span>
|
||||||
|
${meta ? `<span class="result-meta">${esc(meta)}</span>` : ''}
|
||||||
|
</div>
|
||||||
|
<button class="fetch-btn"
|
||||||
|
data-fetch-type="release"
|
||||||
|
data-id="${esc(r.id)}"
|
||||||
|
data-artist="${esc(artist)}"
|
||||||
|
data-album="${esc(r.title)}">Fetch</button>
|
||||||
|
</div>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderArtist(a) {
|
||||||
|
const dis = a.disambiguation ? ` (${esc(a.disambiguation)})` : '';
|
||||||
|
return `
|
||||||
|
<div class="result-row">
|
||||||
|
<div class="result-info">
|
||||||
|
<span class="result-title">${esc(a.name)}${dis}</span>
|
||||||
|
${a.country ? `<span class="result-meta">${esc(a.country)}</span>` : ''}
|
||||||
|
</div>
|
||||||
|
<button class="fetch-btn"
|
||||||
|
data-fetch-type="artist"
|
||||||
|
data-id="${esc(a.id)}"
|
||||||
|
data-name="${esc(a.name)}">Fetch All</button>
|
||||||
|
</div>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Fetch operations ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function startReleaseFetch(btn) {
|
||||||
|
const { id, artist, album } = btn.dataset;
|
||||||
|
btn.disabled = true;
|
||||||
|
btn.textContent = 'Fetching\u2026';
|
||||||
|
|
||||||
|
fetch('/discover/fetch', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ id, artist, album }),
|
||||||
|
})
|
||||||
|
.then(r => {
|
||||||
|
if (!r.ok) return r.text().then(t => { throw new Error(t || r.statusText); });
|
||||||
|
return r.json();
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
addFetchCard(id, `${artist} \u2014 ${album}`);
|
||||||
|
pollFetch(id);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
btn.disabled = false;
|
||||||
|
btn.textContent = 'Fetch';
|
||||||
|
showFetchError(err.message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function startArtistFetch(btn) {
|
||||||
|
const { id, name } = btn.dataset;
|
||||||
|
btn.disabled = true;
|
||||||
|
btn.textContent = 'Fetching\u2026';
|
||||||
|
|
||||||
|
fetch('/discover/fetch/artist', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ id, name }),
|
||||||
|
})
|
||||||
|
.then(r => {
|
||||||
|
if (!r.ok) return r.text().then(t => { throw new Error(t || r.statusText); });
|
||||||
|
return r.json();
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
addFetchCard(id, `${name} \u2014 full discography`);
|
||||||
|
pollFetch(id);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
btn.disabled = false;
|
||||||
|
btn.textContent = 'Fetch All';
|
||||||
|
showFetchError(err.message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Fetch cards ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function addFetchCard(id, title) {
|
||||||
|
knownFetchIds.add(id);
|
||||||
|
const list = document.getElementById('fetch-list');
|
||||||
|
const card = document.createElement('div');
|
||||||
|
card.className = 'fetch-card';
|
||||||
|
card.id = `fetch-${id}`;
|
||||||
|
card.innerHTML = `
|
||||||
|
<div class="fetch-header">
|
||||||
|
<span class="fetch-title">${esc(title)}</span>
|
||||||
|
<span class="fetch-status" id="fstatus-${id}">In progress\u2026</span>
|
||||||
|
</div>
|
||||||
|
<div class="fetch-log" id="flog-${id}"></div>`;
|
||||||
|
list.prepend(card);
|
||||||
|
}
|
||||||
|
|
||||||
|
function pollFetch(id) {
|
||||||
|
fetch(`/discover/fetch/status?id=${encodeURIComponent(id)}`)
|
||||||
|
.then(r => r.json())
|
||||||
|
.then(data => {
|
||||||
|
const logEl = document.getElementById(`flog-${id}`);
|
||||||
|
const statusEl = document.getElementById(`fstatus-${id}`);
|
||||||
|
const card = document.getElementById(`fetch-${id}`);
|
||||||
|
|
||||||
|
if (logEl && data.log) {
|
||||||
|
logEl.innerHTML = data.log
|
||||||
|
.map(l => `<div class="log-line">${esc(l)}</div>`)
|
||||||
|
.join('');
|
||||||
|
logEl.scrollTop = logEl.scrollHeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.done) {
|
||||||
|
if (data.success) {
|
||||||
|
statusEl?.setAttribute('class', 'fetch-status fetch-status-ok');
|
||||||
|
if (statusEl) statusEl.textContent = '\u2713 done';
|
||||||
|
card?.classList.add('fetch-card-ok');
|
||||||
|
} else {
|
||||||
|
statusEl?.setAttribute('class', 'fetch-status fetch-status-err');
|
||||||
|
if (statusEl) statusEl.textContent = '\u2717 failed';
|
||||||
|
card?.classList.add('fetch-card-err');
|
||||||
|
if (data.error && logEl) {
|
||||||
|
logEl.innerHTML += `<div class="log-line log-line-err">${esc(data.error)}</div>`;
|
||||||
|
logEl.scrollTop = logEl.scrollHeight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
setTimeout(() => pollFetch(id), 2000);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(() => setTimeout(() => pollFetch(id), 3000));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Fetch list polling ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
// Polls /discover/fetch/list every 5 s to discover server-created fetch entries
|
||||||
|
// (e.g. per-album cards spawned during an artist fetch) and create cards for them.
|
||||||
|
function initFetchList() {
|
||||||
|
pollFetchList();
|
||||||
|
}
|
||||||
|
|
||||||
|
function pollFetchList() {
|
||||||
|
fetch('/discover/fetch/list')
|
||||||
|
.then(r => r.ok ? r.json() : null)
|
||||||
|
.then(items => {
|
||||||
|
if (!items) return;
|
||||||
|
for (const item of items) {
|
||||||
|
if (!knownFetchIds.has(item.id)) {
|
||||||
|
knownFetchIds.add(item.id);
|
||||||
|
addFetchCard(item.id, item.title);
|
||||||
|
if (!item.done) pollFetch(item.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(() => {})
|
||||||
|
.finally(() => setTimeout(pollFetchList, 5000));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Utilities ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function showFetchError(msg) {
|
||||||
|
const list = document.getElementById('fetch-list');
|
||||||
|
const el = document.createElement('div');
|
||||||
|
el.className = 'fetch-card fetch-card-err';
|
||||||
|
el.innerHTML = `<div class="fetch-header">
|
||||||
|
<span class="fetch-title">Fetch failed</span>
|
||||||
|
<span class="fetch-status fetch-status-err">\u2717 error</span>
|
||||||
|
</div>
|
||||||
|
<div class="fetch-log"><div class="log-line log-line-err">${esc(msg)}</div></div>`;
|
||||||
|
list.prepend(el);
|
||||||
|
}
|
||||||
|
|
||||||
|
function esc(s) {
|
||||||
|
return String(s ?? '')
|
||||||
|
.replace(/&/g, '&')
|
||||||
|
.replace(/</g, '<')
|
||||||
|
.replace(/>/g, '>')
|
||||||
|
.replace(/"/g, '"');
|
||||||
|
}
|
||||||
445
static/style.css
Normal file
445
static/style.css
Normal file
|
|
@ -0,0 +1,445 @@
|
||||||
|
/* ── Custom properties ────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
:root {
|
||||||
|
--bg: #111;
|
||||||
|
--surface: #1a1a1a;
|
||||||
|
--surface-hi: #222;
|
||||||
|
--border: #2a2a2a;
|
||||||
|
--border-focus: #555;
|
||||||
|
|
||||||
|
--text: #eee;
|
||||||
|
--text-secondary: #aaa;
|
||||||
|
--text-muted: #777;
|
||||||
|
--text-dim: #555;
|
||||||
|
|
||||||
|
--green: #4caf50;
|
||||||
|
--green-bg: #1e4d2b;
|
||||||
|
--green-hover: #1e3d1e;
|
||||||
|
--green-border: #3a7a3a;
|
||||||
|
--amber: #f0a500;
|
||||||
|
--amber-bg: #4d3a00;
|
||||||
|
--red: #e05050;
|
||||||
|
--red-bg: #4d1a1a;
|
||||||
|
--red-text: #c0392b;
|
||||||
|
|
||||||
|
--pill-beets: #7ec8e3;
|
||||||
|
--pill-mb: #c084fc;
|
||||||
|
--pill-tags: #f0a500;
|
||||||
|
|
||||||
|
--radius-lg: 8px;
|
||||||
|
--radius: 6px;
|
||||||
|
--radius-sm: 5px;
|
||||||
|
--radius-xs: 4px;
|
||||||
|
|
||||||
|
--max-w: 860px;
|
||||||
|
--pad-x: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── Reset & base ─────────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
*, *::before, *::after { box-sizing: border-box; }
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: system-ui, -apple-system, sans-serif;
|
||||||
|
background: var(--bg);
|
||||||
|
color: var(--text);
|
||||||
|
margin: 0;
|
||||||
|
padding: 48px var(--pad-x) 80px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
margin: 0 0 24px;
|
||||||
|
font-size: clamp(20px, 4vw, 28px);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── Tabs ─────────────────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.tabs {
|
||||||
|
display: inline-flex;
|
||||||
|
gap: 4px;
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 4px;
|
||||||
|
margin-bottom: 36px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-btn {
|
||||||
|
font-size: 14px;
|
||||||
|
min-height: 36px;
|
||||||
|
padding: 0 24px;
|
||||||
|
border-radius: var(--radius);
|
||||||
|
border: none;
|
||||||
|
cursor: pointer;
|
||||||
|
background: transparent;
|
||||||
|
color: var(--text-muted);
|
||||||
|
transition: background 0.15s, color 0.15s;
|
||||||
|
}
|
||||||
|
.tab-btn.active {
|
||||||
|
background: var(--surface-hi);
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-pane { display: none; }
|
||||||
|
.tab-pane.active { display: block; }
|
||||||
|
|
||||||
|
/* ── Shared card / content container ─────────────────────────────────────── */
|
||||||
|
|
||||||
|
.content-box {
|
||||||
|
max-width: var(--max-w);
|
||||||
|
margin: 0 auto;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── Import tab — run button ─────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.run-btn {
|
||||||
|
font-size: clamp(18px, 4vw, 28px);
|
||||||
|
padding: 18px 40px;
|
||||||
|
border-radius: 10px;
|
||||||
|
border: none;
|
||||||
|
cursor: pointer;
|
||||||
|
background: var(--green);
|
||||||
|
color: #fff;
|
||||||
|
transition: opacity 0.15s;
|
||||||
|
}
|
||||||
|
.run-btn:hover:not(:disabled) { opacity: 0.88; }
|
||||||
|
.run-btn:disabled {
|
||||||
|
background: #555;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── Import tab — session summary ────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.session { margin-top: 48px; }
|
||||||
|
|
||||||
|
.session-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: baseline;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: 4px;
|
||||||
|
border-bottom: 1px solid #333;
|
||||||
|
padding-bottom: 8px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
.session-header h2 { margin: 0; font-size: 16px; color: var(--text-secondary); }
|
||||||
|
.session-header .duration { font-size: 13px; color: var(--text-dim); }
|
||||||
|
|
||||||
|
/* ── Album card ───────────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.album {
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 16px 20px;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.album-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
.album-name {
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 15px;
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
.badge {
|
||||||
|
font-size: 11px;
|
||||||
|
font-weight: 700;
|
||||||
|
padding: 2px 8px;
|
||||||
|
border-radius: var(--radius-xs);
|
||||||
|
white-space: nowrap;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.badge-ok { background: var(--green-bg); color: var(--green); }
|
||||||
|
.badge-warn { background: var(--amber-bg); color: var(--amber); }
|
||||||
|
.badge-fatal { background: var(--red-bg); color: var(--red); }
|
||||||
|
|
||||||
|
/* ── Metadata row ─────────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.metadata {
|
||||||
|
display: flex;
|
||||||
|
align-items: baseline;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: 10px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
.metadata-title { color: var(--text-secondary); font-size: 13px; }
|
||||||
|
|
||||||
|
.metadata-pill {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
background: var(--surface-hi);
|
||||||
|
border-radius: var(--radius-xs);
|
||||||
|
padding: 2px 7px;
|
||||||
|
font-size: 11px;
|
||||||
|
}
|
||||||
|
.pill-label { color: var(--text-dim); }
|
||||||
|
.pill-beets { color: var(--pill-beets); }
|
||||||
|
.pill-musicbrainz { color: var(--pill-mb); }
|
||||||
|
.pill-file_tags { color: var(--pill-tags); }
|
||||||
|
.pill-unknown { color: #888; }
|
||||||
|
|
||||||
|
/* ── Info grid ────────────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.info-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fill, minmax(160px, 1fr));
|
||||||
|
gap: 6px;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
.info-card {
|
||||||
|
background: var(--surface-hi);
|
||||||
|
border-radius: var(--radius);
|
||||||
|
padding: 8px 12px;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
.info-card-label {
|
||||||
|
font-size: 10px;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.06em;
|
||||||
|
color: var(--text-dim);
|
||||||
|
margin-bottom: 4px;
|
||||||
|
}
|
||||||
|
.info-card-value { color: var(--text-secondary); font-size: 13px; font-weight: 600; }
|
||||||
|
.info-card-sub { margin-top: 3px; color: var(--text-dim); font-size: 11px; line-height: 1.4; }
|
||||||
|
|
||||||
|
.info-ok { color: var(--green); }
|
||||||
|
.info-warn { color: var(--amber); }
|
||||||
|
.info-dim { color: var(--text-dim); }
|
||||||
|
|
||||||
|
/* ── Pipeline steps ───────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.steps-label {
|
||||||
|
font-size: 10px;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.06em;
|
||||||
|
color: #444;
|
||||||
|
margin-bottom: 6px;
|
||||||
|
}
|
||||||
|
.steps {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fill, minmax(180px, 1fr));
|
||||||
|
gap: 6px;
|
||||||
|
}
|
||||||
|
.step {
|
||||||
|
font-size: 12px;
|
||||||
|
padding: 5px 10px;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
background: var(--surface-hi);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 2px;
|
||||||
|
}
|
||||||
|
.step-label { color: #888; }
|
||||||
|
.step-ok { color: var(--green); }
|
||||||
|
.step-warn { color: var(--amber); }
|
||||||
|
.step-fatal { color: var(--red); }
|
||||||
|
.step-err { font-size: 11px; color: var(--red-text); margin-top: 2px; word-break: break-word; }
|
||||||
|
|
||||||
|
/* ── Discover tab — search form ───────────────────────────────────────────── */
|
||||||
|
|
||||||
|
.search-form {
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
|
align-items: stretch;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.type-toggle {
|
||||||
|
display: flex;
|
||||||
|
border: 1px solid #333;
|
||||||
|
border-radius: var(--radius);
|
||||||
|
overflow: hidden;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.type-btn {
|
||||||
|
font-size: 13px;
|
||||||
|
padding: 0 16px;
|
||||||
|
border: none;
|
||||||
|
background: var(--surface);
|
||||||
|
color: var(--text-muted);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background 0.15s, color 0.15s;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.type-btn.active {
|
||||||
|
background: var(--surface-hi);
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-input {
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
font-size: 14px;
|
||||||
|
padding: 0 12px;
|
||||||
|
height: 38px;
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid #333;
|
||||||
|
border-radius: var(--radius);
|
||||||
|
color: var(--text);
|
||||||
|
outline: none;
|
||||||
|
transition: border-color 0.15s;
|
||||||
|
}
|
||||||
|
.search-input:focus { border-color: var(--border-focus); }
|
||||||
|
|
||||||
|
.search-btn {
|
||||||
|
font-size: 14px;
|
||||||
|
padding: 0 20px;
|
||||||
|
height: 38px;
|
||||||
|
border-radius: var(--radius);
|
||||||
|
border: none;
|
||||||
|
background: var(--green);
|
||||||
|
color: #fff;
|
||||||
|
cursor: pointer;
|
||||||
|
flex-shrink: 0;
|
||||||
|
transition: opacity 0.15s;
|
||||||
|
}
|
||||||
|
.search-btn:hover:not(:disabled) { opacity: 0.88; }
|
||||||
|
.search-btn:disabled { background: #555; cursor: not-allowed; }
|
||||||
|
|
||||||
|
/* ── Discover tab — search results ───────────────────────────────────────── */
|
||||||
|
|
||||||
|
.search-msg {
|
||||||
|
text-align: center;
|
||||||
|
color: var(--text-dim);
|
||||||
|
font-size: 14px;
|
||||||
|
padding: 32px 0;
|
||||||
|
}
|
||||||
|
.search-msg.error { color: var(--red); }
|
||||||
|
|
||||||
|
.result-row {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 12px 16px;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
.result-info { flex: 1; min-width: 0; }
|
||||||
|
.result-title {
|
||||||
|
display: block;
|
||||||
|
font-size: 14px;
|
||||||
|
color: #ddd;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
.result-meta {
|
||||||
|
display: block;
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-dim);
|
||||||
|
margin-top: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fetch-btn {
|
||||||
|
font-size: 12px;
|
||||||
|
padding: 5px 14px;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
border: 1px solid var(--green-border);
|
||||||
|
background: transparent;
|
||||||
|
color: var(--green);
|
||||||
|
cursor: pointer;
|
||||||
|
flex-shrink: 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
transition: background 0.15s;
|
||||||
|
}
|
||||||
|
.fetch-btn:hover:not(:disabled) { background: var(--green-hover); }
|
||||||
|
.fetch-btn:disabled { border-color: #333; color: var(--text-dim); cursor: not-allowed; }
|
||||||
|
|
||||||
|
/* ── Discover tab — fetch log cards ───────────────────────────────────────── */
|
||||||
|
|
||||||
|
.fetch-list { margin-top: 32px; }
|
||||||
|
|
||||||
|
.fetch-card {
|
||||||
|
background: var(--surface);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
padding: 14px 16px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
transition: border-color 0.3s;
|
||||||
|
}
|
||||||
|
.fetch-card-ok { border-color: var(--green-bg); }
|
||||||
|
.fetch-card-err { border-color: var(--red-bg); }
|
||||||
|
|
||||||
|
.fetch-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: 6px;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
.fetch-title {
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
min-width: 0;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
.fetch-status {
|
||||||
|
font-size: 12px;
|
||||||
|
color: var(--text-dim);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.fetch-status-ok { color: var(--green); }
|
||||||
|
.fetch-status-err { color: var(--red); }
|
||||||
|
|
||||||
|
.fetch-log {
|
||||||
|
font-size: 12px;
|
||||||
|
font-family: ui-monospace, "Cascadia Code", "Fira Mono", monospace;
|
||||||
|
color: var(--text-muted);
|
||||||
|
max-height: 260px;
|
||||||
|
overflow-y: auto;
|
||||||
|
scrollbar-width: thin;
|
||||||
|
scrollbar-color: #333 transparent;
|
||||||
|
}
|
||||||
|
.log-line { padding: 1px 0; line-height: 1.5; }
|
||||||
|
.log-line-err { color: var(--red-text); }
|
||||||
|
|
||||||
|
/* ── Footer ───────────────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
footer {
|
||||||
|
position: fixed;
|
||||||
|
bottom: 14px;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
font-size: 12px;
|
||||||
|
color: #444;
|
||||||
|
text-align: center;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── Responsive ───────────────────────────────────────────────────────────── */
|
||||||
|
|
||||||
|
@media (max-width: 600px) {
|
||||||
|
body { padding: 32px 16px 72px; }
|
||||||
|
|
||||||
|
.tabs { display: flex; width: 100%; }
|
||||||
|
.tab-btn { flex: 1; padding: 0; min-height: 40px; }
|
||||||
|
|
||||||
|
.search-form { flex-wrap: wrap; }
|
||||||
|
.type-toggle { width: 100%; }
|
||||||
|
.type-btn { flex: 1; min-height: 38px; }
|
||||||
|
.search-btn { width: 100%; }
|
||||||
|
|
||||||
|
.result-title { white-space: normal; }
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue