mirror of
https://github.com/gabehf/music-importer.git
synced 2026-04-22 11:31:52 -07:00
Compare commits
No commits in common. "main" and "v0.2.0" have entirely different histories.
12 changed files with 309 additions and 2528 deletions
63
CLAUDE.md
63
CLAUDE.md
|
|
@ -1,63 +0,0 @@
|
|||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
# Build
|
||||
go build -o importer .
|
||||
|
||||
# Build with version baked in
|
||||
go build -ldflags="-X main.version=v1.0.0" -o importer .
|
||||
|
||||
# Run locally (requires IMPORT_DIR and LIBRARY_DIR env vars)
|
||||
IMPORT_DIR=/path/to/import LIBRARY_DIR=/path/to/library ./importer
|
||||
|
||||
# Build Docker image
|
||||
docker build -t music-importer .
|
||||
|
||||
# Build Docker image with version
|
||||
docker build --build-arg VERSION=v1.0.0 -t music-importer .
|
||||
```
|
||||
|
||||
There are no tests in this codebase.
|
||||
|
||||
## Architecture
|
||||
|
||||
This is a single-package Go web app (`package main`) that runs as a web server on port 8080. Users trigger an import via the web UI, which runs the import pipeline in a background goroutine.
|
||||
|
||||
**Pipeline flow** (`importer.go: RunImporter`):
|
||||
1. **Cluster** — loose audio files at the top of `IMPORT_DIR` are grouped into subdirectories by album tag (`files.go: cluster`)
|
||||
2. For each album directory:
|
||||
- **Clean tags** — removes COMMENT/DESCRIPTION tags via `metaflac` (`audio.go`)
|
||||
- **Tag metadata** — tries `beets` first; falls back to reading existing file tags, then MusicBrainz API (`metadata.go: getAlbumMetadata`)
|
||||
- **Lyrics** — fetches synced LRC lyrics from LRClib API; falls back to plain lyrics formatted as LRC (`lrc.go`)
|
||||
- **ReplayGain** — runs `rsgain easy` on the directory (`audio.go`)
|
||||
- **Cover art** — looks for existing image files, downloads from Cover Art Archive via MusicBrainz if missing, then embeds into tracks (`media.go`)
|
||||
- **Move** — moves tracks, .lrc files, and cover image into `LIBRARY_DIR/{Artist}/[{Date}] {Album} [{Quality}]/` (`files.go: moveToLibrary`)
|
||||
|
||||
**Key types** (`importer.go`):
|
||||
- `AlbumResult` — tracks per-step success/failure/skip for one album
|
||||
- `ImportSession` — holds all `AlbumResult`s for one run; stored in `lastSession` global
|
||||
- `MusicMetadata` — artist/album/title/date/quality used throughout the pipeline
|
||||
|
||||
**Web layer** (`main.go`):
|
||||
- `GET /` — renders `index.html.tmpl` with the last session's results
|
||||
- `POST /run` — starts `RunImporter()` in a goroutine; prevents concurrent runs via `importerMu` mutex
|
||||
|
||||
**External tool dependencies** (must be present in PATH at runtime):
|
||||
- `ffprobe` — reads audio tags and stream info
|
||||
- `beet` — metadata tagging via MusicBrainz (primary metadata source)
|
||||
- `rsgain` — ReplayGain calculation
|
||||
- `metaflac` — FLAC tag manipulation and cover embedding
|
||||
- `curl` — MusicBrainz API fallback queries
|
||||
|
||||
**Environment variables**:
|
||||
- `IMPORT_DIR` — source directory scanned for albums
|
||||
- `LIBRARY_DIR` — destination library root
|
||||
- `COPYMODE=true` — copies files instead of moving (still destructive on the destination)
|
||||
- `SLSKD_URL` — base URL of the slskd instance (e.g. `http://localhost:5030`)
|
||||
- `SLSKD_API_KEY` — slskd API key (sent as `X-API-Key` header)
|
||||
|
||||
**Releases**: Docker image `gabehf/music-importer` is built and pushed to Docker Hub via GitHub Actions on `v*` tags.
|
||||
547
discover.go
547
discover.go
|
|
@ -1,547 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ── MusicBrainz types ─────────────────────────────────────────────────────────
|
||||
|
||||
type mbArtistCredit struct {
|
||||
Name string `json:"name"`
|
||||
Artist struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
} `json:"artist"`
|
||||
}
|
||||
|
||||
type mbMedia struct {
|
||||
Format string `json:"format"`
|
||||
TrackCount int `json:"track-count"`
|
||||
}
|
||||
|
||||
type mbRelease struct {
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Date string `json:"date"`
|
||||
Country string `json:"country"`
|
||||
Disambiguation string `json:"disambiguation"`
|
||||
TextRepresentation struct {
|
||||
Language string `json:"language"`
|
||||
} `json:"text-representation"`
|
||||
Media []mbMedia `json:"media"`
|
||||
ArtistCredit []mbArtistCredit `json:"artist-credit"`
|
||||
ReleaseGroup struct {
|
||||
PrimaryType string `json:"primary-type"`
|
||||
} `json:"release-group"`
|
||||
}
|
||||
|
||||
type mbArtist struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Country string `json:"country"`
|
||||
Disambiguation string `json:"disambiguation"`
|
||||
}
|
||||
|
||||
type mbReleaseGroup struct {
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
PrimaryType string `json:"primary-type"`
|
||||
FirstReleaseDate string `json:"first-release-date"`
|
||||
}
|
||||
|
||||
// releaseTrackCount returns the total number of tracks across all media in a release.
|
||||
func releaseTrackCount(r mbRelease) int {
|
||||
total := 0
|
||||
for _, m := range r.Media {
|
||||
total += m.TrackCount
|
||||
}
|
||||
return total
|
||||
}
|
||||
|
||||
// getMBRelease fetches a single release by MBID (with media/track-count included).
|
||||
func getMBRelease(mbid string) (*mbRelease, error) {
|
||||
var r mbRelease
|
||||
err := mbGet(fmt.Sprintf("/ws/2/release/%s?fmt=json&inc=media", url.QueryEscape(mbid)), &r)
|
||||
return &r, err
|
||||
}
|
||||
|
||||
func mbGet(path string, out interface{}) error {
|
||||
req, err := http.NewRequest("GET", "https://musicbrainz.org"+path, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("User-Agent", "music-importer/1.0 (https://github.com/gabehf/music-importer)")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("MusicBrainz returned %d", resp.StatusCode)
|
||||
}
|
||||
return json.NewDecoder(resp.Body).Decode(out)
|
||||
}
|
||||
|
||||
func searchMBReleases(query string) ([]mbRelease, error) {
|
||||
var result struct {
|
||||
Releases []mbRelease `json:"releases"`
|
||||
}
|
||||
err := mbGet("/ws/2/release/?query="+url.QueryEscape(query)+"&fmt=json&limit=20&inc=media", &result)
|
||||
return result.Releases, err
|
||||
}
|
||||
|
||||
func searchMBArtists(query string) ([]mbArtist, error) {
|
||||
var result struct {
|
||||
Artists []mbArtist `json:"artists"`
|
||||
}
|
||||
err := mbGet("/ws/2/artist/?query="+url.QueryEscape(query)+"&fmt=json&limit=20", &result)
|
||||
return result.Artists, err
|
||||
}
|
||||
|
||||
// releaseFormatScore returns a preference score for a release's media format.
|
||||
// Higher is better. CD=2, Digital Media=1, anything else=0.
|
||||
func releaseFormatScore(r mbRelease) int {
|
||||
for _, m := range r.Media {
|
||||
switch m.Format {
|
||||
case "Digital Media":
|
||||
return 2
|
||||
case "CD":
|
||||
return 1
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// releaseCountryScore returns a preference score for a release's country.
|
||||
// Higher is better. KR=3, JP=2, XW=1, anything else=0.
|
||||
func releaseCountryScore(r mbRelease) int {
|
||||
switch r.Country {
|
||||
case "XW":
|
||||
return 2
|
||||
case "KR":
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// returns true if strings formatted 'YYYY-MM-DD" ts1 is before ts2
|
||||
func timeStringIsBefore(ts1, ts2 string) (bool, error) {
|
||||
datefmt := "2006-02-01"
|
||||
t1, err := time.Parse(datefmt, ts1)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
t2, err := time.Parse(datefmt, ts2)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return t1.Unix() <= t2.Unix(), nil
|
||||
}
|
||||
|
||||
// pickBestRelease selects the preferred release from a list.
|
||||
// No disambiguation (canonical release) is the primary sort key;
|
||||
// format (CD > Digital Media > *) is secondary; country (KR > XW > *) breaks ties.
|
||||
func pickBestRelease(releases []mbRelease) *mbRelease {
|
||||
if len(releases) == 0 {
|
||||
return nil
|
||||
}
|
||||
best := &releases[0]
|
||||
for i := 1; i < len(releases); i++ {
|
||||
r := &releases[i]
|
||||
|
||||
rNoDisamb := r.Disambiguation == ""
|
||||
bestNoDisamb := best.Disambiguation == ""
|
||||
|
||||
// Prefer releases with no disambiguation — they are the canonical default.
|
||||
if rNoDisamb && !bestNoDisamb {
|
||||
best = r
|
||||
continue
|
||||
}
|
||||
if !rNoDisamb && bestNoDisamb {
|
||||
continue
|
||||
}
|
||||
|
||||
// Both have the same disambiguation status; use date/format/country.
|
||||
if before, err := timeStringIsBefore(r.Date, best.Date); before && err == nil {
|
||||
rf, bf := releaseFormatScore(*r), releaseFormatScore(*best)
|
||||
if rf > bf || (rf == bf && releaseCountryScore(*r) > releaseCountryScore(*best)) {
|
||||
best = r
|
||||
}
|
||||
}
|
||||
}
|
||||
return best
|
||||
}
|
||||
|
||||
// pickBestReleaseForGroup fetches all releases for a release group via the
|
||||
// MusicBrainz browse API (with media info) and returns the preferred release.
|
||||
// Returns nil on error or when the group has no releases.
|
||||
func pickBestReleaseForGroup(rgMBID string) *mbRelease {
|
||||
var result struct {
|
||||
Releases []mbRelease `json:"releases"`
|
||||
}
|
||||
path := fmt.Sprintf("/ws/2/release?release-group=%s&fmt=json&inc=media&limit=100", url.QueryEscape(rgMBID))
|
||||
if err := mbGet(path, &result); err != nil || len(result.Releases) == 0 {
|
||||
return nil
|
||||
}
|
||||
return pickBestRelease(result.Releases)
|
||||
}
|
||||
|
||||
// getMBArtistReleaseGroups returns all Album and EP release groups for an artist,
|
||||
// paginating through the MusicBrainz browse API with the required 1 req/s rate limit.
|
||||
func getMBArtistReleaseGroups(artistMBID string) ([]mbReleaseGroup, error) {
|
||||
const limit = 100
|
||||
var all []mbReleaseGroup
|
||||
|
||||
for offset := 0; ; offset += limit {
|
||||
path := fmt.Sprintf(
|
||||
"/ws/2/release-group?artist=%s&type=album%%7Cep&fmt=json&limit=%d&offset=%d",
|
||||
url.QueryEscape(artistMBID), limit, offset,
|
||||
)
|
||||
|
||||
var result struct {
|
||||
ReleaseGroups []mbReleaseGroup `json:"release-groups"`
|
||||
Count int `json:"release-group-count"`
|
||||
}
|
||||
if err := mbGet(path, &result); err != nil {
|
||||
return all, err
|
||||
}
|
||||
|
||||
for _, rg := range result.ReleaseGroups {
|
||||
t := strings.ToLower(rg.PrimaryType)
|
||||
if t == "album" || t == "ep" {
|
||||
all = append(all, rg)
|
||||
}
|
||||
}
|
||||
|
||||
if offset+limit >= result.Count {
|
||||
break
|
||||
}
|
||||
time.Sleep(time.Second) // MusicBrainz rate limit
|
||||
}
|
||||
|
||||
return all, nil
|
||||
}
|
||||
|
||||
// fetchArtist fetches every Album and EP release group for an artist by running
|
||||
// fetchRelease for each one sequentially, then registers each for monitoring.
|
||||
func fetchArtist(artistMBID, artistName string, logf func(string)) error {
|
||||
log.Printf("[discover] artist fetch started: %s (%s)", artistName, artistMBID)
|
||||
logf(fmt.Sprintf("Looking up discography for %s on MusicBrainz…", artistName))
|
||||
|
||||
groups, err := getMBArtistReleaseGroups(artistMBID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("MusicBrainz discography lookup failed: %w", err)
|
||||
}
|
||||
if len(groups) == 0 {
|
||||
return fmt.Errorf("no albums or EPs found for %s on MusicBrainz", artistName)
|
||||
}
|
||||
|
||||
log.Printf("[discover] found %d release groups for %s", len(groups), artistName)
|
||||
logf(fmt.Sprintf("Found %d albums/EPs", len(groups)))
|
||||
|
||||
failed := 0
|
||||
for i, rg := range groups {
|
||||
logf(fmt.Sprintf("[%d/%d] %s", i+1, len(groups), rg.Title))
|
||||
// Pick the best release for this group. beets --search-id requires a
|
||||
// release MBID; release group MBIDs are not accepted.
|
||||
time.Sleep(time.Second) // MusicBrainz rate limit
|
||||
rel := pickBestReleaseForGroup(rg.ID)
|
||||
releaseMBID := ""
|
||||
trackCount := 0
|
||||
if rel == nil {
|
||||
logf(fmt.Sprintf(" ↳ warning: could not resolve release for group %s, beets will search by name", rg.ID))
|
||||
} else {
|
||||
releaseMBID = rel.ID
|
||||
trackCount = releaseTrackCount(*rel)
|
||||
format := ""
|
||||
if len(rel.Media) > 0 {
|
||||
format = rel.Media[0].Format
|
||||
}
|
||||
logf(fmt.Sprintf(" ↳ selected release: %s [%s / %s / %d tracks]", releaseMBID, format, rel.Country, trackCount))
|
||||
}
|
||||
|
||||
folder, err := fetchRelease(artistName, rg.Title, releaseMBID, trackCount, logf)
|
||||
if err != nil {
|
||||
log.Printf("[discover] fetch failed for %q by %s: %v", rg.Title, artistName, err)
|
||||
logf(fmt.Sprintf(" ↳ failed: %v", err))
|
||||
failed++
|
||||
continue
|
||||
}
|
||||
// Key the pending download by release group ID for dedup; beets uses releaseMBID.
|
||||
registerDownload(rg.ID, releaseMBID, artistName, rg.Title, trackCount, folder, nil)
|
||||
logf(fmt.Sprintf(" ↳ registered for import (release mbid: %s)", releaseMBID))
|
||||
}
|
||||
|
||||
if failed > 0 {
|
||||
logf(fmt.Sprintf("Done — %d/%d queued, %d failed", len(groups)-failed, len(groups), failed))
|
||||
} else {
|
||||
logf(fmt.Sprintf("Done — all %d downloads queued, monitoring for import", len(groups)))
|
||||
}
|
||||
log.Printf("[discover] artist fetch complete: %s (%d/%d succeeded)", artistName, len(groups)-failed, len(groups))
|
||||
return nil
|
||||
}
|
||||
|
||||
// ── Fetch state ───────────────────────────────────────────────────────────────
|
||||
|
||||
type fetchEntry struct {
|
||||
mu sync.Mutex
|
||||
ID string `json:"id"`
|
||||
Artist string `json:"artist"`
|
||||
Album string `json:"album"`
|
||||
Log []string `json:"log"`
|
||||
Done bool `json:"done"`
|
||||
Success bool `json:"success"`
|
||||
ErrMsg string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
var (
|
||||
fetchesMu sync.Mutex
|
||||
fetchMap = make(map[string]*fetchEntry)
|
||||
)
|
||||
|
||||
func newFetchEntry(id, artist, album string) *fetchEntry {
|
||||
e := &fetchEntry{ID: id, Artist: artist, Album: album}
|
||||
fetchesMu.Lock()
|
||||
fetchMap[id] = e
|
||||
fetchesMu.Unlock()
|
||||
return e
|
||||
}
|
||||
|
||||
func (e *fetchEntry) appendLog(msg string) {
|
||||
e.mu.Lock()
|
||||
e.Log = append(e.Log, msg)
|
||||
e.mu.Unlock()
|
||||
}
|
||||
|
||||
func (e *fetchEntry) finish(err error) {
|
||||
e.mu.Lock()
|
||||
e.Done = true
|
||||
if err != nil {
|
||||
e.ErrMsg = err.Error()
|
||||
} else {
|
||||
e.Success = true
|
||||
}
|
||||
e.mu.Unlock()
|
||||
}
|
||||
|
||||
func (e *fetchEntry) snapshot() fetchEntry {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
cp := *e
|
||||
cp.Log = append([]string(nil), e.Log...)
|
||||
return cp
|
||||
}
|
||||
|
||||
// ── HTTP handlers ─────────────────────────────────────────────────────────────
|
||||
|
||||
// handleDiscoverSearch handles GET /discover/search?q=...&type=release|artist
|
||||
func handleDiscoverSearch(w http.ResponseWriter, r *http.Request) {
|
||||
q := r.URL.Query().Get("q")
|
||||
if q == "" {
|
||||
http.Error(w, "missing q", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
searchType := r.URL.Query().Get("type")
|
||||
if searchType == "" {
|
||||
searchType = "release"
|
||||
}
|
||||
log.Printf("[discover] search: type=%s q=%q", searchType, q)
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
switch searchType {
|
||||
case "artist":
|
||||
artists, err := searchMBArtists(q)
|
||||
if err != nil {
|
||||
log.Printf("[discover] artist search error: %v", err)
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
log.Printf("[discover] artist search returned %d results", len(artists))
|
||||
json.NewEncoder(w).Encode(artists)
|
||||
|
||||
default: // "release"
|
||||
releases, err := searchMBReleases(q)
|
||||
if err != nil {
|
||||
log.Printf("[discover] release search error: %v", err)
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
log.Printf("[discover] release search returned %d results", len(releases))
|
||||
json.NewEncoder(w).Encode(releases)
|
||||
}
|
||||
}
|
||||
|
||||
// handleDiscoverFetch handles POST /discover/fetch
|
||||
// Body: {"id":"mbid","artist":"...","album":"..."}
|
||||
func handleDiscoverFetch(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "POST only", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
var body struct {
|
||||
ID string `json:"id"`
|
||||
Artist string `json:"artist"`
|
||||
Album string `json:"album"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil || body.ID == "" || body.Artist == "" || body.Album == "" {
|
||||
http.Error(w, "id, artist and album are required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// If a fetch for this ID is already in progress, return its ID without starting a new one.
|
||||
fetchesMu.Lock()
|
||||
existing := fetchMap[body.ID]
|
||||
fetchesMu.Unlock()
|
||||
if existing != nil {
|
||||
existing.mu.Lock()
|
||||
done := existing.Done
|
||||
existing.mu.Unlock()
|
||||
if !done {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{"id": body.ID})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("[discover] starting fetch: %q by %s (mbid: %s)", body.Album, body.Artist, body.ID)
|
||||
entry := newFetchEntry(body.ID, body.Artist, body.Album)
|
||||
|
||||
// Look up the expected track count from MusicBrainz so the folder-selection
|
||||
// logic can prefer results that match the release we intend to import.
|
||||
trackCount := 0
|
||||
if rel, err := getMBRelease(body.ID); err == nil {
|
||||
trackCount = releaseTrackCount(*rel)
|
||||
log.Printf("[discover] release %s has %d tracks", body.ID, trackCount)
|
||||
} else {
|
||||
log.Printf("[discover] could not fetch release track count for %s: %v", body.ID, err)
|
||||
}
|
||||
|
||||
go func() {
|
||||
folder, err := fetchRelease(body.Artist, body.Album, body.ID, trackCount, entry.appendLog)
|
||||
if err != nil {
|
||||
log.Printf("[discover] fetch failed for %q by %s: %v", body.Album, body.Artist, err)
|
||||
entry.finish(err)
|
||||
return
|
||||
}
|
||||
log.Printf("[discover] fetch complete for %q by %s, registering for import", body.Album, body.Artist)
|
||||
registerDownload(body.ID, body.ID, body.Artist, body.Album, trackCount, folder, entry)
|
||||
// entry.finish is called by the monitor when import completes
|
||||
}()
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{"id": body.ID})
|
||||
}
|
||||
|
||||
// handleDiscoverFetchArtist handles POST /discover/fetch/artist
|
||||
// Body: {"id":"artist-mbid","name":"Artist Name"}
|
||||
func handleDiscoverFetchArtist(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "POST only", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
var body struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil || body.ID == "" || body.Name == "" {
|
||||
http.Error(w, "id and name are required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
fetchesMu.Lock()
|
||||
existing := fetchMap[body.ID]
|
||||
fetchesMu.Unlock()
|
||||
if existing != nil {
|
||||
existing.mu.Lock()
|
||||
done := existing.Done
|
||||
existing.mu.Unlock()
|
||||
if !done {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{"id": body.ID})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("[discover] starting artist fetch: %s (%s)", body.Name, body.ID)
|
||||
entry := newFetchEntry(body.ID, body.Name, "")
|
||||
go func() {
|
||||
err := fetchArtist(body.ID, body.Name, entry.appendLog)
|
||||
if err != nil {
|
||||
log.Printf("[discover] artist fetch failed for %s: %v", body.Name, err)
|
||||
} else {
|
||||
log.Printf("[discover] artist fetch complete for %s", body.Name)
|
||||
}
|
||||
entry.finish(err)
|
||||
}()
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{"id": body.ID})
|
||||
}
|
||||
|
||||
// handleDiscoverFetchStatus handles GET /discover/fetch/status?id=...
|
||||
func handleDiscoverFetchStatus(w http.ResponseWriter, r *http.Request) {
|
||||
id := r.URL.Query().Get("id")
|
||||
if id == "" {
|
||||
http.Error(w, "missing id", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
fetchesMu.Lock()
|
||||
entry := fetchMap[id]
|
||||
fetchesMu.Unlock()
|
||||
|
||||
if entry == nil {
|
||||
http.Error(w, "not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
snap := entry.snapshot()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(snap)
|
||||
}
|
||||
|
||||
// fetchListItem is a summary of a fetch entry for the list endpoint.
|
||||
type fetchListItem struct {
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Done bool `json:"done"`
|
||||
Success bool `json:"success"`
|
||||
}
|
||||
|
||||
// handleDiscoverFetchList handles GET /discover/fetch/list
|
||||
// Returns a summary of all known fetch entries so the frontend can discover
|
||||
// entries created server-side (e.g. per-album entries from an artist fetch).
|
||||
func handleDiscoverFetchList(w http.ResponseWriter, r *http.Request) {
|
||||
fetchesMu.Lock()
|
||||
items := make([]fetchListItem, 0, len(fetchMap))
|
||||
for _, e := range fetchMap {
|
||||
e.mu.Lock()
|
||||
title := e.Artist
|
||||
if e.Album != "" {
|
||||
title = e.Artist + " \u2014 " + e.Album
|
||||
}
|
||||
items = append(items, fetchListItem{
|
||||
ID: e.ID,
|
||||
Title: title,
|
||||
Done: e.Done,
|
||||
Success: e.Success,
|
||||
})
|
||||
e.mu.Unlock()
|
||||
}
|
||||
fetchesMu.Unlock()
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(items)
|
||||
}
|
||||
12
files.go
12
files.go
|
|
@ -9,9 +9,8 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
// albumTargetDir returns the destination directory for an album without
|
||||
// creating it. Use this to check for an existing import before moving files.
|
||||
func albumTargetDir(libDir string, md *MusicMetadata) string {
|
||||
// moveToLibrary moves a file to {libDir}/{artist}/[{date}] {album} [{quality}]/filename.
|
||||
func moveToLibrary(libDir string, md *MusicMetadata, srcPath string) error {
|
||||
date := md.Date
|
||||
if date == "" {
|
||||
date = md.Year
|
||||
|
|
@ -20,12 +19,7 @@ func albumTargetDir(libDir string, md *MusicMetadata) string {
|
|||
if md.Quality != "" {
|
||||
albumDir += fmt.Sprintf(" [%s]", md.Quality)
|
||||
}
|
||||
return filepath.Join(libDir, sanitize(md.Artist), sanitize(albumDir))
|
||||
}
|
||||
|
||||
// moveToLibrary moves a file to {libDir}/{artist}/[{date}] {album} [{quality}]/filename.
|
||||
func moveToLibrary(libDir string, md *MusicMetadata, srcPath string) error {
|
||||
targetDir := albumTargetDir(libDir, md)
|
||||
targetDir := filepath.Join(libDir, sanitize(md.Artist), sanitize(albumDir))
|
||||
if err := os.MkdirAll(targetDir, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
64
importer.go
64
importer.go
|
|
@ -185,7 +185,7 @@ func RunImporter() {
|
|||
}
|
||||
|
||||
fmt.Println("→ Tagging album metadata:")
|
||||
md, src, err := getAlbumMetadata(albumPath, tracks[0], "")
|
||||
md, src, err := getAlbumMetadata(albumPath, tracks[0])
|
||||
result.TagMetadata.Err = err
|
||||
result.MetadataSource = src
|
||||
if err != nil {
|
||||
|
|
@ -213,15 +213,11 @@ func RunImporter() {
|
|||
|
||||
fmt.Println("→ Downloading cover art for album:", albumPath)
|
||||
if _, err := FindCoverImage(albumPath); err != nil {
|
||||
if err := DownloadCoverArt(albumPath, md, ""); err != nil {
|
||||
if err := DownloadCoverArt(albumPath, md); err != nil {
|
||||
fmt.Println("Cover art download failed:", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := NormalizeCoverArt(albumPath); err != nil {
|
||||
fmt.Println("Cover art normalization warning:", err)
|
||||
}
|
||||
|
||||
fmt.Println("→ Embedding cover art for album:", albumPath)
|
||||
result.CoverArt.Err = EmbedAlbumArtIntoFolder(albumPath)
|
||||
if coverImg, err := FindCoverImage(albumPath); err == nil {
|
||||
|
|
@ -237,39 +233,33 @@ func RunImporter() {
|
|||
continue
|
||||
}
|
||||
|
||||
targetDir := albumTargetDir(libraryDir, md)
|
||||
if _, err := os.Stat(targetDir); err == nil {
|
||||
fmt.Println("→ Album already exists in library, skipping move:", targetDir)
|
||||
result.Move.Skipped = true
|
||||
} else {
|
||||
fmt.Println("→ Moving tracks into library for album:", albumPath)
|
||||
for _, track := range tracks {
|
||||
if err := moveToLibrary(libraryDir, md, track); err != nil {
|
||||
fmt.Println("Failed to move track:", track, err)
|
||||
result.Move.Err = err // retains last error; all attempts are still made
|
||||
}
|
||||
fmt.Println("→ Moving tracks into library for album:", albumPath)
|
||||
for _, track := range tracks {
|
||||
if err := moveToLibrary(libraryDir, md, track); err != nil {
|
||||
fmt.Println("Failed to move track:", track, err)
|
||||
result.Move.Err = err // retains last error; all attempts are still made
|
||||
}
|
||||
|
||||
lyrics, _ := getLyricFiles(albumPath)
|
||||
|
||||
fmt.Println("→ Moving lyrics into library for album:", albumPath)
|
||||
for _, file := range lyrics {
|
||||
if err := moveToLibrary(libraryDir, md, file); err != nil {
|
||||
fmt.Println("Failed to move lyrics:", file, err)
|
||||
result.Move.Err = err
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("→ Moving album cover into library for album:", albumPath)
|
||||
if coverImg, err := FindCoverImage(albumPath); err == nil {
|
||||
if err := moveToLibrary(libraryDir, md, coverImg); err != nil {
|
||||
fmt.Println("Failed to cover image:", coverImg, err)
|
||||
result.Move.Err = err
|
||||
}
|
||||
}
|
||||
|
||||
os.Remove(albumPath)
|
||||
}
|
||||
|
||||
lyrics, _ := getLyricFiles(albumPath)
|
||||
|
||||
fmt.Println("→ Moving lyrics into library for album:", albumPath)
|
||||
for _, file := range lyrics {
|
||||
if err := moveToLibrary(libraryDir, md, file); err != nil {
|
||||
fmt.Println("Failed to move lyrics:", file, err)
|
||||
result.Move.Err = err
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("→ Moving album cover into library for album:", albumPath)
|
||||
if coverImg, err := FindCoverImage(albumPath); err == nil {
|
||||
if err := moveToLibrary(libraryDir, md, coverImg); err != nil {
|
||||
fmt.Println("Failed to cover image:", coverImg, err)
|
||||
result.Move.Err = err
|
||||
}
|
||||
}
|
||||
|
||||
os.Remove(albumPath)
|
||||
}
|
||||
|
||||
fmt.Println("\n=== Import Complete ===")
|
||||
|
|
|
|||
378
index.html.tmpl
378
index.html.tmpl
|
|
@ -1,53 +1,223 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Music Importer</title>
|
||||
<link rel="stylesheet" href="/static/style.css?v={{.Version}}">
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; }
|
||||
|
||||
body {
|
||||
font-family: sans-serif;
|
||||
background: #111;
|
||||
color: #eee;
|
||||
text-align: center;
|
||||
padding: 60px 24px 80px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
h1 { margin-bottom: 32px; }
|
||||
|
||||
button {
|
||||
font-size: 32px;
|
||||
padding: 20px 40px;
|
||||
border-radius: 10px;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
background: #4CAF50;
|
||||
color: white;
|
||||
}
|
||||
button:disabled {
|
||||
background: #555;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* ── Last run summary ── */
|
||||
.session {
|
||||
margin: 48px auto 0;
|
||||
max-width: 820px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.session-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: baseline;
|
||||
border-bottom: 1px solid #333;
|
||||
padding-bottom: 8px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
.session-header h2 { margin: 0; font-size: 18px; color: #ccc; }
|
||||
.session-header .duration { font-size: 13px; color: #666; }
|
||||
|
||||
.album {
|
||||
background: #1a1a1a;
|
||||
border: 1px solid #2a2a2a;
|
||||
border-radius: 8px;
|
||||
padding: 16px 20px;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
.album-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.album-name {
|
||||
font-weight: bold;
|
||||
font-size: 15px;
|
||||
flex: 1;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.badge {
|
||||
font-size: 11px;
|
||||
font-weight: bold;
|
||||
padding: 2px 8px;
|
||||
border-radius: 4px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.badge-ok { background: #1e4d2b; color: #4CAF50; }
|
||||
.badge-warn { background: #4d3a00; color: #f0a500; }
|
||||
.badge-fatal { background: #4d1a1a; color: #e05050; }
|
||||
|
||||
/* ── Metadata row ── */
|
||||
.metadata {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
gap: 14px;
|
||||
flex-wrap: wrap;
|
||||
font-size: 12px;
|
||||
color: #777;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
.metadata-title {
|
||||
color: #aaa;
|
||||
font-size: 13px;
|
||||
}
|
||||
.metadata-pill {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
background: #222;
|
||||
border-radius: 4px;
|
||||
padding: 2px 7px;
|
||||
font-size: 11px;
|
||||
}
|
||||
.pill-label { color: #555; }
|
||||
.pill-beets { color: #7ec8e3; }
|
||||
.pill-musicbrainz { color: #c084fc; }
|
||||
.pill-file_tags { color: #f0a500; }
|
||||
.pill-unknown { color: #888; }
|
||||
|
||||
/* ── Rich info grid ── */
|
||||
.info-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(180px, 1fr));
|
||||
gap: 6px;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
.info-card {
|
||||
background: #222;
|
||||
border-radius: 6px;
|
||||
padding: 8px 12px;
|
||||
font-size: 12px;
|
||||
}
|
||||
.info-card-label {
|
||||
font-size: 10px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: #555;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
.info-card-value {
|
||||
color: #ccc;
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
}
|
||||
.info-card-sub {
|
||||
margin-top: 3px;
|
||||
color: #666;
|
||||
font-size: 11px;
|
||||
line-height: 1.4;
|
||||
}
|
||||
.info-ok { color: #4CAF50; }
|
||||
.info-warn { color: #f0a500; }
|
||||
.info-dim { color: #555; }
|
||||
|
||||
/* ── Pipeline steps ── */
|
||||
.steps-label {
|
||||
font-size: 10px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: #444;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
.steps {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
|
||||
gap: 6px;
|
||||
}
|
||||
.step {
|
||||
font-size: 12px;
|
||||
padding: 5px 10px;
|
||||
border-radius: 5px;
|
||||
background: #222;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
.step-label { color: #888; }
|
||||
.step-ok { color: #4CAF50; }
|
||||
.step-warn { color: #f0a500; }
|
||||
.step-fatal { color: #e05050; }
|
||||
.step-err { font-size: 11px; color: #c0392b; margin-top: 2px; word-break: break-word; }
|
||||
|
||||
footer {
|
||||
position: fixed;
|
||||
bottom: 16px;
|
||||
width: 100%;
|
||||
font-size: 13px;
|
||||
color: #999;
|
||||
text-align: center;
|
||||
left: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Music Importer</h1>
|
||||
|
||||
<nav class="tabs">
|
||||
<button class="tab-btn active" data-tab="import">Import</button>
|
||||
<button class="tab-btn" data-tab="discover">Discover</button>
|
||||
</nav>
|
||||
<form action="/run" method="POST">
|
||||
<button type="submit" {{if .Running}}disabled{{end}}>
|
||||
{{if .Running}}Importer Running...{{else}}Run Importer{{end}}
|
||||
</button>
|
||||
</form>
|
||||
|
||||
<!-- ── Import ─────────────────────────────────────────────────────────── -->
|
||||
<section id="tab-import" class="tab-pane active">
|
||||
<form action="/run" method="POST">
|
||||
<button type="submit" class="run-btn" {{if .Running}}disabled{{end}}>
|
||||
{{if .Running}}Importer Running…{{else}}Run Importer{{end}}
|
||||
</button>
|
||||
</form>
|
||||
{{with .Session}}
|
||||
<div class="session">
|
||||
<div class="session-header">
|
||||
<h2>Last Run — {{.StartedAt.Format "Jan 2, 2006 15:04:05"}}</h2>
|
||||
<span class="duration">{{duration .StartedAt .FinishedAt}}</span>
|
||||
</div>
|
||||
|
||||
{{with .Session}}
|
||||
<div class="content-box session">
|
||||
<div class="session-header">
|
||||
<h2>Last Run — {{.StartedAt.Format "Jan 2, 2006 15:04:05"}}</h2>
|
||||
<span class="duration">{{duration .StartedAt .FinishedAt}}</span>
|
||||
{{range .Albums}}{{$album := .}}
|
||||
<div class="album">
|
||||
<div class="album-header">
|
||||
<span class="album-name" title="{{.Path}}">{{.Name}}</span>
|
||||
{{if .Succeeded}}
|
||||
{{if .HasWarnings}}
|
||||
<span class="badge badge-warn">⚠ warnings</span>
|
||||
{{else}}
|
||||
<span class="badge badge-ok">✓ ok</span>
|
||||
{{end}}
|
||||
{{else}}
|
||||
<span class="badge badge-fatal">✗ failed at {{.FatalStep}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
|
||||
{{range .Albums}}{{$album := .}}
|
||||
<article class="album">
|
||||
<div class="album-header">
|
||||
<span class="album-name" title="{{.Path}}">{{.Name}}</span>
|
||||
{{if .Succeeded}}
|
||||
{{if .HasWarnings}}
|
||||
<span class="badge badge-warn">⚠ warnings</span>
|
||||
{{else}}
|
||||
<span class="badge badge-ok">✓ ok</span>
|
||||
{{end}}
|
||||
{{else}}
|
||||
<span class="badge badge-fatal">✗ failed at {{.FatalStep}}</span>
|
||||
{{end}}
|
||||
</div>
|
||||
|
||||
{{with .Metadata}}
|
||||
<div class="metadata">
|
||||
<span class="metadata-title">{{.Artist}} — {{.Album}}{{if .Year}} ({{.Year}}){{end}}</span>
|
||||
{{if $album.MetadataSource}}
|
||||
{{with .Metadata}}
|
||||
<div class="metadata">
|
||||
<span class="metadata-title">{{.Artist}} — {{.Album}}{{if .Year}} ({{.Year}}){{end}}</span>
|
||||
{{if $album.MetadataSource}}
|
||||
<span class="metadata-pill">
|
||||
<span class="pill-label">via</span>
|
||||
{{if eq (print $album.MetadataSource) "beets"}}
|
||||
|
|
@ -61,83 +231,67 @@
|
|||
{{end}}
|
||||
</span>
|
||||
{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
<div class="info-grid">
|
||||
<div class="info-card">
|
||||
<div class="info-card-label">Tracks</div>
|
||||
<div class="info-card-value">{{.TrackCount}}</div>
|
||||
</div>
|
||||
|
||||
<div class="info-card">
|
||||
<div class="info-card-label">Lyrics</div>
|
||||
{{if eq .LyricsStats.Total 0}}
|
||||
<div class="info-card-value info-dim">n/a</div>
|
||||
{{else}}
|
||||
<div class="info-card-value {{if gt .LyricsStats.Downloaded 0}}info-ok{{else}}info-dim{{end}}">
|
||||
{{.LyricsStats.Downloaded}} / {{.LyricsStats.Total}}
|
||||
</div>
|
||||
<div class="info-card-sub">
|
||||
{{if gt .LyricsStats.Synced 0}}<span class="info-ok">{{.LyricsStats.Synced}} synced</span>{{end}}
|
||||
{{if and (gt .LyricsStats.Synced 0) (gt .LyricsStats.Plain 0)}} · {{end}}
|
||||
{{if gt .LyricsStats.Plain 0}}<span class="info-warn">{{.LyricsStats.Plain}} plain</span>{{end}}
|
||||
{{if gt .LyricsStats.AlreadyHad 0}}<span class="info-dim"> {{.LyricsStats.AlreadyHad}} existing</span>{{end}}
|
||||
{{if gt .LyricsStats.NotFound 0}}<span class="info-dim"> {{.LyricsStats.NotFound}} missing</span>{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
|
||||
<div class="info-card">
|
||||
<div class="info-card-label">Cover Art</div>
|
||||
{{if .CoverArtStats.Found}}
|
||||
{{if .CoverArtStats.Embedded}}
|
||||
<div class="info-card-value info-ok">Embedded</div>
|
||||
<div class="info-card-sub info-dim">{{.CoverArtStats.Source}}</div>
|
||||
{{else}}
|
||||
<div class="info-card-value info-warn">Found, not embedded</div>
|
||||
<div class="info-card-sub info-dim">{{.CoverArtStats.Source}}</div>
|
||||
{{end}}
|
||||
{{else}}
|
||||
<div class="info-card-value info-dim">Not found</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="steps-label">Pipeline</div>
|
||||
<div class="steps">
|
||||
{{stepCell "Clean Tags" .CleanTags ""}}
|
||||
{{stepCell "Metadata" .TagMetadata .FatalStep}}
|
||||
{{stepCell "Lyrics" .Lyrics ""}}
|
||||
{{stepCell "ReplayGain" .ReplayGain .FatalStep}}
|
||||
{{stepCell "Cover Art" .CoverArt .FatalStep}}
|
||||
{{stepCell "Move" .Move ""}}
|
||||
</div>
|
||||
</article>
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
{{/* ── Rich info cards ── */}}
|
||||
<div class="info-grid">
|
||||
{{/* Tracks */}}
|
||||
<div class="info-card">
|
||||
<div class="info-card-label">Tracks</div>
|
||||
<div class="info-card-value">{{.TrackCount}}</div>
|
||||
</div>
|
||||
|
||||
{{/* Lyrics */}}
|
||||
<div class="info-card">
|
||||
<div class="info-card-label">Lyrics</div>
|
||||
{{if eq .LyricsStats.Total 0}}
|
||||
<div class="info-card-value info-dim">n/a</div>
|
||||
{{else}}
|
||||
<div class="info-card-value {{if gt .LyricsStats.Downloaded 0}}info-ok{{else}}info-dim{{end}}">
|
||||
{{.LyricsStats.Downloaded}} / {{.LyricsStats.Total}}
|
||||
</div>
|
||||
<div class="info-card-sub">
|
||||
{{if gt .LyricsStats.Synced 0}}<span class="info-ok">{{.LyricsStats.Synced}} synced</span>{{end}}
|
||||
{{if and (gt .LyricsStats.Synced 0) (gt .LyricsStats.Plain 0)}} · {{end}}
|
||||
{{if gt .LyricsStats.Plain 0}}<span class="info-warn">{{.LyricsStats.Plain}} plain</span>{{end}}
|
||||
{{if gt .LyricsStats.AlreadyHad 0}}<span class="info-dim"> {{.LyricsStats.AlreadyHad}} existing</span>{{end}}
|
||||
{{if gt .LyricsStats.NotFound 0}}<span class="info-dim"> {{.LyricsStats.NotFound}} missing</span>{{end}}
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
|
||||
{{/* Cover art */}}
|
||||
<div class="info-card">
|
||||
<div class="info-card-label">Cover Art</div>
|
||||
{{if .CoverArtStats.Found}}
|
||||
{{if .CoverArtStats.Embedded}}
|
||||
<div class="info-card-value info-ok">Embedded</div>
|
||||
<div class="info-card-sub info-dim">{{.CoverArtStats.Source}}</div>
|
||||
{{else}}
|
||||
<div class="info-card-value info-warn">Found, not embedded</div>
|
||||
<div class="info-card-sub info-dim">{{.CoverArtStats.Source}}</div>
|
||||
{{end}}
|
||||
{{else}}
|
||||
<div class="info-card-value info-dim">Not found</div>
|
||||
{{end}}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="steps-label">Pipeline</div>
|
||||
<div class="steps">
|
||||
{{stepCell "Clean Tags" .CleanTags ""}}
|
||||
{{stepCell "Metadata" .TagMetadata .FatalStep}}
|
||||
{{stepCell "Lyrics" .Lyrics ""}}
|
||||
{{stepCell "ReplayGain" .ReplayGain .FatalStep}}
|
||||
{{stepCell "Cover Art" .CoverArt .FatalStep}}
|
||||
{{stepCell "Move" .Move ""}}
|
||||
</div>
|
||||
</div>
|
||||
{{end}}
|
||||
</section>
|
||||
|
||||
<!-- ── Discover ───────────────────────────────────────────────────────── -->
|
||||
<section id="tab-discover" class="tab-pane">
|
||||
<div class="content-box">
|
||||
<div class="search-form">
|
||||
<div class="type-toggle">
|
||||
<button class="type-btn active" data-type="release">Release</button>
|
||||
<button class="type-btn" data-type="artist">Artist</button>
|
||||
</div>
|
||||
<input id="search-q" class="search-input" type="search"
|
||||
placeholder="Search MusicBrainz…" autocomplete="off">
|
||||
<button id="search-btn" class="search-btn">Search</button>
|
||||
</div>
|
||||
<div id="search-results"></div>
|
||||
</div>
|
||||
<div class="content-box fetch-list" id="fetch-list"></div>
|
||||
</section>
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
<footer>{{.Version}}</footer>
|
||||
|
||||
<script src="/static/app.js?v={{.Version}}" defer></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
10
main.go
10
main.go
|
|
@ -18,9 +18,6 @@ var importerRunning bool
|
|||
|
||||
//go:embed index.html.tmpl
|
||||
var tmplFS embed.FS
|
||||
|
||||
//go:embed static
|
||||
var staticFS embed.FS
|
||||
var tmpl = template.Must(
|
||||
template.New("index.html.tmpl").
|
||||
Funcs(template.FuncMap{
|
||||
|
|
@ -123,15 +120,8 @@ func handleRun(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
func main() {
|
||||
log.Printf("Music Importer %s starting on http://localhost:8080", version)
|
||||
startMonitor()
|
||||
http.Handle("/static/", http.FileServer(http.FS(staticFS)))
|
||||
http.HandleFunc("/", handleHome)
|
||||
http.HandleFunc("/run", handleRun)
|
||||
http.HandleFunc("/discover/search", handleDiscoverSearch)
|
||||
http.HandleFunc("/discover/fetch", handleDiscoverFetch)
|
||||
http.HandleFunc("/discover/fetch/artist", handleDiscoverFetchArtist)
|
||||
http.HandleFunc("/discover/fetch/status", handleDiscoverFetchStatus)
|
||||
http.HandleFunc("/discover/fetch/list", handleDiscoverFetchList)
|
||||
|
||||
log.Fatal(http.ListenAndServe(":8080", nil))
|
||||
}
|
||||
|
|
|
|||
71
media.go
71
media.go
|
|
@ -57,17 +57,14 @@ func EmbedAlbumArtIntoFolder(albumDir string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// DownloadCoverArt downloads the front cover from the Cover Art Archive and
|
||||
// saves it as cover.jpg/cover.png inside albumDir.
|
||||
// If mbid is non-empty it is used directly, bypassing the MusicBrainz search.
|
||||
// Otherwise, a search is performed using md's artist and album.
|
||||
func DownloadCoverArt(albumDir string, md *MusicMetadata, mbid string) error {
|
||||
if mbid == "" {
|
||||
var err error
|
||||
mbid, err = searchMusicBrainzRelease(md.Artist, md.Album)
|
||||
if err != nil {
|
||||
return fmt.Errorf("MusicBrainz release search failed: %w", err)
|
||||
}
|
||||
// DownloadCoverArt searches MusicBrainz for a release matching md's artist and
|
||||
// album, then downloads the front cover from the Cover Art Archive and saves it
|
||||
// as cover.jpg inside albumDir. Returns an error if no cover could be found or
|
||||
// downloaded.
|
||||
func DownloadCoverArt(albumDir string, md *MusicMetadata) error {
|
||||
mbid, err := searchMusicBrainzRelease(md.Artist, md.Album)
|
||||
if err != nil {
|
||||
return fmt.Errorf("MusicBrainz release search failed: %w", err)
|
||||
}
|
||||
|
||||
data, ext, err := fetchCoverArtArchiveFront(mbid)
|
||||
|
|
@ -157,58 +154,6 @@ func fetchCoverArtArchiveFront(mbid string) ([]byte, string, error) {
|
|||
return data, ext, nil
|
||||
}
|
||||
|
||||
const coverMaxBytes = 5 * 1024 * 1024 // 5 MB
|
||||
|
||||
// NormalizeCoverArt checks whether the cover image in albumDir is a large
|
||||
// non-JPEG (>5 MB). If so, it converts it to JPEG and resizes it to at most
|
||||
// 2000×2000 pixels using ffmpeg, replacing the original file with cover.jpg.
|
||||
// The function is a no-op when no cover is found, the cover is already JPEG,
|
||||
// or the file is ≤5 MB.
|
||||
func NormalizeCoverArt(albumDir string) error {
|
||||
cover, err := FindCoverImage(albumDir)
|
||||
if err != nil {
|
||||
return nil // no cover present, nothing to do
|
||||
}
|
||||
|
||||
// Already JPEG — no conversion needed regardless of size.
|
||||
ext := strings.ToLower(filepath.Ext(cover))
|
||||
if ext == ".jpg" || ext == ".jpeg" {
|
||||
return nil
|
||||
}
|
||||
|
||||
info, err := os.Stat(cover)
|
||||
if err != nil {
|
||||
return fmt.Errorf("stat cover: %w", err)
|
||||
}
|
||||
if info.Size() <= coverMaxBytes {
|
||||
return nil // small enough, leave as-is
|
||||
}
|
||||
|
||||
dest := filepath.Join(albumDir, "cover.jpg")
|
||||
fmt.Printf("→ Cover art is %.1f MB %s; converting to JPEG (max 2000×2000)…\n",
|
||||
float64(info.Size())/(1024*1024), strings.ToUpper(strings.TrimPrefix(ext, ".")))
|
||||
|
||||
// scale=2000:2000:force_original_aspect_ratio=decrease fits the image within
|
||||
// 2000×2000 while preserving aspect ratio, and never upscales smaller images.
|
||||
cmd := exec.Command("ffmpeg", "-y", "-i", cover,
|
||||
"-vf", "scale=2000:2000:force_original_aspect_ratio=decrease",
|
||||
"-q:v", "2",
|
||||
dest,
|
||||
)
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("ffmpeg cover conversion failed: %w\n%s", err, out)
|
||||
}
|
||||
|
||||
if cover != dest {
|
||||
if err := os.Remove(cover); err != nil {
|
||||
fmt.Println("Warning: could not remove original cover:", err)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("→ Converted cover art to JPEG:", filepath.Base(dest))
|
||||
return nil
|
||||
}
|
||||
|
||||
// -------------------------
|
||||
// Find cover image
|
||||
// -------------------------
|
||||
|
|
|
|||
30
metadata.go
30
metadata.go
|
|
@ -206,11 +206,7 @@ func snapMP3Bitrate(bpsStr string) int {
|
|||
// A temp log file is passed to beets via -l so that skipped albums
|
||||
// (which exit 0 but produce a "skip" log entry) are detected and
|
||||
// returned as errors, triggering the MusicBrainz fallback.
|
||||
// If mbid is non-empty it is passed as --search-id to pin beets to a specific
|
||||
// MusicBrainz release. In that case, quiet mode is skipped and newlines are
|
||||
// piped to stdin so beets auto-accepts the pinned release regardless of
|
||||
// confidence score.
|
||||
func tagWithBeets(path, mbid string) error {
|
||||
func tagWithBeets(path string) error {
|
||||
fmt.Println("→ Tagging with beets:", path)
|
||||
|
||||
logFile, err := os.CreateTemp("", "beets-log-*.txt")
|
||||
|
|
@ -221,23 +217,8 @@ func tagWithBeets(path, mbid string) error {
|
|||
logFile.Close()
|
||||
defer os.Remove(logPath)
|
||||
|
||||
args := []string{"import", "-C", "-l", logPath}
|
||||
if mbid != "" {
|
||||
// Drop -q so beets doesn't skip on low confidence. Pipe newlines to
|
||||
// auto-accept the interactive prompt for the MBID-pinned release.
|
||||
args = append(args, "--search-id", mbid, path)
|
||||
cmd := exec.Command("beet", args...)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
cmd.Stdin = strings.NewReader(strings.Repeat("A\n", 20))
|
||||
if err := cmd.Run(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
args = append(args, "-q", path)
|
||||
if err := runCmd("beet", args...); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := runCmd("beet", "import", "-Cq", "-l", logPath, path); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Even on exit 0, beets may have skipped the album in quiet mode.
|
||||
|
|
@ -330,11 +311,10 @@ func fetchMusicBrainzInfo(filename string) (*MusicMetadata, error) {
|
|||
|
||||
// getAlbumMetadata attempts beets tagging on the album directory, reads tags
|
||||
// back from the first track, and falls back to MusicBrainz if tags are missing.
|
||||
// If mbid is non-empty it is forwarded to beets as --search-id.
|
||||
func getAlbumMetadata(albumPath, trackPath, mbid string) (*MusicMetadata, MetadataSource, error) {
|
||||
func getAlbumMetadata(albumPath, trackPath string) (*MusicMetadata, MetadataSource, error) {
|
||||
fmt.Println("→ Tagging track with beets:", trackPath)
|
||||
|
||||
beetsErr := tagWithBeets(albumPath, mbid)
|
||||
beetsErr := tagWithBeets(albumPath)
|
||||
if beetsErr != nil {
|
||||
fmt.Println("Beets tagging failed; fallback to manual MusicBrainz lookup:", beetsErr)
|
||||
}
|
||||
|
|
|
|||
304
monitor.go
304
monitor.go
|
|
@ -1,304 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// pendingDownload tracks a queued slskd download that should be auto-imported
|
||||
// once all files have transferred successfully.
|
||||
type pendingDownload struct {
|
||||
ID string // dedup key (release MBID for single fetches; release group MBID for artist fetches)
|
||||
BeetsMBID string // release MBID passed to beets --search-id (may differ from ID)
|
||||
Artist string
|
||||
Album string
|
||||
Username string // slskd peer username
|
||||
Dir string // remote directory path on the peer
|
||||
Files []slskdFile // files that were queued for download
|
||||
Entry *fetchEntry // fetch card to update with import progress
|
||||
TrackCount int // expected number of audio tracks (0 = unknown, skip check)
|
||||
}
|
||||
|
||||
var (
|
||||
pendingMu sync.Mutex
|
||||
pendingDownloads = make(map[string]*pendingDownload) // keyed by MBID
|
||||
)
|
||||
|
||||
// registerDownload records a queued slskd download for monitoring and eventual
|
||||
// auto-import. id is used as the dedup key; beetsMBID is the release MBID
|
||||
// forwarded to beets --search-id (may be empty or differ from id).
|
||||
// trackCount is the expected number of audio tracks from MusicBrainz; 0 means
|
||||
// unknown and the sanity check will be skipped at import time.
|
||||
// If entry is nil a new fetchEntry is created so the frontend can discover it
|
||||
// via /discover/fetch/list.
|
||||
func registerDownload(id, beetsMBID, artist, album string, trackCount int, folder *albumFolder, entry *fetchEntry) {
|
||||
pd := &pendingDownload{
|
||||
ID: id,
|
||||
BeetsMBID: beetsMBID,
|
||||
Artist: artist,
|
||||
Album: album,
|
||||
Username: folder.Username,
|
||||
Dir: folder.Dir,
|
||||
Files: folder.Files,
|
||||
Entry: entry,
|
||||
TrackCount: trackCount,
|
||||
}
|
||||
|
||||
if entry == nil {
|
||||
e := newFetchEntry(id, artist, album)
|
||||
e.appendLog(fmt.Sprintf("Queued %d files from %s — waiting for download",
|
||||
len(folder.Files), folder.Username))
|
||||
pd.Entry = e
|
||||
}
|
||||
|
||||
pendingMu.Lock()
|
||||
pendingDownloads[id] = pd
|
||||
pendingMu.Unlock()
|
||||
|
||||
log.Printf("[monitor] registered: %q by %s (id: %s, beets mbid: %s, peer: %s, %d files, expected tracks: %d)",
|
||||
album, artist, id, beetsMBID, folder.Username, len(folder.Files), trackCount)
|
||||
}
|
||||
|
||||
// startMonitor launches a background goroutine that periodically checks whether
|
||||
// pending downloads have completed and triggers import when they have.
|
||||
func startMonitor() {
|
||||
go func() {
|
||||
for {
|
||||
time.Sleep(15 * time.Second)
|
||||
checkPendingDownloads()
|
||||
}
|
||||
}()
|
||||
log.Println("[monitor] started")
|
||||
}
|
||||
|
||||
// checkPendingDownloads polls slskd transfer state for every pending download
|
||||
// and kicks off importPendingRelease for any that are fully complete.
|
||||
func checkPendingDownloads() {
|
||||
pendingMu.Lock()
|
||||
if len(pendingDownloads) == 0 {
|
||||
pendingMu.Unlock()
|
||||
return
|
||||
}
|
||||
snapshot := make(map[string]*pendingDownload, len(pendingDownloads))
|
||||
for k, v := range pendingDownloads {
|
||||
snapshot[k] = v
|
||||
}
|
||||
pendingMu.Unlock()
|
||||
|
||||
log.Printf("[monitor] checking %d pending download(s)", len(snapshot))
|
||||
|
||||
// Group by username to minimise API calls.
|
||||
byUser := make(map[string][]*pendingDownload)
|
||||
for _, pd := range snapshot {
|
||||
byUser[pd.Username] = append(byUser[pd.Username], pd)
|
||||
}
|
||||
|
||||
for username, pds := range byUser {
|
||||
dirs, err := getSlskdTransfers(username)
|
||||
if err != nil {
|
||||
log.Printf("[monitor] failed to get transfers for %s: %v", username, err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Index transfer dirs by normalised path.
|
||||
transfersByDir := make(map[string][]slskdTransferFile, len(dirs))
|
||||
for _, d := range dirs {
|
||||
norm := strings.ReplaceAll(d.Directory, "\\", "/")
|
||||
transfersByDir[norm] = d.Files
|
||||
}
|
||||
|
||||
for _, pd := range pds {
|
||||
normDir := strings.ReplaceAll(pd.Dir, "\\", "/")
|
||||
files, ok := transfersByDir[normDir]
|
||||
if !ok {
|
||||
log.Printf("[monitor] transfer dir not found yet for %q (peer: %s)", pd.Dir, username)
|
||||
continue
|
||||
}
|
||||
|
||||
if !allFilesCompleted(files) {
|
||||
log.Printf("[monitor] %q by %s: download still in progress", pd.Album, pd.Artist)
|
||||
continue
|
||||
}
|
||||
|
||||
localDir := localDirForDownload(pd, files)
|
||||
if localDir == "" {
|
||||
log.Printf("[monitor] cannot determine local dir for %q by %s", pd.Album, pd.Artist)
|
||||
pd.Entry.appendLog("Error: could not determine local download path from transfer info")
|
||||
continue
|
||||
}
|
||||
|
||||
log.Printf("[monitor] download complete: %q by %s → %s", pd.Album, pd.Artist, localDir)
|
||||
|
||||
// Remove from pending before starting import to avoid double-import.
|
||||
pendingMu.Lock()
|
||||
delete(pendingDownloads, pd.ID)
|
||||
pendingMu.Unlock()
|
||||
|
||||
go importPendingRelease(pd, localDir)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// allFilesCompleted reports whether every file in a transfer directory has
|
||||
// reached a terminal Completed state. Returns false if files is empty.
|
||||
func allFilesCompleted(files []slskdTransferFile) bool {
|
||||
if len(files) == 0 {
|
||||
return false
|
||||
}
|
||||
for _, f := range files {
|
||||
if !strings.Contains(f.State, "Completed") {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// localDirForDownload resolves the local filesystem path for a completed download.
|
||||
//
|
||||
// Strategy 1 — localFilename from transfer metadata: slskd sets this field to
|
||||
// the absolute path of the downloaded file. Works when paths are consistent
|
||||
// across containers (same volume mount point).
|
||||
//
|
||||
// Strategy 2 — SLSKD_DOWNLOAD_DIR reconstruction: slskd stores files under
|
||||
// {downloadDir}/{username}/{sanitized_remote_dir}/. Used when localFilename is
|
||||
// empty or when SLSKD_DOWNLOAD_DIR is explicitly set to override.
|
||||
func localDirForDownload(pd *pendingDownload, files []slskdTransferFile) string {
|
||||
// Strategy 1: use localFilename from transfer response.
|
||||
for _, f := range files {
|
||||
if f.LocalFilename != "" {
|
||||
dir := filepath.Dir(f.LocalFilename)
|
||||
log.Printf("[monitor] local dir from localFilename: %s", dir)
|
||||
return dir
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy 2: reconstruct from SLSKD_DOWNLOAD_DIR.
|
||||
// slskd places downloaded files directly into {downloadDir}/{album_folder_name}/,
|
||||
// where the folder name is the last path component of the remote directory.
|
||||
dlDir := os.Getenv("SLSKD_DOWNLOAD_DIR")
|
||||
if dlDir == "" {
|
||||
log.Printf("[monitor] localFilename empty and SLSKD_DOWNLOAD_DIR not set — cannot determine local dir for %q", pd.Album)
|
||||
return ""
|
||||
}
|
||||
|
||||
dir := filepath.Join(dlDir, filepath.Base(filepath.FromSlash(pd.Dir)))
|
||||
log.Printf("[monitor] local dir reconstructed from SLSKD_DOWNLOAD_DIR: %s", dir)
|
||||
return dir
|
||||
}
|
||||
|
||||
// importPendingRelease runs the full import pipeline on a completed download.
|
||||
// It mirrors RunImporter's per-album logic but uses the MBID for beets tagging.
|
||||
func importPendingRelease(pd *pendingDownload, localDir string) {
|
||||
entry := pd.Entry
|
||||
logf := func(msg string) {
|
||||
entry.appendLog("[import] " + msg)
|
||||
log.Printf("[monitor/import %s] %s", pd.ID, msg)
|
||||
}
|
||||
|
||||
logf(fmt.Sprintf("Starting import from %s", localDir))
|
||||
|
||||
libraryDir := os.Getenv("LIBRARY_DIR")
|
||||
if libraryDir == "" {
|
||||
entry.finish(fmt.Errorf("LIBRARY_DIR is not set"))
|
||||
return
|
||||
}
|
||||
|
||||
tracks, err := getAudioFiles(localDir)
|
||||
if err != nil {
|
||||
entry.finish(fmt.Errorf("scanning audio files: %w", err))
|
||||
return
|
||||
}
|
||||
if len(tracks) == 0 {
|
||||
entry.finish(fmt.Errorf("no audio files found in %s", localDir))
|
||||
return
|
||||
}
|
||||
logf(fmt.Sprintf("Found %d tracks", len(tracks)))
|
||||
|
||||
if pd.TrackCount > 0 && len(tracks) != pd.TrackCount {
|
||||
entry.finish(fmt.Errorf(
|
||||
"track count mismatch: downloaded %d tracks but release expects %d — aborting to avoid importing wrong edition",
|
||||
len(tracks), pd.TrackCount,
|
||||
))
|
||||
return
|
||||
}
|
||||
|
||||
if err := cleanAlbumTags(localDir); err != nil {
|
||||
logf(fmt.Sprintf("Clean tags warning: %v", err))
|
||||
}
|
||||
|
||||
md, src, err := getAlbumMetadata(localDir, tracks[0], pd.BeetsMBID)
|
||||
if err != nil {
|
||||
entry.finish(fmt.Errorf("metadata failed: %w", err))
|
||||
return
|
||||
}
|
||||
logf(fmt.Sprintf("Tagged via %s: %s — %s", src, md.Artist, md.Album))
|
||||
|
||||
if _, err := DownloadAlbumLyrics(localDir); err != nil {
|
||||
logf(fmt.Sprintf("Lyrics warning: %v", err))
|
||||
}
|
||||
|
||||
if err := applyReplayGain(localDir); err != nil {
|
||||
entry.finish(fmt.Errorf("ReplayGain failed: %w", err))
|
||||
return
|
||||
}
|
||||
logf("ReplayGain applied")
|
||||
|
||||
if _, err := FindCoverImage(localDir); err != nil {
|
||||
if err := DownloadCoverArt(localDir, md, pd.BeetsMBID); err != nil {
|
||||
logf(fmt.Sprintf("Cover art download warning: %v", err))
|
||||
}
|
||||
}
|
||||
|
||||
if err := NormalizeCoverArt(localDir); err != nil {
|
||||
logf(fmt.Sprintf("Cover art normalization warning: %v", err))
|
||||
}
|
||||
|
||||
if err := EmbedAlbumArtIntoFolder(localDir); err != nil {
|
||||
entry.finish(fmt.Errorf("cover embed failed: %w", err))
|
||||
return
|
||||
}
|
||||
logf("Cover art embedded")
|
||||
|
||||
targetDir := albumTargetDir(libraryDir, md)
|
||||
if _, err := os.Stat(targetDir); err == nil {
|
||||
logf(fmt.Sprintf("Album already exists in library, skipping move: %s", targetDir))
|
||||
entry.finish(nil)
|
||||
return
|
||||
}
|
||||
|
||||
var moveErr error
|
||||
for _, track := range tracks {
|
||||
if err := moveToLibrary(libraryDir, md, track); err != nil {
|
||||
logf(fmt.Sprintf("Move warning: %v", err))
|
||||
moveErr = err
|
||||
}
|
||||
}
|
||||
|
||||
lyrics, _ := getLyricFiles(localDir)
|
||||
for _, file := range lyrics {
|
||||
if err := moveToLibrary(libraryDir, md, file); err != nil {
|
||||
logf(fmt.Sprintf("Move lyrics warning: %v", err))
|
||||
}
|
||||
}
|
||||
|
||||
if coverImg, err := FindCoverImage(localDir); err == nil {
|
||||
if err := moveToLibrary(libraryDir, md, coverImg); err != nil {
|
||||
logf(fmt.Sprintf("Move cover warning: %v", err))
|
||||
}
|
||||
}
|
||||
|
||||
os.Remove(localDir)
|
||||
|
||||
if moveErr != nil {
|
||||
entry.finish(fmt.Errorf("import completed with move errors: %w", moveErr))
|
||||
return
|
||||
}
|
||||
|
||||
logf("Import complete")
|
||||
entry.finish(nil)
|
||||
}
|
||||
460
slskd.go
460
slskd.go
|
|
@ -1,460 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// slskdAttr is a Soulseek file attribute (bitrate, sample rate, bit depth, etc.).
|
||||
// Attribute types: 0 = bitrate (kbps), 1 = duration (s), 2 = VBR flag,
|
||||
//
|
||||
// 4 = sample rate (Hz), 5 = bit depth.
|
||||
type slskdAttr struct {
|
||||
Type int `json:"type"`
|
||||
Value int `json:"value"`
|
||||
}
|
||||
|
||||
// slskdFile is a single file in a slskd search response.
|
||||
type slskdFile struct {
|
||||
Filename string `json:"filename"`
|
||||
Size int64 `json:"size"`
|
||||
Extension string `json:"extension"`
|
||||
Attributes []slskdAttr `json:"attributes"`
|
||||
}
|
||||
|
||||
// slskdPeerResponse is one peer's response to a search.
|
||||
type slskdPeerResponse struct {
|
||||
Username string `json:"username"`
|
||||
Files []slskdFile `json:"files"`
|
||||
}
|
||||
|
||||
// slskdSearch is the search-state object returned by GET /api/v0/searches/{id}.
|
||||
// File responses are not included here; fetch them from /searches/{id}/responses.
|
||||
type slskdSearch struct {
|
||||
ID string `json:"id"`
|
||||
State string `json:"state"`
|
||||
}
|
||||
|
||||
// Quality tiers; higher value = more preferred.
|
||||
const (
|
||||
qualityUnknown = 0
|
||||
qualityMP3Any = 1
|
||||
qualityMP3_320 = 2
|
||||
qualityFLACOther = 3 // FLAC at unspecified or uncommon specs
|
||||
qualityFLAC24_96 = 4
|
||||
qualityFLAC16_44 = 5 // most preferred: standard CD-quality lossless
|
||||
)
|
||||
|
||||
// albumFolder groups audio files from the same peer and directory path.
|
||||
type albumFolder struct {
|
||||
Username string
|
||||
Dir string
|
||||
Files []slskdFile
|
||||
Quality int
|
||||
}
|
||||
|
||||
func slskdBaseURL() string {
|
||||
return strings.TrimRight(os.Getenv("SLSKD_URL"), "/")
|
||||
}
|
||||
|
||||
// slskdDo performs an authenticated HTTP request against the slskd API.
|
||||
func slskdDo(method, endpoint string, body interface{}) (*http.Response, error) {
|
||||
base := slskdBaseURL()
|
||||
if base == "" {
|
||||
return nil, fmt.Errorf("SLSKD_URL is not configured")
|
||||
}
|
||||
|
||||
var br io.Reader
|
||||
if body != nil {
|
||||
data, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
br = bytes.NewReader(data)
|
||||
}
|
||||
|
||||
req, err := http.NewRequest(method, base+endpoint, br)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if key := os.Getenv("SLSKD_API_KEY"); key != "" {
|
||||
req.Header.Set("X-API-Key", key)
|
||||
}
|
||||
if body != nil {
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
}
|
||||
|
||||
return http.DefaultClient.Do(req)
|
||||
}
|
||||
|
||||
// createSlskdSearch starts a new slskd search and returns its ID.
|
||||
func createSlskdSearch(searchText string) (string, error) {
|
||||
payload := map[string]interface{}{
|
||||
"searchText": searchText,
|
||||
"fileLimit": 10000,
|
||||
"filterResponses": true,
|
||||
"maximumPeerQueueLength": 1000,
|
||||
"minimumPeerUploadSpeed": 0,
|
||||
"responseLimit": 100,
|
||||
"timeout": 15000,
|
||||
}
|
||||
|
||||
resp, err := slskdDo("POST", "/api/v0/searches", payload)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK {
|
||||
b, _ := io.ReadAll(resp.Body)
|
||||
return "", fmt.Errorf("slskd search failed (%d): %s", resp.StatusCode, strings.TrimSpace(string(b)))
|
||||
}
|
||||
|
||||
var s slskdSearch
|
||||
if err := json.NewDecoder(resp.Body).Decode(&s); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return s.ID, nil
|
||||
}
|
||||
|
||||
// slskdSearchIsTerminal reports whether a slskd SearchStates string has reached
|
||||
// a terminal state. slskd serialises its [Flags] enum as a comma-separated list
|
||||
// (e.g. "Completed, TimedOut"), so we check for containment rather than equality.
|
||||
func slskdSearchIsTerminal(state string) bool {
|
||||
for _, term := range []string{"Completed", "TimedOut", "Errored", "Cancelled"} {
|
||||
if strings.Contains(state, term) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// pollSlskdSearch waits up to 30 s for a search to reach a terminal state,
|
||||
// then returns the responses from the dedicated /responses sub-endpoint.
|
||||
// Each poll check-in is reported via logf.
|
||||
func pollSlskdSearch(id string, logf func(string)) ([]slskdPeerResponse, error) {
|
||||
deadline := time.Now().Add(60 * time.Second)
|
||||
for {
|
||||
resp, err := slskdDo("GET", "/api/v0/searches/"+id, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var s slskdSearch
|
||||
err = json.NewDecoder(resp.Body).Decode(&s)
|
||||
resp.Body.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logf(fmt.Sprintf("Search state: %s", s.State))
|
||||
|
||||
if slskdSearchIsTerminal(s.State) {
|
||||
return fetchSlskdResponses(id, logf)
|
||||
}
|
||||
|
||||
if time.Now().After(deadline) {
|
||||
logf("Poll deadline reached, fetching current results")
|
||||
return fetchSlskdResponses(id, logf)
|
||||
}
|
||||
time.Sleep(2 * time.Second)
|
||||
}
|
||||
}
|
||||
|
||||
// fetchSlskdResponses fetches file responses from the dedicated sub-endpoint.
|
||||
// The main GET /searches/{id} endpoint only returns metadata; responses live at
|
||||
// /searches/{id}/responses.
|
||||
func fetchSlskdResponses(id string, logf func(string)) ([]slskdPeerResponse, error) {
|
||||
resp, err := slskdDo("GET", "/api/v0/searches/"+id+"/responses", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
b, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("fetching responses failed (%d): %s", resp.StatusCode, strings.TrimSpace(string(b)))
|
||||
}
|
||||
|
||||
var responses []slskdPeerResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&responses); err != nil {
|
||||
return nil, fmt.Errorf("decoding responses: %w", err)
|
||||
}
|
||||
logf(fmt.Sprintf("Fetched %d peer responses", len(responses)))
|
||||
return responses, nil
|
||||
}
|
||||
|
||||
// deleteSlskdSearch removes a search from slskd (best-effort cleanup).
|
||||
func deleteSlskdSearch(id string) {
|
||||
resp, err := slskdDo("DELETE", "/api/v0/searches/"+id, nil)
|
||||
if err == nil {
|
||||
resp.Body.Close()
|
||||
}
|
||||
}
|
||||
|
||||
// fileDir returns the directory portion of a Soulseek filename,
|
||||
// normalising backslashes to forward slashes first.
|
||||
func fileDir(filename string) string {
|
||||
return path.Dir(strings.ReplaceAll(filename, "\\", "/"))
|
||||
}
|
||||
|
||||
// normaliseExt returns a lower-case extension that always starts with ".".
|
||||
func normaliseExt(raw string) string {
|
||||
ext := strings.ToLower(raw)
|
||||
if ext != "" && !strings.HasPrefix(ext, ".") {
|
||||
ext = "." + ext
|
||||
}
|
||||
return ext
|
||||
}
|
||||
|
||||
// fileQuality scores a single file by the preferred quality tier.
|
||||
func fileQuality(f slskdFile) int {
|
||||
ext := normaliseExt(f.Extension)
|
||||
if ext == "." || ext == "" {
|
||||
ext = strings.ToLower(path.Ext(strings.ReplaceAll(f.Filename, "\\", "/")))
|
||||
}
|
||||
|
||||
switch ext {
|
||||
case ".flac":
|
||||
var depth, rate int
|
||||
for _, a := range f.Attributes {
|
||||
switch a.Type {
|
||||
case 4:
|
||||
rate = a.Value
|
||||
case 5:
|
||||
depth = a.Value
|
||||
}
|
||||
}
|
||||
if depth == 16 && rate == 44100 {
|
||||
return qualityFLAC16_44
|
||||
}
|
||||
if depth == 24 && rate == 96000 {
|
||||
return qualityFLAC24_96
|
||||
}
|
||||
return qualityFLACOther
|
||||
|
||||
case ".mp3":
|
||||
for _, a := range f.Attributes {
|
||||
if a.Type == 0 && a.Value >= 315 {
|
||||
return qualityMP3_320
|
||||
}
|
||||
}
|
||||
return qualityMP3Any
|
||||
}
|
||||
|
||||
return qualityUnknown
|
||||
}
|
||||
|
||||
// groupAlbumFolders groups audio files by (username, directory) and scores each group.
|
||||
func groupAlbumFolders(responses []slskdPeerResponse) []albumFolder {
|
||||
type key struct{ user, dir string }
|
||||
m := make(map[key]*albumFolder)
|
||||
|
||||
for _, r := range responses {
|
||||
for _, f := range r.Files {
|
||||
ext := normaliseExt(f.Extension)
|
||||
if ext == "." || ext == "" {
|
||||
ext = strings.ToLower(path.Ext(strings.ReplaceAll(f.Filename, "\\", "/")))
|
||||
}
|
||||
if ext != ".flac" && ext != ".mp3" {
|
||||
continue
|
||||
}
|
||||
|
||||
k := key{r.Username, fileDir(f.Filename)}
|
||||
if m[k] == nil {
|
||||
m[k] = &albumFolder{Username: r.Username, Dir: k.dir}
|
||||
}
|
||||
m[k].Files = append(m[k].Files, f)
|
||||
if q := fileQuality(f); q > m[k].Quality {
|
||||
m[k].Quality = q
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out := make([]albumFolder, 0, len(m))
|
||||
for _, af := range m {
|
||||
out = append(out, *af)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// bestAlbumFolder picks the highest-quality folder; file count breaks ties.
|
||||
func bestAlbumFolder(folders []albumFolder) *albumFolder {
|
||||
if len(folders) == 0 {
|
||||
return nil
|
||||
}
|
||||
best := &folders[0]
|
||||
for i := 1; i < len(folders); i++ {
|
||||
a := &folders[i]
|
||||
if a.Quality > best.Quality || (a.Quality == best.Quality && len(a.Files) > len(best.Files)) {
|
||||
best = a
|
||||
}
|
||||
}
|
||||
return best
|
||||
}
|
||||
|
||||
// queueSlskdDownload sends a batch download request to slskd for all files in folder.
|
||||
func queueSlskdDownload(folder *albumFolder) error {
|
||||
type dlFile struct {
|
||||
Filename string `json:"filename"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
files := make([]dlFile, len(folder.Files))
|
||||
for i, f := range folder.Files {
|
||||
files[i] = dlFile{Filename: f.Filename, Size: f.Size}
|
||||
}
|
||||
|
||||
resp, err := slskdDo("POST", "/api/v0/transfers/downloads/"+folder.Username, files)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK {
|
||||
b, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("slskd download request failed (%d): %s", resp.StatusCode, strings.TrimSpace(string(b)))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// qualityLabel returns a human-readable label for a quality tier constant.
|
||||
func qualityLabel(q int) string {
|
||||
switch q {
|
||||
case qualityFLAC16_44:
|
||||
return "FLAC 16bit/44.1kHz"
|
||||
case qualityFLAC24_96:
|
||||
return "FLAC 24bit/96kHz"
|
||||
case qualityFLACOther:
|
||||
return "FLAC"
|
||||
case qualityMP3_320:
|
||||
return "MP3 320kbps"
|
||||
case qualityMP3Any:
|
||||
return "MP3"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
// slskdTransferFile is one file entry in a slskd transfers response.
|
||||
type slskdTransferFile struct {
|
||||
Filename string `json:"filename"`
|
||||
LocalFilename string `json:"localFilename"`
|
||||
State string `json:"state"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
// slskdTransferDir groups transfer files by remote directory.
|
||||
type slskdTransferDir struct {
|
||||
Directory string `json:"directory"`
|
||||
Files []slskdTransferFile `json:"files"`
|
||||
}
|
||||
|
||||
// slskdUserTransfers is the object returned by GET /api/v0/transfers/downloads/{username}.
|
||||
type slskdUserTransfers struct {
|
||||
Directories []slskdTransferDir `json:"directories"`
|
||||
}
|
||||
|
||||
// getSlskdTransfers returns all active/pending download transfer directories for a peer.
|
||||
func getSlskdTransfers(username string) ([]slskdTransferDir, error) {
|
||||
resp, err := slskdDo("GET", "/api/v0/transfers/downloads/"+username, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
b, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("slskd transfers (%d): %s", resp.StatusCode, strings.TrimSpace(string(b)))
|
||||
}
|
||||
|
||||
var ut slskdUserTransfers
|
||||
if err := json.NewDecoder(resp.Body).Decode(&ut); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ut.Directories, nil
|
||||
}
|
||||
|
||||
// fetchRelease searches slskd for an album, queues the best-quality match for
|
||||
// download, and returns the chosen folder so the caller can monitor completion.
|
||||
// mbid, if non-empty, will be stored for use during import (beets --search-id).
|
||||
// trackCount, if > 0, filters candidate folders to those whose audio file count
|
||||
// matches the expected number of tracks on the release, so alternate editions
|
||||
// with different track counts are not accidentally selected.
|
||||
func fetchRelease(artist, album, mbid string, trackCount int, logf func(string)) (*albumFolder, error) {
|
||||
query := artist + " " + album
|
||||
log.Printf("[discover] fetch started: %q by %s (expected tracks: %d)", album, artist, trackCount)
|
||||
logf("Starting fetch for: " + query)
|
||||
|
||||
logf("Creating slskd search…")
|
||||
id, err := createSlskdSearch(query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("create search: %w", err)
|
||||
}
|
||||
log.Printf("[discover] slskd search created: %s", id)
|
||||
logf(fmt.Sprintf("Search created (id: %s)", id))
|
||||
defer func() {
|
||||
log.Printf("[discover] deleting slskd search %s", id)
|
||||
deleteSlskdSearch(id)
|
||||
}()
|
||||
|
||||
logf("Polling for results…")
|
||||
responses, err := pollSlskdSearch(id, logf)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("poll search: %w", err)
|
||||
}
|
||||
log.Printf("[discover] search %s finished: %d peer responses", id, len(responses))
|
||||
logf(fmt.Sprintf("Search finished: %d peer responses received", len(responses)))
|
||||
|
||||
logf("Grouping results into album folders…")
|
||||
folders := groupAlbumFolders(responses)
|
||||
log.Printf("[discover] grouped into %d candidate album folders", len(folders))
|
||||
logf(fmt.Sprintf("Found %d candidate album folders", len(folders)))
|
||||
|
||||
if len(folders) == 0 {
|
||||
return nil, fmt.Errorf("no audio files found for %q by %s", album, artist)
|
||||
}
|
||||
|
||||
// When we know the expected track count, prefer folders that match exactly
|
||||
// so we don't accidentally grab a bonus-track edition or a different version
|
||||
// that won't align with the release MBID we pass to beets.
|
||||
candidates := folders
|
||||
if trackCount > 0 {
|
||||
var matched []albumFolder
|
||||
for _, f := range folders {
|
||||
if len(f.Files) == trackCount {
|
||||
matched = append(matched, f)
|
||||
}
|
||||
}
|
||||
if len(matched) > 0 {
|
||||
log.Printf("[discover] %d/%d folders match expected track count (%d)", len(matched), len(folders), trackCount)
|
||||
logf(fmt.Sprintf("Filtered to %d/%d folders matching expected track count (%d)",
|
||||
len(matched), len(folders), trackCount))
|
||||
candidates = matched
|
||||
} else {
|
||||
log.Printf("[discover] no folders matched expected track count (%d); using best available", trackCount)
|
||||
logf(fmt.Sprintf("Warning: no folders matched expected track count (%d); using best available", trackCount))
|
||||
}
|
||||
}
|
||||
|
||||
best := bestAlbumFolder(candidates)
|
||||
log.Printf("[discover] selected folder: %s from %s (%s, %d files)",
|
||||
best.Dir, best.Username, qualityLabel(best.Quality), len(best.Files))
|
||||
logf(fmt.Sprintf("Selected folder: %s", best.Dir))
|
||||
logf(fmt.Sprintf(" Peer: %s | Quality: %s | Files: %d",
|
||||
best.Username, qualityLabel(best.Quality), len(best.Files)))
|
||||
|
||||
logf(fmt.Sprintf("Queuing %d files for download…", len(best.Files)))
|
||||
if err := queueSlskdDownload(best); err != nil {
|
||||
return nil, fmt.Errorf("queue download: %w", err)
|
||||
}
|
||||
log.Printf("[discover] download queued: %d files from %s", len(best.Files), best.Username)
|
||||
logf("Download queued — waiting for completion before import")
|
||||
return best, nil
|
||||
}
|
||||
313
static/app.js
313
static/app.js
|
|
@ -1,313 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
// IDs of fetch cards we've already created, so we don't duplicate them.
|
||||
const knownFetchIds = new Set();
|
||||
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
initTabs();
|
||||
initSearch();
|
||||
initFetchList();
|
||||
});
|
||||
|
||||
// ── Tabs ───────────────────────────────────────────────────────────────────────
|
||||
|
||||
function initTabs() {
|
||||
document.querySelector(".tabs").addEventListener("click", (e) => {
|
||||
const btn = e.target.closest(".tab-btn");
|
||||
if (!btn) return;
|
||||
showTab(btn.dataset.tab);
|
||||
});
|
||||
}
|
||||
|
||||
function showTab(name) {
|
||||
document
|
||||
.querySelectorAll(".tab-pane")
|
||||
.forEach((p) => p.classList.remove("active"));
|
||||
document
|
||||
.querySelectorAll(".tab-btn")
|
||||
.forEach((b) => b.classList.remove("active"));
|
||||
document.getElementById("tab-" + name).classList.add("active");
|
||||
document
|
||||
.querySelector(`.tab-btn[data-tab="${name}"]`)
|
||||
.classList.add("active");
|
||||
}
|
||||
|
||||
// ── Search ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
let searchType = "release";
|
||||
|
||||
function initSearch() {
|
||||
document.querySelector(".type-toggle").addEventListener("click", (e) => {
|
||||
const btn = e.target.closest(".type-btn");
|
||||
if (btn) setSearchType(btn.dataset.type);
|
||||
});
|
||||
|
||||
const searchBtn = document.getElementById("search-btn");
|
||||
const searchInput = document.getElementById("search-q");
|
||||
searchBtn.addEventListener("click", doSearch);
|
||||
searchInput.addEventListener("keydown", (e) => {
|
||||
if (e.key === "Enter") doSearch();
|
||||
});
|
||||
|
||||
// Event delegation for dynamically rendered result buttons
|
||||
document.getElementById("search-results").addEventListener("click", (e) => {
|
||||
const btn = e.target.closest(".fetch-btn");
|
||||
if (!btn || btn.disabled) return;
|
||||
if (btn.dataset.fetchType === "artist") startArtistFetch(btn);
|
||||
else startReleaseFetch(btn);
|
||||
});
|
||||
}
|
||||
|
||||
function setSearchType(type) {
|
||||
searchType = type;
|
||||
document.querySelectorAll(".type-btn").forEach((b) => {
|
||||
b.classList.toggle("active", b.dataset.type === type);
|
||||
});
|
||||
}
|
||||
|
||||
function doSearch() {
|
||||
const q = document.getElementById("search-q").value.trim();
|
||||
if (!q) return;
|
||||
|
||||
const btn = document.getElementById("search-btn");
|
||||
const resultsEl = document.getElementById("search-results");
|
||||
|
||||
btn.disabled = true;
|
||||
btn.textContent = "Searching\u2026";
|
||||
resultsEl.innerHTML = '<p class="search-msg">Searching MusicBrainz\u2026</p>';
|
||||
|
||||
fetch(`/discover/search?q=${encodeURIComponent(q)}&type=${searchType}`)
|
||||
.then((r) => {
|
||||
if (!r.ok)
|
||||
return r.text().then((t) => {
|
||||
throw new Error(t || r.statusText);
|
||||
});
|
||||
return r.json();
|
||||
})
|
||||
.then((data) => renderResults(data))
|
||||
.catch((err) => {
|
||||
resultsEl.innerHTML = `<p class="search-msg error">Error: ${esc(err.message)}</p>`;
|
||||
})
|
||||
.finally(() => {
|
||||
btn.disabled = false;
|
||||
btn.textContent = "Search";
|
||||
});
|
||||
}
|
||||
|
||||
// ── Results rendering ──────────────────────────────────────────────────────────
|
||||
|
||||
function renderResults(data) {
|
||||
const el = document.getElementById("search-results");
|
||||
if (!data || data.length === 0) {
|
||||
el.innerHTML = '<p class="search-msg">No results found.</p>';
|
||||
return;
|
||||
}
|
||||
const renderer = searchType === "artist" ? renderArtist : renderRelease;
|
||||
el.innerHTML = data.map(renderer).join("");
|
||||
}
|
||||
|
||||
function renderRelease(r) {
|
||||
const credits = r["artist-credit"] ?? [];
|
||||
const artist =
|
||||
credits.map((c) => c.name || c.artist?.name || "").join("") ||
|
||||
"Unknown Artist";
|
||||
const year = r.date?.substring(0, 4) ?? "";
|
||||
const type = r["release-group"]?.["primary-type"] ?? "";
|
||||
const country = r.country ?? "";
|
||||
const formats = [
|
||||
...new Set((r.media ?? []).map((m) => m.format).filter(Boolean)),
|
||||
].join("+");
|
||||
const lang = r["text-representation"]?.language ?? "";
|
||||
const meta = [year, type, formats, country, lang]
|
||||
.filter(Boolean)
|
||||
.join(" \u00b7 ");
|
||||
const dis = r.disambiguation ? ` (${esc(r.disambiguation)})` : "";
|
||||
const coverUrl = `https://coverartarchive.org/release/${r.id}/front-250`;
|
||||
|
||||
return `
|
||||
<div class="result-row">
|
||||
<img class="result-cover" src="${coverUrl}" onerror="this.style.display='none'" loading="lazy" alt="">
|
||||
<div class="result-info">
|
||||
<span class="result-title">${esc(artist)} \u2014 ${esc(r.title)}<span class="result-dis">${dis}</span></span>
|
||||
${meta ? `<span class="result-meta">${esc(meta)}</span>` : ""}
|
||||
</div>
|
||||
<button class="fetch-btn"
|
||||
data-fetch-type="release"
|
||||
data-id="${esc(r.id)}"
|
||||
data-artist="${esc(artist)}"
|
||||
data-album="${esc(r.title)}">Fetch</button>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
function renderArtist(a) {
|
||||
const dis = a.disambiguation ? ` (${esc(a.disambiguation)})` : "";
|
||||
return `
|
||||
<div class="result-row">
|
||||
<div class="result-info">
|
||||
<span class="result-title">${esc(a.name)}${dis}</span>
|
||||
${a.country ? `<span class="result-meta">${esc(a.country)}</span>` : ""}
|
||||
</div>
|
||||
<button class="fetch-btn"
|
||||
data-fetch-type="artist"
|
||||
data-id="${esc(a.id)}"
|
||||
data-name="${esc(a.name)}">Fetch All</button>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
// ── Fetch operations ───────────────────────────────────────────────────────────
|
||||
|
||||
function startReleaseFetch(btn) {
|
||||
const { id, artist, album } = btn.dataset;
|
||||
btn.disabled = true;
|
||||
btn.textContent = "Fetching\u2026";
|
||||
|
||||
fetch("/discover/fetch", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ id, artist, album }),
|
||||
})
|
||||
.then((r) => {
|
||||
if (!r.ok)
|
||||
return r.text().then((t) => {
|
||||
throw new Error(t || r.statusText);
|
||||
});
|
||||
return r.json();
|
||||
})
|
||||
.then(() => {
|
||||
addFetchCard(id, `${artist} \u2014 ${album}`);
|
||||
pollFetch(id);
|
||||
})
|
||||
.catch((err) => {
|
||||
btn.disabled = false;
|
||||
btn.textContent = "Fetch";
|
||||
showFetchError(err.message);
|
||||
});
|
||||
}
|
||||
|
||||
function startArtistFetch(btn) {
|
||||
const { id, name } = btn.dataset;
|
||||
btn.disabled = true;
|
||||
btn.textContent = "Fetching\u2026";
|
||||
|
||||
fetch("/discover/fetch/artist", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ id, name }),
|
||||
})
|
||||
.then((r) => {
|
||||
if (!r.ok)
|
||||
return r.text().then((t) => {
|
||||
throw new Error(t || r.statusText);
|
||||
});
|
||||
return r.json();
|
||||
})
|
||||
.then(() => {
|
||||
addFetchCard(id, `${name} \u2014 full discography`);
|
||||
pollFetch(id);
|
||||
})
|
||||
.catch((err) => {
|
||||
btn.disabled = false;
|
||||
btn.textContent = "Fetch All";
|
||||
showFetchError(err.message);
|
||||
});
|
||||
}
|
||||
|
||||
// ── Fetch cards ────────────────────────────────────────────────────────────────
|
||||
|
||||
function addFetchCard(id, title) {
|
||||
knownFetchIds.add(id);
|
||||
const list = document.getElementById("fetch-list");
|
||||
const card = document.createElement("div");
|
||||
card.className = "fetch-card";
|
||||
card.id = `fetch-${id}`;
|
||||
card.innerHTML = `
|
||||
<div class="fetch-header">
|
||||
<span class="fetch-title">${esc(title)}</span>
|
||||
<span class="fetch-status" id="fstatus-${id}">In progress\u2026</span>
|
||||
</div>
|
||||
<div class="fetch-log" id="flog-${id}"></div>`;
|
||||
list.prepend(card);
|
||||
}
|
||||
|
||||
function pollFetch(id) {
|
||||
fetch(`/discover/fetch/status?id=${encodeURIComponent(id)}`)
|
||||
.then((r) => r.json())
|
||||
.then((data) => {
|
||||
const logEl = document.getElementById(`flog-${id}`);
|
||||
const statusEl = document.getElementById(`fstatus-${id}`);
|
||||
const card = document.getElementById(`fetch-${id}`);
|
||||
|
||||
if (logEl && data.log) {
|
||||
logEl.innerHTML = data.log
|
||||
.map((l) => `<div class="log-line">${esc(l)}</div>`)
|
||||
.join("");
|
||||
logEl.scrollTop = logEl.scrollHeight;
|
||||
}
|
||||
|
||||
if (data.done) {
|
||||
if (data.success) {
|
||||
statusEl?.setAttribute("class", "fetch-status fetch-status-ok");
|
||||
if (statusEl) statusEl.textContent = "\u2713 done";
|
||||
card?.classList.add("fetch-card-ok");
|
||||
} else {
|
||||
statusEl?.setAttribute("class", "fetch-status fetch-status-err");
|
||||
if (statusEl) statusEl.textContent = "\u2717 failed";
|
||||
card?.classList.add("fetch-card-err");
|
||||
if (data.error && logEl) {
|
||||
logEl.innerHTML += `<div class="log-line log-line-err">${esc(data.error)}</div>`;
|
||||
logEl.scrollTop = logEl.scrollHeight;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
setTimeout(() => pollFetch(id), 2000);
|
||||
}
|
||||
})
|
||||
.catch(() => setTimeout(() => pollFetch(id), 3000));
|
||||
}
|
||||
|
||||
// ── Fetch list polling ─────────────────────────────────────────────────────────
|
||||
|
||||
// Polls /discover/fetch/list every 5 s to discover server-created fetch entries
|
||||
// (e.g. per-album cards spawned during an artist fetch) and create cards for them.
|
||||
function initFetchList() {
|
||||
pollFetchList();
|
||||
}
|
||||
|
||||
function pollFetchList() {
|
||||
fetch("/discover/fetch/list")
|
||||
.then((r) => (r.ok ? r.json() : null))
|
||||
.then((items) => {
|
||||
if (!items) return;
|
||||
for (const item of items) {
|
||||
if (!knownFetchIds.has(item.id)) {
|
||||
knownFetchIds.add(item.id);
|
||||
addFetchCard(item.id, item.title);
|
||||
if (!item.done) pollFetch(item.id);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch(() => {})
|
||||
.finally(() => setTimeout(pollFetchList, 5000));
|
||||
}
|
||||
|
||||
// ── Utilities ──────────────────────────────────────────────────────────────────
|
||||
|
||||
function showFetchError(msg) {
|
||||
const list = document.getElementById("fetch-list");
|
||||
const el = document.createElement("div");
|
||||
el.className = "fetch-card fetch-card-err";
|
||||
el.innerHTML = `<div class="fetch-header">
|
||||
<span class="fetch-title">Fetch failed</span>
|
||||
<span class="fetch-status fetch-status-err">\u2717 error</span>
|
||||
</div>
|
||||
<div class="fetch-log"><div class="log-line log-line-err">${esc(msg)}</div></div>`;
|
||||
list.prepend(el);
|
||||
}
|
||||
|
||||
function esc(s) {
|
||||
return String(s ?? "")
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/"/g, """);
|
||||
}
|
||||
585
static/style.css
585
static/style.css
|
|
@ -1,585 +0,0 @@
|
|||
/* ── Custom properties ────────────────────────────────────────────────────── */
|
||||
|
||||
:root {
|
||||
--bg: #111;
|
||||
--surface: #1a1a1a;
|
||||
--surface-hi: #222;
|
||||
--border: #2a2a2a;
|
||||
--border-focus: #555;
|
||||
|
||||
--text: #eee;
|
||||
--text-secondary: #aaa;
|
||||
--text-muted: #777;
|
||||
--text-dim: #555;
|
||||
|
||||
--green: #4caf50;
|
||||
--green-bg: #1e4d2b;
|
||||
--green-hover: #1e3d1e;
|
||||
--green-border: #3a7a3a;
|
||||
--amber: #f0a500;
|
||||
--amber-bg: #4d3a00;
|
||||
--red: #e05050;
|
||||
--red-bg: #4d1a1a;
|
||||
--red-text: #c0392b;
|
||||
|
||||
--pill-beets: #7ec8e3;
|
||||
--pill-mb: #c084fc;
|
||||
--pill-tags: #f0a500;
|
||||
|
||||
--radius-lg: 8px;
|
||||
--radius: 6px;
|
||||
--radius-sm: 5px;
|
||||
--radius-xs: 4px;
|
||||
|
||||
--max-w: 860px;
|
||||
--pad-x: 24px;
|
||||
}
|
||||
|
||||
/* ── Reset & base ─────────────────────────────────────────────────────────── */
|
||||
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family:
|
||||
system-ui,
|
||||
-apple-system,
|
||||
sans-serif;
|
||||
background: var(--bg);
|
||||
color: var(--text);
|
||||
margin: 0;
|
||||
padding: 48px var(--pad-x) 80px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
h1 {
|
||||
margin: 0 0 24px;
|
||||
font-size: clamp(20px, 4vw, 28px);
|
||||
}
|
||||
|
||||
/* ── Tabs ─────────────────────────────────────────────────────────────────── */
|
||||
|
||||
.tabs {
|
||||
display: inline-flex;
|
||||
gap: 4px;
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 4px;
|
||||
margin-bottom: 36px;
|
||||
}
|
||||
|
||||
.tab-btn {
|
||||
font-size: 14px;
|
||||
min-height: 36px;
|
||||
padding: 0 24px;
|
||||
border-radius: var(--radius);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
background: transparent;
|
||||
color: var(--text-muted);
|
||||
transition:
|
||||
background 0.15s,
|
||||
color 0.15s;
|
||||
}
|
||||
.tab-btn.active {
|
||||
background: var(--surface-hi);
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.tab-pane {
|
||||
display: none;
|
||||
}
|
||||
.tab-pane.active {
|
||||
display: block;
|
||||
}
|
||||
|
||||
/* ── Shared card / content container ─────────────────────────────────────── */
|
||||
|
||||
.content-box {
|
||||
max-width: var(--max-w);
|
||||
margin: 0 auto;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
/* ── Import tab — run button ─────────────────────────────────────────────── */
|
||||
|
||||
.run-btn {
|
||||
font-size: clamp(18px, 4vw, 28px);
|
||||
padding: 18px 40px;
|
||||
border-radius: 10px;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
background: var(--green);
|
||||
color: #fff;
|
||||
transition: opacity 0.15s;
|
||||
}
|
||||
.run-btn:hover:not(:disabled) {
|
||||
opacity: 0.88;
|
||||
}
|
||||
.run-btn:disabled {
|
||||
background: #555;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* ── Import tab — session summary ────────────────────────────────────────── */
|
||||
|
||||
.session {
|
||||
margin-top: 48px;
|
||||
}
|
||||
|
||||
.session-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: baseline;
|
||||
flex-wrap: wrap;
|
||||
gap: 4px;
|
||||
border-bottom: 1px solid #333;
|
||||
padding-bottom: 8px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
.session-header h2 {
|
||||
margin: 0;
|
||||
font-size: 16px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
.session-header .duration {
|
||||
font-size: 13px;
|
||||
color: var(--text-dim);
|
||||
}
|
||||
|
||||
/* ── Album card ───────────────────────────────────────────────────────────── */
|
||||
|
||||
.album {
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 16px 20px;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.album-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
margin-bottom: 10px;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.album-name {
|
||||
font-weight: 600;
|
||||
font-size: 15px;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.badge {
|
||||
font-size: 11px;
|
||||
font-weight: 700;
|
||||
padding: 2px 8px;
|
||||
border-radius: var(--radius-xs);
|
||||
white-space: nowrap;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.badge-ok {
|
||||
background: var(--green-bg);
|
||||
color: var(--green);
|
||||
}
|
||||
.badge-warn {
|
||||
background: var(--amber-bg);
|
||||
color: var(--amber);
|
||||
}
|
||||
.badge-fatal {
|
||||
background: var(--red-bg);
|
||||
color: var(--red);
|
||||
}
|
||||
|
||||
/* ── Metadata row ─────────────────────────────────────────────────────────── */
|
||||
|
||||
.metadata {
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
flex-wrap: wrap;
|
||||
gap: 10px;
|
||||
font-size: 12px;
|
||||
color: var(--text-muted);
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
.metadata-title {
|
||||
color: var(--text-secondary);
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.metadata-pill {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
background: var(--surface-hi);
|
||||
border-radius: var(--radius-xs);
|
||||
padding: 2px 7px;
|
||||
font-size: 11px;
|
||||
}
|
||||
.pill-label {
|
||||
color: var(--text-dim);
|
||||
}
|
||||
.pill-beets {
|
||||
color: var(--pill-beets);
|
||||
}
|
||||
.pill-musicbrainz {
|
||||
color: var(--pill-mb);
|
||||
}
|
||||
.pill-file_tags {
|
||||
color: var(--pill-tags);
|
||||
}
|
||||
.pill-unknown {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
/* ── Info grid ────────────────────────────────────────────────────────────── */
|
||||
|
||||
.info-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(160px, 1fr));
|
||||
gap: 6px;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
.info-card {
|
||||
background: var(--surface-hi);
|
||||
border-radius: var(--radius);
|
||||
padding: 8px 12px;
|
||||
font-size: 12px;
|
||||
}
|
||||
.info-card-label {
|
||||
font-size: 10px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.06em;
|
||||
color: var(--text-dim);
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
.info-card-value {
|
||||
color: var(--text-secondary);
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
}
|
||||
.info-card-sub {
|
||||
margin-top: 3px;
|
||||
color: var(--text-dim);
|
||||
font-size: 11px;
|
||||
line-height: 1.4;
|
||||
}
|
||||
|
||||
.info-ok {
|
||||
color: var(--green);
|
||||
}
|
||||
.info-warn {
|
||||
color: var(--amber);
|
||||
}
|
||||
.info-dim {
|
||||
color: var(--text-dim);
|
||||
}
|
||||
|
||||
/* ── Pipeline steps ───────────────────────────────────────────────────────── */
|
||||
|
||||
.steps-label {
|
||||
font-size: 10px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.06em;
|
||||
color: #444;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
.steps {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(180px, 1fr));
|
||||
gap: 6px;
|
||||
}
|
||||
.step {
|
||||
font-size: 12px;
|
||||
padding: 5px 10px;
|
||||
border-radius: var(--radius-sm);
|
||||
background: var(--surface-hi);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
.step-label {
|
||||
color: #888;
|
||||
}
|
||||
.step-ok {
|
||||
color: var(--green);
|
||||
}
|
||||
.step-warn {
|
||||
color: var(--amber);
|
||||
}
|
||||
.step-fatal {
|
||||
color: var(--red);
|
||||
}
|
||||
.step-err {
|
||||
font-size: 11px;
|
||||
color: var(--red-text);
|
||||
margin-top: 2px;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
/* ── Discover tab — search form ───────────────────────────────────────────── */
|
||||
|
||||
.search-form {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: stretch;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.type-toggle {
|
||||
display: flex;
|
||||
border: 1px solid #333;
|
||||
border-radius: var(--radius);
|
||||
overflow: hidden;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.type-btn {
|
||||
font-size: 13px;
|
||||
padding: 0 16px;
|
||||
border: none;
|
||||
background: var(--surface);
|
||||
color: var(--text-muted);
|
||||
cursor: pointer;
|
||||
transition:
|
||||
background 0.15s,
|
||||
color 0.15s;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.type-btn.active {
|
||||
background: var(--surface-hi);
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.search-input {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
font-size: 14px;
|
||||
padding: 0 12px;
|
||||
height: 38px;
|
||||
background: var(--surface);
|
||||
border: 1px solid #333;
|
||||
border-radius: var(--radius);
|
||||
color: var(--text);
|
||||
outline: none;
|
||||
transition: border-color 0.15s;
|
||||
}
|
||||
.search-input:focus {
|
||||
border-color: var(--border-focus);
|
||||
}
|
||||
|
||||
.search-btn {
|
||||
font-size: 14px;
|
||||
padding: 0 20px;
|
||||
height: 38px;
|
||||
border-radius: var(--radius);
|
||||
border: none;
|
||||
background: var(--green);
|
||||
color: #fff;
|
||||
cursor: pointer;
|
||||
flex-shrink: 0;
|
||||
transition: opacity 0.15s;
|
||||
}
|
||||
.search-btn:hover:not(:disabled) {
|
||||
opacity: 0.88;
|
||||
}
|
||||
.search-btn:disabled {
|
||||
background: #555;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* ── Discover tab — search results ───────────────────────────────────────── */
|
||||
|
||||
.search-msg {
|
||||
text-align: center;
|
||||
color: var(--text-dim);
|
||||
font-size: 14px;
|
||||
padding: 32px 0;
|
||||
}
|
||||
.search-msg.error {
|
||||
color: var(--red);
|
||||
}
|
||||
|
||||
.result-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 12px 16px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
.result-cover {
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
object-fit: cover;
|
||||
border-radius: var(--radius-sm);
|
||||
flex-shrink: 0;
|
||||
background: var(--surface-hi);
|
||||
}
|
||||
.result-info {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
}
|
||||
.result-title {
|
||||
display: block;
|
||||
font-size: 14px;
|
||||
color: #ddd;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.result-dis {
|
||||
color: var(--text-dim);
|
||||
}
|
||||
.result-meta {
|
||||
display: block;
|
||||
font-size: 12px;
|
||||
color: var(--text-dim);
|
||||
margin-top: 2px;
|
||||
}
|
||||
|
||||
.fetch-btn {
|
||||
font-size: 12px;
|
||||
padding: 5px 14px;
|
||||
border-radius: var(--radius-sm);
|
||||
border: 1px solid var(--green-border);
|
||||
background: transparent;
|
||||
color: var(--green);
|
||||
cursor: pointer;
|
||||
flex-shrink: 0;
|
||||
white-space: nowrap;
|
||||
transition: background 0.15s;
|
||||
}
|
||||
.fetch-btn:hover:not(:disabled) {
|
||||
background: var(--green-hover);
|
||||
}
|
||||
.fetch-btn:disabled {
|
||||
border-color: #333;
|
||||
color: var(--text-dim);
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* ── Discover tab — fetch log cards ───────────────────────────────────────── */
|
||||
|
||||
.fetch-list {
|
||||
margin-top: 32px;
|
||||
}
|
||||
|
||||
.fetch-card {
|
||||
background: var(--surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: 14px 16px;
|
||||
margin-bottom: 10px;
|
||||
transition: border-color 0.3s;
|
||||
}
|
||||
.fetch-card-ok {
|
||||
border-color: var(--green-bg);
|
||||
}
|
||||
.fetch-card-err {
|
||||
border-color: var(--red-bg);
|
||||
}
|
||||
|
||||
.fetch-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
gap: 6px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
.fetch-title {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
color: var(--text-secondary);
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.fetch-status {
|
||||
font-size: 12px;
|
||||
color: var(--text-dim);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.fetch-status-ok {
|
||||
color: var(--green);
|
||||
}
|
||||
.fetch-status-err {
|
||||
color: var(--red);
|
||||
}
|
||||
|
||||
.fetch-log {
|
||||
font-size: 12px;
|
||||
font-family: ui-monospace, "Cascadia Code", "Fira Mono", monospace;
|
||||
color: var(--text-muted);
|
||||
max-height: 260px;
|
||||
overflow-y: auto;
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: #333 transparent;
|
||||
}
|
||||
.log-line {
|
||||
padding: 1px 0;
|
||||
line-height: 1.5;
|
||||
}
|
||||
.log-line-err {
|
||||
color: var(--red-text);
|
||||
}
|
||||
|
||||
/* ── Footer ───────────────────────────────────────────────────────────────── */
|
||||
|
||||
footer {
|
||||
position: fixed;
|
||||
bottom: 14px;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
font-size: 12px;
|
||||
color: #444;
|
||||
text-align: center;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
/* ── Responsive ───────────────────────────────────────────────────────────── */
|
||||
|
||||
@media (max-width: 600px) {
|
||||
body {
|
||||
padding: 32px 16px 72px;
|
||||
}
|
||||
|
||||
.tabs {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
}
|
||||
.tab-btn {
|
||||
flex: 1;
|
||||
padding: 0;
|
||||
min-height: 40px;
|
||||
}
|
||||
|
||||
.search-form {
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.type-toggle {
|
||||
width: 100%;
|
||||
}
|
||||
.type-btn {
|
||||
flex: 1;
|
||||
min-height: 38px;
|
||||
}
|
||||
.search-btn {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.result-title {
|
||||
white-space: normal;
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue