diff --git a/client/app/routes/MediaItems/Track.tsx b/client/app/routes/MediaItems/Track.tsx
index 20258c1..6b6690e 100644
--- a/client/app/routes/MediaItems/Track.tsx
+++ b/client/app/routes/MediaItems/Track.tsx
@@ -34,6 +34,7 @@ export default function Track() {
title={track.title}
img={track.image}
id={track.id}
+ rank={track.all_time_rank}
musicbrainzId={track.musicbrainz_id}
imgItemId={track.album_id}
mergeFunc={mergeTracks}
diff --git a/db/queries/artist.sql b/db/queries/artist.sql
index 863de32..deaad60 100644
--- a/db/queries/artist.sql
+++ b/db/queries/artist.sql
@@ -81,6 +81,26 @@ FROM (
ORDER BY x.listen_count DESC, x.id
LIMIT $3 OFFSET $4;
+-- name: GetArtistAllTimeRank :one
+SELECT
+ artist_id,
+ rank
+FROM (
+ SELECT
+ x.artist_id,
+ RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ FROM (
+ SELECT
+ at.artist_id,
+ COUNT(*) AS listen_count
+ FROM listens l
+ JOIN tracks t ON l.track_id = t.id
+ JOIN artist_tracks at ON t.id = at.track_id
+ GROUP BY at.artist_id
+ ) x
+ )
+WHERE artist_id = $1;
+
-- name: CountTopArtists :one
SELECT COUNT(DISTINCT at.artist_id) AS total_count
FROM listens l
diff --git a/db/queries/release.sql b/db/queries/release.sql
index cb548ed..47aac86 100644
--- a/db/queries/release.sql
+++ b/db/queries/release.sql
@@ -83,6 +83,25 @@ FROM (
ORDER BY listen_count DESC, x.id
LIMIT $3 OFFSET $4;
+-- name: GetReleaseAllTimeRank :one
+SELECT
+ release_id,
+ rank
+FROM (
+ SELECT
+ x.release_id,
+ RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ FROM (
+ SELECT
+ t.release_id,
+ COUNT(*) AS listen_count
+ FROM listens l
+ JOIN tracks t ON l.track_id = t.id
+ GROUP BY t.release_id
+ ) x
+ )
+WHERE release_id = $1;
+
-- name: CountTopReleases :one
SELECT COUNT(DISTINCT r.id) AS total_count
FROM listens l
diff --git a/db/queries/track.sql b/db/queries/track.sql
index 24be467..c69bed5 100644
--- a/db/queries/track.sql
+++ b/db/queries/track.sql
@@ -124,6 +124,24 @@ FROM (
ORDER BY x.listen_count DESC, x.id
LIMIT $3 OFFSET $4;
+-- name: GetTrackAllTimeRank :one
+SELECT
+ id,
+ rank
+FROM (
+ SELECT
+ x.id,
+ RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ FROM (
+ SELECT
+ t.id,
+ COUNT(*) AS listen_count
+ FROM listens l
+ JOIN tracks_with_title t ON l.track_id = t.id
+ GROUP BY t.id) x
+ ) y
+WHERE id = $1;
+
-- name: CountTopTracks :one
SELECT COUNT(DISTINCT l.track_id) AS total_count
FROM listens l
diff --git a/internal/db/psql/album.go b/internal/db/psql/album.go
index 630cf1f..f4c614c 100644
--- a/internal/db/psql/album.go
+++ b/internal/db/psql/album.go
@@ -23,32 +23,13 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu
var err error
var ret = new(models.Album)
- if opts.ID != 0 {
- l.Debug().Msgf("Fetching album from DB with id %d", opts.ID)
- row, err := d.q.GetRelease(ctx, opts.ID)
- if err != nil {
- return nil, fmt.Errorf("GetAlbum: %w", err)
- }
- ret.ID = row.ID
- ret.MbzID = row.MusicBrainzID
- ret.Title = row.Title
- ret.Image = row.Image
- ret.VariousArtists = row.VariousArtists
- err = json.Unmarshal(row.Artists, &ret.Artists)
- if err != nil {
- return nil, fmt.Errorf("GetAlbum: json.Unmarshal: %w", err)
- }
- } else if opts.MusicBrainzID != uuid.Nil {
+ if opts.MusicBrainzID != uuid.Nil {
l.Debug().Msgf("Fetching album from DB with MusicBrainz Release ID %s", opts.MusicBrainzID)
row, err := d.q.GetReleaseByMbzID(ctx, &opts.MusicBrainzID)
if err != nil {
return nil, fmt.Errorf("GetAlbum: %w", err)
}
- ret.ID = row.ID
- ret.MbzID = row.MusicBrainzID
- ret.Title = row.Title
- ret.Image = row.Image
- ret.VariousArtists = row.VariousArtists
+ opts.ID = row.ID
} else if opts.ArtistID != 0 && opts.Title != "" {
l.Debug().Msgf("Fetching album from DB with artist_id %d and title %s", opts.ArtistID, opts.Title)
row, err := d.q.GetReleaseByArtistAndTitle(ctx, repository.GetReleaseByArtistAndTitleParams{
@@ -58,11 +39,7 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu
if err != nil {
return nil, fmt.Errorf("GetAlbum: %w", err)
}
- ret.ID = row.ID
- ret.MbzID = row.MusicBrainzID
- ret.Title = row.Title
- ret.Image = row.Image
- ret.VariousArtists = row.VariousArtists
+ opts.ID = row.ID
} else if opts.ArtistID != 0 && len(opts.Titles) > 0 {
l.Debug().Msgf("Fetching release group from DB with artist_id %d and titles %v", opts.ArtistID, opts.Titles)
row, err := d.q.GetReleaseByArtistAndTitles(ctx, repository.GetReleaseByArtistAndTitlesParams{
@@ -72,19 +49,19 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu
if err != nil {
return nil, fmt.Errorf("GetAlbum: %w", err)
}
- ret.ID = row.ID
- ret.MbzID = row.MusicBrainzID
- ret.Title = row.Title
- ret.Image = row.Image
- ret.VariousArtists = row.VariousArtists
- } else {
- return nil, errors.New("GetAlbum: insufficient information to get album")
+ opts.ID = row.ID
+ }
+
+ l.Debug().Msgf("Fetching album from DB with id %d", opts.ID)
+ row, err := d.q.GetRelease(ctx, opts.ID)
+ if err != nil {
+ return nil, fmt.Errorf("GetAlbum: %w", err)
}
count, err := d.q.CountListensFromRelease(ctx, repository.CountListensFromReleaseParams{
ListenedAt: time.Unix(0, 0),
ListenedAt_2: time.Now(),
- ReleaseID: ret.ID,
+ ReleaseID: opts.ID,
})
if err != nil {
return nil, fmt.Errorf("GetAlbum: CountListensFromRelease: %w", err)
@@ -92,17 +69,32 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu
seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
Timeframe: db.Timeframe{Period: db.PeriodAllTime},
- AlbumID: ret.ID,
+ AlbumID: opts.ID,
})
if err != nil {
return nil, fmt.Errorf("GetAlbum: CountTimeListenedToItem: %w", err)
}
- firstListen, err := d.q.GetFirstListenFromRelease(ctx, ret.ID)
+ firstListen, err := d.q.GetFirstListenFromRelease(ctx, opts.ID)
if err != nil && !errors.Is(err, pgx.ErrNoRows) {
return nil, fmt.Errorf("GetAlbum: GetFirstListenFromRelease: %w", err)
}
+ rank, err := d.q.GetReleaseAllTimeRank(ctx, opts.ID)
+ if err != nil && !errors.Is(err, pgx.ErrNoRows) {
+ return nil, fmt.Errorf("GetAlbum: GetReleaseAllTimeRank: %w", err)
+ }
+
+ ret.ID = row.ID
+ ret.MbzID = row.MusicBrainzID
+ ret.Title = row.Title
+ ret.Image = row.Image
+ ret.VariousArtists = row.VariousArtists
+ err = json.Unmarshal(row.Artists, &ret.Artists)
+ if err != nil {
+ return nil, fmt.Errorf("GetAlbum: json.Unmarshal: %w", err)
+ }
+ ret.AllTimeRank = rank.Rank
ret.ListenCount = count
ret.TimeListened = seconds
ret.FirstListen = firstListen.ListenedAt.Unix()
diff --git a/internal/db/psql/artist.go b/internal/db/psql/artist.go
index a67fc4c..7bb50ec 100644
--- a/internal/db/psql/artist.go
+++ b/internal/db/psql/artist.go
@@ -20,114 +20,60 @@ import (
// this function sucks because sqlc keeps making new types for rows that are the same
func (d *Psql) GetArtist(ctx context.Context, opts db.GetArtistOpts) (*models.Artist, error) {
l := logger.FromContext(ctx)
- if opts.ID != 0 {
- l.Debug().Msgf("Fetching artist from DB with id %d", opts.ID)
- row, err := d.q.GetArtist(ctx, opts.ID)
- if err != nil {
- return nil, fmt.Errorf("GetArtist: GetArtist by ID: %w", err)
- }
- count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
- ListenedAt: time.Unix(0, 0),
- ListenedAt_2: time.Now(),
- ArtistID: row.ID,
- })
- if err != nil {
- return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err)
- }
- seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
- Timeframe: db.Timeframe{Period: db.PeriodAllTime},
- ArtistID: row.ID,
- })
- if err != nil {
- return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err)
- }
- firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID)
- if err != nil && !errors.Is(err, pgx.ErrNoRows) {
- return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err)
- }
- return &models.Artist{
- ID: row.ID,
- MbzID: row.MusicBrainzID,
- Name: row.Name,
- Aliases: row.Aliases,
- Image: row.Image,
- ListenCount: count,
- TimeListened: seconds,
- FirstListen: firstListen.ListenedAt.Unix(),
- }, nil
- } else if opts.MusicBrainzID != uuid.Nil {
+ if opts.MusicBrainzID != uuid.Nil {
l.Debug().Msgf("Fetching artist from DB with MusicBrainz ID %s", opts.MusicBrainzID)
row, err := d.q.GetArtistByMbzID(ctx, &opts.MusicBrainzID)
if err != nil {
return nil, fmt.Errorf("GetArtist: GetArtistByMbzID: %w", err)
}
- count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
- ListenedAt: time.Unix(0, 0),
- ListenedAt_2: time.Now(),
- ArtistID: row.ID,
- })
- if err != nil {
- return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err)
- }
- seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
- Timeframe: db.Timeframe{Period: db.PeriodAllTime},
- ArtistID: row.ID,
- })
- if err != nil {
- return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err)
- }
- firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID)
- if err != nil && !errors.Is(err, pgx.ErrNoRows) {
- return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err)
- }
- return &models.Artist{
- ID: row.ID,
- MbzID: row.MusicBrainzID,
- Name: row.Name,
- Aliases: row.Aliases,
- Image: row.Image,
- ListenCount: count,
- TimeListened: seconds,
- FirstListen: firstListen.ListenedAt.Unix(),
- }, nil
+ opts.ID = row.ID
} else if opts.Name != "" {
l.Debug().Msgf("Fetching artist from DB with name '%s'", opts.Name)
row, err := d.q.GetArtistByName(ctx, opts.Name)
if err != nil {
return nil, fmt.Errorf("GetArtist: GetArtistByName: %w", err)
}
- count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
- ListenedAt: time.Unix(0, 0),
- ListenedAt_2: time.Now(),
- ArtistID: row.ID,
- })
- if err != nil {
- return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err)
- }
- seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
- Timeframe: db.Timeframe{Period: db.PeriodAllTime},
- ArtistID: row.ID,
- })
- if err != nil {
- return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err)
- }
- firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID)
- if err != nil && !errors.Is(err, pgx.ErrNoRows) {
- return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err)
- }
- return &models.Artist{
- ID: row.ID,
- MbzID: row.MusicBrainzID,
- Name: row.Name,
- Aliases: row.Aliases,
- Image: row.Image,
- ListenCount: count,
- TimeListened: seconds,
- FirstListen: firstListen.ListenedAt.Unix(),
- }, nil
- } else {
- return nil, errors.New("insufficient information to get artist")
+ opts.ID = row.ID
}
+ l.Debug().Msgf("Fetching artist from DB with id %d", opts.ID)
+ row, err := d.q.GetArtist(ctx, opts.ID)
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: GetArtist by ID: %w", err)
+ }
+ count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
+ ListenedAt: time.Unix(0, 0),
+ ListenedAt_2: time.Now(),
+ ArtistID: row.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err)
+ }
+ seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
+ Timeframe: db.Timeframe{Period: db.PeriodAllTime},
+ ArtistID: row.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err)
+ }
+ firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID)
+ if err != nil && !errors.Is(err, pgx.ErrNoRows) {
+ return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err)
+ }
+ rank, err := d.q.GetArtistAllTimeRank(ctx, opts.ID)
+ if err != nil && !errors.Is(err, pgx.ErrNoRows) {
+ return nil, fmt.Errorf("GetArtist: GetArtistAllTimeRank: %w", err)
+ }
+ return &models.Artist{
+ ID: row.ID,
+ MbzID: row.MusicBrainzID,
+ Name: row.Name,
+ Aliases: row.Aliases,
+ Image: row.Image,
+ ListenCount: count,
+ TimeListened: seconds,
+ AllTimeRank: rank.Rank,
+ FirstListen: firstListen.ListenedAt.Unix(),
+ }, nil
}
// Inserts all unique aliases into the DB with specified source
diff --git a/internal/db/psql/track.go b/internal/db/psql/track.go
index d511de6..743a20e 100644
--- a/internal/db/psql/track.go
+++ b/internal/db/psql/track.go
@@ -21,37 +21,13 @@ func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Trac
l := logger.FromContext(ctx)
var track models.Track
- if opts.ID != 0 {
- l.Debug().Msgf("Fetching track from DB with id %d", opts.ID)
- t, err := d.q.GetTrack(ctx, opts.ID)
- if err != nil {
- return nil, fmt.Errorf("GetTrack: GetTrack By ID: %w", err)
- }
- track = models.Track{
- ID: t.ID,
- MbzID: t.MusicBrainzID,
- Title: t.Title,
- AlbumID: t.ReleaseID,
- Image: t.Image,
- Duration: t.Duration,
- }
- err = json.Unmarshal(t.Artists, &track.Artists)
- if err != nil {
- return nil, fmt.Errorf("GetTrack: json.Unmarshal: %w", err)
- }
- } else if opts.MusicBrainzID != uuid.Nil {
+ if opts.MusicBrainzID != uuid.Nil {
l.Debug().Msgf("Fetching track from DB with MusicBrainz ID %s", opts.MusicBrainzID)
t, err := d.q.GetTrackByMbzID(ctx, &opts.MusicBrainzID)
if err != nil {
return nil, fmt.Errorf("GetTrack: GetTrackByMbzID: %w", err)
}
- track = models.Track{
- ID: t.ID,
- MbzID: t.MusicBrainzID,
- Title: t.Title,
- AlbumID: t.ReleaseID,
- Duration: t.Duration,
- }
+ opts.ID = t.ID
} else if len(opts.ArtistIDs) > 0 && opts.ReleaseID != 0 {
l.Debug().Msgf("Fetching track from DB from release id %d with title '%s' and artist id(s) '%v'", opts.ReleaseID, opts.Title, opts.ArtistIDs)
t, err := d.q.GetTrackByTrackInfo(ctx, repository.GetTrackByTrackInfoParams{
@@ -62,21 +38,19 @@ func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Trac
if err != nil {
return nil, fmt.Errorf("GetTrack: GetTrackByTrackInfo: %w", err)
}
- track = models.Track{
- ID: t.ID,
- MbzID: t.MusicBrainzID,
- Title: t.Title,
- AlbumID: t.ReleaseID,
- Duration: t.Duration,
- }
- } else {
- return nil, errors.New("GetTrack: insufficient information to get track")
+ opts.ID = t.ID
+ }
+
+ l.Debug().Msgf("Fetching track from DB with id %d", opts.ID)
+ t, err := d.q.GetTrack(ctx, opts.ID)
+ if err != nil {
+ return nil, fmt.Errorf("GetTrack: GetTrack By ID: %w", err)
}
count, err := d.q.CountListensFromTrack(ctx, repository.CountListensFromTrackParams{
ListenedAt: time.Unix(0, 0),
ListenedAt_2: time.Now(),
- TrackID: track.ID,
+ TrackID: opts.ID,
})
if err != nil {
return nil, fmt.Errorf("GetTrack: CountListensFromTrack: %w", err)
@@ -84,20 +58,37 @@ func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Trac
seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
Timeframe: db.Timeframe{Period: db.PeriodAllTime},
- TrackID: track.ID,
+ TrackID: opts.ID,
})
if err != nil {
return nil, fmt.Errorf("GetTrack: CountTimeListenedToItem: %w", err)
}
- firstListen, err := d.q.GetFirstListenFromTrack(ctx, track.ID)
+ firstListen, err := d.q.GetFirstListenFromTrack(ctx, opts.ID)
if err != nil && !errors.Is(err, pgx.ErrNoRows) {
return nil, fmt.Errorf("GetAlbum: GetFirstListenFromRelease: %w", err)
}
+ rank, err := d.q.GetTrackAllTimeRank(ctx, opts.ID)
+ if err != nil && !errors.Is(err, pgx.ErrNoRows) {
+ return nil, fmt.Errorf("GetAlbum: GetTrackAllTimeRank: %w", err)
+ }
- track.ListenCount = count
- track.TimeListened = seconds
- track.FirstListen = firstListen.ListenedAt.Unix()
+ track = models.Track{
+ ID: t.ID,
+ MbzID: t.MusicBrainzID,
+ Title: t.Title,
+ AlbumID: t.ReleaseID,
+ Image: t.Image,
+ Duration: t.Duration,
+ AllTimeRank: rank.Rank,
+ ListenCount: count,
+ TimeListened: seconds,
+ FirstListen: firstListen.ListenedAt.Unix(),
+ }
+ err = json.Unmarshal(t.Artists, &track.Artists)
+ if err != nil {
+ return nil, fmt.Errorf("GetTrack: json.Unmarshal: %w", err)
+ }
return &track, nil
}
diff --git a/internal/models/album.go b/internal/models/album.go
index 24948f9..a295fe9 100644
--- a/internal/models/album.go
+++ b/internal/models/album.go
@@ -12,11 +12,5 @@ type Album struct {
ListenCount int64 `json:"listen_count"`
TimeListened int64 `json:"time_listened"`
FirstListen int64 `json:"first_listen"`
+ AllTimeRank int64 `json:"all_time_rank"`
}
-
-// type SimpleAlbum struct {
-// ID int32 `json:"id"`
-// Title string `json:"title"`
-// VariousArtists bool `json:"is_various_artists"`
-// Image uuid.UUID `json:"image"`
-// }
diff --git a/internal/models/artist.go b/internal/models/artist.go
index 7784e51..07f09e6 100644
--- a/internal/models/artist.go
+++ b/internal/models/artist.go
@@ -12,6 +12,7 @@ type Artist struct {
TimeListened int64 `json:"time_listened"`
FirstListen int64 `json:"first_listen"`
IsPrimary bool `json:"is_primary,omitempty"`
+ AllTimeRank int64 `json:"all_time_rank"`
}
type SimpleArtist struct {
diff --git a/internal/models/track.go b/internal/models/track.go
index 8eb802c..4cb5b04 100644
--- a/internal/models/track.go
+++ b/internal/models/track.go
@@ -13,4 +13,5 @@ type Track struct {
AlbumID int32 `json:"album_id"`
TimeListened int64 `json:"time_listened"`
FirstListen int64 `json:"first_listen"`
+ AllTimeRank int64 `json:"all_time_rank"`
}
diff --git a/internal/repository/artist.sql.go b/internal/repository/artist.sql.go
index 3722291..96f00f2 100644
--- a/internal/repository/artist.sql.go
+++ b/internal/repository/artist.sql.go
@@ -134,6 +134,39 @@ func (q *Queries) GetArtist(ctx context.Context, id int32) (GetArtistRow, error)
return i, err
}
+const getArtistAllTimeRank = `-- name: GetArtistAllTimeRank :one
+SELECT
+ artist_id,
+ rank
+FROM (
+ SELECT
+ x.artist_id,
+ RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ FROM (
+ SELECT
+ at.artist_id,
+ COUNT(*) AS listen_count
+ FROM listens l
+ JOIN tracks t ON l.track_id = t.id
+ JOIN artist_tracks at ON t.id = at.track_id
+ GROUP BY at.artist_id
+ ) x
+ )
+WHERE artist_id = $1
+`
+
+type GetArtistAllTimeRankRow struct {
+ ArtistID int32
+ Rank int64
+}
+
+func (q *Queries) GetArtistAllTimeRank(ctx context.Context, artistID int32) (GetArtistAllTimeRankRow, error) {
+ row := q.db.QueryRow(ctx, getArtistAllTimeRank, artistID)
+ var i GetArtistAllTimeRankRow
+ err := row.Scan(&i.ArtistID, &i.Rank)
+ return i, err
+}
+
const getArtistByImage = `-- name: GetArtistByImage :one
SELECT id, musicbrainz_id, image, image_source FROM artists WHERE image = $1 LIMIT 1
`
diff --git a/internal/repository/release.sql.go b/internal/repository/release.sql.go
index 76789d0..6d12da4 100644
--- a/internal/repository/release.sql.go
+++ b/internal/repository/release.sql.go
@@ -141,6 +141,38 @@ func (q *Queries) GetRelease(ctx context.Context, id int32) (GetReleaseRow, erro
return i, err
}
+const getReleaseAllTimeRank = `-- name: GetReleaseAllTimeRank :one
+SELECT
+ release_id,
+ rank
+FROM (
+ SELECT
+ x.release_id,
+ RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ FROM (
+ SELECT
+ t.release_id,
+ COUNT(*) AS listen_count
+ FROM listens l
+ JOIN tracks t ON l.track_id = t.id
+ GROUP BY t.release_id
+ ) x
+ )
+WHERE release_id = $1
+`
+
+type GetReleaseAllTimeRankRow struct {
+ ReleaseID int32
+ Rank int64
+}
+
+func (q *Queries) GetReleaseAllTimeRank(ctx context.Context, releaseID int32) (GetReleaseAllTimeRankRow, error) {
+ row := q.db.QueryRow(ctx, getReleaseAllTimeRank, releaseID)
+ var i GetReleaseAllTimeRankRow
+ err := row.Scan(&i.ReleaseID, &i.Rank)
+ return i, err
+}
+
const getReleaseByArtistAndTitle = `-- name: GetReleaseByArtistAndTitle :one
SELECT r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title
FROM releases_with_title r
diff --git a/internal/repository/track.sql.go b/internal/repository/track.sql.go
index a18d87a..e2aa084 100644
--- a/internal/repository/track.sql.go
+++ b/internal/repository/track.sql.go
@@ -438,6 +438,37 @@ func (q *Queries) GetTrack(ctx context.Context, id int32) (GetTrackRow, error) {
return i, err
}
+const getTrackAllTimeRank = `-- name: GetTrackAllTimeRank :one
+SELECT
+ id,
+ rank
+FROM (
+ SELECT
+ x.id,
+ RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ FROM (
+ SELECT
+ t.id,
+ COUNT(*) AS listen_count
+ FROM listens l
+ JOIN tracks_with_title t ON l.track_id = t.id
+ GROUP BY t.id) x
+ ) y
+WHERE id = $1
+`
+
+type GetTrackAllTimeRankRow struct {
+ ID int32
+ Rank int64
+}
+
+func (q *Queries) GetTrackAllTimeRank(ctx context.Context, id int32) (GetTrackAllTimeRankRow, error) {
+ row := q.db.QueryRow(ctx, getTrackAllTimeRank, id)
+ var i GetTrackAllTimeRankRow
+ err := row.Scan(&i.ID, &i.Rank)
+ return i, err
+}
+
const getTrackByMbzID = `-- name: GetTrackByMbzID :one
SELECT id, musicbrainz_id, duration, release_id, title FROM tracks_with_title
WHERE musicbrainz_id = $1 LIMIT 1
From 1a8099e902298f2b9aeef38f07164a338c4e06a9 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Tue, 20 Jan 2026 12:10:54 -0500
Subject: [PATCH 15/31] feat: refetch missing images on startup (#160)
* artist image refetching
* album image refetching
* remove unused var
---
.env.example | 5 +
.gitignore | 1 +
Makefile | 9 +-
db/queries/artist.sql | 9 ++
engine/engine.go | 14 ++-
engine/handlers/replace_image.go | 3 +-
internal/catalog/images.go | 147 +++++++++++++++++++++++++-----
internal/db/db.go | 2 +
internal/db/psql/album.go | 3 +
internal/db/psql/artist.go | 3 +
internal/db/psql/images.go | 23 +++++
internal/images/deezer.go | 3 +
internal/images/imagesrc.go | 46 +++++++---
internal/images/subsonic.go | 6 +-
internal/repository/artist.sql.go | 41 +++++++++
15 files changed, 271 insertions(+), 44 deletions(-)
create mode 100644 .env.example
diff --git a/.env.example b/.env.example
new file mode 100644
index 0000000..d5ed451
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,5 @@
+KOITO_ALLOWED_HOSTS=*
+KOITO_LOG_LEVEL=debug
+KOITO_CONFIG_DIR=test_config_dir
+KOITO_DATABASE_URL=postgres://postgres:secret@localhost:5432?sslmode=disable
+TZ=Etc/UTC
diff --git a/.gitignore b/.gitignore
index bade026..083bb78 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,2 @@
test_config_dir
+.env
diff --git a/Makefile b/Makefile
index b437622..99455ac 100644
--- a/Makefile
+++ b/Makefile
@@ -1,3 +1,8 @@
+ifneq (,$(wildcard ./.env))
+ include .env
+ export
+endif
+
.PHONY: all test clean client
postgres.schemadump:
@@ -28,10 +33,10 @@ postgres.remove-scratch:
docker stop koito-scratch && docker rm koito-scratch
api.debug: postgres.start
- KOITO_ALLOWED_HOSTS=* KOITO_LOG_LEVEL=debug KOITO_CONFIG_DIR=test_config_dir KOITO_DATABASE_URL=postgres://postgres:secret@localhost:5432?sslmode=disable go run cmd/api/main.go
+ go run cmd/api/main.go
api.scratch: postgres.run-scratch
- KOITO_ALLOWED_HOSTS=* KOITO_LOG_LEVEL=debug KOITO_CONFIG_DIR=test_config_dir/scratch KOITO_DATABASE_URL=postgres://postgres:secret@localhost:5433?sslmode=disable go run cmd/api/main.go
+ KOITO_DATABASE_URL=postgres://postgres:secret@localhost:5433?sslmode=disable go run cmd/api/main.go
api.test:
go test ./... -timeout 60s
diff --git a/db/queries/artist.sql b/db/queries/artist.sql
index deaad60..70a2fdd 100644
--- a/db/queries/artist.sql
+++ b/db/queries/artist.sql
@@ -56,6 +56,15 @@ LEFT JOIN artist_aliases aa ON a.id = aa.artist_id
WHERE a.musicbrainz_id = $1
GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name;
+-- name: GetArtistsWithoutImages :many
+SELECT
+ *
+FROM artists_with_name
+WHERE image IS NULL
+ AND id > $2
+ORDER BY id ASC
+LIMIT $1;
+
-- name: GetTopArtistsPaginated :many
SELECT
x.id,
diff --git a/engine/engine.go b/engine/engine.go
index 31fe552..9374819 100644
--- a/engine/engine.go
+++ b/engine/engine.go
@@ -211,6 +211,8 @@ func Run(
}
}()
+ l.Info().Msg("Engine: Beginning startup tasks...")
+
l.Debug().Msg("Engine: Checking import configuration")
if !cfg.SkipImport() {
go func() {
@@ -218,18 +220,14 @@ func Run(
}()
}
- // l.Info().Msg("Creating test export file")
- // go func() {
- // err := export.ExportData(ctx, "koito", store)
- // if err != nil {
- // l.Err(err).Msg("Failed to generate export file")
- // }
- // }()
-
l.Info().Msg("Engine: Pruning orphaned images")
go catalog.PruneOrphanedImages(logger.NewContext(l), store)
l.Info().Msg("Engine: Running duration backfill task")
go catalog.BackfillTrackDurationsFromMusicBrainz(ctx, store, mbzC)
+ l.Info().Msg("Engine: Attempting to fetch missing artist images")
+ go catalog.FetchMissingArtistImages(ctx, store)
+ l.Info().Msg("Engine: Attempting to fetch missing album images")
+ go catalog.FetchMissingAlbumImages(ctx, store)
l.Info().Msg("Engine: Initialization finished")
quit := make(chan os.Signal, 1)
diff --git a/engine/handlers/replace_image.go b/engine/handlers/replace_image.go
index 66c0bbe..9a2835d 100644
--- a/engine/handlers/replace_image.go
+++ b/engine/handlers/replace_image.go
@@ -9,6 +9,7 @@ import (
"github.com/gabehf/koito/internal/catalog"
"github.com/gabehf/koito/internal/cfg"
"github.com/gabehf/koito/internal/db"
+ "github.com/gabehf/koito/internal/images"
"github.com/gabehf/koito/internal/logger"
"github.com/gabehf/koito/internal/utils"
"github.com/google/uuid"
@@ -75,7 +76,7 @@ func ReplaceImageHandler(store db.DB) http.HandlerFunc {
fileUrl := r.FormValue("image_url")
if fileUrl != "" {
l.Debug().Msg("ReplaceImageHandler: Image identified as remote file")
- err = catalog.ValidateImageURL(fileUrl)
+ err = images.ValidateImageURL(fileUrl)
if err != nil {
l.Debug().AnErr("error", err).Msg("ReplaceImageHandler: Invalid image URL")
utils.WriteError(w, "url is invalid or not an image file", http.StatusBadRequest)
diff --git a/internal/catalog/images.go b/internal/catalog/images.go
index bf5aa26..4193a39 100644
--- a/internal/catalog/images.go
+++ b/internal/catalog/images.go
@@ -13,7 +13,9 @@ import (
"github.com/gabehf/koito/internal/cfg"
"github.com/gabehf/koito/internal/db"
+ "github.com/gabehf/koito/internal/images"
"github.com/gabehf/koito/internal/logger"
+ "github.com/gabehf/koito/internal/utils"
"github.com/google/uuid"
"github.com/h2non/bimg"
)
@@ -78,30 +80,10 @@ func SourceImageDir() string {
}
}
-// ValidateImageURL checks if the URL points to a valid image by performing a HEAD request.
-func ValidateImageURL(url string) error {
- resp, err := http.Head(url)
- if err != nil {
- return fmt.Errorf("ValidateImageURL: http.Head: %w", err)
- }
- defer resp.Body.Close()
-
- if resp.StatusCode != http.StatusOK {
- return fmt.Errorf("ValidateImageURL: HEAD request failed, status code: %d", resp.StatusCode)
- }
-
- contentType := resp.Header.Get("Content-Type")
- if !strings.HasPrefix(contentType, "image/") {
- return fmt.Errorf("ValidateImageURL: URL does not point to an image, content type: %s", contentType)
- }
-
- return nil
-}
-
// DownloadAndCacheImage downloads an image from the given URL, then calls CompressAndSaveImage.
func DownloadAndCacheImage(ctx context.Context, id uuid.UUID, url string, size ImageSize) error {
l := logger.FromContext(ctx)
- err := ValidateImageURL(url)
+ err := images.ValidateImageURL(url)
if err != nil {
return fmt.Errorf("DownloadAndCacheImage: %w", err)
}
@@ -285,3 +267,126 @@ func pruneDirImgs(ctx context.Context, store db.DB, path string, memo map[string
}
return count, nil
}
+
+func FetchMissingArtistImages(ctx context.Context, store db.DB) error {
+ l := logger.FromContext(ctx)
+ l.Info().Msg("FetchMissingArtistImages: Starting backfill of missing artist images")
+
+ var from int32 = 0
+
+ for {
+ l.Debug().Int32("ID", from).Msg("Fetching artist images to backfill from ID")
+ artists, err := store.ArtistsWithoutImages(ctx, from)
+ if err != nil {
+ return fmt.Errorf("FetchMissingArtistImages: failed to fetch artists for image backfill: %w", err)
+ }
+
+ if len(artists) == 0 {
+ if from == 0 {
+ l.Info().Msg("FetchMissingArtistImages: No artists with missing images found")
+ } else {
+ l.Info().Msg("FetchMissingArtistImages: Finished fetching missing artist images")
+ }
+ return nil
+ }
+
+ for _, artist := range artists {
+ from = artist.ID
+
+ l.Debug().
+ Str("title", artist.Name).
+ Msg("FetchMissingArtistImages: Attempting to fetch missing artist image")
+
+ var aliases []string
+ if aliasrow, err := store.GetAllArtistAliases(ctx, artist.ID); err != nil {
+ aliases = utils.FlattenAliases(aliasrow)
+ } else {
+ aliases = []string{artist.Name}
+ }
+
+ var imgid uuid.UUID
+ imgUrl, imgErr := images.GetArtistImage(ctx, images.ArtistImageOpts{
+ Aliases: aliases,
+ })
+ if imgErr == nil && imgUrl != "" {
+ imgid = uuid.New()
+ err = store.UpdateArtist(ctx, db.UpdateArtistOpts{
+ ID: artist.ID,
+ Image: imgid,
+ ImageSrc: imgUrl,
+ })
+ if err != nil {
+ l.Err(err).
+ Str("title", artist.Name).
+ Msg("FetchMissingArtistImages: Failed to update artist with image in database")
+ continue
+ }
+ l.Info().
+ Str("name", artist.Name).
+ Msg("FetchMissingArtistImages: Successfully fetched missing artist image")
+ } else {
+ l.Err(err).
+ Str("name", artist.Name).
+ Msg("FetchMissingArtistImages: Failed to fetch artist image")
+ }
+ }
+ }
+}
+func FetchMissingAlbumImages(ctx context.Context, store db.DB) error {
+ l := logger.FromContext(ctx)
+ l.Info().Msg("FetchMissingAlbumImages: Starting backfill of missing album images")
+
+ var from int32 = 0
+
+ for {
+ l.Debug().Int32("ID", from).Msg("Fetching album images to backfill from ID")
+ albums, err := store.AlbumsWithoutImages(ctx, from)
+ if err != nil {
+ return fmt.Errorf("FetchMissingAlbumImages: failed to fetch albums for image backfill: %w", err)
+ }
+
+ if len(albums) == 0 {
+ if from == 0 {
+ l.Info().Msg("FetchMissingAlbumImages: No albums with missing images found")
+ } else {
+ l.Info().Msg("FetchMissingAlbumImages: Finished fetching missing album images")
+ }
+ return nil
+ }
+
+ for _, album := range albums {
+ from = album.ID
+
+ l.Debug().
+ Str("title", album.Title).
+ Msg("FetchMissingAlbumImages: Attempting to fetch missing album image")
+
+ var imgid uuid.UUID
+ imgUrl, imgErr := images.GetAlbumImage(ctx, images.AlbumImageOpts{
+ Artists: utils.FlattenSimpleArtistNames(album.Artists),
+ Album: album.Title,
+ })
+ if imgErr == nil && imgUrl != "" {
+ imgid = uuid.New()
+ err = store.UpdateAlbum(ctx, db.UpdateAlbumOpts{
+ ID: album.ID,
+ Image: imgid,
+ ImageSrc: imgUrl,
+ })
+ if err != nil {
+ l.Err(err).
+ Str("title", album.Title).
+ Msg("FetchMissingAlbumImages: Failed to update album with image in database")
+ continue
+ }
+ l.Info().
+ Str("name", album.Title).
+ Msg("FetchMissingAlbumImages: Successfully fetched missing album image")
+ } else {
+ l.Err(err).
+ Str("name", album.Title).
+ Msg("FetchMissingAlbumImages: Failed to fetch album image")
+ }
+ }
+ }
+}
diff --git a/internal/db/db.go b/internal/db/db.go
index a0f0f80..97badac 100644
--- a/internal/db/db.go
+++ b/internal/db/db.go
@@ -88,6 +88,7 @@ type DB interface {
// in seconds
CountTimeListenedToItem(ctx context.Context, opts TimeListenedOpts) (int64, error)
CountUsers(ctx context.Context) (int64, error)
+
// Search
SearchArtists(ctx context.Context, q string) ([]*models.Artist, error)
@@ -105,6 +106,7 @@ type DB interface {
ImageHasAssociation(ctx context.Context, image uuid.UUID) (bool, error)
GetImageSource(ctx context.Context, image uuid.UUID) (string, error)
AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.Album, error)
+ ArtistsWithoutImages(ctx context.Context, from int32) ([]*models.Artist, error)
GetExportPage(ctx context.Context, opts GetExportPageOpts) ([]*ExportItem, error)
Ping(ctx context.Context) error
Close(ctx context.Context)
diff --git a/internal/db/psql/album.go b/internal/db/psql/album.go
index f4c614c..758c287 100644
--- a/internal/db/psql/album.go
+++ b/internal/db/psql/album.go
@@ -274,6 +274,9 @@ func (d *Psql) UpdateAlbum(ctx context.Context, opts db.UpdateAlbumOpts) error {
}
}
if opts.Image != uuid.Nil {
+ if opts.ImageSrc == "" {
+ return fmt.Errorf("UpdateAlbum: image source must be provided when updating an image")
+ }
l.Debug().Msgf("Updating release with ID %d with image %s", opts.ID, opts.Image)
err := qtx.UpdateReleaseImage(ctx, repository.UpdateReleaseImageParams{
ID: opts.ID,
diff --git a/internal/db/psql/artist.go b/internal/db/psql/artist.go
index 7bb50ec..859a490 100644
--- a/internal/db/psql/artist.go
+++ b/internal/db/psql/artist.go
@@ -210,6 +210,9 @@ func (d *Psql) UpdateArtist(ctx context.Context, opts db.UpdateArtistOpts) error
}
}
if opts.Image != uuid.Nil {
+ if opts.ImageSrc == "" {
+ return fmt.Errorf("UpdateAlbum: image source must be provided when updating an image")
+ }
l.Debug().Msgf("Updating artist with id %d with image %s", opts.ID, opts.Image)
err = qtx.UpdateArtistImage(ctx, repository.UpdateArtistImageParams{
ID: opts.ID,
diff --git a/internal/db/psql/images.go b/internal/db/psql/images.go
index 49e2850..eef0d8f 100644
--- a/internal/db/psql/images.go
+++ b/internal/db/psql/images.go
@@ -72,3 +72,26 @@ func (d *Psql) AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.A
}
return albums, nil
}
+
+// returns nil, nil on no results
+func (d *Psql) ArtistsWithoutImages(ctx context.Context, from int32) ([]*models.Artist, error) {
+ rows, err := d.q.GetArtistsWithoutImages(ctx, repository.GetArtistsWithoutImagesParams{
+ Limit: 20,
+ ID: from,
+ })
+ if errors.Is(err, pgx.ErrNoRows) {
+ return nil, nil
+ } else if err != nil {
+ return nil, fmt.Errorf("ArtistsWithoutImages: %w", err)
+ }
+
+ ret := make([]*models.Artist, len(rows))
+ for i, row := range rows {
+ ret[i] = &models.Artist{
+ ID: row.ID,
+ Name: row.Name,
+ MbzID: row.MusicBrainzID,
+ }
+ }
+ return ret, nil
+}
diff --git a/internal/images/deezer.go b/internal/images/deezer.go
index 8fb7b27..2ced676 100644
--- a/internal/images/deezer.go
+++ b/internal/images/deezer.go
@@ -110,6 +110,9 @@ func (c *DeezerClient) getEntity(ctx context.Context, endpoint string, result an
return nil
}
+// Deezer behavior is that it serves a default image when it can't find one for an artist, so
+// this function will just download the default image thinking that it is an actual artist image.
+// I don't know how to fix this yet.
func (c *DeezerClient) GetArtistImages(ctx context.Context, aliases []string) (string, error) {
l := logger.FromContext(ctx)
resp := new(DeezerArtistResponse)
diff --git a/internal/images/imagesrc.go b/internal/images/imagesrc.go
index 21eec65..b49e9dd 100644
--- a/internal/images/imagesrc.go
+++ b/internal/images/imagesrc.go
@@ -5,6 +5,7 @@ import (
"context"
"fmt"
"net/http"
+ "strings"
"sync"
"github.com/gabehf/koito/internal/logger"
@@ -67,19 +68,23 @@ func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
if imgsrc.subsonicEnabled {
img, err := imgsrc.subsonicC.GetArtistImage(ctx, opts.Aliases[0])
if err != nil {
- return "", err
- }
- if img != "" {
+ l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from Subsonic")
+ } else if img != "" {
return img, nil
}
- l.Debug().Msg("Could not find artist image from Subsonic")
+ } else {
+ l.Debug().Msg("GetArtistImage: Subsonic image fetching is disabled")
}
- if imgsrc.deezerC != nil {
+ if imgsrc.deezerEnabled {
img, err := imgsrc.deezerC.GetArtistImages(ctx, opts.Aliases)
if err != nil {
+ l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from Deezer")
return "", err
+ } else if img != "" {
+ return img, nil
}
- return img, nil
+ } else {
+ l.Debug().Msg("GetArtistImage: Deezer image fetching is disabled")
}
l.Warn().Msg("GetArtistImage: No image providers are enabled")
return "", nil
@@ -89,7 +94,7 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
if imgsrc.subsonicEnabled {
img, err := imgsrc.subsonicC.GetAlbumImage(ctx, opts.Artists[0], opts.Album)
if err != nil {
- return "", err
+ l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Subsonic")
}
if img != "" {
return img, nil
@@ -102,29 +107,28 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
url := fmt.Sprintf(caaBaseUrl+"/release/%s/front", opts.ReleaseMbzID.String())
resp, err := http.DefaultClient.Head(url)
if err != nil {
- return "", err
+ l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from CoverArtArchive with Release MBID")
}
if resp.StatusCode == 200 {
return url, nil
}
- l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release ID")
}
if opts.ReleaseGroupMbzID != nil && *opts.ReleaseGroupMbzID != uuid.Nil {
url := fmt.Sprintf(caaBaseUrl+"/release-group/%s/front", opts.ReleaseGroupMbzID.String())
resp, err := http.DefaultClient.Head(url)
if err != nil {
- return "", err
+ l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from CoverArtArchive with Release Group MBID")
}
if resp.StatusCode == 200 {
return url, nil
}
- l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release group ID")
}
}
if imgsrc.deezerEnabled {
l.Debug().Msg("Attempting to find album image from Deezer")
img, err := imgsrc.deezerC.GetAlbumImages(ctx, opts.Artists, opts.Album)
if err != nil {
+ l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Deezer")
return "", err
}
return img, nil
@@ -132,3 +136,23 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
l.Warn().Msg("GetAlbumImage: No image providers are enabled")
return "", nil
}
+
+// ValidateImageURL checks if the URL points to a valid image by performing a HEAD request.
+func ValidateImageURL(url string) error {
+ resp, err := http.Head(url)
+ if err != nil {
+ return fmt.Errorf("ValidateImageURL: http.Head: %w", err)
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return fmt.Errorf("ValidateImageURL: HEAD request failed, status code: %d", resp.StatusCode)
+ }
+
+ contentType := resp.Header.Get("Content-Type")
+ if !strings.HasPrefix(contentType, "image/") {
+ return fmt.Errorf("ValidateImageURL: URL does not point to an image, content type: %s", contentType)
+ }
+
+ return nil
+}
diff --git a/internal/images/subsonic.go b/internal/images/subsonic.go
index 961b4c2..6241b09 100644
--- a/internal/images/subsonic.go
+++ b/internal/images/subsonic.go
@@ -129,9 +129,13 @@ func (c *SubsonicClient) GetArtistImage(ctx context.Context, artist string) (str
if err != nil {
return "", fmt.Errorf("GetArtistImage: %v", err)
}
- l.Debug().Any("subsonic_response", resp).Send()
+ l.Debug().Any("subsonic_response", resp).Msg("")
if len(resp.SubsonicResponse.SearchResult3.Artist) < 1 || resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl == "" {
return "", fmt.Errorf("GetArtistImage: failed to get artist art")
}
+ // Subsonic seems to have a tendency to return an artist image even though the url is a 404
+ if err = ValidateImageURL(resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl); err != nil {
+ return "", fmt.Errorf("GetArtistImage: failed to get validate image url")
+ }
return resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl, nil
}
diff --git a/internal/repository/artist.sql.go b/internal/repository/artist.sql.go
index 96f00f2..8506975 100644
--- a/internal/repository/artist.sql.go
+++ b/internal/repository/artist.sql.go
@@ -254,6 +254,47 @@ func (q *Queries) GetArtistByName(ctx context.Context, alias string) (GetArtistB
return i, err
}
+const getArtistsWithoutImages = `-- name: GetArtistsWithoutImages :many
+SELECT
+ id, musicbrainz_id, image, image_source, name
+FROM artists_with_name
+WHERE image IS NULL
+ AND id > $2
+ORDER BY id ASC
+LIMIT $1
+`
+
+type GetArtistsWithoutImagesParams struct {
+ Limit int32
+ ID int32
+}
+
+func (q *Queries) GetArtistsWithoutImages(ctx context.Context, arg GetArtistsWithoutImagesParams) ([]ArtistsWithName, error) {
+ rows, err := q.db.Query(ctx, getArtistsWithoutImages, arg.Limit, arg.ID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var items []ArtistsWithName
+ for rows.Next() {
+ var i ArtistsWithName
+ if err := rows.Scan(
+ &i.ID,
+ &i.MusicBrainzID,
+ &i.Image,
+ &i.ImageSource,
+ &i.Name,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
const getReleaseArtists = `-- name: GetReleaseArtists :many
SELECT
a.id, a.musicbrainz_id, a.image, a.image_source, a.name,
From 56ac73d12b4a92d55a3be23099a8ce538ec89346 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Wed, 21 Jan 2026 14:54:52 -0500
Subject: [PATCH 16/31] fix: improve subsonic image searching (#164)
---
internal/catalog/images.go | 5 ++--
internal/images/imagesrc.go | 5 ++--
internal/images/subsonic.go | 55 +++++++++++++++++++++++++++++++------
3 files changed, 53 insertions(+), 12 deletions(-)
diff --git a/internal/catalog/images.go b/internal/catalog/images.go
index 4193a39..72b6efd 100644
--- a/internal/catalog/images.go
+++ b/internal/catalog/images.go
@@ -363,8 +363,9 @@ func FetchMissingAlbumImages(ctx context.Context, store db.DB) error {
var imgid uuid.UUID
imgUrl, imgErr := images.GetAlbumImage(ctx, images.AlbumImageOpts{
- Artists: utils.FlattenSimpleArtistNames(album.Artists),
- Album: album.Title,
+ Artists: utils.FlattenSimpleArtistNames(album.Artists),
+ Album: album.Title,
+ ReleaseMbzID: album.MbzID,
})
if imgErr == nil && imgUrl != "" {
imgid = uuid.New()
diff --git a/internal/images/imagesrc.go b/internal/images/imagesrc.go
index b49e9dd..717b862 100644
--- a/internal/images/imagesrc.go
+++ b/internal/images/imagesrc.go
@@ -31,6 +31,7 @@ var imgsrc ImageSource
type ArtistImageOpts struct {
Aliases []string
+ MBID *uuid.UUID
}
type AlbumImageOpts struct {
@@ -66,7 +67,7 @@ func Shutdown() {
func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
l := logger.FromContext(ctx)
if imgsrc.subsonicEnabled {
- img, err := imgsrc.subsonicC.GetArtistImage(ctx, opts.Aliases[0])
+ img, err := imgsrc.subsonicC.GetArtistImage(ctx, opts.MBID, opts.Aliases[0])
if err != nil {
l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from Subsonic")
} else if img != "" {
@@ -92,7 +93,7 @@ func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
l := logger.FromContext(ctx)
if imgsrc.subsonicEnabled {
- img, err := imgsrc.subsonicC.GetAlbumImage(ctx, opts.Artists[0], opts.Album)
+ img, err := imgsrc.subsonicC.GetAlbumImage(ctx, opts.ReleaseMbzID, opts.Artists[0], opts.Album)
if err != nil {
l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Subsonic")
}
diff --git a/internal/images/subsonic.go b/internal/images/subsonic.go
index 6241b09..4fd55c0 100644
--- a/internal/images/subsonic.go
+++ b/internal/images/subsonic.go
@@ -11,6 +11,7 @@ import (
"github.com/gabehf/koito/internal/cfg"
"github.com/gabehf/koito/internal/logger"
"github.com/gabehf/koito/queue"
+ "github.com/google/uuid"
)
type SubsonicClient struct {
@@ -26,6 +27,8 @@ type SubsonicAlbumResponse struct {
SearchResult3 struct {
Album []struct {
CoverArt string `json:"coverArt"`
+ Artist string `json:"artist"`
+ MBID string `json:"musicBrainzId"`
} `json:"album"`
} `json:"searchResult3"`
} `json:"subsonic-response"`
@@ -43,7 +46,7 @@ type SubsonicArtistResponse struct {
}
const (
- subsonicAlbumSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=0&songCount=0&albumCount=1"
+ subsonicAlbumSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=0&songCount=0&albumCount=10"
subsonicArtistSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=1&songCount=0&albumCount=0"
subsonicCoverArtFmtStr = "/rest/getCoverArt?%s&id=%s&v=1.13.0&c=koito"
)
@@ -106,25 +109,61 @@ func (c *SubsonicClient) getEntity(ctx context.Context, endpoint string, result
return nil
}
-func (c *SubsonicClient) GetAlbumImage(ctx context.Context, artist, album string) (string, error) {
+func (c *SubsonicClient) GetAlbumImage(ctx context.Context, mbid *uuid.UUID, artist, album string) (string, error) {
l := logger.FromContext(ctx)
resp := new(SubsonicAlbumResponse)
l.Debug().Msgf("Finding album image for %s from artist %s", album, artist)
- err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(artist+" "+album)), resp)
+ // first try mbid search
+ if mbid != nil {
+ l.Debug().Str("mbid", mbid.String()).Msg("Searching album image by MBID")
+ err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(mbid.String())), resp)
+ if err != nil {
+ return "", fmt.Errorf("GetAlbumImage: %v", err)
+ }
+ l.Debug().Any("subsonic_response", resp).Msg("")
+ if len(resp.SubsonicResponse.SearchResult3.Album) >= 1 {
+ return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil
+ }
+ }
+ // else do artist match
+ l.Debug().Str("title", album).Str("artist", artist).Msg("Searching album image by title and artist")
+ err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(album)), resp)
if err != nil {
return "", fmt.Errorf("GetAlbumImage: %v", err)
}
- l.Debug().Any("subsonic_response", resp).Send()
- if len(resp.SubsonicResponse.SearchResult3.Album) < 1 || resp.SubsonicResponse.SearchResult3.Album[0].CoverArt == "" {
- return "", fmt.Errorf("GetAlbumImage: failed to get album art")
+ l.Debug().Any("subsonic_response", resp).Msg("")
+ if len(resp.SubsonicResponse.SearchResult3.Album) < 1 {
+ return "", fmt.Errorf("GetAlbumImage: failed to get album art from subsonic")
}
- return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil
+ for _, album := range resp.SubsonicResponse.SearchResult3.Album {
+ if album.Artist == artist {
+ return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil
+ }
+ }
+ return "", fmt.Errorf("GetAlbumImage: failed to get album art from subsonic")
}
-func (c *SubsonicClient) GetArtistImage(ctx context.Context, artist string) (string, error) {
+func (c *SubsonicClient) GetArtistImage(ctx context.Context, mbid *uuid.UUID, artist string) (string, error) {
l := logger.FromContext(ctx)
resp := new(SubsonicArtistResponse)
l.Debug().Msgf("Finding artist image for %s", artist)
+ // first try mbid search
+ if mbid != nil {
+ l.Debug().Str("mbid", mbid.String()).Msg("Searching artist image by MBID")
+ err := c.getEntity(ctx, fmt.Sprintf(subsonicArtistSearchFmtStr, c.authParams, url.QueryEscape(mbid.String())), resp)
+ if err != nil {
+ return "", fmt.Errorf("GetArtistImage: %v", err)
+ }
+ l.Debug().Any("subsonic_response", resp).Msg("")
+ if len(resp.SubsonicResponse.SearchResult3.Artist) < 1 || resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl == "" {
+ return "", fmt.Errorf("GetArtistImage: failed to get artist art")
+ }
+ // Subsonic seems to have a tendency to return an artist image even though the url is a 404
+ if err = ValidateImageURL(resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl); err != nil {
+ return "", fmt.Errorf("GetArtistImage: failed to get validate image url")
+ }
+ }
+ l.Debug().Str("artist", artist).Msg("Searching artist image by name")
err := c.getEntity(ctx, fmt.Sprintf(subsonicArtistSearchFmtStr, c.authParams, url.QueryEscape(artist)), resp)
if err != nil {
return "", fmt.Errorf("GetArtistImage: %v", err)
From e7ba34710cf598c3cf9920a7d0671738f6616c09 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Wed, 21 Jan 2026 16:03:05 -0500
Subject: [PATCH 17/31] feat: lastfm image support (#166)
* feat: lastfm image support
* docs
---
.../content/docs/reference/configuration.md | 3 +
engine/engine.go | 1 +
internal/cfg/cfg.go | 9 +
internal/images/imagesrc.go | 37 ++-
internal/images/lastfm.go | 298 ++++++++++++++++++
5 files changed, 345 insertions(+), 3 deletions(-)
create mode 100644 internal/images/lastfm.go
diff --git a/docs/src/content/docs/reference/configuration.md b/docs/src/content/docs/reference/configuration.md
index 67c4a2b..6eae82b 100644
--- a/docs/src/content/docs/reference/configuration.md
+++ b/docs/src/content/docs/reference/configuration.md
@@ -82,6 +82,9 @@ If the environment variable is defined without **and** with the suffix at the sa
If Koito is unable to validate your Subsonic configuration, it will fail to start. If you notice your container isn't running after
changing these parameters, check the logs!
:::
+##### KOITO_LASTFM_API_KEY
+- Required: `false`
+- Description: Your LastFM API key, which will be used for fetching images if provided. You can get an API key [here](https://www.last.fm/api/authentication),
##### KOITO_SKIP_IMPORT
- Default: `false`
- Description: Skips running the importer on startup.
diff --git a/engine/engine.go b/engine/engine.go
index 9374819..7de9254 100644
--- a/engine/engine.go
+++ b/engine/engine.go
@@ -138,6 +138,7 @@ func Run(
EnableCAA: !cfg.CoverArtArchiveDisabled(),
EnableDeezer: !cfg.DeezerDisabled(),
EnableSubsonic: cfg.SubsonicEnabled(),
+ EnableLastFM: cfg.LastFMApiKey() != "",
})
l.Info().Msg("Engine: Image sources initialized")
diff --git a/internal/cfg/cfg.go b/internal/cfg/cfg.go
index 9e537eb..36478b1 100644
--- a/internal/cfg/cfg.go
+++ b/internal/cfg/cfg.go
@@ -38,6 +38,7 @@ const (
DISABLE_MUSICBRAINZ_ENV = "KOITO_DISABLE_MUSICBRAINZ"
SUBSONIC_URL_ENV = "KOITO_SUBSONIC_URL"
SUBSONIC_PARAMS_ENV = "KOITO_SUBSONIC_PARAMS"
+ LASTFM_API_KEY_ENV = "KOITO_LASTFM_API_KEY"
SKIP_IMPORT_ENV = "KOITO_SKIP_IMPORT"
ALLOWED_HOSTS_ENV = "KOITO_ALLOWED_HOSTS"
CORS_ORIGINS_ENV = "KOITO_CORS_ALLOWED_ORIGINS"
@@ -72,6 +73,7 @@ type config struct {
disableMusicBrainz bool
subsonicUrl string
subsonicParams string
+ lastfmApiKey string
subsonicEnabled bool
skipImport bool
fetchImageDuringImport bool
@@ -165,6 +167,7 @@ func loadConfig(getenv func(string) string, version string) (*config, error) {
if cfg.subsonicEnabled && (cfg.subsonicUrl == "" || cfg.subsonicParams == "") {
return nil, fmt.Errorf("loadConfig: invalid configuration: both %s and %s must be set in order to use subsonic image fetching", SUBSONIC_URL_ENV, SUBSONIC_PARAMS_ENV)
}
+ cfg.lastfmApiKey = getenv(LASTFM_API_KEY_ENV)
cfg.skipImport = parseBool(getenv(SKIP_IMPORT_ENV))
cfg.userAgent = fmt.Sprintf("Koito %s (contact@koito.io)", version)
@@ -361,6 +364,12 @@ func SubsonicParams() string {
return globalConfig.subsonicParams
}
+func LastFMApiKey() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.lastfmApiKey
+}
+
func SkipImport() bool {
lock.RLock()
defer lock.RUnlock()
diff --git a/internal/images/imagesrc.go b/internal/images/imagesrc.go
index 717b862..46fe87a 100644
--- a/internal/images/imagesrc.go
+++ b/internal/images/imagesrc.go
@@ -17,6 +17,8 @@ type ImageSource struct {
deezerC *DeezerClient
subsonicEnabled bool
subsonicC *SubsonicClient
+ lastfmEnabled bool
+ lastfmC *LastFMClient
caaEnabled bool
}
type ImageSourceOpts struct {
@@ -24,6 +26,7 @@ type ImageSourceOpts struct {
EnableCAA bool
EnableDeezer bool
EnableSubsonic bool
+ EnableLastFM bool
}
var once sync.Once
@@ -57,6 +60,10 @@ func Initialize(opts ImageSourceOpts) {
imgsrc.subsonicEnabled = true
imgsrc.subsonicC = NewSubsonicClient()
}
+ if opts.EnableLastFM {
+ imgsrc.lastfmEnabled = true
+ imgsrc.lastfmC = NewLastFMClient()
+ }
})
}
@@ -76,6 +83,16 @@ func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
} else {
l.Debug().Msg("GetArtistImage: Subsonic image fetching is disabled")
}
+ if imgsrc.lastfmEnabled {
+ img, err := imgsrc.lastfmC.GetArtistImage(ctx, opts.MBID, opts.Aliases[0])
+ if err != nil {
+ l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from LastFM")
+ } else if img != "" {
+ return img, nil
+ }
+ } else {
+ l.Debug().Msg("GetArtistImage: LastFM image fetching is disabled")
+ }
if imgsrc.deezerEnabled {
img, err := imgsrc.deezerC.GetArtistImages(ctx, opts.Aliases)
if err != nil {
@@ -90,6 +107,7 @@ func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
l.Warn().Msg("GetArtistImage: No image providers are enabled")
return "", nil
}
+
func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
l := logger.FromContext(ctx)
if imgsrc.subsonicEnabled {
@@ -109,9 +127,12 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
resp, err := http.DefaultClient.Head(url)
if err != nil {
l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from CoverArtArchive with Release MBID")
- }
- if resp.StatusCode == 200 {
- return url, nil
+ } else {
+ if resp.StatusCode == 200 {
+ return url, nil
+ } else {
+ l.Debug().Int("status", resp.StatusCode).Msg("GetAlbumImage: Got non-OK response from CoverArtArchive")
+ }
}
}
if opts.ReleaseGroupMbzID != nil && *opts.ReleaseGroupMbzID != uuid.Nil {
@@ -125,6 +146,16 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
}
}
}
+ if imgsrc.lastfmEnabled {
+ img, err := imgsrc.lastfmC.GetAlbumImage(ctx, opts.ReleaseMbzID, opts.Artists[0], opts.Album)
+ if err != nil {
+ l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Subsonic")
+ }
+ if img != "" {
+ return img, nil
+ }
+ l.Debug().Msg("Could not find album cover from Subsonic")
+ }
if imgsrc.deezerEnabled {
l.Debug().Msg("Attempting to find album image from Deezer")
img, err := imgsrc.deezerC.GetAlbumImages(ctx, opts.Artists, opts.Album)
diff --git a/internal/images/lastfm.go b/internal/images/lastfm.go
new file mode 100644
index 0000000..f35f6a3
--- /dev/null
+++ b/internal/images/lastfm.go
@@ -0,0 +1,298 @@
+package images
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "github.com/gabehf/koito/internal/cfg"
+ "github.com/gabehf/koito/internal/logger"
+ "github.com/gabehf/koito/queue"
+ "github.com/google/uuid"
+)
+
+// i told gemini to write this cuz i figured it would be simple enough and
+// it looks like it just works? maybe ai is actually worth one quintillion gallons of water
+
+type LastFMClient struct {
+ apiKey string
+ baseUrl string
+ userAgent string
+ requestQueue *queue.RequestQueue
+}
+
+// LastFM JSON structures use "#text" for the value of XML-mapped fields
+type lastFMImage struct {
+ URL string `json:"#text"`
+ Size string `json:"size"`
+}
+
+type lastFMAlbumResponse struct {
+ Album struct {
+ Name string `json:"name"`
+ Image []lastFMImage `json:"image"`
+ } `json:"album"`
+ Error int `json:"error"`
+ Message string `json:"message"`
+}
+
+type lastFMArtistResponse struct {
+ Artist struct {
+ Name string `json:"name"`
+ Image []lastFMImage `json:"image"`
+ } `json:"artist"`
+ Error int `json:"error"`
+ Message string `json:"message"`
+}
+
+const (
+ lastFMApiBaseUrl = "http://ws.audioscrobbler.com/2.0/"
+)
+
+func NewLastFMClient() *LastFMClient {
+ ret := new(LastFMClient)
+ ret.apiKey = cfg.LastFMApiKey()
+ ret.baseUrl = lastFMApiBaseUrl
+ ret.userAgent = cfg.UserAgent()
+ ret.requestQueue = queue.NewRequestQueue(5, 5)
+ return ret
+}
+
+func (c *LastFMClient) queue(ctx context.Context, req *http.Request) ([]byte, error) {
+ l := logger.FromContext(ctx)
+ req.Header.Set("User-Agent", c.userAgent)
+ req.Header.Set("Accept", "application/json")
+
+ resultChan := c.requestQueue.Enqueue(func(client *http.Client, done chan<- queue.RequestResult) {
+ resp, err := client.Do(req)
+ if err != nil {
+ l.Debug().Err(err).Str("url", req.URL.String()).Msg("Failed to contact LastFM")
+ done <- queue.RequestResult{Err: err}
+ return
+ }
+ defer resp.Body.Close()
+
+ // LastFM might return 200 OK even for API errors (like "Artist not found"),
+ // so we rely on parsing the JSON body for logic errors later,
+ // but we still check for HTTP protocol failures here.
+ if resp.StatusCode >= 500 {
+ err = fmt.Errorf("received server error from LastFM: %s", resp.Status)
+ done <- queue.RequestResult{Body: nil, Err: err}
+ return
+ }
+
+ body, err := io.ReadAll(resp.Body)
+ done <- queue.RequestResult{Body: body, Err: err}
+ })
+
+ result := <-resultChan
+ return result.Body, result.Err
+}
+
+func (c *LastFMClient) getEntity(ctx context.Context, params url.Values, result any) error {
+ l := logger.FromContext(ctx)
+
+ // Add standard parameters
+ params.Set("api_key", c.apiKey)
+ params.Set("format", "json")
+
+ // Construct URL
+ reqUrl, _ := url.Parse(c.baseUrl)
+ reqUrl.RawQuery = params.Encode()
+
+ l.Debug().Msgf("Sending request to LastFM: GET %s", reqUrl.String())
+
+ req, err := http.NewRequest("GET", reqUrl.String(), nil)
+ if err != nil {
+ return fmt.Errorf("getEntity: %w", err)
+ }
+
+ l.Debug().Msg("Adding LastFM request to queue")
+ body, err := c.queue(ctx, req)
+ if err != nil {
+ l.Err(err).Msg("LastFM request failed")
+ return fmt.Errorf("getEntity: %w", err)
+ }
+
+ err = json.Unmarshal(body, result)
+ if err != nil {
+ l.Err(err).Msg("Failed to unmarshal LastFM response")
+ return fmt.Errorf("getEntity: %w", err)
+ }
+
+ return nil
+}
+
+// selectBestImage picks the largest available image from the LastFM slice
+func (c *LastFMClient) selectBestImage(images []lastFMImage) string {
+ // Rank preference: mega > extralarge > large > medium > small
+ // Since LastFM usually returns them in order of size, we could take the last one,
+ // but a map lookup is safer against API changes.
+
+ imgMap := make(map[string]string)
+ for _, img := range images {
+ if img.URL != "" {
+ imgMap[img.Size] = img.URL
+ }
+ }
+
+ if url, ok := imgMap["mega"]; ok {
+ if err := ValidateImageURL(overrideImgSize(url)); err == nil {
+ return overrideImgSize(url)
+ } else {
+ return url
+ }
+ }
+ if url, ok := imgMap["extralarge"]; ok {
+ if err := ValidateImageURL(overrideImgSize(url)); err == nil {
+ return overrideImgSize(url)
+ } else {
+ return url
+ }
+ }
+ if url, ok := imgMap["large"]; ok {
+ if err := ValidateImageURL(overrideImgSize(url)); err == nil {
+ return overrideImgSize(url)
+ } else {
+ return url
+ }
+ }
+ if url, ok := imgMap["medium"]; ok {
+ return url
+ }
+ if url, ok := imgMap["small"]; ok {
+ return url
+ }
+
+ return ""
+}
+
+// lastfm seems to only return a 300x300 image even for "mega" and "extralarge" images, so I'm cheating
+func overrideImgSize(url string) string {
+ return strings.Replace(url, "300x300", "600x600", 1)
+}
+
+func (c *LastFMClient) GetAlbumImage(ctx context.Context, mbid *uuid.UUID, artist, album string) (string, error) {
+ l := logger.FromContext(ctx)
+ resp := new(lastFMAlbumResponse)
+ l.Debug().Msgf("Finding album image for %s from artist %s", album, artist)
+
+ // Helper to run the fetch
+ fetch := func(query paramsBuilder) error {
+ params := url.Values{}
+ params.Set("method", "album.getInfo")
+ query(params)
+ return c.getEntity(ctx, params, resp)
+ }
+
+ // 1. Try MBID search first
+ if mbid != nil {
+ l.Debug().Str("mbid", mbid.String()).Msg("Searching album image by MBID")
+ err := fetch(func(p url.Values) {
+ p.Set("mbid", mbid.String())
+ })
+
+ // If success and no API error code
+ if err == nil && resp.Error == 0 && len(resp.Album.Image) > 0 {
+ best := c.selectBestImage(resp.Album.Image)
+ if best != "" {
+ return best, nil
+ }
+ } else if resp.Error != 0 {
+ l.Debug().Int("api_error", resp.Error).Msg("LastFM MBID lookup failed, falling back to name")
+ }
+ }
+
+ // 2. Fallback to Artist + Album name match
+ l.Debug().Str("title", album).Str("artist", artist).Msg("Searching album image by title and artist")
+
+ // Clear previous response structure just in case
+ resp = new(lastFMAlbumResponse)
+
+ err := fetch(func(p url.Values) {
+ p.Set("artist", artist)
+ p.Set("album", album)
+ // Auto-correct spelling is useful for name lookups
+ p.Set("autocorrect", "1")
+ })
+
+ if err != nil {
+ return "", fmt.Errorf("GetAlbumImage: %v", err)
+ }
+
+ if resp.Error != 0 {
+ return "", fmt.Errorf("GetAlbumImage: LastFM API error %d: %s", resp.Error, resp.Message)
+ }
+
+ best := c.selectBestImage(resp.Album.Image)
+ if best == "" {
+ return "", fmt.Errorf("GetAlbumImage: no suitable image found")
+ }
+
+ return best, nil
+}
+
+func (c *LastFMClient) GetArtistImage(ctx context.Context, mbid *uuid.UUID, artist string) (string, error) {
+ l := logger.FromContext(ctx)
+ resp := new(lastFMArtistResponse)
+ l.Debug().Msgf("Finding artist image for %s", artist)
+
+ fetch := func(query paramsBuilder) error {
+ params := url.Values{}
+ params.Set("method", "artist.getInfo")
+ query(params)
+ return c.getEntity(ctx, params, resp)
+ }
+
+ // 1. Try MBID search
+ if mbid != nil {
+ l.Debug().Str("mbid", mbid.String()).Msg("Searching artist image by MBID")
+ err := fetch(func(p url.Values) {
+ p.Set("mbid", mbid.String())
+ })
+
+ if err == nil && resp.Error == 0 && len(resp.Artist.Image) > 0 {
+ best := c.selectBestImage(resp.Artist.Image)
+ if best != "" {
+ // Validate to match Subsonic implementation behavior
+ if err := ValidateImageURL(best); err == nil {
+ return best, nil
+ }
+ }
+ }
+ }
+
+ // 2. Fallback to Artist name
+ l.Debug().Str("artist", artist).Msg("Searching artist image by name")
+ resp = new(lastFMArtistResponse)
+
+ err := fetch(func(p url.Values) {
+ p.Set("artist", artist)
+ p.Set("autocorrect", "1")
+ })
+
+ if err != nil {
+ return "", fmt.Errorf("GetArtistImage: %v", err)
+ }
+
+ if resp.Error != 0 {
+ return "", fmt.Errorf("GetArtistImage: LastFM API error %d: %s", resp.Error, resp.Message)
+ }
+
+ best := c.selectBestImage(resp.Artist.Image)
+ if best == "" {
+ return "", fmt.Errorf("GetArtistImage: no suitable image found")
+ }
+
+ if err := ValidateImageURL(best); err != nil {
+ return "", fmt.Errorf("GetArtistImage: failed to validate image url")
+ }
+
+ return best, nil
+}
+
+type paramsBuilder func(url.Values)
From c59c6c3baa602e609115a07334d14dc942c7532f Mon Sep 17 00:00:00 2001
From: onespaceman
Date: Wed, 21 Jan 2026 16:03:27 -0500
Subject: [PATCH 18/31] QOL changes to client (#165)
---
client/app/app.css | 31 +++++++-------------
client/app/components/modals/DeleteModal.tsx | 2 +-
client/app/components/modals/MergeModal.tsx | 13 ++++----
client/app/components/modals/Modal.tsx | 30 +++++++++++++++++--
4 files changed, 46 insertions(+), 30 deletions(-)
diff --git a/client/app/app.css b/client/app/app.css
index 217e955..eb5e7f6 100644
--- a/client/app/app.css
+++ b/client/app/app.css
@@ -130,30 +130,21 @@ h4 {
text-decoration: underline;
}
-input[type="text"] {
- border: 1px solid var(--color-bg);
-}
-input[type="text"]:focus {
- outline: none;
- border: 1px solid var(--color-fg-tertiary);
-}
+input[type="text"],
+input[type="password"],
textarea {
border: 1px solid var(--color-bg);
}
+input[type="checkbox"] {
+ height: fit-content;
+}
+input:focus,
+button:focus,
+a:focus,
+select:focus,
textarea:focus {
- outline: none;
- border: 1px solid var(--color-fg-tertiary);
-}
-input[type="password"] {
- border: 1px solid var(--color-bg);
-}
-input[type="password"]:focus {
- outline: none;
- border: 1px solid var(--color-fg-tertiary);
-}
-input[type="checkbox"]:focus {
- outline: none;
- border: 1px solid var(--color-fg-tertiary);
+ border-color: transparent;
+ outline: 2px solid var(--color-fg-tertiary);
}
button:hover {
diff --git a/client/app/components/modals/DeleteModal.tsx b/client/app/components/modals/DeleteModal.tsx
index 06bfdaf..227951e 100644
--- a/client/app/components/modals/DeleteModal.tsx
+++ b/client/app/components/modals/DeleteModal.tsx
@@ -20,7 +20,7 @@ export default function DeleteModal({ open, setOpen, title, id, type }: Props) {
setLoading(true);
deleteItem(type.toLowerCase(), id).then((r) => {
if (r.ok) {
- navigate("/");
+ navigate(-1);
} else {
console.log(r);
}
diff --git a/client/app/components/modals/MergeModal.tsx b/client/app/components/modals/MergeModal.tsx
index 61e2618..c78681d 100644
--- a/client/app/components/modals/MergeModal.tsx
+++ b/client/app/components/modals/MergeModal.tsx
@@ -19,7 +19,7 @@ interface Props {
}
export default function MergeModal(props: Props) {
- const [query, setQuery] = useState("");
+ const [query, setQuery] = useState(props.currentTitle);
const [data, setData] = useState();
const [debouncedQuery, setDebouncedQuery] = useState(query);
const [mergeTarget, setMergeTarget] = useState<{ title: string; id: number }>(
@@ -101,11 +101,12 @@ export default function MergeModal(props: Props) {
{ setQuery(e.target.value); e.target.select()}}
onChange={(e) => setQuery(e.target.value)}
/>
@@ -128,7 +129,7 @@ export default function MergeModal(props: Props) {
>
Merge Items
-
+
{(props.type.toLowerCase() === "album" ||
props.type.toLowerCase() === "artist") && (
-
+
{
const handleKeyDown = (e: KeyboardEvent) => {
- if (e.key === 'Escape') onClose();
+ // Close on Escape key
+ if (e.key === 'Escape') {
+ onClose()
+ // Trap tab navigation to the modal
+ } else if (e.key === 'Tab') {
+ if (modalRef.current) {
+ const focusableEls = modalRef.current.querySelectorAll(
+ 'button:not(:disabled), [href], input:not(:disabled), select:not(:disabled), textarea:not(:disabled), [tabindex]:not([tabindex="-1"])'
+ );
+ const firstEl = focusableEls[0];
+ const lastEl = focusableEls[focusableEls.length - 1];
+ const activeEl = document.activeElement
+
+ if (e.shiftKey && activeEl === firstEl) {
+ e.preventDefault();
+ lastEl.focus();
+ } else if (!e.shiftKey && activeEl === lastEl) {
+ e.preventDefault();
+ firstEl.focus();
+ } else if (!Array.from(focusableEls).find(node => node.isEqualNode(activeEl))) {
+ e.preventDefault();
+ firstEl.focus();
+ }
+ }
+ };
};
if (isOpen) document.addEventListener('keydown', handleKeyDown);
return () => document.removeEventListener('keydown', handleKeyDown);
@@ -70,13 +94,13 @@ export function Modal({
}`}
style={{ maxWidth: maxW ?? 600, height: h ?? '' }}
>
+ {children}
- {children}
,
document.body
From 16cee8cfcababb84e93ba4793fb57e53993a942b Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Wed, 21 Jan 2026 17:30:59 -0500
Subject: [PATCH 19/31] fix: speedup top-artists and top-albums queries (#167)
---
db/queries/release.sql | 8 +--
db/queries/track.sql | 110 +++++++++++++----------------
internal/repository/release.sql.go | 12 ++--
internal/repository/track.sql.go | 110 +++++++++++++----------------
4 files changed, 112 insertions(+), 128 deletions(-)
diff --git a/db/queries/release.sql b/db/queries/release.sql
index 47aac86..23bd2f2 100644
--- a/db/queries/release.sql
+++ b/db/queries/release.sql
@@ -48,12 +48,12 @@ WHERE r.title = ANY ($1::TEXT[])
-- name: GetTopReleasesFromArtist :many
SELECT
x.*,
+ get_artists_for_release(x.id) AS artists,
RANK() OVER (ORDER BY x.listen_count DESC) AS rank
FROM (
SELECT
r.*,
- COUNT(*) AS listen_count,
- get_artists_for_release(r.id) AS artists
+ COUNT(*) AS listen_count
FROM listens l
JOIN tracks t ON l.track_id = t.id
JOIN releases_with_title r ON t.release_id = r.id
@@ -68,12 +68,12 @@ LIMIT $3 OFFSET $4;
-- name: GetTopReleasesPaginated :many
SELECT
x.*,
+ get_artists_for_release(x.id) AS artists,
RANK() OVER (ORDER BY x.listen_count DESC) AS rank
FROM (
SELECT
r.*,
- COUNT(*) AS listen_count,
- get_artists_for_release(r.id) AS artists
+ COUNT(*) AS listen_count
FROM listens l
JOIN tracks t ON l.track_id = t.id
JOIN releases_with_title r ON t.release_id = r.id
diff --git a/db/queries/track.sql b/db/queries/track.sql
index c69bed5..3be4d7e 100644
--- a/db/queries/track.sql
+++ b/db/queries/track.sql
@@ -39,90 +39,82 @@ HAVING COUNT(DISTINCT at.artist_id) = cardinality($3::int[]);
-- name: GetTopTracksPaginated :many
SELECT
- x.id,
- x.title,
- x.musicbrainz_id,
- x.release_id,
- x.image,
+ x.track_id AS id,
+ t.title,
+ t.musicbrainz_id,
+ t.release_id,
+ r.image,
x.listen_count,
- x.artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ get_artists_for_track(x.track_id) AS artists,
+ x.rank
FROM (
SELECT
- t.id,
- t.title,
- t.musicbrainz_id,
- t.release_id,
- r.image,
+ track_id,
COUNT(*) AS listen_count,
- get_artists_for_track(t.id) AS artists
- FROM listens l
- JOIN tracks_with_title t ON l.track_id = t.id
- JOIN releases r ON t.release_id = r.id
- WHERE l.listened_at BETWEEN $1 AND $2
- GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ RANK() OVER (ORDER BY COUNT(*) DESC) as rank
+ FROM listens
+ WHERE listened_at BETWEEN $1 AND $2
+ GROUP BY track_id
+ ORDER BY listen_count DESC
+ LIMIT $3 OFFSET $4
) x
-ORDER BY x.listen_count DESC, x.id
-LIMIT $3 OFFSET $4;
+JOIN tracks_with_title t ON x.track_id = t.id
+JOIN releases r ON t.release_id = r.id
+ORDER BY x.listen_count DESC, x.track_id;
-- name: GetTopTracksByArtistPaginated :many
SELECT
- x.id,
- x.title,
- x.musicbrainz_id,
- x.release_id,
- x.image,
+ x.track_id AS id,
+ t.title,
+ t.musicbrainz_id,
+ t.release_id,
+ r.image,
x.listen_count,
- x.artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ get_artists_for_track(x.track_id) AS artists,
+ x.rank
FROM (
SELECT
- t.id,
- t.title,
- t.musicbrainz_id,
- t.release_id,
- r.image,
+ l.track_id,
COUNT(*) AS listen_count,
- get_artists_for_track(t.id) AS artists
+ RANK() OVER (ORDER BY COUNT(*) DESC) as rank
FROM listens l
- JOIN tracks_with_title t ON l.track_id = t.id
- JOIN releases r ON t.release_id = r.id
- JOIN artist_tracks at ON at.track_id = t.id
+ JOIN artist_tracks at ON l.track_id = at.track_id
WHERE l.listened_at BETWEEN $1 AND $2
- AND at.artist_id = $5
- GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ AND at.artist_id = $5
+ GROUP BY l.track_id
+ ORDER BY listen_count DESC
+ LIMIT $3 OFFSET $4
) x
-ORDER BY x.listen_count DESC, x.id
-LIMIT $3 OFFSET $4;
+JOIN tracks_with_title t ON x.track_id = t.id
+JOIN releases r ON t.release_id = r.id
+ORDER BY x.listen_count DESC, x.track_id;
-- name: GetTopTracksInReleasePaginated :many
SELECT
- x.id,
- x.title,
- x.musicbrainz_id,
- x.release_id,
- x.image,
+ x.track_id AS id,
+ t.title,
+ t.musicbrainz_id,
+ t.release_id,
+ r.image,
x.listen_count,
- x.artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ get_artists_for_track(x.track_id) AS artists,
+ x.rank
FROM (
SELECT
- t.id,
- t.title,
- t.musicbrainz_id,
- t.release_id,
- r.image,
+ l.track_id,
COUNT(*) AS listen_count,
- get_artists_for_track(t.id) AS artists
+ RANK() OVER (ORDER BY COUNT(*) DESC) as rank
FROM listens l
- JOIN tracks_with_title t ON l.track_id = t.id
- JOIN releases r ON t.release_id = r.id
+ JOIN tracks t ON l.track_id = t.id
WHERE l.listened_at BETWEEN $1 AND $2
- AND t.release_id = $5
- GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ AND t.release_id = $5
+ GROUP BY l.track_id
+ ORDER BY listen_count DESC
+ LIMIT $3 OFFSET $4
) x
-ORDER BY x.listen_count DESC, x.id
-LIMIT $3 OFFSET $4;
+JOIN tracks_with_title t ON x.track_id = t.id
+JOIN releases r ON t.release_id = r.id
+ORDER BY x.listen_count DESC, x.track_id;
-- name: GetTrackAllTimeRank :one
SELECT
diff --git a/internal/repository/release.sql.go b/internal/repository/release.sql.go
index 6d12da4..f62e086 100644
--- a/internal/repository/release.sql.go
+++ b/internal/repository/release.sql.go
@@ -353,13 +353,13 @@ func (q *Queries) GetReleasesWithoutImages(ctx context.Context, arg GetReleasesW
const getTopReleasesFromArtist = `-- name: GetTopReleasesFromArtist :many
SELECT
- x.id, x.musicbrainz_id, x.image, x.various_artists, x.image_source, x.title, x.listen_count, x.artists,
+ x.id, x.musicbrainz_id, x.image, x.various_artists, x.image_source, x.title, x.listen_count,
+ get_artists_for_release(x.id) AS artists,
RANK() OVER (ORDER BY x.listen_count DESC) AS rank
FROM (
SELECT
r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
- COUNT(*) AS listen_count,
- get_artists_for_release(r.id) AS artists
+ COUNT(*) AS listen_count
FROM listens l
JOIN tracks t ON l.track_id = t.id
JOIN releases_with_title r ON t.release_id = r.id
@@ -430,13 +430,13 @@ func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleas
const getTopReleasesPaginated = `-- name: GetTopReleasesPaginated :many
SELECT
- x.id, x.musicbrainz_id, x.image, x.various_artists, x.image_source, x.title, x.listen_count, x.artists,
+ x.id, x.musicbrainz_id, x.image, x.various_artists, x.image_source, x.title, x.listen_count,
+ get_artists_for_release(x.id) AS artists,
RANK() OVER (ORDER BY x.listen_count DESC) AS rank
FROM (
SELECT
r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
- COUNT(*) AS listen_count,
- get_artists_for_release(r.id) AS artists
+ COUNT(*) AS listen_count
FROM listens l
JOIN tracks t ON l.track_id = t.id
JOIN releases_with_title r ON t.release_id = r.id
diff --git a/internal/repository/track.sql.go b/internal/repository/track.sql.go
index e2aa084..b376198 100644
--- a/internal/repository/track.sql.go
+++ b/internal/repository/track.sql.go
@@ -155,33 +155,30 @@ func (q *Queries) GetAllTracksFromArtist(ctx context.Context, artistID int32) ([
const getTopTracksByArtistPaginated = `-- name: GetTopTracksByArtistPaginated :many
SELECT
- x.id,
- x.title,
- x.musicbrainz_id,
- x.release_id,
- x.image,
+ x.track_id AS id,
+ t.title,
+ t.musicbrainz_id,
+ t.release_id,
+ r.image,
x.listen_count,
- x.artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ get_artists_for_track(x.track_id) AS artists,
+ x.rank
FROM (
SELECT
- t.id,
- t.title,
- t.musicbrainz_id,
- t.release_id,
- r.image,
+ l.track_id,
COUNT(*) AS listen_count,
- get_artists_for_track(t.id) AS artists
+ RANK() OVER (ORDER BY COUNT(*) DESC) as rank
FROM listens l
- JOIN tracks_with_title t ON l.track_id = t.id
- JOIN releases r ON t.release_id = r.id
- JOIN artist_tracks at ON at.track_id = t.id
+ JOIN artist_tracks at ON l.track_id = at.track_id
WHERE l.listened_at BETWEEN $1 AND $2
- AND at.artist_id = $5
- GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ AND at.artist_id = $5
+ GROUP BY l.track_id
+ ORDER BY listen_count DESC
+ LIMIT $3 OFFSET $4
) x
-ORDER BY x.listen_count DESC, x.id
-LIMIT $3 OFFSET $4
+JOIN tracks_with_title t ON x.track_id = t.id
+JOIN releases r ON t.release_id = r.id
+ORDER BY x.listen_count DESC, x.track_id
`
type GetTopTracksByArtistPaginatedParams struct {
@@ -240,32 +237,30 @@ func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopT
const getTopTracksInReleasePaginated = `-- name: GetTopTracksInReleasePaginated :many
SELECT
- x.id,
- x.title,
- x.musicbrainz_id,
- x.release_id,
- x.image,
+ x.track_id AS id,
+ t.title,
+ t.musicbrainz_id,
+ t.release_id,
+ r.image,
x.listen_count,
- x.artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ get_artists_for_track(x.track_id) AS artists,
+ x.rank
FROM (
SELECT
- t.id,
- t.title,
- t.musicbrainz_id,
- t.release_id,
- r.image,
+ l.track_id,
COUNT(*) AS listen_count,
- get_artists_for_track(t.id) AS artists
+ RANK() OVER (ORDER BY COUNT(*) DESC) as rank
FROM listens l
- JOIN tracks_with_title t ON l.track_id = t.id
- JOIN releases r ON t.release_id = r.id
+ JOIN tracks t ON l.track_id = t.id
WHERE l.listened_at BETWEEN $1 AND $2
- AND t.release_id = $5
- GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ AND t.release_id = $5
+ GROUP BY l.track_id
+ ORDER BY listen_count DESC
+ LIMIT $3 OFFSET $4
) x
-ORDER BY x.listen_count DESC, x.id
-LIMIT $3 OFFSET $4
+JOIN tracks_with_title t ON x.track_id = t.id
+JOIN releases r ON t.release_id = r.id
+ORDER BY x.listen_count DESC, x.track_id
`
type GetTopTracksInReleasePaginatedParams struct {
@@ -324,31 +319,28 @@ func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTop
const getTopTracksPaginated = `-- name: GetTopTracksPaginated :many
SELECT
- x.id,
- x.title,
- x.musicbrainz_id,
- x.release_id,
- x.image,
+ x.track_id AS id,
+ t.title,
+ t.musicbrainz_id,
+ t.release_id,
+ r.image,
x.listen_count,
- x.artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
+ get_artists_for_track(x.track_id) AS artists,
+ x.rank
FROM (
SELECT
- t.id,
- t.title,
- t.musicbrainz_id,
- t.release_id,
- r.image,
+ track_id,
COUNT(*) AS listen_count,
- get_artists_for_track(t.id) AS artists
- FROM listens l
- JOIN tracks_with_title t ON l.track_id = t.id
- JOIN releases r ON t.release_id = r.id
- WHERE l.listened_at BETWEEN $1 AND $2
- GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ RANK() OVER (ORDER BY COUNT(*) DESC) as rank
+ FROM listens
+ WHERE listened_at BETWEEN $1 AND $2
+ GROUP BY track_id
+ ORDER BY listen_count DESC
+ LIMIT $3 OFFSET $4
) x
-ORDER BY x.listen_count DESC, x.id
-LIMIT $3 OFFSET $4
+JOIN tracks_with_title t ON x.track_id = t.id
+JOIN releases r ON t.release_id = r.id
+ORDER BY x.listen_count DESC, x.track_id
`
type GetTopTracksPaginatedParams struct {
From cb4d17787597f5a3826adb8c37cff8c836ebdfda Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Thu, 22 Jan 2026 15:33:38 -0500
Subject: [PATCH 20/31] fix: release associations and add cleanup migration
(#168)
* fix: release associations and add cleanup migration
* fix: incorrect test
---
.../000005_rm_orphan_artist_releases.sql | 9 ++++++
db/queries/etc.sql | 10 +++++--
engine/import_test.go | 7 +++++
internal/db/psql/merge.go | 2 +-
internal/db/psql/merge_test.go | 22 +++++++-------
internal/db/psql/track.go | 30 ++++++++++++++++++-
internal/db/psql/track_test.go | 26 +++++++++++++++-
internal/repository/etc.sql.go | 10 +++++--
test_assets/koito_export_test.json | 16 +++++-----
9 files changed, 106 insertions(+), 26 deletions(-)
create mode 100644 db/migrations/000005_rm_orphan_artist_releases.sql
diff --git a/db/migrations/000005_rm_orphan_artist_releases.sql b/db/migrations/000005_rm_orphan_artist_releases.sql
new file mode 100644
index 0000000..bfb361f
--- /dev/null
+++ b/db/migrations/000005_rm_orphan_artist_releases.sql
@@ -0,0 +1,9 @@
+-- +goose Up
+DELETE FROM artist_releases ar
+WHERE NOT EXISTS (
+ SELECT 1
+ FROM artist_tracks at
+ JOIN tracks t ON at.track_id = t.id
+ WHERE at.artist_id = ar.artist_id
+ AND t.release_id = ar.release_id
+);
diff --git a/db/queries/etc.sql b/db/queries/etc.sql
index 44139b8..38465f2 100644
--- a/db/queries/etc.sql
+++ b/db/queries/etc.sql
@@ -3,7 +3,13 @@ DO $$
BEGIN
DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l);
DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t);
--- DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t);
--- DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg);
DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at);
+ DELETE FROM artist_releases ar
+ WHERE NOT EXISTS (
+ SELECT 1
+ FROM artist_tracks at
+ JOIN tracks t ON at.track_id = t.id
+ WHERE at.artist_id = ar.artist_id
+ AND t.release_id = ar.release_id
+ );
END $$;
diff --git a/engine/import_test.go b/engine/import_test.go
index 2a802aa..bb5c18e 100644
--- a/engine/import_test.go
+++ b/engine/import_test.go
@@ -276,6 +276,7 @@ func TestImportKoito(t *testing.T) {
giriReleaseMBID := uuid.MustParse("ac1f8da0-21d7-426e-83b0-befff06f0871")
suzukiMBID := uuid.MustParse("30f851bb-dba3-4e9b-811c-5f27f595c86a")
nijinoTrackMBID := uuid.MustParse("a4f26836-3894-46c1-acac-227808308687")
+ lp3MBID := uuid.MustParse("d0ec30bd-7cdc-417c-979d-5a0631b8a161")
input, err := os.ReadFile(src)
require.NoError(t, err)
@@ -312,6 +313,12 @@ func TestImportKoito(t *testing.T) {
aliases, err := store.GetAllAlbumAliases(ctx, album.ID)
require.NoError(t, err)
assert.Contains(t, utils.FlattenAliases(aliases), "Nijinoiroyo Azayakadeare (NELKE ver.)")
+ // ensure album associations are saved
+ album, err = store.GetAlbum(ctx, db.GetAlbumOpts{MusicBrainzID: lp3MBID})
+ require.NoError(t, err)
+ assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "Elizabeth Powell")
+ assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "Rachel Goswell")
+ assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "American Football")
// ensure all tracks are saved
track, err := store.GetTrack(ctx, db.GetTrackOpts{MusicBrainzID: nijinoTrackMBID})
diff --git a/internal/db/psql/merge.go b/internal/db/psql/merge.go
index d9e24b6..dd375c5 100644
--- a/internal/db/psql/merge.go
+++ b/internal/db/psql/merge.go
@@ -52,7 +52,7 @@ func (d *Psql) MergeTracks(ctx context.Context, fromId, toId int32) error {
}
err = qtx.CleanOrphanedEntries(ctx)
if err != nil {
- l.Err(err).Msg("Failed to clean orphaned entries")
+ l.Err(err).Msg("MergeTracks: Failed to clean orphaned entries")
return err
}
return tx.Commit(ctx)
diff --git a/internal/db/psql/merge_test.go b/internal/db/psql/merge_test.go
index 08169fb..38e843a 100644
--- a/internal/db/psql/merge_test.go
+++ b/internal/db/psql/merge_test.go
@@ -12,27 +12,27 @@ func setupTestDataForMerge(t *testing.T) {
truncateTestData(t)
// Insert artists
err := store.Exec(context.Background(),
- `INSERT INTO artists (musicbrainz_id, image, image_source)
+ `INSERT INTO artists (musicbrainz_id, image, image_source)
VALUES ('00000000-0000-0000-0000-000000000001', '10000000-0000-0000-0000-000000000000', 'source.com'),
('00000000-0000-0000-0000-000000000002', NULL, NULL)`)
require.NoError(t, err)
err = store.Exec(context.Background(),
- `INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
+ `INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
VALUES (1, 'Artist One', 'Testing', true),
(2, 'Artist Two', 'Testing', true)`)
require.NoError(t, err)
// Insert albums
err = store.Exec(context.Background(),
- `INSERT INTO releases (musicbrainz_id, image, image_source)
+ `INSERT INTO releases (musicbrainz_id, image, image_source)
VALUES ('11111111-1111-1111-1111-111111111111', '20000000-0000-0000-0000-000000000000', 'source.com'),
('22222222-2222-2222-2222-222222222222', NULL, NULL),
(NULL, NULL, NULL)`)
require.NoError(t, err)
err = store.Exec(context.Background(),
- `INSERT INTO release_aliases (release_id, alias, source, is_primary)
+ `INSERT INTO release_aliases (release_id, alias, source, is_primary)
VALUES (1, 'Album One', 'Testing', true),
(2, 'Album Two', 'Testing', true),
(3, 'Album Three', 'Testing', true)`)
@@ -40,7 +40,7 @@ func setupTestDataForMerge(t *testing.T) {
// Insert tracks
err = store.Exec(context.Background(),
- `INSERT INTO tracks (musicbrainz_id, release_id)
+ `INSERT INTO tracks (musicbrainz_id, release_id)
VALUES ('33333333-3333-3333-3333-333333333333', 1),
('44444444-4444-4444-4444-444444444444', 2),
('55555555-5555-5555-5555-555555555555', 1),
@@ -48,7 +48,7 @@ func setupTestDataForMerge(t *testing.T) {
require.NoError(t, err)
err = store.Exec(context.Background(),
- `INSERT INTO track_aliases (track_id, alias, source, is_primary)
+ `INSERT INTO track_aliases (track_id, alias, source, is_primary)
VALUES (1, 'Track One', 'Testing', true),
(2, 'Track Two', 'Testing', true),
(3, 'Track Three', 'Testing', true),
@@ -57,18 +57,18 @@ func setupTestDataForMerge(t *testing.T) {
// Associate artists with albums and tracks
err = store.Exec(context.Background(),
- `INSERT INTO artist_releases (artist_id, release_id)
+ `INSERT INTO artist_releases (artist_id, release_id)
VALUES (1, 1), (2, 2), (1, 3)`)
require.NoError(t, err)
err = store.Exec(context.Background(),
- `INSERT INTO artist_tracks (artist_id, track_id)
+ `INSERT INTO artist_tracks (artist_id, track_id)
VALUES (1, 1), (2, 2), (1, 3), (1, 4)`)
require.NoError(t, err)
// Insert listens
err = store.Exec(context.Background(),
- `INSERT INTO listens (user_id, track_id, listened_at)
+ `INSERT INTO listens (user_id, track_id, listened_at)
VALUES (1, 1, NOW() - INTERVAL '1 day'),
(1, 2, NOW() - INTERVAL '2 days'),
(1, 3, NOW() - INTERVAL '3 days'),
@@ -90,14 +90,14 @@ func TestMergeTracks(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, 2, count, "expected all listens to be merged into Track 2")
- // Verify artist is associated with album
+ // Verify old artist is not associated with album
exists, err := store.RowExists(ctx, `
SELECT EXISTS (
SELECT 1 FROM artist_releases
WHERE release_id = $1 AND artist_id = $2
)`, 2, 1)
require.NoError(t, err)
- assert.True(t, exists, "expected old artist to be associated with album")
+ assert.False(t, exists)
truncateTestData(t)
}
diff --git a/internal/db/psql/track.go b/internal/db/psql/track.go
index 743a20e..d4cc616 100644
--- a/internal/db/psql/track.go
+++ b/internal/db/psql/track.go
@@ -137,6 +137,13 @@ func (d *Psql) SaveTrack(ctx context.Context, opts db.SaveTrackOpts) (*models.Tr
if err != nil {
return nil, fmt.Errorf("SaveTrack: AssociateArtistToTrack: %w", err)
}
+ err = qtx.AssociateArtistToRelease(ctx, repository.AssociateArtistToReleaseParams{
+ ArtistID: aid,
+ ReleaseID: trackRow.ReleaseID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("SaveTrack: AssociateArtistToTrack: %w", err)
+ }
}
// insert primary alias
err = qtx.InsertTrackAlias(ctx, repository.InsertTrackAliasParams{
@@ -233,7 +240,28 @@ func (d *Psql) SaveTrackAliases(ctx context.Context, id int32, aliases []string,
}
func (d *Psql) DeleteTrack(ctx context.Context, id int32) error {
- return d.q.DeleteTrack(ctx, id)
+ l := logger.FromContext(ctx)
+ tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
+ if err != nil {
+ l.Err(err).Msg("Failed to begin transaction")
+ return fmt.Errorf("DeleteTrack: %w", err)
+ }
+ defer tx.Rollback(ctx)
+ qtx := d.q.WithTx(tx)
+
+ err = qtx.DeleteTrack(ctx, id)
+ if err != nil {
+ return fmt.Errorf("DeleteTrack: DeleteTrack: %w", err)
+ }
+
+ // also clean orphaned entries to ensure artists are disassociated with releases where
+ // they no longer have any tracks on the release
+ err = qtx.CleanOrphanedEntries(ctx)
+ if err != nil {
+ return fmt.Errorf("DeleteTrack: CleanOrphanedEntries: %w", err)
+ }
+
+ return tx.Commit(ctx)
}
func (d *Psql) DeleteTrackAlias(ctx context.Context, id int32, alias string) error {
diff --git a/internal/db/psql/track_test.go b/internal/db/psql/track_test.go
index 7fa58d4..f0ecd09 100644
--- a/internal/db/psql/track_test.go
+++ b/internal/db/psql/track_test.go
@@ -62,7 +62,7 @@ func testDataForTracks(t *testing.T) {
VALUES (1, 1), (2, 2)`)
require.NoError(t, err)
- // Associate tracks with artists
+ // Insert listens
err = store.Exec(context.Background(),
`INSERT INTO listens (user_id, track_id, listened_at)
VALUES (1, 1, NOW()), (1, 2, NOW())`)
@@ -228,3 +228,27 @@ func TestDeleteTrack(t *testing.T) {
_, err = store.Count(ctx, `SELECT * FROM tracks WHERE id = 2`)
require.ErrorIs(t, err, pgx.ErrNoRows) // no rows error
}
+
+func TestReleaseAssociations(t *testing.T) {
+ testDataForTracks(t)
+ ctx := context.Background()
+
+ track, err := store.SaveTrack(ctx, db.SaveTrackOpts{
+ Title: "Track Three",
+ AlbumID: 2,
+ ArtistIDs: []int32{2, 1}, // Artist Two feat. Artist One
+ Duration: 100,
+ })
+ require.NoError(t, err)
+ count, err := store.Count(ctx, `SELECT COUNT(*) FROM artist_releases WHERE release_id = 2`)
+ require.NoError(t, err)
+ require.Equal(t, 2, count, "expected release to be associated with artist from inserted track")
+
+ err = store.DeleteTrack(ctx, track.ID)
+ require.NoError(t, err)
+
+ count, err = store.Count(ctx, `SELECT COUNT(*) FROM artist_releases WHERE release_id = 2`)
+ require.NoError(t, err)
+ require.Equal(t, 1, count, "expected artist no longer on release to be disassociated from release")
+
+}
diff --git a/internal/repository/etc.sql.go b/internal/repository/etc.sql.go
index ed902ea..484f5c4 100644
--- a/internal/repository/etc.sql.go
+++ b/internal/repository/etc.sql.go
@@ -15,11 +15,17 @@ BEGIN
DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l);
DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t);
DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at);
+ DELETE FROM artist_releases ar
+ WHERE NOT EXISTS (
+ SELECT 1
+ FROM artist_tracks at
+ JOIN tracks t ON at.track_id = t.id
+ WHERE at.artist_id = ar.artist_id
+ AND t.release_id = ar.release_id
+ );
END $$
`
-// DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t);
-// DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg);
func (q *Queries) CleanOrphanedEntries(ctx context.Context) error {
_, err := q.db.Exec(ctx, cleanOrphanedEntries)
return err
diff --git a/test_assets/koito_export_test.json b/test_assets/koito_export_test.json
index b7ce463..e2cd8ea 100644
--- a/test_assets/koito_export_test.json
+++ b/test_assets/koito_export_test.json
@@ -18,7 +18,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": null,
+ "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
"aliases": [
{
"alias": "American Football (LP3)",
@@ -70,7 +70,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": null,
+ "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
"aliases": [
{
"alias": "American Football (LP3)",
@@ -122,7 +122,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": null,
+ "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
"aliases": [
{
"alias": "American Football (LP3)",
@@ -174,7 +174,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": null,
+ "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
"aliases": [
{
"alias": "American Football (LP3)",
@@ -226,7 +226,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": null,
+ "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
"aliases": [
{
"alias": "American Football (LP3)",
@@ -278,7 +278,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": null,
+ "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
"aliases": [
{
"alias": "American Football (LP3)",
@@ -330,7 +330,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": null,
+ "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
"aliases": [
{
"alias": "American Football (LP3)",
@@ -703,4 +703,4 @@
]
}
]
-}
\ No newline at end of file
+}
From 08fc9eed86d657bab524c4a52cb8b4569df172b4 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Thu, 22 Jan 2026 17:01:46 -0500
Subject: [PATCH 21/31] fix: correct interest bucket queries (#169)
---
db/queries/interest.sql | 211 ++++++++++++---------------
internal/db/psql/interest.go | 26 ++--
internal/repository/interest.sql.go | 217 +++++++++++++---------------
3 files changed, 204 insertions(+), 250 deletions(-)
diff --git a/db/queries/interest.sql b/db/queries/interest.sql
index 389c75b..874f4cd 100644
--- a/db/queries/interest.sql
+++ b/db/queries/interest.sql
@@ -1,162 +1,139 @@
-- name: GetGroupedListensFromArtist :many
-WITH artist_listens AS (
+WITH bounds AS (
SELECT
- l.listened_at
+ MIN(l.listened_at) AS start_time,
+ NOW() AS end_time
FROM listens l
JOIN tracks t ON t.id = l.track_id
JOIN artist_tracks at ON at.track_id = t.id
WHERE at.artist_id = $1
),
-bounds AS (
+stats AS (
SELECT
- MIN(listened_at) AS start_time,
- MAX(listened_at) AS end_time
- FROM artist_listens
+ start_time,
+ end_time,
+ EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
+ ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval
+ FROM bounds
),
-bucketed AS (
+bucket_series AS (
+ SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx
+),
+listen_indices AS (
SELECT
LEAST(
- sqlc.arg(bucket_count) - 1,
+ sqlc.arg(bucket_count)::int - 1,
FLOOR(
- (
- EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
- /
- NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
- ) * sqlc.arg(bucket_count)
+ (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
+ * sqlc.arg(bucket_count)::int
)::int
- ) AS bucket_idx,
- b.start_time,
- b.end_time
- FROM artist_listens al
- CROSS JOIN bounds b
-),
-aggregated AS (
- SELECT
- start_time
- + (
- bucket_idx * (end_time - start_time)
- / sqlc.arg(bucket_count)
- ) AS bucket_start,
- start_time
- + (
- (bucket_idx + 1) * (end_time - start_time)
- / sqlc.arg(bucket_count)
- ) AS bucket_end,
- COUNT(*) AS listen_count
- FROM bucketed
- GROUP BY bucket_idx, start_time, end_time
+ ) AS bucket_idx
+ FROM listens l
+ JOIN tracks t ON t.id = l.track_id
+ JOIN artist_tracks at ON at.track_id = t.id
+ CROSS JOIN stats s
+ WHERE at.artist_id = $1
+ AND s.start_time IS NOT NULL
)
SELECT
- bucket_start::timestamptz,
- bucket_end::timestamptz,
- listen_count
-FROM aggregated
-ORDER BY bucket_start;
+ (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
+ (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
+ COUNT(li.bucket_idx) AS listen_count
+FROM bucket_series bs
+CROSS JOIN stats s
+LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
+WHERE s.start_time IS NOT NULL
+GROUP BY bs.idx, s.start_time, s.bucket_interval
+ORDER BY bs.idx;
-- name: GetGroupedListensFromRelease :many
-WITH artist_listens AS (
+WITH bounds AS (
SELECT
- l.listened_at
+ MIN(l.listened_at) AS start_time,
+ NOW() AS end_time
FROM listens l
JOIN tracks t ON t.id = l.track_id
WHERE t.release_id = $1
),
-bounds AS (
+stats AS (
SELECT
- MIN(listened_at) AS start_time,
- MAX(listened_at) AS end_time
- FROM artist_listens
+ start_time,
+ end_time,
+ EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
+ ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval
+ FROM bounds
),
-bucketed AS (
+bucket_series AS (
+ SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx
+),
+listen_indices AS (
SELECT
LEAST(
- sqlc.arg(bucket_count) - 1,
+ sqlc.arg(bucket_count)::int - 1,
FLOOR(
- (
- EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
- /
- NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
- ) * sqlc.arg(bucket_count)
+ (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
+ * sqlc.arg(bucket_count)::int
)::int
- ) AS bucket_idx,
- b.start_time,
- b.end_time
- FROM artist_listens al
- CROSS JOIN bounds b
-),
-aggregated AS (
- SELECT
- start_time
- + (
- bucket_idx * (end_time - start_time)
- / sqlc.arg(bucket_count)
- ) AS bucket_start,
- start_time
- + (
- (bucket_idx + 1) * (end_time - start_time)
- / sqlc.arg(bucket_count)
- ) AS bucket_end,
- COUNT(*) AS listen_count
- FROM bucketed
- GROUP BY bucket_idx, start_time, end_time
+ ) AS bucket_idx
+ FROM listens l
+ JOIN tracks t ON t.id = l.track_id
+ CROSS JOIN stats s
+ WHERE t.release_id = $1
+ AND s.start_time IS NOT NULL
)
SELECT
- bucket_start::timestamptz,
- bucket_end::timestamptz,
- listen_count
-FROM aggregated
-ORDER BY bucket_start;
+ (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
+ (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
+ COUNT(li.bucket_idx) AS listen_count
+FROM bucket_series bs
+CROSS JOIN stats s
+LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
+WHERE s.start_time IS NOT NULL
+GROUP BY bs.idx, s.start_time, s.bucket_interval
+ORDER BY bs.idx;
-- name: GetGroupedListensFromTrack :many
-WITH artist_listens AS (
+WITH bounds AS (
SELECT
- l.listened_at
+ MIN(l.listened_at) AS start_time,
+ NOW() AS end_time
FROM listens l
JOIN tracks t ON t.id = l.track_id
WHERE t.id = $1
),
-bounds AS (
+stats AS (
SELECT
- MIN(listened_at) AS start_time,
- MAX(listened_at) AS end_time
- FROM artist_listens
+ start_time,
+ end_time,
+ EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
+ ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval
+ FROM bounds
),
-bucketed AS (
+bucket_series AS (
+ SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx
+),
+listen_indices AS (
SELECT
LEAST(
- sqlc.arg(bucket_count) - 1,
+ sqlc.arg(bucket_count)::int - 1,
FLOOR(
- (
- EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
- /
- NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
- ) * sqlc.arg(bucket_count)
+ (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
+ * sqlc.arg(bucket_count)::int
)::int
- ) AS bucket_idx,
- b.start_time,
- b.end_time
- FROM artist_listens al
- CROSS JOIN bounds b
-),
-aggregated AS (
- SELECT
- start_time
- + (
- bucket_idx * (end_time - start_time)
- / sqlc.arg(bucket_count)
- ) AS bucket_start,
- start_time
- + (
- (bucket_idx + 1) * (end_time - start_time)
- / sqlc.arg(bucket_count)
- ) AS bucket_end,
- COUNT(*) AS listen_count
- FROM bucketed
- GROUP BY bucket_idx, start_time, end_time
+ ) AS bucket_idx
+ FROM listens l
+ JOIN tracks t ON t.id = l.track_id
+ CROSS JOIN stats s
+ WHERE t.id = $1
+ AND s.start_time IS NOT NULL
)
SELECT
- bucket_start::timestamptz,
- bucket_end::timestamptz,
- listen_count
-FROM aggregated
-ORDER BY bucket_start;
+ (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
+ (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
+ COUNT(li.bucket_idx) AS listen_count
+FROM bucket_series bs
+CROSS JOIN stats s
+LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
+WHERE s.start_time IS NOT NULL
+GROUP BY bs.idx, s.start_time, s.bucket_interval
+ORDER BY bs.idx;
diff --git a/internal/db/psql/interest.go b/internal/db/psql/interest.go
index 9e8a623..0c8f4eb 100644
--- a/internal/db/psql/interest.go
+++ b/internal/db/psql/interest.go
@@ -14,54 +14,54 @@ func (d *Psql) GetInterest(ctx context.Context, opts db.GetInterestOpts) ([]db.I
return nil, errors.New("GetInterest: bucket count must be provided")
}
- ret := make([]db.InterestBucket, opts.Buckets)
+ ret := make([]db.InterestBucket, 0)
if opts.ArtistID != 0 {
resp, err := d.q.GetGroupedListensFromArtist(ctx, repository.GetGroupedListensFromArtistParams{
ArtistID: opts.ArtistID,
- BucketCount: opts.Buckets,
+ BucketCount: int32(opts.Buckets),
})
if err != nil {
return nil, fmt.Errorf("GetInterest: GetGroupedListensFromArtist: %w", err)
}
- for i, v := range resp {
- ret[i] = db.InterestBucket{
+ for _, v := range resp {
+ ret = append(ret, db.InterestBucket{
BucketStart: v.BucketStart,
BucketEnd: v.BucketEnd,
ListenCount: v.ListenCount,
- }
+ })
}
return ret, nil
} else if opts.AlbumID != 0 {
resp, err := d.q.GetGroupedListensFromRelease(ctx, repository.GetGroupedListensFromReleaseParams{
ReleaseID: opts.AlbumID,
- BucketCount: opts.Buckets,
+ BucketCount: int32(opts.Buckets),
})
if err != nil {
return nil, fmt.Errorf("GetInterest: GetGroupedListensFromRelease: %w", err)
}
- for i, v := range resp {
- ret[i] = db.InterestBucket{
+ for _, v := range resp {
+ ret = append(ret, db.InterestBucket{
BucketStart: v.BucketStart,
BucketEnd: v.BucketEnd,
ListenCount: v.ListenCount,
- }
+ })
}
return ret, nil
} else if opts.TrackID != 0 {
resp, err := d.q.GetGroupedListensFromTrack(ctx, repository.GetGroupedListensFromTrackParams{
ID: opts.TrackID,
- BucketCount: opts.Buckets,
+ BucketCount: int32(opts.Buckets),
})
if err != nil {
return nil, fmt.Errorf("GetInterest: GetGroupedListensFromTrack: %w", err)
}
- for i, v := range resp {
- ret[i] = db.InterestBucket{
+ for _, v := range resp {
+ ret = append(ret, db.InterestBucket{
BucketStart: v.BucketStart,
BucketEnd: v.BucketEnd,
ListenCount: v.ListenCount,
- }
+ })
}
return ret, nil
} else {
diff --git a/internal/repository/interest.sql.go b/internal/repository/interest.sql.go
index 27c1920..ae77764 100644
--- a/internal/repository/interest.sql.go
+++ b/internal/repository/interest.sql.go
@@ -11,64 +11,57 @@ import (
)
const getGroupedListensFromArtist = `-- name: GetGroupedListensFromArtist :many
-WITH artist_listens AS (
+WITH bounds AS (
SELECT
- l.listened_at
+ MIN(l.listened_at) AS start_time,
+ NOW() AS end_time
FROM listens l
JOIN tracks t ON t.id = l.track_id
JOIN artist_tracks at ON at.track_id = t.id
WHERE at.artist_id = $1
),
-bounds AS (
+stats AS (
SELECT
- MIN(listened_at) AS start_time,
- MAX(listened_at) AS end_time
- FROM artist_listens
+ start_time,
+ end_time,
+ EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
+ ((end_time - start_time) / $2::int) AS bucket_interval
+ FROM bounds
),
-bucketed AS (
+bucket_series AS (
+ SELECT generate_series(0, $2::int - 1) AS idx
+),
+listen_indices AS (
SELECT
LEAST(
- $2 - 1,
+ $2::int - 1,
FLOOR(
- (
- EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
- /
- NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
- ) * $2
+ (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
+ * $2::int
)::int
- ) AS bucket_idx,
- b.start_time,
- b.end_time
- FROM artist_listens al
- CROSS JOIN bounds b
-),
-aggregated AS (
- SELECT
- start_time
- + (
- bucket_idx * (end_time - start_time)
- / $2
- ) AS bucket_start,
- start_time
- + (
- (bucket_idx + 1) * (end_time - start_time)
- / $2
- ) AS bucket_end,
- COUNT(*) AS listen_count
- FROM bucketed
- GROUP BY bucket_idx, start_time, end_time
+ ) AS bucket_idx
+ FROM listens l
+ JOIN tracks t ON t.id = l.track_id
+ JOIN artist_tracks at ON at.track_id = t.id
+ CROSS JOIN stats s
+ WHERE at.artist_id = $1
+ AND s.start_time IS NOT NULL
)
SELECT
- bucket_start::timestamptz,
- bucket_end::timestamptz,
- listen_count
-FROM aggregated
-ORDER BY bucket_start
+ (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
+ (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
+ COUNT(li.bucket_idx) AS listen_count
+FROM bucket_series bs
+CROSS JOIN stats s
+LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
+WHERE s.start_time IS NOT NULL
+GROUP BY bs.idx, s.start_time, s.bucket_interval
+ORDER BY bs.idx
`
type GetGroupedListensFromArtistParams struct {
ArtistID int32
- BucketCount interface{}
+ BucketCount int32
}
type GetGroupedListensFromArtistRow struct {
@@ -98,63 +91,55 @@ func (q *Queries) GetGroupedListensFromArtist(ctx context.Context, arg GetGroupe
}
const getGroupedListensFromRelease = `-- name: GetGroupedListensFromRelease :many
-WITH artist_listens AS (
+WITH bounds AS (
SELECT
- l.listened_at
+ MIN(l.listened_at) AS start_time,
+ NOW() AS end_time
FROM listens l
JOIN tracks t ON t.id = l.track_id
WHERE t.release_id = $1
),
-bounds AS (
+stats AS (
SELECT
- MIN(listened_at) AS start_time,
- MAX(listened_at) AS end_time
- FROM artist_listens
+ start_time,
+ end_time,
+ EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
+ ((end_time - start_time) / $2::int) AS bucket_interval
+ FROM bounds
),
-bucketed AS (
+bucket_series AS (
+ SELECT generate_series(0, $2::int - 1) AS idx
+),
+listen_indices AS (
SELECT
LEAST(
- $2 - 1,
+ $2::int - 1,
FLOOR(
- (
- EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
- /
- NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
- ) * $2
+ (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
+ * $2::int
)::int
- ) AS bucket_idx,
- b.start_time,
- b.end_time
- FROM artist_listens al
- CROSS JOIN bounds b
-),
-aggregated AS (
- SELECT
- start_time
- + (
- bucket_idx * (end_time - start_time)
- / $2
- ) AS bucket_start,
- start_time
- + (
- (bucket_idx + 1) * (end_time - start_time)
- / $2
- ) AS bucket_end,
- COUNT(*) AS listen_count
- FROM bucketed
- GROUP BY bucket_idx, start_time, end_time
+ ) AS bucket_idx
+ FROM listens l
+ JOIN tracks t ON t.id = l.track_id
+ CROSS JOIN stats s
+ WHERE t.release_id = $1
+ AND s.start_time IS NOT NULL
)
SELECT
- bucket_start::timestamptz,
- bucket_end::timestamptz,
- listen_count
-FROM aggregated
-ORDER BY bucket_start
+ (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
+ (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
+ COUNT(li.bucket_idx) AS listen_count
+FROM bucket_series bs
+CROSS JOIN stats s
+LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
+WHERE s.start_time IS NOT NULL
+GROUP BY bs.idx, s.start_time, s.bucket_interval
+ORDER BY bs.idx
`
type GetGroupedListensFromReleaseParams struct {
ReleaseID int32
- BucketCount interface{}
+ BucketCount int32
}
type GetGroupedListensFromReleaseRow struct {
@@ -184,63 +169,55 @@ func (q *Queries) GetGroupedListensFromRelease(ctx context.Context, arg GetGroup
}
const getGroupedListensFromTrack = `-- name: GetGroupedListensFromTrack :many
-WITH artist_listens AS (
+WITH bounds AS (
SELECT
- l.listened_at
+ MIN(l.listened_at) AS start_time,
+ NOW() AS end_time
FROM listens l
JOIN tracks t ON t.id = l.track_id
WHERE t.id = $1
),
-bounds AS (
+stats AS (
SELECT
- MIN(listened_at) AS start_time,
- MAX(listened_at) AS end_time
- FROM artist_listens
+ start_time,
+ end_time,
+ EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
+ ((end_time - start_time) / $2::int) AS bucket_interval
+ FROM bounds
),
-bucketed AS (
+bucket_series AS (
+ SELECT generate_series(0, $2::int - 1) AS idx
+),
+listen_indices AS (
SELECT
LEAST(
- $2 - 1,
+ $2::int - 1,
FLOOR(
- (
- EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
- /
- NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
- ) * $2
+ (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
+ * $2::int
)::int
- ) AS bucket_idx,
- b.start_time,
- b.end_time
- FROM artist_listens al
- CROSS JOIN bounds b
-),
-aggregated AS (
- SELECT
- start_time
- + (
- bucket_idx * (end_time - start_time)
- / $2
- ) AS bucket_start,
- start_time
- + (
- (bucket_idx + 1) * (end_time - start_time)
- / $2
- ) AS bucket_end,
- COUNT(*) AS listen_count
- FROM bucketed
- GROUP BY bucket_idx, start_time, end_time
+ ) AS bucket_idx
+ FROM listens l
+ JOIN tracks t ON t.id = l.track_id
+ CROSS JOIN stats s
+ WHERE t.id = $1
+ AND s.start_time IS NOT NULL
)
SELECT
- bucket_start::timestamptz,
- bucket_end::timestamptz,
- listen_count
-FROM aggregated
-ORDER BY bucket_start
+ (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
+ (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
+ COUNT(li.bucket_idx) AS listen_count
+FROM bucket_series bs
+CROSS JOIN stats s
+LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
+WHERE s.start_time IS NOT NULL
+GROUP BY bs.idx, s.start_time, s.bucket_interval
+ORDER BY bs.idx
`
type GetGroupedListensFromTrackParams struct {
ID int32
- BucketCount interface{}
+ BucketCount int32
}
type GetGroupedListensFromTrackRow struct {
From 1ed055d0986e1c3c15424a501a544f4b88a51542 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Thu, 22 Jan 2026 21:31:14 -0500
Subject: [PATCH 22/31] fix: ui tweaks and fixes (#170)
* add subtle gradient to home page
* tweak autumn theme primary color
* reduce home page top margin on mobile
* use focus-active instead of focus for outline
* fix gradient on rewind page
* align checkbox on login form
* i forgot what the pseudo class was called
---
client/app/app.css | 10 +++++-----
client/app/components/modals/LoginForm.tsx | 2 +-
client/app/routes/Home.tsx | 16 ++++++++--------
client/app/routes/RewindPage.tsx | 2 +-
client/app/styles/themes.css.ts | 2 +-
5 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/client/app/app.css b/client/app/app.css
index eb5e7f6..bc60042 100644
--- a/client/app/app.css
+++ b/client/app/app.css
@@ -138,11 +138,11 @@ textarea {
input[type="checkbox"] {
height: fit-content;
}
-input:focus,
-button:focus,
-a:focus,
-select:focus,
-textarea:focus {
+input:focus-visible,
+button:focus-visible,
+a:focus-visible,
+select:focus-visible,
+textarea:focus-visible {
border-color: transparent;
outline: 2px solid var(--color-fg-tertiary);
}
diff --git a/client/app/components/modals/LoginForm.tsx b/client/app/components/modals/LoginForm.tsx
index 66ae6cb..1078476 100644
--- a/client/app/components/modals/LoginForm.tsx
+++ b/client/app/components/modals/LoginForm.tsx
@@ -54,7 +54,7 @@ export default function LoginForm() {
className="w-full mx-auto fg bg rounded p-2"
onChange={(e) => setPassword(e.target.value)}
/>
-
+
-
+
+
@@ -33,7 +30,10 @@ export default function Home() {
-
+
diff --git a/client/app/routes/RewindPage.tsx b/client/app/routes/RewindPage.tsx
index 71a1ef6..4d60065 100644
--- a/client/app/routes/RewindPage.tsx
+++ b/client/app/routes/RewindPage.tsx
@@ -59,7 +59,7 @@ export default function RewindPage() {
useEffect(() => {
if (!stats.top_artists[0]) return;
- const img = (stats.top_artists[0] as any)?.image;
+ const img = (stats.top_artists[0] as any)?.item.image;
if (!img) return;
average(imageUrl(img, "small"), { amount: 1 }).then((color) => {
diff --git a/client/app/styles/themes.css.ts b/client/app/styles/themes.css.ts
index d5390ae..1a3a57d 100644
--- a/client/app/styles/themes.css.ts
+++ b/client/app/styles/themes.css.ts
@@ -92,7 +92,7 @@ export const themes: Record
= {
fg: "#fef9f3",
fgSecondary: "#dbc6b0",
fgTertiary: "#a3917a",
- primary: "#d97706",
+ primary: "#F0850A",
primaryDim: "#b45309",
accent: "#8c4c28",
accentDim: "#6b3b1f",
From 937f9062b546a68bc045cf7749a0810a02a06fd8 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Sat, 24 Jan 2026 13:19:04 -0500
Subject: [PATCH 23/31] fix: include time zone name overrides and add
KOITO_FORCE_TZ cfg option (#176)
* timezone overrides and force_tz option
* docs for force_tz
* add link to time zone names in docs
---
.../content/docs/reference/configuration.md | 2 +
engine/engine.go | 4 +
engine/handlers/handlers.go | 133 +++++++++++++++++-
internal/cfg/cfg.go | 15 ++
4 files changed, 153 insertions(+), 1 deletion(-)
diff --git a/docs/src/content/docs/reference/configuration.md b/docs/src/content/docs/reference/configuration.md
index 6eae82b..2af573c 100644
--- a/docs/src/content/docs/reference/configuration.md
+++ b/docs/src/content/docs/reference/configuration.md
@@ -64,6 +64,8 @@ If the environment variable is defined without **and** with the suffix at the sa
##### KOITO_CONFIG_DIR
- Default: `/etc/koito`
- Description: The location where import folders and image caches are stored.
+##### KOITO_FORCE_TZ
+- Description: A canonical IANA database time zone name (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) that Koito will use to serve all clients. Overrides any timezones requested via a `tz` cookie or `tz` query parameter. Koito will fail to start if this value is invalid.
##### KOITO_DISABLE_DEEZER
- Default: `false`
- Description: Disables Deezer as a source for finding artist and album images.
diff --git a/engine/engine.go b/engine/engine.go
index 7de9254..979667e 100644
--- a/engine/engine.go
+++ b/engine/engine.go
@@ -96,6 +96,10 @@ func Run(
defer store.Close(ctx)
l.Info().Msg("Engine: Database connection established")
+ if cfg.ForceTZ() != nil {
+ l.Debug().Msgf("Engine: Forcing the use of timezone '%s'", cfg.ForceTZ().String())
+ }
+
l.Debug().Msg("Engine: Initializing MusicBrainz client")
var mbzC mbz.MusicBrainzCaller
if !cfg.MusicBrainzDisabled() {
diff --git a/engine/handlers/handlers.go b/engine/handlers/handlers.go
index 06127aa..78bc228 100644
--- a/engine/handlers/handlers.go
+++ b/engine/handlers/handlers.go
@@ -6,7 +6,9 @@ import (
"strconv"
"strings"
"time"
+ _ "time/tzdata"
+ "github.com/gabehf/koito/internal/cfg"
"github.com/gabehf/koito/internal/db"
"github.com/gabehf/koito/internal/logger"
)
@@ -107,14 +109,143 @@ func TimeframeFromRequest(r *http.Request) db.Timeframe {
func parseTZ(r *http.Request) *time.Location {
+ // this map is obviously AI.
+ // i manually referenced as many links as I could and couldn't find any
+ // incorrect entries here so hopefully it is all correct.
+ overrides := map[string]string{
+ // --- North America ---
+ "America/Indianapolis": "America/Indiana/Indianapolis",
+ "America/Knoxville": "America/Indiana/Knoxville",
+ "America/Louisville": "America/Kentucky/Louisville",
+ "America/Montreal": "America/Toronto",
+ "America/Shiprock": "America/Denver",
+ "America/Fort_Wayne": "America/Indiana/Indianapolis",
+ "America/Virgin": "America/Port_of_Spain",
+ "America/Santa_Isabel": "America/Tijuana",
+ "America/Ensenada": "America/Tijuana",
+ "America/Rosario": "America/Argentina/Cordoba",
+ "America/Jujuy": "America/Argentina/Jujuy",
+ "America/Mendoza": "America/Argentina/Mendoza",
+ "America/Catamarca": "America/Argentina/Catamarca",
+ "America/Cordoba": "America/Argentina/Cordoba",
+ "America/Buenos_Aires": "America/Argentina/Buenos_Aires",
+ "America/Coral_Harbour": "America/Atikokan",
+ "America/Atka": "America/Adak",
+ "US/Alaska": "America/Anchorage",
+ "US/Aleutian": "America/Adak",
+ "US/Arizona": "America/Phoenix",
+ "US/Central": "America/Chicago",
+ "US/Eastern": "America/New_York",
+ "US/East-Indiana": "America/Indiana/Indianapolis",
+ "US/Hawaii": "Pacific/Honolulu",
+ "US/Indiana-Starke": "America/Indiana/Knoxville",
+ "US/Michigan": "America/Detroit",
+ "US/Mountain": "America/Denver",
+ "US/Pacific": "America/Los_Angeles",
+ "US/Samoa": "Pacific/Pago_Pago",
+ "Canada/Atlantic": "America/Halifax",
+ "Canada/Central": "America/Winnipeg",
+ "Canada/Eastern": "America/Toronto",
+ "Canada/Mountain": "America/Edmonton",
+ "Canada/Newfoundland": "America/St_Johns",
+ "Canada/Pacific": "America/Vancouver",
+
+ // --- Asia ---
+ "Asia/Calcutta": "Asia/Kolkata",
+ "Asia/Saigon": "Asia/Ho_Chi_Minh",
+ "Asia/Katmandu": "Asia/Kathmandu",
+ "Asia/Rangoon": "Asia/Yangon",
+ "Asia/Ulan_Bator": "Asia/Ulaanbaatar",
+ "Asia/Macao": "Asia/Macau",
+ "Asia/Tel_Aviv": "Asia/Jerusalem",
+ "Asia/Ashkhabad": "Asia/Ashgabat",
+ "Asia/Chungking": "Asia/Chongqing",
+ "Asia/Dacca": "Asia/Dhaka",
+ "Asia/Istanbul": "Europe/Istanbul",
+ "Asia/Kashgar": "Asia/Urumqi",
+ "Asia/Thimbu": "Asia/Thimphu",
+ "Asia/Ujung_Pandang": "Asia/Makassar",
+ "ROC": "Asia/Taipei",
+ "Iran": "Asia/Tehran",
+ "Israel": "Asia/Jerusalem",
+ "Japan": "Asia/Tokyo",
+ "Singapore": "Asia/Singapore",
+ "Hongkong": "Asia/Hong_Kong",
+
+ // --- Europe ---
+ "Europe/Kiev": "Europe/Kyiv",
+ "Europe/Belfast": "Europe/London",
+ "Europe/Tiraspol": "Europe/Chisinau",
+ "Europe/Nicosia": "Asia/Nicosia",
+ "Europe/Moscow": "Europe/Moscow",
+ "W-SU": "Europe/Moscow",
+ "GB": "Europe/London",
+ "GB-Eire": "Europe/London",
+ "Eire": "Europe/Dublin",
+ "Poland": "Europe/Warsaw",
+ "Portugal": "Europe/Lisbon",
+ "Turkey": "Europe/Istanbul",
+
+ // --- Australia / Pacific ---
+ "Australia/ACT": "Australia/Sydney",
+ "Australia/Canberra": "Australia/Sydney",
+ "Australia/LHI": "Australia/Lord_Howe",
+ "Australia/North": "Australia/Darwin",
+ "Australia/NSW": "Australia/Sydney",
+ "Australia/Queensland": "Australia/Brisbane",
+ "Australia/South": "Australia/Adelaide",
+ "Australia/Tasmania": "Australia/Hobart",
+ "Australia/Victoria": "Australia/Melbourne",
+ "Australia/West": "Australia/Perth",
+ "Australia/Yancowinna": "Australia/Broken_Hill",
+ "Pacific/Samoa": "Pacific/Pago_Pago",
+ "Pacific/Yap": "Pacific/Chuuk",
+ "Pacific/Truk": "Pacific/Chuuk",
+ "Pacific/Ponape": "Pacific/Pohnpei",
+ "NZ": "Pacific/Auckland",
+ "NZ-CHAT": "Pacific/Chatham",
+
+ // --- Africa ---
+ "Africa/Asmera": "Africa/Asmara",
+ "Africa/Timbuktu": "Africa/Bamako",
+ "Egypt": "Africa/Cairo",
+ "Libya": "Africa/Tripoli",
+
+ // --- Atlantic ---
+ "Atlantic/Faeroe": "Atlantic/Faroe",
+ "Atlantic/Jan_Mayen": "Europe/Oslo",
+ "Iceland": "Atlantic/Reykjavik",
+
+ // --- Etc / Misc ---
+ "UTC": "UTC",
+ "Etc/UTC": "UTC",
+ "Etc/GMT": "UTC",
+ "GMT": "UTC",
+ "Zulu": "UTC",
+ "Universal": "UTC",
+ }
+
+ if cfg.ForceTZ() != nil {
+ return cfg.ForceTZ()
+ }
+
if tz := r.URL.Query().Get("tz"); tz != "" {
+ if fixedTz, exists := overrides[tz]; exists {
+ tz = fixedTz
+ }
if loc, err := time.LoadLocation(tz); err == nil {
return loc
}
}
if c, err := r.Cookie("tz"); err == nil {
- if loc, err := time.LoadLocation(c.Value); err == nil {
+ var tz string
+ if fixedTz, exists := overrides[c.Value]; exists {
+ tz = fixedTz
+ } else {
+ tz = c.Value
+ }
+ if loc, err := time.LoadLocation(tz); err == nil {
return loc
}
}
diff --git a/internal/cfg/cfg.go b/internal/cfg/cfg.go
index 36478b1..e74d6b9 100644
--- a/internal/cfg/cfg.go
+++ b/internal/cfg/cfg.go
@@ -49,6 +49,7 @@ const (
FETCH_IMAGES_DURING_IMPORT_ENV = "KOITO_FETCH_IMAGES_DURING_IMPORT"
ARTIST_SEPARATORS_ENV = "KOITO_ARTIST_SEPARATORS_REGEX"
LOGIN_GATE_ENV = "KOITO_LOGIN_GATE"
+ FORCE_TZ = "KOITO_FORCE_TZ"
)
type config struct {
@@ -87,6 +88,7 @@ type config struct {
importAfter time.Time
artistSeparators []*regexp.Regexp
loginGate bool
+ forceTZ *time.Location
}
var (
@@ -213,6 +215,13 @@ func loadConfig(getenv func(string) string, version string) (*config, error) {
cfg.loginGate = true
}
+ if getenv(FORCE_TZ) != "" {
+ cfg.forceTZ, err = time.LoadLocation(getenv(FORCE_TZ))
+ if err != nil {
+ return nil, fmt.Errorf("forced timezone '%s' is not a valid timezone", getenv(FORCE_TZ))
+ }
+ }
+
switch strings.ToLower(getenv(LOG_LEVEL_ENV)) {
case "debug":
cfg.logLevel = 0
@@ -430,3 +439,9 @@ func LoginGate() bool {
defer lock.RUnlock()
return globalConfig.loginGate
}
+
+func ForceTZ() *time.Location {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.forceTZ
+}
From c8a11ef018461f14a5e7c9fcf8fb69cfed0d7d0f Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Sun, 25 Jan 2026 15:51:07 -0500
Subject: [PATCH 24/31] fix: ensure mbids in mbidmapping are discovered (#180)
---
engine/import_test.go | 28 ++++++++++++++++++
internal/importer/listenbrainz.go | 19 ++++++++++--
test_assets/listenbrainz_shoko1_123456789.zip | Bin 0 -> 3184 bytes
3 files changed, 44 insertions(+), 3 deletions(-)
create mode 100644 test_assets/listenbrainz_shoko1_123456789.zip
diff --git a/engine/import_test.go b/engine/import_test.go
index bb5c18e..fa69e73 100644
--- a/engine/import_test.go
+++ b/engine/import_test.go
@@ -264,6 +264,34 @@ func TestImportListenBrainz_MbzDisabled(t *testing.T) {
truncateTestData(t)
}
+func TestImportListenBrainz_MBIDMapping(t *testing.T) {
+
+ src := path.Join("..", "test_assets", "listenbrainz_shoko1_123456789.zip")
+ destDir := filepath.Join(cfg.ConfigDir(), "import")
+ dest := filepath.Join(destDir, "listenbrainz_shoko1_123456789.zip")
+
+ // not going to make the dest dir because engine should make it already
+
+ input, err := os.ReadFile(src)
+ require.NoError(t, err)
+
+ require.NoError(t, os.WriteFile(dest, input, os.ModePerm))
+
+ engine.RunImporter(logger.Get(), store, &mbz.MbzErrorCaller{})
+
+ album, err := store.GetAlbum(context.Background(), db.GetAlbumOpts{MusicBrainzID: uuid.MustParse("177ebc28-0115-3897-8eb3-ebf74ce23790")})
+ require.NoError(t, err)
+ assert.Equal(t, "Zombie", album.Title)
+ artist, err := store.GetArtist(context.Background(), db.GetArtistOpts{MusicBrainzID: uuid.MustParse("c98d40fd-f6cf-4b26-883e-eaa515ee2851")})
+ require.NoError(t, err)
+ assert.Equal(t, "The Cranberries", artist.Name)
+ track, err := store.GetTrack(context.Background(), db.GetTrackOpts{MusicBrainzID: uuid.MustParse("3bbeb4e3-ab6d-460d-bfc5-de49e4251061")})
+ require.NoError(t, err)
+ assert.Equal(t, "Zombie", track.Title)
+
+ truncateTestData(t)
+}
+
func TestImportKoito(t *testing.T) {
src := path.Join("..", "test_assets", "koito_export_test.json")
diff --git a/internal/importer/listenbrainz.go b/internal/importer/listenbrainz.go
index 4187bbb..7c1a8bb 100644
--- a/internal/importer/listenbrainz.go
+++ b/internal/importer/listenbrainz.go
@@ -85,7 +85,14 @@ func ImportListenBrainzFile(ctx context.Context, store db.DB, mbzc mbz.MusicBrai
}
artistMbzIDs, err := utils.ParseUUIDSlice(payload.TrackMeta.AdditionalInfo.ArtistMBIDs)
if err != nil {
- l.Debug().Err(err).Msg("Failed to parse one or more uuids")
+ l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Failed to parse one or more UUIDs")
+ }
+ if len(artistMbzIDs) < 1 {
+ l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Attempting to parse artist UUIDs from mbid_mapping")
+ utils.ParseUUIDSlice(payload.TrackMeta.MBIDMapping.ArtistMBIDs)
+ if err != nil {
+ l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Failed to parse one or more UUIDs")
+ }
}
rgMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.ReleaseGroupMBID)
if err != nil {
@@ -93,11 +100,17 @@ func ImportListenBrainzFile(ctx context.Context, store db.DB, mbzc mbz.MusicBrai
}
releaseMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.ReleaseMBID)
if err != nil {
- releaseMbzID = uuid.Nil
+ releaseMbzID, err = uuid.Parse(payload.TrackMeta.MBIDMapping.ReleaseMBID)
+ if err != nil {
+ releaseMbzID = uuid.Nil
+ }
}
recordingMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.RecordingMBID)
if err != nil {
- recordingMbzID = uuid.Nil
+ recordingMbzID, err = uuid.Parse(payload.TrackMeta.MBIDMapping.RecordingMBID)
+ if err != nil {
+ recordingMbzID = uuid.Nil
+ }
}
var client string
diff --git a/test_assets/listenbrainz_shoko1_123456789.zip b/test_assets/listenbrainz_shoko1_123456789.zip
new file mode 100644
index 0000000000000000000000000000000000000000..14c97a2a104dca68ccb2e810fe24cd5039754322
GIT binary patch
literal 3184
zcmZ|RXIK;28o==g1nD*OuF@fZlu#8A5D1V^LX{#Nq-jD?0T-l-NSDxS=%6&|(gf+f
zccldoLAnIw;_g0seY|^To^$5RhxyI>@ehaM6OaIYUTt17_x?Qmy%7Oe0WMBRl${$=
z1Ytl7z@I~TISpPePajgi6#^_i06?&DxxxPtI(`$#e-Unq-jw`{`E*H){W}A?{526E
z`;zfLAsr99oJ(8~008~Yh=HAv?rtuB4wZQ+{an8dwVgce#`=*iWM>~lZlMAx%RK7v
zjuldUSFHs4{6R@Eb*Afe$h;OTik|ez@tHy{JygLqid&k@nlU<5e6%`sRqmoN8FJON
z2HbAEM;&=}QL1g4vuRT8LN%N-9+xL$T)kC}+G);e6!ki#Tjj26C)A0q#?XYf4R?RS=OV*>AyP&`E;302;r_^~ySuMSKlZ*&I=2m;svRg_
zw*Roiy&=M%6Eep|E{+^I9`3S?)7UXT6$t^e4--*6;%4cUeZceb)F_XaGXwRQyASsRswpU92psdOGs+cb)
zo$#LfXQu`Wo35NHXDFlt*Lv=HvdCPp?e8=Y>OM1C$*v(TA+mBX*OA*S_5+;$V{GZ2
zfQ9s=mV2p{phJ9}-N2R=sR@U+f)DO!J7eK&g7s)lr2A8YS^hl6E%$J14Y!lR0>dSm
z@1q=-?S&x8cWY93SCw$iJf$2*4i2{mSCBO(rS|3-amBmtwuQMZsPY)!S8X0hZjzK|6`6zMe$m-Po#H32wnwg!>vU&T_>er4XVig5%8vCU5S<+pwh}oZNb4%g%fep2t4(8#)-{Y
zn7nkkmX>RftV%rmn-O9w-<7DXa-y3j?Ub~VCYC?YQwQC
zrJwW1$$fa7Y4MjZ^dWFr;7U4IM0wPTCO=L4$EHlRIBLN+LXRm~WQ|AjO_Snc?kejl
ze)0D!5GH@a1~;>x7wOi(KIfKsd=)2KCrOvR);U486?aRa8DtMJN*R4OoBFlAlFCna
zID5}_BS~tmGE?!HA7NQKSaSXTF*0lI#%Ayu^->@RzDC_ejIe!k)V8VbScdaU#FfbinTg!t#KcvElzK0D
zZG(egg+S!K1P
zg#$L}$b=Q{iTRcox<%WDARiPU#V#$dH@qRJ=5Wsw?erUl$Qe!drL?Q*ak)&c$nuP?CKUT6S~
zGGl=K;=Z7!(q4FNLMlc!TrkW|4UcDx@6E!A6_?IY_I}g;YCqCv*ahi}0cnPFiE^T~j@y%qxL@*W6AlM0}#G!ga5K|1@s=Tlda(sW|$5_F^)
z67}C{hU|IhqKE1Xw1;eG1~=#LhQe56-grig#;eosw(8)6H}bU)+3hk#3ezyycT+NafCS{P~bL$Q(lVVw^}C6eJb~rHH&A4_7!+iO=Qd&
ztR*P(MfI;IjkNHi{2zdZSB`HKK@{0q&H+^#30i$K7982q+==LK{xaa`FMh86;9{*s
zDM%Jm{d$}ix@X>|gv2wNP@TvZKA}CUWL~w5k=!-hNT<&ALq2efikie(nawb&a(5Kn
z;mx~#H$*ciWl~RCdBm`?vKV(BNhTqz2a<@Vu-lmk>K^PrG&;M;5pRTUA8v*DDLw#=
z7Cz%xw~;dHjzB&hvB!cXnN?8{J_&pe!bgnTB#Q!DMT!DLm*FG_!huJ`Y>8^w%R#pI
zcsvixnUii06&o3ise|uO1%z(j6e*4w&@jm+B9m3P(xa5~gy4X>^+P89H1n#3-=nFApd@hzPY}0SO5OBuw83YKn7}@9Up+rZZ9GC7
zZiccPEi81eI^3IM?waqMU|6X4B#FLw4f{pyx9`c2TONs|x>_zbd^3vL1i{AXW!kY)
z+$Yi;n{xN|E#L1@!*47|_q!y|bj#BXYlL>91FI3nO1SxB?7b%O`W7KBu~i#9#mt~w
zL267c*2h0$_ri0L)~ZYOa5L!9#U8qIWUf}B@fJ%8uMt~hdfEZzf#vEt&XG5O^E1ZO
zzHyOVKWi1y37FM1%PJY>GG{pfmVT#MKA*F}A&Z=3cN!Gn485XNb~+M%LGr?>;zXPs
zuyF9?lHKo3PcJUCMC2fRoq
zFb*HzFvxxMY1DtsT#(^|(STNM
zk`5yB4uo=snBaNe0?XHxS9`*h+ID&u@#R4C`~?|Ul}e}JKpb@L;+EgnYtMXj&&=3g
zf4FG(cS(003f7&hsvFYcVSSByxa248_Gs2JhE$Iq^vtd?+Gs_21#HGb%{r=pKH4P?
zCnM>#y3zX4QU0hWc+jYRnH|;rp>o>Fe3jxfN|RIG^+t~9xG=ROOn!33d+xcc$qQ~S
zG@7F2lgC&e=*F14K}VrL(2!;n4lVNTj?lZcWOovi>ZTDJs;Tb;i49Ji=WZiE8WVDN;i{aHU??4me!Hu#@+e@C&Mr0e@SpS6i9ga2>}1{#$7%w
INq@fm2g3obEdT%j
literal 0
HcmV?d00001
From 35e104c97e586e11fa36c1cc1094677f754c1acd Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Mon, 26 Jan 2026 13:03:27 -0500
Subject: [PATCH 25/31] fix: gradient background on top charts (#181)
---
client/app/routes/Charts/ChartLayout.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/client/app/routes/Charts/ChartLayout.tsx b/client/app/routes/Charts/ChartLayout.tsx
index 02ee9bd..446f7d1 100644
--- a/client/app/routes/Charts/ChartLayout.tsx
+++ b/client/app/routes/Charts/ChartLayout.tsx
@@ -40,7 +40,7 @@ export default function ChartLayout({
useEffect(() => {
if ((data?.items?.length ?? 0) === 0) return;
- const img = (data.items[0] as any)?.image;
+ const img = (data.items[0] as any)?.item.image;
if (!img) return;
average(imageUrl(img, "small"), { amount: 1 }).then((color) => {
From bf1c03e9fdfbeafc3c954ee151ca714b945d4e59 Mon Sep 17 00:00:00 2001
From: PythonGermany <97847597+PythonGermany@users.noreply.github.com>
Date: Mon, 26 Jan 2026 19:43:01 +0100
Subject: [PATCH 26/31] docs: fix typo in index.mdx (#182)
---
docs/src/content/docs/index.mdx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/src/content/docs/index.mdx b/docs/src/content/docs/index.mdx
index a4d1858..f590ebb 100644
--- a/docs/src/content/docs/index.mdx
+++ b/docs/src/content/docs/index.mdx
@@ -28,7 +28,7 @@ import { Card, CardGrid } from '@astrojs/starlight/components';
Koito can be connected to any music server or client that allows for custom ListenBrainz URLs.
- Automatically relay listens submitted to your Koito instance to other ListenBrainz compatble servers.
+ Automatically relay listens submitted to your Koito instance to other ListenBrainz compatible servers.
Koito automatically fetches data from MusicBrainz and images from Deezer and Cover Art Archive to compliment what is provided by your music server.
From 42b32c79201260a5b251841cf371b2c5b58b9a52 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Mon, 26 Jan 2026 13:48:43 -0500
Subject: [PATCH 27/31] feat: add api key auth to web api (#183)
---
engine/long_test.go | 32 +++++
engine/middleware/authenticate.go | 167 ++++++++++++++++++++++++
engine/middleware/validate.go | 125 ------------------
engine/routes.go | 12 +-
internal/cfg/cfg.go | 201 -----------------------------
internal/cfg/getters.go | 206 ++++++++++++++++++++++++++++++
internal/cfg/setters.go | 7 +
7 files changed, 418 insertions(+), 332 deletions(-)
create mode 100644 engine/middleware/authenticate.go
delete mode 100644 engine/middleware/validate.go
create mode 100644 internal/cfg/getters.go
create mode 100644 internal/cfg/setters.go
diff --git a/engine/long_test.go b/engine/long_test.go
index 2ef5d4b..d916117 100644
--- a/engine/long_test.go
+++ b/engine/long_test.go
@@ -356,6 +356,38 @@ func TestDelete(t *testing.T) {
truncateTestData(t)
}
+func TestLoginGate(t *testing.T) {
+
+ t.Run("Submit Listens", doSubmitListens)
+
+ req, err := http.NewRequest("DELETE", host()+"/apis/web/v1/artist?id=1", nil)
+ require.NoError(t, err)
+ req.Header.Add("Authorization", "Token "+apikey)
+ resp, err := http.DefaultClient.Do(req)
+ assert.NoError(t, err)
+ assert.Equal(t, 204, resp.StatusCode)
+
+ cfg.SetLoginGate(true)
+
+ req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil)
+ require.NoError(t, err)
+ // req.Header.Add("Authorization", "Token "+apikey)
+ resp, err = http.DefaultClient.Do(req)
+ assert.NoError(t, err)
+ assert.Equal(t, 401, resp.StatusCode)
+
+ req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil)
+ require.NoError(t, err)
+ req.Header.Add("Authorization", "Token "+apikey)
+ resp, err = http.DefaultClient.Do(req)
+ assert.NoError(t, err)
+ assert.Equal(t, 200, resp.StatusCode)
+
+ cfg.SetLoginGate(false)
+
+ truncateTestData(t)
+}
+
func TestAliasesAndSearch(t *testing.T) {
t.Run("Submit Listens", doSubmitListens)
diff --git a/engine/middleware/authenticate.go b/engine/middleware/authenticate.go
new file mode 100644
index 0000000..a435473
--- /dev/null
+++ b/engine/middleware/authenticate.go
@@ -0,0 +1,167 @@
+package middleware
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/gabehf/koito/internal/cfg"
+ "github.com/gabehf/koito/internal/db"
+ "github.com/gabehf/koito/internal/logger"
+ "github.com/gabehf/koito/internal/models"
+ "github.com/gabehf/koito/internal/utils"
+ "github.com/google/uuid"
+)
+
+type MiddlwareContextKey string
+
+const (
+ UserContextKey MiddlwareContextKey = "user"
+ apikeyContextKey MiddlwareContextKey = "apikeyID"
+)
+
+type AuthMode int
+
+const (
+ AuthModeSessionCookie AuthMode = iota
+ AuthModeAPIKey
+ AuthModeSessionOrAPIKey
+ AuthModeLoginGate
+)
+
+func Authenticate(store db.DB, mode AuthMode) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := r.Context()
+ l := logger.FromContext(ctx)
+
+ var user *models.User
+ var err error
+
+ switch mode {
+ case AuthModeSessionCookie:
+ user, err = validateSession(ctx, store, r)
+
+ case AuthModeAPIKey:
+ user, err = validateAPIKey(ctx, store, r)
+
+ case AuthModeSessionOrAPIKey:
+ user, err = validateSession(ctx, store, r)
+ if err != nil || user == nil {
+ user, err = validateAPIKey(ctx, store, r)
+ }
+
+ case AuthModeLoginGate:
+ if cfg.LoginGate() {
+ user, err = validateSession(ctx, store, r)
+ if err != nil || user == nil {
+ user, err = validateAPIKey(ctx, store, r)
+ }
+ } else {
+ next.ServeHTTP(w, r)
+ }
+ }
+
+ if err != nil {
+ l.Err(err).Msg("authentication failed")
+ utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
+ return
+ }
+
+ if user == nil {
+ utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
+ return
+ }
+
+ if user != nil {
+ ctx = context.WithValue(ctx, UserContextKey, user)
+ r = r.WithContext(ctx)
+ }
+
+ next.ServeHTTP(w, r)
+ })
+ }
+}
+
+func validateSession(ctx context.Context, store db.DB, r *http.Request) (*models.User, error) {
+ l := logger.FromContext(r.Context())
+
+ l.Debug().Msgf("ValidateSession: Checking user authentication via session cookie")
+
+ cookie, err := r.Cookie("koito_session")
+ var sid uuid.UUID
+ if err == nil {
+ sid, err = uuid.Parse(cookie.Value)
+ if err != nil {
+ l.Err(err).Msg("ValidateSession: Could not parse UUID from session cookie")
+ return nil, errors.New("session cookie is invalid")
+ }
+ } else {
+ l.Debug().Msgf("ValidateSession: No session cookie found; attempting API key authentication")
+ return nil, errors.New("session cookie is missing")
+ }
+
+ l.Debug().Msg("ValidateSession: Retrieved login cookie from request")
+
+ u, err := store.GetUserBySession(r.Context(), sid)
+ if err != nil {
+ l.Err(fmt.Errorf("ValidateSession: %w", err)).Msg("Error accessing database")
+ return nil, errors.New("internal server error")
+ }
+ if u == nil {
+ l.Debug().Msg("ValidateSession: No user with session id found")
+ return nil, errors.New("no user with session id found")
+ }
+
+ ctx = context.WithValue(r.Context(), UserContextKey, u)
+ r = r.WithContext(ctx)
+
+ l.Debug().Msgf("ValidateSession: Refreshing session for user '%s'", u.Username)
+
+ store.RefreshSession(r.Context(), sid, time.Now().Add(30*24*time.Hour))
+
+ l.Debug().Msgf("ValidateSession: Refreshed session for user '%s'", u.Username)
+
+ return u, nil
+}
+
+func validateAPIKey(ctx context.Context, store db.DB, r *http.Request) (*models.User, error) {
+ l := logger.FromContext(ctx)
+
+ l.Debug().Msg("ValidateApiKey: Checking if user is already authenticated")
+
+ authH := r.Header.Get("Authorization")
+ var token string
+ if strings.HasPrefix(strings.ToLower(authH), "token ") {
+ token = strings.TrimSpace(authH[6:]) // strip "Token "
+ } else {
+ l.Error().Msg("ValidateApiKey: Authorization header must be formatted 'Token {token}'")
+ return nil, errors.New("authorization header is invalid")
+ }
+
+ u, err := store.GetUserByApiKey(ctx, token)
+ if err != nil {
+ l.Err(err).Msg("ValidateApiKey: Failed to get user from database using api key")
+ return nil, errors.New("internal server error")
+ }
+ if u == nil {
+ l.Debug().Msg("ValidateApiKey: API key does not exist")
+ return nil, errors.New("authorization token is invalid")
+ }
+
+ ctx = context.WithValue(r.Context(), UserContextKey, u)
+ r = r.WithContext(ctx)
+
+ return u, nil
+}
+
+func GetUserFromContext(ctx context.Context) *models.User {
+ user, ok := ctx.Value(UserContextKey).(*models.User)
+ if !ok {
+ return nil
+ }
+ return user
+}
diff --git a/engine/middleware/validate.go b/engine/middleware/validate.go
deleted file mode 100644
index b3e1369..0000000
--- a/engine/middleware/validate.go
+++ /dev/null
@@ -1,125 +0,0 @@
-package middleware
-
-import (
- "context"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/gabehf/koito/internal/db"
- "github.com/gabehf/koito/internal/logger"
- "github.com/gabehf/koito/internal/models"
- "github.com/gabehf/koito/internal/utils"
- "github.com/google/uuid"
-)
-
-type MiddlwareContextKey string
-
-const (
- UserContextKey MiddlwareContextKey = "user"
- apikeyContextKey MiddlwareContextKey = "apikeyID"
-)
-
-func ValidateSession(store db.DB) func(next http.Handler) http.Handler {
- return func(next http.Handler) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- l := logger.FromContext(r.Context())
-
- l.Debug().Msgf("ValidateSession: Checking user authentication via session cookie")
-
- cookie, err := r.Cookie("koito_session")
- var sid uuid.UUID
- if err == nil {
- sid, err = uuid.Parse(cookie.Value)
- if err != nil {
- l.Err(err).Msg("ValidateSession: Could not parse UUID from session cookie")
- utils.WriteError(w, "session cookie is invalid", http.StatusUnauthorized)
- return
- }
- } else {
- l.Debug().Msgf("ValidateSession: No session cookie found; attempting API key authentication")
- utils.WriteError(w, "session cookie is missing", http.StatusUnauthorized)
- return
- }
-
- l.Debug().Msg("ValidateSession: Retrieved login cookie from request")
-
- u, err := store.GetUserBySession(r.Context(), sid)
- if err != nil {
- l.Err(fmt.Errorf("ValidateSession: %w", err)).Msg("Error accessing database")
- utils.WriteError(w, "internal server error", http.StatusInternalServerError)
- return
- }
- if u == nil {
- l.Debug().Msg("ValidateSession: No user with session id found")
- utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
- return
- }
-
- ctx := context.WithValue(r.Context(), UserContextKey, u)
- r = r.WithContext(ctx)
-
- l.Debug().Msgf("ValidateSession: Refreshing session for user '%s'", u.Username)
-
- store.RefreshSession(r.Context(), sid, time.Now().Add(30*24*time.Hour))
-
- l.Debug().Msgf("ValidateSession: Refreshed session for user '%s'", u.Username)
-
- next.ServeHTTP(w, r)
- })
- }
-}
-
-func ValidateApiKey(store db.DB) func(next http.Handler) http.Handler {
- return func(next http.Handler) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- ctx := r.Context()
- l := logger.FromContext(ctx)
-
- l.Debug().Msg("ValidateApiKey: Checking if user is already authenticated")
-
- u := GetUserFromContext(ctx)
- if u != nil {
- l.Debug().Msg("ValidateApiKey: User is already authenticated; skipping API key authentication")
- next.ServeHTTP(w, r)
- return
- }
-
- authh := r.Header.Get("Authorization")
- var token string
- if strings.HasPrefix(strings.ToLower(authh), "token ") {
- token = strings.TrimSpace(authh[6:]) // strip "Token "
- } else {
- l.Error().Msg("ValidateApiKey: Authorization header must be formatted 'Token {token}'")
- utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
- return
- }
-
- u, err := store.GetUserByApiKey(ctx, token)
- if err != nil {
- l.Err(err).Msg("Failed to get user from database using api key")
- utils.WriteError(w, "internal server error", http.StatusInternalServerError)
- return
- }
- if u == nil {
- l.Debug().Msg("Api key does not exist")
- utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
- return
- }
-
- ctx = context.WithValue(r.Context(), UserContextKey, u)
- r = r.WithContext(ctx)
-
- next.ServeHTTP(w, r)
- })
- }
-}
-
-func GetUserFromContext(ctx context.Context) *models.User {
- user, ok := ctx.Value(UserContextKey).(*models.User)
- if !ok {
- return nil
- }
- return user
-}
diff --git a/engine/routes.go b/engine/routes.go
index e1c5fda..c62edf5 100644
--- a/engine/routes.go
+++ b/engine/routes.go
@@ -38,9 +38,7 @@ func bindRoutes(
r.Get("/config", handlers.GetCfgHandler())
r.Group(func(r chi.Router) {
- if cfg.LoginGate() {
- r.Use(middleware.ValidateSession(db))
- }
+ r.Use(middleware.Authenticate(db, middleware.AuthModeLoginGate))
r.Get("/artist", handlers.GetArtistHandler(db))
r.Get("/artists", handlers.GetArtistsForItemHandler(db))
r.Get("/album", handlers.GetAlbumHandler(db))
@@ -79,7 +77,7 @@ func bindRoutes(
})
r.Group(func(r chi.Router) {
- r.Use(middleware.ValidateSession(db))
+ r.Use(middleware.Authenticate(db, middleware.AuthModeSessionOrAPIKey))
r.Get("/export", handlers.ExportHandler(db))
r.Post("/replace-image", handlers.ReplaceImageHandler(db))
r.Patch("/album", handlers.UpdateAlbumHandler(db))
@@ -111,8 +109,10 @@ func bindRoutes(
AllowedHeaders: []string{"Content-Type", "Authorization"},
}))
- r.With(middleware.ValidateApiKey(db)).Post("/submit-listens", handlers.LbzSubmitListenHandler(db, mbz))
- r.With(middleware.ValidateApiKey(db)).Get("/validate-token", handlers.LbzValidateTokenHandler(db))
+ r.With(middleware.Authenticate(db, middleware.AuthModeAPIKey)).
+ Post("/submit-listens", handlers.LbzSubmitListenHandler(db, mbz))
+ r.With(middleware.Authenticate(db, middleware.AuthModeAPIKey)).
+ Get("/validate-token", handlers.LbzValidateTokenHandler(db))
})
// serve react client
diff --git a/internal/cfg/cfg.go b/internal/cfg/cfg.go
index e74d6b9..0cfc7bb 100644
--- a/internal/cfg/cfg.go
+++ b/internal/cfg/cfg.go
@@ -244,204 +244,3 @@ func parseBool(s string) bool {
return false
}
}
-
-// Global accessors for configuration values
-
-func UserAgent() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.userAgent
-}
-
-func ListenAddr() string {
- lock.RLock()
- defer lock.RUnlock()
- return fmt.Sprintf("%s:%d", globalConfig.bindAddr, globalConfig.listenPort)
-}
-
-func ConfigDir() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.configDir
-}
-
-func DatabaseUrl() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.databaseUrl
-}
-
-func MusicBrainzUrl() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.musicBrainzUrl
-}
-
-func MusicBrainzRateLimit() int {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.musicBrainzRateLimit
-}
-
-func LogLevel() int {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.logLevel
-}
-
-func StructuredLogging() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.structuredLogging
-}
-
-func LbzRelayEnabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.lbzRelayEnabled
-}
-
-func LbzRelayUrl() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.lbzRelayUrl
-}
-
-func LbzRelayToken() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.lbzRelayToken
-}
-
-func DefaultPassword() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.defaultPw
-}
-
-func DefaultUsername() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.defaultUsername
-}
-
-func DefaultTheme() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.defaultTheme
-}
-
-func FullImageCacheEnabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.enableFullImageCache
-}
-
-func DeezerDisabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.disableDeezer
-}
-
-func CoverArtArchiveDisabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.disableCAA
-}
-
-func MusicBrainzDisabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.disableMusicBrainz
-}
-
-func SubsonicEnabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.subsonicEnabled
-}
-
-func SubsonicUrl() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.subsonicUrl
-}
-
-func SubsonicParams() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.subsonicParams
-}
-
-func LastFMApiKey() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.lastfmApiKey
-}
-
-func SkipImport() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.skipImport
-}
-
-func AllowedHosts() []string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.allowedHosts
-}
-
-func AllowAllHosts() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.allowAllHosts
-}
-
-func AllowedOrigins() []string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.allowedOrigins
-}
-
-func RateLimitDisabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.disableRateLimit
-}
-
-func ThrottleImportMs() int {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.importThrottleMs
-}
-
-// returns the before, after times, in that order
-func ImportWindow() (time.Time, time.Time) {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.importBefore, globalConfig.importAfter
-}
-
-func FetchImagesDuringImport() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.fetchImageDuringImport
-}
-
-func ArtistSeparators() []*regexp.Regexp {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.artistSeparators
-}
-
-func LoginGate() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.loginGate
-}
-
-func ForceTZ() *time.Location {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.forceTZ
-}
diff --git a/internal/cfg/getters.go b/internal/cfg/getters.go
new file mode 100644
index 0000000..596ca9d
--- /dev/null
+++ b/internal/cfg/getters.go
@@ -0,0 +1,206 @@
+package cfg
+
+import (
+ "fmt"
+ "regexp"
+ "time"
+)
+
+func UserAgent() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.userAgent
+}
+
+func ListenAddr() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return fmt.Sprintf("%s:%d", globalConfig.bindAddr, globalConfig.listenPort)
+}
+
+func ConfigDir() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.configDir
+}
+
+func DatabaseUrl() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.databaseUrl
+}
+
+func MusicBrainzUrl() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.musicBrainzUrl
+}
+
+func MusicBrainzRateLimit() int {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.musicBrainzRateLimit
+}
+
+func LogLevel() int {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.logLevel
+}
+
+func StructuredLogging() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.structuredLogging
+}
+
+func LbzRelayEnabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.lbzRelayEnabled
+}
+
+func LbzRelayUrl() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.lbzRelayUrl
+}
+
+func LbzRelayToken() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.lbzRelayToken
+}
+
+func DefaultPassword() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.defaultPw
+}
+
+func DefaultUsername() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.defaultUsername
+}
+
+func DefaultTheme() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.defaultTheme
+}
+
+func FullImageCacheEnabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.enableFullImageCache
+}
+
+func DeezerDisabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.disableDeezer
+}
+
+func CoverArtArchiveDisabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.disableCAA
+}
+
+func MusicBrainzDisabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.disableMusicBrainz
+}
+
+func SubsonicEnabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.subsonicEnabled
+}
+
+func SubsonicUrl() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.subsonicUrl
+}
+
+func SubsonicParams() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.subsonicParams
+}
+
+func LastFMApiKey() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.lastfmApiKey
+}
+
+func SkipImport() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.skipImport
+}
+
+func AllowedHosts() []string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.allowedHosts
+}
+
+func AllowAllHosts() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.allowAllHosts
+}
+
+func AllowedOrigins() []string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.allowedOrigins
+}
+
+func RateLimitDisabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.disableRateLimit
+}
+
+func ThrottleImportMs() int {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.importThrottleMs
+}
+
+// returns the before, after times, in that order
+func ImportWindow() (time.Time, time.Time) {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.importBefore, globalConfig.importAfter
+}
+
+func FetchImagesDuringImport() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.fetchImageDuringImport
+}
+
+func ArtistSeparators() []*regexp.Regexp {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.artistSeparators
+}
+
+func LoginGate() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.loginGate
+}
+
+func ForceTZ() *time.Location {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.forceTZ
+}
diff --git a/internal/cfg/setters.go b/internal/cfg/setters.go
new file mode 100644
index 0000000..8458780
--- /dev/null
+++ b/internal/cfg/setters.go
@@ -0,0 +1,7 @@
+package cfg
+
+func SetLoginGate(val bool) {
+ lock.Lock()
+ defer lock.Unlock()
+ globalConfig.loginGate = val
+}
From 64236c99c9fe2493b997be9bcbd4b748bf15a379 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Mon, 26 Jan 2026 14:49:30 -0500
Subject: [PATCH 28/31] fix: invalid json response when login gate is disabled
(#184)
---
engine/long_test.go | 13 +++++++++++++
engine/middleware/authenticate.go | 7 +++----
2 files changed, 16 insertions(+), 4 deletions(-)
diff --git a/engine/long_test.go b/engine/long_test.go
index d916117..db86ac2 100644
--- a/engine/long_test.go
+++ b/engine/long_test.go
@@ -367,6 +367,16 @@ func TestLoginGate(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, 204, resp.StatusCode)
+ req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil)
+ require.NoError(t, err)
+ resp, err = http.DefaultClient.Do(req)
+ assert.NoError(t, err)
+ assert.Equal(t, 200, resp.StatusCode)
+ var artist models.Artist
+ err = json.NewDecoder(resp.Body).Decode(&artist)
+ require.NoError(t, err)
+ assert.Equal(t, "ネクライトーキー", artist.Name)
+
cfg.SetLoginGate(true)
req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil)
@@ -382,6 +392,9 @@ func TestLoginGate(t *testing.T) {
resp, err = http.DefaultClient.Do(req)
assert.NoError(t, err)
assert.Equal(t, 200, resp.StatusCode)
+ err = json.NewDecoder(resp.Body).Decode(&artist)
+ require.NoError(t, err)
+ assert.Equal(t, "ネクライトーキー", artist.Name)
cfg.SetLoginGate(false)
diff --git a/engine/middleware/authenticate.go b/engine/middleware/authenticate.go
index a435473..830fb78 100644
--- a/engine/middleware/authenticate.go
+++ b/engine/middleware/authenticate.go
@@ -62,6 +62,7 @@ func Authenticate(store db.DB, mode AuthMode) func(http.Handler) http.Handler {
}
} else {
next.ServeHTTP(w, r)
+ return
}
}
@@ -76,10 +77,8 @@ func Authenticate(store db.DB, mode AuthMode) func(http.Handler) http.Handler {
return
}
- if user != nil {
- ctx = context.WithValue(ctx, UserContextKey, user)
- r = r.WithContext(ctx)
- }
+ ctx = context.WithValue(ctx, UserContextKey, user)
+ r = r.WithContext(ctx)
next.ServeHTTP(w, r)
})
From b06685c1afe2d635e819b96646419be4ae81dab3 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Mon, 2 Feb 2026 15:06:13 -0500
Subject: [PATCH 29/31] fix: rewind navigation (#191)
---
client/app/routes/RewindPage.tsx | 32 ++++++++++++++++++++------------
1 file changed, 20 insertions(+), 12 deletions(-)
diff --git a/client/app/routes/RewindPage.tsx b/client/app/routes/RewindPage.tsx
index 4d60065..ad92497 100644
--- a/client/app/routes/RewindPage.tsx
+++ b/client/app/routes/RewindPage.tsx
@@ -29,10 +29,12 @@ const months = [
export async function clientLoader({ request }: LoaderFunctionArgs) {
const url = new URL(request.url);
- const year =
- parseInt(url.searchParams.get("year") || "0") || getRewindParams().year;
- const month =
- parseInt(url.searchParams.get("month") || "0") || getRewindParams().month;
+ const year = parseInt(
+ url.searchParams.get("year") || getRewindParams().year.toString()
+ );
+ const month = parseInt(
+ url.searchParams.get("month") || getRewindParams().month.toString()
+ );
const res = await fetch(`/apis/web/v1/summary?year=${year}&month=${month}`);
if (!res.ok) {
@@ -46,10 +48,12 @@ export async function clientLoader({ request }: LoaderFunctionArgs) {
export default function RewindPage() {
const currentParams = new URLSearchParams(location.search);
- let year =
- parseInt(currentParams.get("year") || "0") || getRewindParams().year;
- let month =
- parseInt(currentParams.get("month") || "0") || getRewindParams().month;
+ let year = parseInt(
+ currentParams.get("year") || getRewindParams().year.toString()
+ );
+ let month = parseInt(
+ currentParams.get("month") || getRewindParams().month.toString()
+ );
const navigate = useNavigate();
const [showTime, setShowTime] = useState(false);
const { stats: stats } = useLoaderData<{ stats: RewindStats }>();
@@ -73,10 +77,8 @@ export default function RewindPage() {
for (const key in params) {
const val = params[key];
- if (val !== null && val !== "0") {
+ if (val !== null) {
nextParams.set(key, val);
- } else {
- nextParams.delete(key);
}
}
@@ -99,6 +101,7 @@ export default function RewindPage() {
month -= 1;
}
}
+ console.log(`Month: ${month}`);
updateParams({
year: year.toString(),
@@ -154,7 +157,12 @@ export default function RewindPage() {
From 531c72899cf5b2cbc90a22cf22709d4b382f412b Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Tue, 3 Feb 2026 11:23:30 -0500
Subject: [PATCH 30/31] fix: add null check for top charts bg gradient (#193)
---
client/app/routes/Charts/ChartLayout.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/client/app/routes/Charts/ChartLayout.tsx b/client/app/routes/Charts/ChartLayout.tsx
index 446f7d1..90858bd 100644
--- a/client/app/routes/Charts/ChartLayout.tsx
+++ b/client/app/routes/Charts/ChartLayout.tsx
@@ -40,7 +40,7 @@ export default function ChartLayout({
useEffect(() => {
if ((data?.items?.length ?? 0) === 0) return;
- const img = (data.items[0] as any)?.item.image;
+ const img = (data.items[0] as any)?.item?.image;
if (!img) return;
average(imageUrl(img, "small"), { amount: 1 }).then((color) => {
From 0ec7b458ccf29bed07e5cd53ac407c117a29f2b2 Mon Sep 17 00:00:00 2001
From: Gabe Farrell <90876006+gabehf@users.noreply.github.com>
Date: Wed, 4 Feb 2026 13:41:12 -0500
Subject: [PATCH 31/31] ui: tweaks and fixes (#194)
* reduce min width of top chart on mobile
* adjust error page style
* adjust h1 line height
---
client/app/app.css | 3 +++
client/app/root.tsx | 8 ++++----
client/app/routes/Charts/AlbumChart.tsx | 4 ++--
client/app/routes/Charts/ArtistChart.tsx | 4 ++--
client/app/routes/Charts/TrackChart.tsx | 4 ++--
5 files changed, 13 insertions(+), 10 deletions(-)
diff --git a/client/app/app.css b/client/app/app.css
index bc60042..15cfbc0 100644
--- a/client/app/app.css
+++ b/client/app/app.css
@@ -58,6 +58,7 @@
--header-sm: 16px;
--header-xl-weight: 600;
--header-weight: 600;
+ --header-line-height: 3rem;
}
@media (min-width: 60rem) {
@@ -68,6 +69,7 @@
--header-sm: 16px;
--header-xl-weight: 600;
--header-weight: 600;
+ --header-line-height: 1.3em;
}
}
@@ -98,6 +100,7 @@ h1 {
font-family: "League Spartan";
font-weight: var(--header-weight);
font-size: var(--header-xl);
+ line-height: var(--header-line-height);
}
h2 {
font-family: "League Spartan";
diff --git a/client/app/root.tsx b/client/app/root.tsx
index 077d09e..cb0723f 100644
--- a/client/app/root.tsx
+++ b/client/app/root.tsx
@@ -116,12 +116,12 @@ export function ErrorBoundary() {
{title}
+
-
-
-
-

+
+
+
{message}
{details}
diff --git a/client/app/routes/Charts/AlbumChart.tsx b/client/app/routes/Charts/AlbumChart.tsx
index 7bc4eea..7a157a8 100644
--- a/client/app/routes/Charts/AlbumChart.tsx
+++ b/client/app/routes/Charts/AlbumChart.tsx
@@ -30,7 +30,7 @@ export default function AlbumChart() {
initialData={initialData}
endpoint="chart/top-albums"
render={({ data, page, onNext, onPrev }) => (
-
+