diff --git a/client/app/routes/Home.tsx b/client/app/routes/Home.tsx
index 597c563..55c62bf 100644
--- a/client/app/routes/Home.tsx
+++ b/client/app/routes/Home.tsx
@@ -10,17 +10,20 @@ import PeriodSelector from "~/components/PeriodSelector";
import { useAppContext } from "~/providers/AppProvider";
export function meta({}: Route.MetaArgs) {
- return [{ title: "Koito" }, { name: "description", content: "Koito" }];
+ return [
+ { title: "Koito" },
+ { name: "description", content: "Koito" },
+ ];
}
export default function Home() {
- const [period, setPeriod] = useState("week");
+ const [period, setPeriod] = useState('week')
const { homeItems } = useAppContext();
return (
-
-
+
+
@@ -30,10 +33,7 @@ export default function Home() {
-
+
diff --git a/client/app/routes/MediaItems/Album.tsx b/client/app/routes/MediaItems/Album.tsx
index e6f413e..afba6f7 100644
--- a/client/app/routes/MediaItems/Album.tsx
+++ b/client/app/routes/MediaItems/Album.tsx
@@ -30,7 +30,6 @@ export default function Album() {
title={album.title}
img={album.image}
id={album.id}
- rank={album.all_time_rank}
musicbrainzId={album.musicbrainz_id}
imgItemId={album.id}
mergeFunc={mergeAlbums}
@@ -46,17 +45,17 @@ export default function Album() {
}}
subContent={
- {album.listen_count !== 0 && (
+ {album.listen_count && (
{album.listen_count} play{album.listen_count > 1 ? "s" : ""}
)}
- {album.time_listened !== 0 && (
+ {album.time_listened && (
{timeListenedString(album.time_listened)}
)}
- {album.first_listen > 0 && (
+ {album.first_listen && (
Listening since{" "}
{new Date(album.first_listen * 1000).toLocaleDateString()}
diff --git a/client/app/routes/MediaItems/Artist.tsx b/client/app/routes/MediaItems/Artist.tsx
index a23e4cd..00334c1 100644
--- a/client/app/routes/MediaItems/Artist.tsx
+++ b/client/app/routes/MediaItems/Artist.tsx
@@ -36,7 +36,6 @@ export default function Artist() {
title={artist.name}
img={artist.image}
id={artist.id}
- rank={artist.all_time_rank}
musicbrainzId={artist.musicbrainz_id}
imgItemId={artist.id}
mergeFunc={mergeArtists}
@@ -57,17 +56,17 @@ export default function Artist() {
{artist.listen_count} play{artist.listen_count > 1 ? "s" : ""}
)}
- {artist.time_listened !== 0 && (
+ {
{timeListenedString(artist.time_listened)}
- )}
- {artist.first_listen > 0 && (
+ }
+ {
Listening since{" "}
{new Date(artist.first_listen * 1000).toLocaleDateString()}
- )}
+ }
}
>
diff --git a/client/app/routes/MediaItems/MediaLayout.tsx b/client/app/routes/MediaItems/MediaLayout.tsx
index eaf100b..c675fc6 100644
--- a/client/app/routes/MediaItems/MediaLayout.tsx
+++ b/client/app/routes/MediaItems/MediaLayout.tsx
@@ -28,7 +28,6 @@ interface Props {
title: string;
img: string;
id: number;
- rank: number;
musicbrainzId: string;
imgItemId: number;
mergeFunc: MergeFunc;
@@ -97,15 +96,7 @@ export default function MediaLayout(props: Props) {
{props.type}
-
-
- {props.title}
-
- {" "}
- #{props.rank}
-
-
-
+
{props.title}
{props.subContent}
diff --git a/client/app/routes/MediaItems/Track.tsx b/client/app/routes/MediaItems/Track.tsx
index 6b6690e..20258c1 100644
--- a/client/app/routes/MediaItems/Track.tsx
+++ b/client/app/routes/MediaItems/Track.tsx
@@ -34,7 +34,6 @@ export default function Track() {
title={track.title}
img={track.image}
id={track.id}
- rank={track.all_time_rank}
musicbrainzId={track.musicbrainz_id}
imgItemId={track.album_id}
mergeFunc={mergeTracks}
diff --git a/client/app/routes/RewindPage.tsx b/client/app/routes/RewindPage.tsx
index ad92497..71a1ef6 100644
--- a/client/app/routes/RewindPage.tsx
+++ b/client/app/routes/RewindPage.tsx
@@ -29,12 +29,10 @@ const months = [
export async function clientLoader({ request }: LoaderFunctionArgs) {
const url = new URL(request.url);
- const year = parseInt(
- url.searchParams.get("year") || getRewindParams().year.toString()
- );
- const month = parseInt(
- url.searchParams.get("month") || getRewindParams().month.toString()
- );
+ const year =
+ parseInt(url.searchParams.get("year") || "0") || getRewindParams().year;
+ const month =
+ parseInt(url.searchParams.get("month") || "0") || getRewindParams().month;
const res = await fetch(`/apis/web/v1/summary?year=${year}&month=${month}`);
if (!res.ok) {
@@ -48,12 +46,10 @@ export async function clientLoader({ request }: LoaderFunctionArgs) {
export default function RewindPage() {
const currentParams = new URLSearchParams(location.search);
- let year = parseInt(
- currentParams.get("year") || getRewindParams().year.toString()
- );
- let month = parseInt(
- currentParams.get("month") || getRewindParams().month.toString()
- );
+ let year =
+ parseInt(currentParams.get("year") || "0") || getRewindParams().year;
+ let month =
+ parseInt(currentParams.get("month") || "0") || getRewindParams().month;
const navigate = useNavigate();
const [showTime, setShowTime] = useState(false);
const { stats: stats } = useLoaderData<{ stats: RewindStats }>();
@@ -63,7 +59,7 @@ export default function RewindPage() {
useEffect(() => {
if (!stats.top_artists[0]) return;
- const img = (stats.top_artists[0] as any)?.item.image;
+ const img = (stats.top_artists[0] as any)?.image;
if (!img) return;
average(imageUrl(img, "small"), { amount: 1 }).then((color) => {
@@ -77,8 +73,10 @@ export default function RewindPage() {
for (const key in params) {
const val = params[key];
- if (val !== null) {
+ if (val !== null && val !== "0") {
nextParams.set(key, val);
+ } else {
+ nextParams.delete(key);
}
}
@@ -101,7 +99,6 @@ export default function RewindPage() {
month -= 1;
}
}
- console.log(`Month: ${month}`);
updateParams({
year: year.toString(),
@@ -157,12 +154,7 @@ export default function RewindPage() {
navigateMonth("next")}
className="p-2 disabled:text-(--color-fg-tertiary)"
- disabled={
- // next month is current or future month and
- month >= new Date().getMonth() &&
- // we are looking at current (or future) year
- year >= new Date().getFullYear()
- }
+ disabled={new Date(year, month) > new Date()}
>
diff --git a/client/app/styles/themes.css.ts b/client/app/styles/themes.css.ts
index 1a3a57d..d5390ae 100644
--- a/client/app/styles/themes.css.ts
+++ b/client/app/styles/themes.css.ts
@@ -92,7 +92,7 @@ export const themes: Record = {
fg: "#fef9f3",
fgSecondary: "#dbc6b0",
fgTertiary: "#a3917a",
- primary: "#F0850A",
+ primary: "#d97706",
primaryDim: "#b45309",
accent: "#8c4c28",
accentDim: "#6b3b1f",
diff --git a/db/migrations/000005_rm_orphan_artist_releases.sql b/db/migrations/000005_rm_orphan_artist_releases.sql
deleted file mode 100644
index bfb361f..0000000
--- a/db/migrations/000005_rm_orphan_artist_releases.sql
+++ /dev/null
@@ -1,9 +0,0 @@
--- +goose Up
-DELETE FROM artist_releases ar
-WHERE NOT EXISTS (
- SELECT 1
- FROM artist_tracks at
- JOIN tracks t ON at.track_id = t.id
- WHERE at.artist_id = ar.artist_id
- AND t.release_id = ar.release_id
-);
diff --git a/db/queries/artist.sql b/db/queries/artist.sql
index 70a2fdd..e20326d 100644
--- a/db/queries/artist.sql
+++ b/db/queries/artist.sql
@@ -56,60 +56,22 @@ LEFT JOIN artist_aliases aa ON a.id = aa.artist_id
WHERE a.musicbrainz_id = $1
GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name;
--- name: GetArtistsWithoutImages :many
-SELECT
- *
-FROM artists_with_name
-WHERE image IS NULL
- AND id > $2
-ORDER BY id ASC
-LIMIT $1;
-
-- name: GetTopArtistsPaginated :many
SELECT
- x.id,
- x.name,
- x.musicbrainz_id,
- x.image,
- x.listen_count,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
-FROM (
- SELECT
a.id,
a.name,
a.musicbrainz_id,
a.image,
COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- JOIN artist_tracks at ON at.track_id = t.id
- JOIN artists_with_name a ON a.id = at.artist_id
- WHERE l.listened_at BETWEEN $1 AND $2
- GROUP BY a.id, a.name, a.musicbrainz_id, a.image
-) x
-ORDER BY x.listen_count DESC, x.id
+FROM listens l
+JOIN tracks t ON l.track_id = t.id
+JOIN artist_tracks at ON at.track_id = t.id
+JOIN artists_with_name a ON a.id = at.artist_id
+WHERE l.listened_at BETWEEN $1 AND $2
+GROUP BY a.id, a.name, a.musicbrainz_id, a.image, a.image_source, a.name
+ORDER BY listen_count DESC, a.id
LIMIT $3 OFFSET $4;
--- name: GetArtistAllTimeRank :one
-SELECT
- artist_id,
- rank
-FROM (
- SELECT
- x.artist_id,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
- FROM (
- SELECT
- at.artist_id,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- JOIN artist_tracks at ON t.id = at.track_id
- GROUP BY at.artist_id
- ) x
- )
-WHERE artist_id = $1;
-
-- name: CountTopArtists :one
SELECT COUNT(DISTINCT at.artist_id) AS total_count
FROM listens l
diff --git a/db/queries/etc.sql b/db/queries/etc.sql
index 38465f2..44139b8 100644
--- a/db/queries/etc.sql
+++ b/db/queries/etc.sql
@@ -3,13 +3,7 @@ DO $$
BEGIN
DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l);
DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t);
+-- DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t);
+-- DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg);
DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at);
- DELETE FROM artist_releases ar
- WHERE NOT EXISTS (
- SELECT 1
- FROM artist_tracks at
- JOIN tracks t ON at.track_id = t.id
- WHERE at.artist_id = ar.artist_id
- AND t.release_id = ar.release_id
- );
END $$;
diff --git a/db/queries/interest.sql b/db/queries/interest.sql
index 874f4cd..389c75b 100644
--- a/db/queries/interest.sql
+++ b/db/queries/interest.sql
@@ -1,139 +1,162 @@
-- name: GetGroupedListensFromArtist :many
-WITH bounds AS (
+WITH artist_listens AS (
SELECT
- MIN(l.listened_at) AS start_time,
- NOW() AS end_time
+ l.listened_at
FROM listens l
JOIN tracks t ON t.id = l.track_id
JOIN artist_tracks at ON at.track_id = t.id
WHERE at.artist_id = $1
),
-stats AS (
+bounds AS (
SELECT
- start_time,
- end_time,
- EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
- ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval
- FROM bounds
+ MIN(listened_at) AS start_time,
+ MAX(listened_at) AS end_time
+ FROM artist_listens
),
-bucket_series AS (
- SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx
-),
-listen_indices AS (
+bucketed AS (
SELECT
LEAST(
- sqlc.arg(bucket_count)::int - 1,
+ sqlc.arg(bucket_count) - 1,
FLOOR(
- (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
- * sqlc.arg(bucket_count)::int
+ (
+ EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
+ /
+ NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
+ ) * sqlc.arg(bucket_count)
)::int
- ) AS bucket_idx
- FROM listens l
- JOIN tracks t ON t.id = l.track_id
- JOIN artist_tracks at ON at.track_id = t.id
- CROSS JOIN stats s
- WHERE at.artist_id = $1
- AND s.start_time IS NOT NULL
+ ) AS bucket_idx,
+ b.start_time,
+ b.end_time
+ FROM artist_listens al
+ CROSS JOIN bounds b
+),
+aggregated AS (
+ SELECT
+ start_time
+ + (
+ bucket_idx * (end_time - start_time)
+ / sqlc.arg(bucket_count)
+ ) AS bucket_start,
+ start_time
+ + (
+ (bucket_idx + 1) * (end_time - start_time)
+ / sqlc.arg(bucket_count)
+ ) AS bucket_end,
+ COUNT(*) AS listen_count
+ FROM bucketed
+ GROUP BY bucket_idx, start_time, end_time
)
SELECT
- (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
- (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
- COUNT(li.bucket_idx) AS listen_count
-FROM bucket_series bs
-CROSS JOIN stats s
-LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
-WHERE s.start_time IS NOT NULL
-GROUP BY bs.idx, s.start_time, s.bucket_interval
-ORDER BY bs.idx;
+ bucket_start::timestamptz,
+ bucket_end::timestamptz,
+ listen_count
+FROM aggregated
+ORDER BY bucket_start;
-- name: GetGroupedListensFromRelease :many
-WITH bounds AS (
+WITH artist_listens AS (
SELECT
- MIN(l.listened_at) AS start_time,
- NOW() AS end_time
+ l.listened_at
FROM listens l
JOIN tracks t ON t.id = l.track_id
WHERE t.release_id = $1
),
-stats AS (
+bounds AS (
SELECT
- start_time,
- end_time,
- EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
- ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval
- FROM bounds
+ MIN(listened_at) AS start_time,
+ MAX(listened_at) AS end_time
+ FROM artist_listens
),
-bucket_series AS (
- SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx
-),
-listen_indices AS (
+bucketed AS (
SELECT
LEAST(
- sqlc.arg(bucket_count)::int - 1,
+ sqlc.arg(bucket_count) - 1,
FLOOR(
- (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
- * sqlc.arg(bucket_count)::int
+ (
+ EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
+ /
+ NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
+ ) * sqlc.arg(bucket_count)
)::int
- ) AS bucket_idx
- FROM listens l
- JOIN tracks t ON t.id = l.track_id
- CROSS JOIN stats s
- WHERE t.release_id = $1
- AND s.start_time IS NOT NULL
+ ) AS bucket_idx,
+ b.start_time,
+ b.end_time
+ FROM artist_listens al
+ CROSS JOIN bounds b
+),
+aggregated AS (
+ SELECT
+ start_time
+ + (
+ bucket_idx * (end_time - start_time)
+ / sqlc.arg(bucket_count)
+ ) AS bucket_start,
+ start_time
+ + (
+ (bucket_idx + 1) * (end_time - start_time)
+ / sqlc.arg(bucket_count)
+ ) AS bucket_end,
+ COUNT(*) AS listen_count
+ FROM bucketed
+ GROUP BY bucket_idx, start_time, end_time
)
SELECT
- (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
- (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
- COUNT(li.bucket_idx) AS listen_count
-FROM bucket_series bs
-CROSS JOIN stats s
-LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
-WHERE s.start_time IS NOT NULL
-GROUP BY bs.idx, s.start_time, s.bucket_interval
-ORDER BY bs.idx;
+ bucket_start::timestamptz,
+ bucket_end::timestamptz,
+ listen_count
+FROM aggregated
+ORDER BY bucket_start;
-- name: GetGroupedListensFromTrack :many
-WITH bounds AS (
+WITH artist_listens AS (
SELECT
- MIN(l.listened_at) AS start_time,
- NOW() AS end_time
+ l.listened_at
FROM listens l
JOIN tracks t ON t.id = l.track_id
WHERE t.id = $1
),
-stats AS (
+bounds AS (
SELECT
- start_time,
- end_time,
- EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
- ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval
- FROM bounds
+ MIN(listened_at) AS start_time,
+ MAX(listened_at) AS end_time
+ FROM artist_listens
),
-bucket_series AS (
- SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx
-),
-listen_indices AS (
+bucketed AS (
SELECT
LEAST(
- sqlc.arg(bucket_count)::int - 1,
+ sqlc.arg(bucket_count) - 1,
FLOOR(
- (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
- * sqlc.arg(bucket_count)::int
+ (
+ EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
+ /
+ NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
+ ) * sqlc.arg(bucket_count)
)::int
- ) AS bucket_idx
- FROM listens l
- JOIN tracks t ON t.id = l.track_id
- CROSS JOIN stats s
- WHERE t.id = $1
- AND s.start_time IS NOT NULL
+ ) AS bucket_idx,
+ b.start_time,
+ b.end_time
+ FROM artist_listens al
+ CROSS JOIN bounds b
+),
+aggregated AS (
+ SELECT
+ start_time
+ + (
+ bucket_idx * (end_time - start_time)
+ / sqlc.arg(bucket_count)
+ ) AS bucket_start,
+ start_time
+ + (
+ (bucket_idx + 1) * (end_time - start_time)
+ / sqlc.arg(bucket_count)
+ ) AS bucket_end,
+ COUNT(*) AS listen_count
+ FROM bucketed
+ GROUP BY bucket_idx, start_time, end_time
)
SELECT
- (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
- (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
- COUNT(li.bucket_idx) AS listen_count
-FROM bucket_series bs
-CROSS JOIN stats s
-LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
-WHERE s.start_time IS NOT NULL
-GROUP BY bs.idx, s.start_time, s.bucket_interval
-ORDER BY bs.idx;
+ bucket_start::timestamptz,
+ bucket_end::timestamptz,
+ listen_count
+FROM aggregated
+ORDER BY bucket_start;
diff --git a/db/queries/release.sql b/db/queries/release.sql
index 23bd2f2..9f54291 100644
--- a/db/queries/release.sql
+++ b/db/queries/release.sql
@@ -47,61 +47,32 @@ WHERE r.title = ANY ($1::TEXT[])
-- name: GetTopReleasesFromArtist :many
SELECT
- x.*,
- get_artists_for_release(x.id) AS artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
-FROM (
- SELECT
- r.*,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- JOIN releases_with_title r ON t.release_id = r.id
- JOIN artist_releases ar ON r.id = ar.release_id
- WHERE ar.artist_id = $5
- AND l.listened_at BETWEEN $1 AND $2
- GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
-) x
-ORDER BY listen_count DESC, x.id
+ r.*,
+ COUNT(*) AS listen_count,
+ get_artists_for_release(r.id) AS artists
+FROM listens l
+JOIN tracks t ON l.track_id = t.id
+JOIN releases_with_title r ON t.release_id = r.id
+JOIN artist_releases ar ON r.id = ar.release_id
+WHERE ar.artist_id = $5
+ AND l.listened_at BETWEEN $1 AND $2
+GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
+ORDER BY listen_count DESC, r.id
LIMIT $3 OFFSET $4;
-- name: GetTopReleasesPaginated :many
SELECT
- x.*,
- get_artists_for_release(x.id) AS artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
-FROM (
- SELECT
- r.*,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- JOIN releases_with_title r ON t.release_id = r.id
- WHERE l.listened_at BETWEEN $1 AND $2
- GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
-) x
-ORDER BY listen_count DESC, x.id
+ r.*,
+ COUNT(*) AS listen_count,
+ get_artists_for_release(r.id) AS artists
+FROM listens l
+JOIN tracks t ON l.track_id = t.id
+JOIN releases_with_title r ON t.release_id = r.id
+WHERE l.listened_at BETWEEN $1 AND $2
+GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
+ORDER BY listen_count DESC, r.id
LIMIT $3 OFFSET $4;
--- name: GetReleaseAllTimeRank :one
-SELECT
- release_id,
- rank
-FROM (
- SELECT
- x.release_id,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
- FROM (
- SELECT
- t.release_id,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- GROUP BY t.release_id
- ) x
- )
-WHERE release_id = $1;
-
-- name: CountTopReleases :one
SELECT COUNT(DISTINCT r.id) AS total_count
FROM listens l
diff --git a/db/queries/track.sql b/db/queries/track.sql
index 3be4d7e..933fcc1 100644
--- a/db/queries/track.sql
+++ b/db/queries/track.sql
@@ -39,100 +39,57 @@ HAVING COUNT(DISTINCT at.artist_id) = cardinality($3::int[]);
-- name: GetTopTracksPaginated :many
SELECT
- x.track_id AS id,
+ t.id,
t.title,
t.musicbrainz_id,
t.release_id,
r.image,
- x.listen_count,
- get_artists_for_track(x.track_id) AS artists,
- x.rank
-FROM (
- SELECT
- track_id,
- COUNT(*) AS listen_count,
- RANK() OVER (ORDER BY COUNT(*) DESC) as rank
- FROM listens
- WHERE listened_at BETWEEN $1 AND $2
- GROUP BY track_id
- ORDER BY listen_count DESC
- LIMIT $3 OFFSET $4
-) x
-JOIN tracks_with_title t ON x.track_id = t.id
+ COUNT(*) AS listen_count,
+ get_artists_for_track(t.id) AS artists
+FROM listens l
+JOIN tracks_with_title t ON l.track_id = t.id
JOIN releases r ON t.release_id = r.id
-ORDER BY x.listen_count DESC, x.track_id;
+WHERE l.listened_at BETWEEN $1 AND $2
+GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ORDER BY listen_count DESC, t.id
+LIMIT $3 OFFSET $4;
-- name: GetTopTracksByArtistPaginated :many
SELECT
- x.track_id AS id,
+ t.id,
t.title,
t.musicbrainz_id,
t.release_id,
r.image,
- x.listen_count,
- get_artists_for_track(x.track_id) AS artists,
- x.rank
-FROM (
- SELECT
- l.track_id,
- COUNT(*) AS listen_count,
- RANK() OVER (ORDER BY COUNT(*) DESC) as rank
- FROM listens l
- JOIN artist_tracks at ON l.track_id = at.track_id
- WHERE l.listened_at BETWEEN $1 AND $2
- AND at.artist_id = $5
- GROUP BY l.track_id
- ORDER BY listen_count DESC
- LIMIT $3 OFFSET $4
-) x
-JOIN tracks_with_title t ON x.track_id = t.id
+ COUNT(*) AS listen_count,
+ get_artists_for_track(t.id) AS artists
+FROM listens l
+JOIN tracks_with_title t ON l.track_id = t.id
JOIN releases r ON t.release_id = r.id
-ORDER BY x.listen_count DESC, x.track_id;
+JOIN artist_tracks at ON at.track_id = t.id
+WHERE l.listened_at BETWEEN $1 AND $2
+ AND at.artist_id = $5
+GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ORDER BY listen_count DESC, t.id
+LIMIT $3 OFFSET $4;
-- name: GetTopTracksInReleasePaginated :many
SELECT
- x.track_id AS id,
+ t.id,
t.title,
t.musicbrainz_id,
t.release_id,
r.image,
- x.listen_count,
- get_artists_for_track(x.track_id) AS artists,
- x.rank
-FROM (
- SELECT
- l.track_id,
- COUNT(*) AS listen_count,
- RANK() OVER (ORDER BY COUNT(*) DESC) as rank
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- WHERE l.listened_at BETWEEN $1 AND $2
- AND t.release_id = $5
- GROUP BY l.track_id
- ORDER BY listen_count DESC
- LIMIT $3 OFFSET $4
-) x
-JOIN tracks_with_title t ON x.track_id = t.id
+ COUNT(*) AS listen_count,
+ get_artists_for_track(t.id) AS artists
+FROM listens l
+JOIN tracks_with_title t ON l.track_id = t.id
JOIN releases r ON t.release_id = r.id
-ORDER BY x.listen_count DESC, x.track_id;
-
--- name: GetTrackAllTimeRank :one
-SELECT
- id,
- rank
-FROM (
- SELECT
- x.id,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
- FROM (
- SELECT
- t.id,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks_with_title t ON l.track_id = t.id
- GROUP BY t.id) x
- ) y
-WHERE id = $1;
+WHERE l.listened_at BETWEEN $1 AND $2
+ AND t.release_id = $5
+GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ORDER BY listen_count DESC, t.id
+LIMIT $3 OFFSET $4;
-- name: CountTopTracks :one
SELECT COUNT(DISTINCT l.track_id) AS total_count
diff --git a/docs/src/content/docs/index.mdx b/docs/src/content/docs/index.mdx
index f590ebb..a4d1858 100644
--- a/docs/src/content/docs/index.mdx
+++ b/docs/src/content/docs/index.mdx
@@ -28,7 +28,7 @@ import { Card, CardGrid } from '@astrojs/starlight/components';
Koito can be connected to any music server or client that allows for custom ListenBrainz URLs.
- Automatically relay listens submitted to your Koito instance to other ListenBrainz compatible servers.
+ Automatically relay listens submitted to your Koito instance to other ListenBrainz compatble servers.
Koito automatically fetches data from MusicBrainz and images from Deezer and Cover Art Archive to compliment what is provided by your music server.
diff --git a/docs/src/content/docs/reference/configuration.md b/docs/src/content/docs/reference/configuration.md
index 2af573c..4e806a0 100644
--- a/docs/src/content/docs/reference/configuration.md
+++ b/docs/src/content/docs/reference/configuration.md
@@ -64,8 +64,6 @@ If the environment variable is defined without **and** with the suffix at the sa
##### KOITO_CONFIG_DIR
- Default: `/etc/koito`
- Description: The location where import folders and image caches are stored.
-##### KOITO_FORCE_TZ
-- Description: A canonical IANA database time zone name (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) that Koito will use to serve all clients. Overrides any timezones requested via a `tz` cookie or `tz` query parameter. Koito will fail to start if this value is invalid.
##### KOITO_DISABLE_DEEZER
- Default: `false`
- Description: Disables Deezer as a source for finding artist and album images.
@@ -80,13 +78,6 @@ If the environment variable is defined without **and** with the suffix at the sa
##### KOITO_SUBSONIC_PARAMS
- Required: `true` if KOITO_SUBSONIC_URL is set
- Description: The `u`, `t`, and `s` authentication parameters to use for authenticated requests to your subsonic server, in the format `u=XXX&t=XXX&s=XXX`. An easy way to find them is to open the network tab in the developer tools of your browser of choice and copy them from a request.
-:::caution
-If Koito is unable to validate your Subsonic configuration, it will fail to start. If you notice your container isn't running after
-changing these parameters, check the logs!
-:::
-##### KOITO_LASTFM_API_KEY
-- Required: `false`
-- Description: Your LastFM API key, which will be used for fetching images if provided. You can get an API key [here](https://www.last.fm/api/authentication),
##### KOITO_SKIP_IMPORT
- Default: `false`
- Description: Skips running the importer on startup.
diff --git a/engine/engine.go b/engine/engine.go
index 979667e..31fe552 100644
--- a/engine/engine.go
+++ b/engine/engine.go
@@ -96,10 +96,6 @@ func Run(
defer store.Close(ctx)
l.Info().Msg("Engine: Database connection established")
- if cfg.ForceTZ() != nil {
- l.Debug().Msgf("Engine: Forcing the use of timezone '%s'", cfg.ForceTZ().String())
- }
-
l.Debug().Msg("Engine: Initializing MusicBrainz client")
var mbzC mbz.MusicBrainzCaller
if !cfg.MusicBrainzDisabled() {
@@ -142,7 +138,6 @@ func Run(
EnableCAA: !cfg.CoverArtArchiveDisabled(),
EnableDeezer: !cfg.DeezerDisabled(),
EnableSubsonic: cfg.SubsonicEnabled(),
- EnableLastFM: cfg.LastFMApiKey() != "",
})
l.Info().Msg("Engine: Image sources initialized")
@@ -216,8 +211,6 @@ func Run(
}
}()
- l.Info().Msg("Engine: Beginning startup tasks...")
-
l.Debug().Msg("Engine: Checking import configuration")
if !cfg.SkipImport() {
go func() {
@@ -225,14 +218,18 @@ func Run(
}()
}
+ // l.Info().Msg("Creating test export file")
+ // go func() {
+ // err := export.ExportData(ctx, "koito", store)
+ // if err != nil {
+ // l.Err(err).Msg("Failed to generate export file")
+ // }
+ // }()
+
l.Info().Msg("Engine: Pruning orphaned images")
go catalog.PruneOrphanedImages(logger.NewContext(l), store)
l.Info().Msg("Engine: Running duration backfill task")
go catalog.BackfillTrackDurationsFromMusicBrainz(ctx, store, mbzC)
- l.Info().Msg("Engine: Attempting to fetch missing artist images")
- go catalog.FetchMissingArtistImages(ctx, store)
- l.Info().Msg("Engine: Attempting to fetch missing album images")
- go catalog.FetchMissingAlbumImages(ctx, store)
l.Info().Msg("Engine: Initialization finished")
quit := make(chan os.Signal, 1)
diff --git a/engine/handlers/get_listen_activity.go b/engine/handlers/get_listen_activity.go
index c11ed3e..22d23fa 100644
--- a/engine/handlers/get_listen_activity.go
+++ b/engine/handlers/get_listen_activity.go
@@ -106,7 +106,7 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R
return
}
- activity = processActivity(activity, opts)
+ activity = fillMissingActivity(activity, opts)
l.Debug().Msg("GetListenActivityHandler: Successfully retrieved listen activity")
utils.WriteJSON(w, http.StatusOK, activity)
@@ -114,55 +114,34 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R
}
// ngl i hate this
-func processActivity(
+func fillMissingActivity(
items []db.ListenActivityItem,
opts db.ListenActivityOpts,
) []db.ListenActivityItem {
from, to := db.ListenActivityOptsToTimes(opts)
- buckets := make(map[string]int64)
-
+ existing := make(map[string]int64, len(items))
for _, item := range items {
- bucketStart := normalizeToStep(item.Start, opts.Step)
- key := bucketStart.Format("2006-01-02")
- buckets[key] += item.Listens
+ existing[item.Start.Format("2006-01-02")] = item.Listens
}
var result []db.ListenActivityItem
- for t := normalizeToStep(from, opts.Step); t.Before(to); t = addStep(t, opts.Step) {
- key := t.Format("2006-01-02")
+ for t := from; t.Before(to); t = addStep(t, opts.Step) {
+ listens := int64(0)
+ if v, ok := existing[t.Format("2006-01-02")]; ok {
+ listens = v
+ }
result = append(result, db.ListenActivityItem{
Start: t,
- Listens: buckets[key],
+ Listens: int64(listens),
})
}
return result
}
-func normalizeToStep(t time.Time, step db.StepInterval) time.Time {
- switch step {
- case db.StepDay:
- return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
-
- case db.StepWeek:
- weekday := int(t.Weekday())
- if weekday == 0 {
- weekday = 7
- }
- start := t.AddDate(0, 0, -(weekday - 1))
- return time.Date(start.Year(), start.Month(), start.Day(), 0, 0, 0, 0, t.Location())
-
- case db.StepMonth:
- return time.Date(t.Year(), t.Month(), 1, 0, 0, 0, 0, t.Location())
-
- default:
- return t
- }
-}
-
func addStep(t time.Time, step db.StepInterval) time.Time {
switch step {
case db.StepDay:
diff --git a/engine/handlers/handlers.go b/engine/handlers/handlers.go
index 78bc228..06127aa 100644
--- a/engine/handlers/handlers.go
+++ b/engine/handlers/handlers.go
@@ -6,9 +6,7 @@ import (
"strconv"
"strings"
"time"
- _ "time/tzdata"
- "github.com/gabehf/koito/internal/cfg"
"github.com/gabehf/koito/internal/db"
"github.com/gabehf/koito/internal/logger"
)
@@ -109,143 +107,14 @@ func TimeframeFromRequest(r *http.Request) db.Timeframe {
func parseTZ(r *http.Request) *time.Location {
- // this map is obviously AI.
- // i manually referenced as many links as I could and couldn't find any
- // incorrect entries here so hopefully it is all correct.
- overrides := map[string]string{
- // --- North America ---
- "America/Indianapolis": "America/Indiana/Indianapolis",
- "America/Knoxville": "America/Indiana/Knoxville",
- "America/Louisville": "America/Kentucky/Louisville",
- "America/Montreal": "America/Toronto",
- "America/Shiprock": "America/Denver",
- "America/Fort_Wayne": "America/Indiana/Indianapolis",
- "America/Virgin": "America/Port_of_Spain",
- "America/Santa_Isabel": "America/Tijuana",
- "America/Ensenada": "America/Tijuana",
- "America/Rosario": "America/Argentina/Cordoba",
- "America/Jujuy": "America/Argentina/Jujuy",
- "America/Mendoza": "America/Argentina/Mendoza",
- "America/Catamarca": "America/Argentina/Catamarca",
- "America/Cordoba": "America/Argentina/Cordoba",
- "America/Buenos_Aires": "America/Argentina/Buenos_Aires",
- "America/Coral_Harbour": "America/Atikokan",
- "America/Atka": "America/Adak",
- "US/Alaska": "America/Anchorage",
- "US/Aleutian": "America/Adak",
- "US/Arizona": "America/Phoenix",
- "US/Central": "America/Chicago",
- "US/Eastern": "America/New_York",
- "US/East-Indiana": "America/Indiana/Indianapolis",
- "US/Hawaii": "Pacific/Honolulu",
- "US/Indiana-Starke": "America/Indiana/Knoxville",
- "US/Michigan": "America/Detroit",
- "US/Mountain": "America/Denver",
- "US/Pacific": "America/Los_Angeles",
- "US/Samoa": "Pacific/Pago_Pago",
- "Canada/Atlantic": "America/Halifax",
- "Canada/Central": "America/Winnipeg",
- "Canada/Eastern": "America/Toronto",
- "Canada/Mountain": "America/Edmonton",
- "Canada/Newfoundland": "America/St_Johns",
- "Canada/Pacific": "America/Vancouver",
-
- // --- Asia ---
- "Asia/Calcutta": "Asia/Kolkata",
- "Asia/Saigon": "Asia/Ho_Chi_Minh",
- "Asia/Katmandu": "Asia/Kathmandu",
- "Asia/Rangoon": "Asia/Yangon",
- "Asia/Ulan_Bator": "Asia/Ulaanbaatar",
- "Asia/Macao": "Asia/Macau",
- "Asia/Tel_Aviv": "Asia/Jerusalem",
- "Asia/Ashkhabad": "Asia/Ashgabat",
- "Asia/Chungking": "Asia/Chongqing",
- "Asia/Dacca": "Asia/Dhaka",
- "Asia/Istanbul": "Europe/Istanbul",
- "Asia/Kashgar": "Asia/Urumqi",
- "Asia/Thimbu": "Asia/Thimphu",
- "Asia/Ujung_Pandang": "Asia/Makassar",
- "ROC": "Asia/Taipei",
- "Iran": "Asia/Tehran",
- "Israel": "Asia/Jerusalem",
- "Japan": "Asia/Tokyo",
- "Singapore": "Asia/Singapore",
- "Hongkong": "Asia/Hong_Kong",
-
- // --- Europe ---
- "Europe/Kiev": "Europe/Kyiv",
- "Europe/Belfast": "Europe/London",
- "Europe/Tiraspol": "Europe/Chisinau",
- "Europe/Nicosia": "Asia/Nicosia",
- "Europe/Moscow": "Europe/Moscow",
- "W-SU": "Europe/Moscow",
- "GB": "Europe/London",
- "GB-Eire": "Europe/London",
- "Eire": "Europe/Dublin",
- "Poland": "Europe/Warsaw",
- "Portugal": "Europe/Lisbon",
- "Turkey": "Europe/Istanbul",
-
- // --- Australia / Pacific ---
- "Australia/ACT": "Australia/Sydney",
- "Australia/Canberra": "Australia/Sydney",
- "Australia/LHI": "Australia/Lord_Howe",
- "Australia/North": "Australia/Darwin",
- "Australia/NSW": "Australia/Sydney",
- "Australia/Queensland": "Australia/Brisbane",
- "Australia/South": "Australia/Adelaide",
- "Australia/Tasmania": "Australia/Hobart",
- "Australia/Victoria": "Australia/Melbourne",
- "Australia/West": "Australia/Perth",
- "Australia/Yancowinna": "Australia/Broken_Hill",
- "Pacific/Samoa": "Pacific/Pago_Pago",
- "Pacific/Yap": "Pacific/Chuuk",
- "Pacific/Truk": "Pacific/Chuuk",
- "Pacific/Ponape": "Pacific/Pohnpei",
- "NZ": "Pacific/Auckland",
- "NZ-CHAT": "Pacific/Chatham",
-
- // --- Africa ---
- "Africa/Asmera": "Africa/Asmara",
- "Africa/Timbuktu": "Africa/Bamako",
- "Egypt": "Africa/Cairo",
- "Libya": "Africa/Tripoli",
-
- // --- Atlantic ---
- "Atlantic/Faeroe": "Atlantic/Faroe",
- "Atlantic/Jan_Mayen": "Europe/Oslo",
- "Iceland": "Atlantic/Reykjavik",
-
- // --- Etc / Misc ---
- "UTC": "UTC",
- "Etc/UTC": "UTC",
- "Etc/GMT": "UTC",
- "GMT": "UTC",
- "Zulu": "UTC",
- "Universal": "UTC",
- }
-
- if cfg.ForceTZ() != nil {
- return cfg.ForceTZ()
- }
-
if tz := r.URL.Query().Get("tz"); tz != "" {
- if fixedTz, exists := overrides[tz]; exists {
- tz = fixedTz
- }
if loc, err := time.LoadLocation(tz); err == nil {
return loc
}
}
if c, err := r.Cookie("tz"); err == nil {
- var tz string
- if fixedTz, exists := overrides[c.Value]; exists {
- tz = fixedTz
- } else {
- tz = c.Value
- }
- if loc, err := time.LoadLocation(tz); err == nil {
+ if loc, err := time.LoadLocation(c.Value); err == nil {
return loc
}
}
diff --git a/engine/handlers/lbz_submit_listen.go b/engine/handlers/lbz_submit_listen.go
index daf7969..91eeaac 100644
--- a/engine/handlers/lbz_submit_listen.go
+++ b/engine/handlers/lbz_submit_listen.go
@@ -90,11 +90,6 @@ func LbzSubmitListenHandler(store db.DB, mbzc mbz.MusicBrainzCaller) func(w http
utils.WriteError(w, "failed to read request body", http.StatusBadRequest)
return
}
-
- if cfg.LbzRelayEnabled() {
- go doLbzRelay(requestBytes, l)
- }
-
if err := json.NewDecoder(bytes.NewBuffer(requestBytes)).Decode(&req); err != nil {
l.Err(err).Msg("LbzSubmitListenHandler: Failed to decode request")
utils.WriteError(w, "failed to decode request", http.StatusBadRequest)
@@ -239,6 +234,10 @@ func LbzSubmitListenHandler(store db.DB, mbzc mbz.MusicBrainzCaller) func(w http
w.WriteHeader(http.StatusOK)
w.Header().Set("Content-Type", "application/json")
w.Write([]byte("{\"status\": \"ok\"}"))
+
+ if cfg.LbzRelayEnabled() {
+ go doLbzRelay(requestBytes, l)
+ }
}
}
diff --git a/engine/handlers/replace_image.go b/engine/handlers/replace_image.go
index 9a2835d..66c0bbe 100644
--- a/engine/handlers/replace_image.go
+++ b/engine/handlers/replace_image.go
@@ -9,7 +9,6 @@ import (
"github.com/gabehf/koito/internal/catalog"
"github.com/gabehf/koito/internal/cfg"
"github.com/gabehf/koito/internal/db"
- "github.com/gabehf/koito/internal/images"
"github.com/gabehf/koito/internal/logger"
"github.com/gabehf/koito/internal/utils"
"github.com/google/uuid"
@@ -76,7 +75,7 @@ func ReplaceImageHandler(store db.DB) http.HandlerFunc {
fileUrl := r.FormValue("image_url")
if fileUrl != "" {
l.Debug().Msg("ReplaceImageHandler: Image identified as remote file")
- err = images.ValidateImageURL(fileUrl)
+ err = catalog.ValidateImageURL(fileUrl)
if err != nil {
l.Debug().AnErr("error", err).Msg("ReplaceImageHandler: Invalid image URL")
utils.WriteError(w, "url is invalid or not an image file", http.StatusBadRequest)
diff --git a/engine/import_test.go b/engine/import_test.go
index fa69e73..2a802aa 100644
--- a/engine/import_test.go
+++ b/engine/import_test.go
@@ -264,34 +264,6 @@ func TestImportListenBrainz_MbzDisabled(t *testing.T) {
truncateTestData(t)
}
-func TestImportListenBrainz_MBIDMapping(t *testing.T) {
-
- src := path.Join("..", "test_assets", "listenbrainz_shoko1_123456789.zip")
- destDir := filepath.Join(cfg.ConfigDir(), "import")
- dest := filepath.Join(destDir, "listenbrainz_shoko1_123456789.zip")
-
- // not going to make the dest dir because engine should make it already
-
- input, err := os.ReadFile(src)
- require.NoError(t, err)
-
- require.NoError(t, os.WriteFile(dest, input, os.ModePerm))
-
- engine.RunImporter(logger.Get(), store, &mbz.MbzErrorCaller{})
-
- album, err := store.GetAlbum(context.Background(), db.GetAlbumOpts{MusicBrainzID: uuid.MustParse("177ebc28-0115-3897-8eb3-ebf74ce23790")})
- require.NoError(t, err)
- assert.Equal(t, "Zombie", album.Title)
- artist, err := store.GetArtist(context.Background(), db.GetArtistOpts{MusicBrainzID: uuid.MustParse("c98d40fd-f6cf-4b26-883e-eaa515ee2851")})
- require.NoError(t, err)
- assert.Equal(t, "The Cranberries", artist.Name)
- track, err := store.GetTrack(context.Background(), db.GetTrackOpts{MusicBrainzID: uuid.MustParse("3bbeb4e3-ab6d-460d-bfc5-de49e4251061")})
- require.NoError(t, err)
- assert.Equal(t, "Zombie", track.Title)
-
- truncateTestData(t)
-}
-
func TestImportKoito(t *testing.T) {
src := path.Join("..", "test_assets", "koito_export_test.json")
@@ -304,7 +276,6 @@ func TestImportKoito(t *testing.T) {
giriReleaseMBID := uuid.MustParse("ac1f8da0-21d7-426e-83b0-befff06f0871")
suzukiMBID := uuid.MustParse("30f851bb-dba3-4e9b-811c-5f27f595c86a")
nijinoTrackMBID := uuid.MustParse("a4f26836-3894-46c1-acac-227808308687")
- lp3MBID := uuid.MustParse("d0ec30bd-7cdc-417c-979d-5a0631b8a161")
input, err := os.ReadFile(src)
require.NoError(t, err)
@@ -341,12 +312,6 @@ func TestImportKoito(t *testing.T) {
aliases, err := store.GetAllAlbumAliases(ctx, album.ID)
require.NoError(t, err)
assert.Contains(t, utils.FlattenAliases(aliases), "Nijinoiroyo Azayakadeare (NELKE ver.)")
- // ensure album associations are saved
- album, err = store.GetAlbum(ctx, db.GetAlbumOpts{MusicBrainzID: lp3MBID})
- require.NoError(t, err)
- assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "Elizabeth Powell")
- assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "Rachel Goswell")
- assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "American Football")
// ensure all tracks are saved
track, err := store.GetTrack(ctx, db.GetTrackOpts{MusicBrainzID: nijinoTrackMBID})
diff --git a/engine/long_test.go b/engine/long_test.go
index db86ac2..2ef5d4b 100644
--- a/engine/long_test.go
+++ b/engine/long_test.go
@@ -356,51 +356,6 @@ func TestDelete(t *testing.T) {
truncateTestData(t)
}
-func TestLoginGate(t *testing.T) {
-
- t.Run("Submit Listens", doSubmitListens)
-
- req, err := http.NewRequest("DELETE", host()+"/apis/web/v1/artist?id=1", nil)
- require.NoError(t, err)
- req.Header.Add("Authorization", "Token "+apikey)
- resp, err := http.DefaultClient.Do(req)
- assert.NoError(t, err)
- assert.Equal(t, 204, resp.StatusCode)
-
- req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil)
- require.NoError(t, err)
- resp, err = http.DefaultClient.Do(req)
- assert.NoError(t, err)
- assert.Equal(t, 200, resp.StatusCode)
- var artist models.Artist
- err = json.NewDecoder(resp.Body).Decode(&artist)
- require.NoError(t, err)
- assert.Equal(t, "ネクライトーキー", artist.Name)
-
- cfg.SetLoginGate(true)
-
- req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil)
- require.NoError(t, err)
- // req.Header.Add("Authorization", "Token "+apikey)
- resp, err = http.DefaultClient.Do(req)
- assert.NoError(t, err)
- assert.Equal(t, 401, resp.StatusCode)
-
- req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil)
- require.NoError(t, err)
- req.Header.Add("Authorization", "Token "+apikey)
- resp, err = http.DefaultClient.Do(req)
- assert.NoError(t, err)
- assert.Equal(t, 200, resp.StatusCode)
- err = json.NewDecoder(resp.Body).Decode(&artist)
- require.NoError(t, err)
- assert.Equal(t, "ネクライトーキー", artist.Name)
-
- cfg.SetLoginGate(false)
-
- truncateTestData(t)
-}
-
func TestAliasesAndSearch(t *testing.T) {
t.Run("Submit Listens", doSubmitListens)
diff --git a/engine/middleware/authenticate.go b/engine/middleware/authenticate.go
deleted file mode 100644
index 830fb78..0000000
--- a/engine/middleware/authenticate.go
+++ /dev/null
@@ -1,166 +0,0 @@
-package middleware
-
-import (
- "context"
- "errors"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/gabehf/koito/internal/cfg"
- "github.com/gabehf/koito/internal/db"
- "github.com/gabehf/koito/internal/logger"
- "github.com/gabehf/koito/internal/models"
- "github.com/gabehf/koito/internal/utils"
- "github.com/google/uuid"
-)
-
-type MiddlwareContextKey string
-
-const (
- UserContextKey MiddlwareContextKey = "user"
- apikeyContextKey MiddlwareContextKey = "apikeyID"
-)
-
-type AuthMode int
-
-const (
- AuthModeSessionCookie AuthMode = iota
- AuthModeAPIKey
- AuthModeSessionOrAPIKey
- AuthModeLoginGate
-)
-
-func Authenticate(store db.DB, mode AuthMode) func(http.Handler) http.Handler {
- return func(next http.Handler) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- ctx := r.Context()
- l := logger.FromContext(ctx)
-
- var user *models.User
- var err error
-
- switch mode {
- case AuthModeSessionCookie:
- user, err = validateSession(ctx, store, r)
-
- case AuthModeAPIKey:
- user, err = validateAPIKey(ctx, store, r)
-
- case AuthModeSessionOrAPIKey:
- user, err = validateSession(ctx, store, r)
- if err != nil || user == nil {
- user, err = validateAPIKey(ctx, store, r)
- }
-
- case AuthModeLoginGate:
- if cfg.LoginGate() {
- user, err = validateSession(ctx, store, r)
- if err != nil || user == nil {
- user, err = validateAPIKey(ctx, store, r)
- }
- } else {
- next.ServeHTTP(w, r)
- return
- }
- }
-
- if err != nil {
- l.Err(err).Msg("authentication failed")
- utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
- return
- }
-
- if user == nil {
- utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
- return
- }
-
- ctx = context.WithValue(ctx, UserContextKey, user)
- r = r.WithContext(ctx)
-
- next.ServeHTTP(w, r)
- })
- }
-}
-
-func validateSession(ctx context.Context, store db.DB, r *http.Request) (*models.User, error) {
- l := logger.FromContext(r.Context())
-
- l.Debug().Msgf("ValidateSession: Checking user authentication via session cookie")
-
- cookie, err := r.Cookie("koito_session")
- var sid uuid.UUID
- if err == nil {
- sid, err = uuid.Parse(cookie.Value)
- if err != nil {
- l.Err(err).Msg("ValidateSession: Could not parse UUID from session cookie")
- return nil, errors.New("session cookie is invalid")
- }
- } else {
- l.Debug().Msgf("ValidateSession: No session cookie found; attempting API key authentication")
- return nil, errors.New("session cookie is missing")
- }
-
- l.Debug().Msg("ValidateSession: Retrieved login cookie from request")
-
- u, err := store.GetUserBySession(r.Context(), sid)
- if err != nil {
- l.Err(fmt.Errorf("ValidateSession: %w", err)).Msg("Error accessing database")
- return nil, errors.New("internal server error")
- }
- if u == nil {
- l.Debug().Msg("ValidateSession: No user with session id found")
- return nil, errors.New("no user with session id found")
- }
-
- ctx = context.WithValue(r.Context(), UserContextKey, u)
- r = r.WithContext(ctx)
-
- l.Debug().Msgf("ValidateSession: Refreshing session for user '%s'", u.Username)
-
- store.RefreshSession(r.Context(), sid, time.Now().Add(30*24*time.Hour))
-
- l.Debug().Msgf("ValidateSession: Refreshed session for user '%s'", u.Username)
-
- return u, nil
-}
-
-func validateAPIKey(ctx context.Context, store db.DB, r *http.Request) (*models.User, error) {
- l := logger.FromContext(ctx)
-
- l.Debug().Msg("ValidateApiKey: Checking if user is already authenticated")
-
- authH := r.Header.Get("Authorization")
- var token string
- if strings.HasPrefix(strings.ToLower(authH), "token ") {
- token = strings.TrimSpace(authH[6:]) // strip "Token "
- } else {
- l.Error().Msg("ValidateApiKey: Authorization header must be formatted 'Token {token}'")
- return nil, errors.New("authorization header is invalid")
- }
-
- u, err := store.GetUserByApiKey(ctx, token)
- if err != nil {
- l.Err(err).Msg("ValidateApiKey: Failed to get user from database using api key")
- return nil, errors.New("internal server error")
- }
- if u == nil {
- l.Debug().Msg("ValidateApiKey: API key does not exist")
- return nil, errors.New("authorization token is invalid")
- }
-
- ctx = context.WithValue(r.Context(), UserContextKey, u)
- r = r.WithContext(ctx)
-
- return u, nil
-}
-
-func GetUserFromContext(ctx context.Context) *models.User {
- user, ok := ctx.Value(UserContextKey).(*models.User)
- if !ok {
- return nil
- }
- return user
-}
diff --git a/engine/middleware/validate.go b/engine/middleware/validate.go
new file mode 100644
index 0000000..b3e1369
--- /dev/null
+++ b/engine/middleware/validate.go
@@ -0,0 +1,125 @@
+package middleware
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/gabehf/koito/internal/db"
+ "github.com/gabehf/koito/internal/logger"
+ "github.com/gabehf/koito/internal/models"
+ "github.com/gabehf/koito/internal/utils"
+ "github.com/google/uuid"
+)
+
+type MiddlwareContextKey string
+
+const (
+ UserContextKey MiddlwareContextKey = "user"
+ apikeyContextKey MiddlwareContextKey = "apikeyID"
+)
+
+func ValidateSession(store db.DB) func(next http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ l := logger.FromContext(r.Context())
+
+ l.Debug().Msgf("ValidateSession: Checking user authentication via session cookie")
+
+ cookie, err := r.Cookie("koito_session")
+ var sid uuid.UUID
+ if err == nil {
+ sid, err = uuid.Parse(cookie.Value)
+ if err != nil {
+ l.Err(err).Msg("ValidateSession: Could not parse UUID from session cookie")
+ utils.WriteError(w, "session cookie is invalid", http.StatusUnauthorized)
+ return
+ }
+ } else {
+ l.Debug().Msgf("ValidateSession: No session cookie found; attempting API key authentication")
+ utils.WriteError(w, "session cookie is missing", http.StatusUnauthorized)
+ return
+ }
+
+ l.Debug().Msg("ValidateSession: Retrieved login cookie from request")
+
+ u, err := store.GetUserBySession(r.Context(), sid)
+ if err != nil {
+ l.Err(fmt.Errorf("ValidateSession: %w", err)).Msg("Error accessing database")
+ utils.WriteError(w, "internal server error", http.StatusInternalServerError)
+ return
+ }
+ if u == nil {
+ l.Debug().Msg("ValidateSession: No user with session id found")
+ utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
+ return
+ }
+
+ ctx := context.WithValue(r.Context(), UserContextKey, u)
+ r = r.WithContext(ctx)
+
+ l.Debug().Msgf("ValidateSession: Refreshing session for user '%s'", u.Username)
+
+ store.RefreshSession(r.Context(), sid, time.Now().Add(30*24*time.Hour))
+
+ l.Debug().Msgf("ValidateSession: Refreshed session for user '%s'", u.Username)
+
+ next.ServeHTTP(w, r)
+ })
+ }
+}
+
+func ValidateApiKey(store db.DB) func(next http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := r.Context()
+ l := logger.FromContext(ctx)
+
+ l.Debug().Msg("ValidateApiKey: Checking if user is already authenticated")
+
+ u := GetUserFromContext(ctx)
+ if u != nil {
+ l.Debug().Msg("ValidateApiKey: User is already authenticated; skipping API key authentication")
+ next.ServeHTTP(w, r)
+ return
+ }
+
+ authh := r.Header.Get("Authorization")
+ var token string
+ if strings.HasPrefix(strings.ToLower(authh), "token ") {
+ token = strings.TrimSpace(authh[6:]) // strip "Token "
+ } else {
+ l.Error().Msg("ValidateApiKey: Authorization header must be formatted 'Token {token}'")
+ utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
+ return
+ }
+
+ u, err := store.GetUserByApiKey(ctx, token)
+ if err != nil {
+ l.Err(err).Msg("Failed to get user from database using api key")
+ utils.WriteError(w, "internal server error", http.StatusInternalServerError)
+ return
+ }
+ if u == nil {
+ l.Debug().Msg("Api key does not exist")
+ utils.WriteError(w, "unauthorized", http.StatusUnauthorized)
+ return
+ }
+
+ ctx = context.WithValue(r.Context(), UserContextKey, u)
+ r = r.WithContext(ctx)
+
+ next.ServeHTTP(w, r)
+ })
+ }
+}
+
+func GetUserFromContext(ctx context.Context) *models.User {
+ user, ok := ctx.Value(UserContextKey).(*models.User)
+ if !ok {
+ return nil
+ }
+ return user
+}
diff --git a/engine/routes.go b/engine/routes.go
index c62edf5..e1c5fda 100644
--- a/engine/routes.go
+++ b/engine/routes.go
@@ -38,7 +38,9 @@ func bindRoutes(
r.Get("/config", handlers.GetCfgHandler())
r.Group(func(r chi.Router) {
- r.Use(middleware.Authenticate(db, middleware.AuthModeLoginGate))
+ if cfg.LoginGate() {
+ r.Use(middleware.ValidateSession(db))
+ }
r.Get("/artist", handlers.GetArtistHandler(db))
r.Get("/artists", handlers.GetArtistsForItemHandler(db))
r.Get("/album", handlers.GetAlbumHandler(db))
@@ -77,7 +79,7 @@ func bindRoutes(
})
r.Group(func(r chi.Router) {
- r.Use(middleware.Authenticate(db, middleware.AuthModeSessionOrAPIKey))
+ r.Use(middleware.ValidateSession(db))
r.Get("/export", handlers.ExportHandler(db))
r.Post("/replace-image", handlers.ReplaceImageHandler(db))
r.Patch("/album", handlers.UpdateAlbumHandler(db))
@@ -109,10 +111,8 @@ func bindRoutes(
AllowedHeaders: []string{"Content-Type", "Authorization"},
}))
- r.With(middleware.Authenticate(db, middleware.AuthModeAPIKey)).
- Post("/submit-listens", handlers.LbzSubmitListenHandler(db, mbz))
- r.With(middleware.Authenticate(db, middleware.AuthModeAPIKey)).
- Get("/validate-token", handlers.LbzValidateTokenHandler(db))
+ r.With(middleware.ValidateApiKey(db)).Post("/submit-listens", handlers.LbzSubmitListenHandler(db, mbz))
+ r.With(middleware.ValidateApiKey(db)).Get("/validate-token", handlers.LbzValidateTokenHandler(db))
})
// serve react client
diff --git a/internal/catalog/associate_track.go b/internal/catalog/associate_track.go
index 3fa1fbc..bb8ebc7 100644
--- a/internal/catalog/associate_track.go
+++ b/internal/catalog/associate_track.go
@@ -74,6 +74,9 @@ func matchTrackByMbzID(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*
} else {
l.Warn().Msgf("Attempted to update track %s with MusicBrainz ID, but an existing ID was already found", track.Title)
}
+ if err != nil {
+ return nil, fmt.Errorf("matchTrackByMbzID: %w", err)
+ }
track.MbzID = &opts.TrackMbzID
return track, nil
}
diff --git a/internal/catalog/duration.go b/internal/catalog/duration.go
index 6217dd6..808ebd0 100644
--- a/internal/catalog/duration.go
+++ b/internal/catalog/duration.go
@@ -21,7 +21,6 @@ func BackfillTrackDurationsFromMusicBrainz(
var from int32 = 0
for {
- l.Debug().Int32("ID", from).Msg("Fetching tracks to backfill from ID")
tracks, err := store.GetTracksWithNoDurationButHaveMbzID(ctx, from)
if err != nil {
return fmt.Errorf("BackfillTrackDurationsFromMusicBrainz: failed to fetch tracks for duration backfill: %w", err)
diff --git a/internal/catalog/duration_test.go b/internal/catalog/duration_test.go
deleted file mode 100644
index 911e345..0000000
--- a/internal/catalog/duration_test.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package catalog_test
-
-import (
- "context"
- "testing"
-
- "github.com/gabehf/koito/internal/catalog"
- "github.com/gabehf/koito/internal/mbz"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-func TestBackfillDuration(t *testing.T) {
- setupTestDataWithMbzIDs(t)
-
- ctx := context.Background()
- mbzc := &mbz.MbzMockCaller{
- Artists: mbzArtistData,
- Releases: mbzReleaseData,
- Tracks: mbzTrackData,
- }
-
- var err error
-
- err = catalog.BackfillTrackDurationsFromMusicBrainz(context.Background(), store, &mbz.MbzErrorCaller{})
- assert.NoError(t, err)
-
- err = catalog.BackfillTrackDurationsFromMusicBrainz(ctx, store, mbzc)
- assert.NoError(t, err)
-
- count, err := store.Count(ctx, `
- SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 AND duration > 0
- `, "Tokyo Calling")
- require.NoError(t, err)
- assert.Equal(t, 1, count, "track was not updated with duration")
-}
diff --git a/internal/catalog/images.go b/internal/catalog/images.go
index 72b6efd..bf5aa26 100644
--- a/internal/catalog/images.go
+++ b/internal/catalog/images.go
@@ -13,9 +13,7 @@ import (
"github.com/gabehf/koito/internal/cfg"
"github.com/gabehf/koito/internal/db"
- "github.com/gabehf/koito/internal/images"
"github.com/gabehf/koito/internal/logger"
- "github.com/gabehf/koito/internal/utils"
"github.com/google/uuid"
"github.com/h2non/bimg"
)
@@ -80,10 +78,30 @@ func SourceImageDir() string {
}
}
+// ValidateImageURL checks if the URL points to a valid image by performing a HEAD request.
+func ValidateImageURL(url string) error {
+ resp, err := http.Head(url)
+ if err != nil {
+ return fmt.Errorf("ValidateImageURL: http.Head: %w", err)
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return fmt.Errorf("ValidateImageURL: HEAD request failed, status code: %d", resp.StatusCode)
+ }
+
+ contentType := resp.Header.Get("Content-Type")
+ if !strings.HasPrefix(contentType, "image/") {
+ return fmt.Errorf("ValidateImageURL: URL does not point to an image, content type: %s", contentType)
+ }
+
+ return nil
+}
+
// DownloadAndCacheImage downloads an image from the given URL, then calls CompressAndSaveImage.
func DownloadAndCacheImage(ctx context.Context, id uuid.UUID, url string, size ImageSize) error {
l := logger.FromContext(ctx)
- err := images.ValidateImageURL(url)
+ err := ValidateImageURL(url)
if err != nil {
return fmt.Errorf("DownloadAndCacheImage: %w", err)
}
@@ -267,127 +285,3 @@ func pruneDirImgs(ctx context.Context, store db.DB, path string, memo map[string
}
return count, nil
}
-
-func FetchMissingArtistImages(ctx context.Context, store db.DB) error {
- l := logger.FromContext(ctx)
- l.Info().Msg("FetchMissingArtistImages: Starting backfill of missing artist images")
-
- var from int32 = 0
-
- for {
- l.Debug().Int32("ID", from).Msg("Fetching artist images to backfill from ID")
- artists, err := store.ArtistsWithoutImages(ctx, from)
- if err != nil {
- return fmt.Errorf("FetchMissingArtistImages: failed to fetch artists for image backfill: %w", err)
- }
-
- if len(artists) == 0 {
- if from == 0 {
- l.Info().Msg("FetchMissingArtistImages: No artists with missing images found")
- } else {
- l.Info().Msg("FetchMissingArtistImages: Finished fetching missing artist images")
- }
- return nil
- }
-
- for _, artist := range artists {
- from = artist.ID
-
- l.Debug().
- Str("title", artist.Name).
- Msg("FetchMissingArtistImages: Attempting to fetch missing artist image")
-
- var aliases []string
- if aliasrow, err := store.GetAllArtistAliases(ctx, artist.ID); err != nil {
- aliases = utils.FlattenAliases(aliasrow)
- } else {
- aliases = []string{artist.Name}
- }
-
- var imgid uuid.UUID
- imgUrl, imgErr := images.GetArtistImage(ctx, images.ArtistImageOpts{
- Aliases: aliases,
- })
- if imgErr == nil && imgUrl != "" {
- imgid = uuid.New()
- err = store.UpdateArtist(ctx, db.UpdateArtistOpts{
- ID: artist.ID,
- Image: imgid,
- ImageSrc: imgUrl,
- })
- if err != nil {
- l.Err(err).
- Str("title", artist.Name).
- Msg("FetchMissingArtistImages: Failed to update artist with image in database")
- continue
- }
- l.Info().
- Str("name", artist.Name).
- Msg("FetchMissingArtistImages: Successfully fetched missing artist image")
- } else {
- l.Err(err).
- Str("name", artist.Name).
- Msg("FetchMissingArtistImages: Failed to fetch artist image")
- }
- }
- }
-}
-func FetchMissingAlbumImages(ctx context.Context, store db.DB) error {
- l := logger.FromContext(ctx)
- l.Info().Msg("FetchMissingAlbumImages: Starting backfill of missing album images")
-
- var from int32 = 0
-
- for {
- l.Debug().Int32("ID", from).Msg("Fetching album images to backfill from ID")
- albums, err := store.AlbumsWithoutImages(ctx, from)
- if err != nil {
- return fmt.Errorf("FetchMissingAlbumImages: failed to fetch albums for image backfill: %w", err)
- }
-
- if len(albums) == 0 {
- if from == 0 {
- l.Info().Msg("FetchMissingAlbumImages: No albums with missing images found")
- } else {
- l.Info().Msg("FetchMissingAlbumImages: Finished fetching missing album images")
- }
- return nil
- }
-
- for _, album := range albums {
- from = album.ID
-
- l.Debug().
- Str("title", album.Title).
- Msg("FetchMissingAlbumImages: Attempting to fetch missing album image")
-
- var imgid uuid.UUID
- imgUrl, imgErr := images.GetAlbumImage(ctx, images.AlbumImageOpts{
- Artists: utils.FlattenSimpleArtistNames(album.Artists),
- Album: album.Title,
- ReleaseMbzID: album.MbzID,
- })
- if imgErr == nil && imgUrl != "" {
- imgid = uuid.New()
- err = store.UpdateAlbum(ctx, db.UpdateAlbumOpts{
- ID: album.ID,
- Image: imgid,
- ImageSrc: imgUrl,
- })
- if err != nil {
- l.Err(err).
- Str("title", album.Title).
- Msg("FetchMissingAlbumImages: Failed to update album with image in database")
- continue
- }
- l.Info().
- Str("name", album.Title).
- Msg("FetchMissingAlbumImages: Successfully fetched missing album image")
- } else {
- l.Err(err).
- Str("name", album.Title).
- Msg("FetchMissingAlbumImages: Failed to fetch album image")
- }
- }
- }
-}
diff --git a/internal/cfg/cfg.go b/internal/cfg/cfg.go
index 0cfc7bb..9e537eb 100644
--- a/internal/cfg/cfg.go
+++ b/internal/cfg/cfg.go
@@ -38,7 +38,6 @@ const (
DISABLE_MUSICBRAINZ_ENV = "KOITO_DISABLE_MUSICBRAINZ"
SUBSONIC_URL_ENV = "KOITO_SUBSONIC_URL"
SUBSONIC_PARAMS_ENV = "KOITO_SUBSONIC_PARAMS"
- LASTFM_API_KEY_ENV = "KOITO_LASTFM_API_KEY"
SKIP_IMPORT_ENV = "KOITO_SKIP_IMPORT"
ALLOWED_HOSTS_ENV = "KOITO_ALLOWED_HOSTS"
CORS_ORIGINS_ENV = "KOITO_CORS_ALLOWED_ORIGINS"
@@ -49,7 +48,6 @@ const (
FETCH_IMAGES_DURING_IMPORT_ENV = "KOITO_FETCH_IMAGES_DURING_IMPORT"
ARTIST_SEPARATORS_ENV = "KOITO_ARTIST_SEPARATORS_REGEX"
LOGIN_GATE_ENV = "KOITO_LOGIN_GATE"
- FORCE_TZ = "KOITO_FORCE_TZ"
)
type config struct {
@@ -74,7 +72,6 @@ type config struct {
disableMusicBrainz bool
subsonicUrl string
subsonicParams string
- lastfmApiKey string
subsonicEnabled bool
skipImport bool
fetchImageDuringImport bool
@@ -88,7 +85,6 @@ type config struct {
importAfter time.Time
artistSeparators []*regexp.Regexp
loginGate bool
- forceTZ *time.Location
}
var (
@@ -169,7 +165,6 @@ func loadConfig(getenv func(string) string, version string) (*config, error) {
if cfg.subsonicEnabled && (cfg.subsonicUrl == "" || cfg.subsonicParams == "") {
return nil, fmt.Errorf("loadConfig: invalid configuration: both %s and %s must be set in order to use subsonic image fetching", SUBSONIC_URL_ENV, SUBSONIC_PARAMS_ENV)
}
- cfg.lastfmApiKey = getenv(LASTFM_API_KEY_ENV)
cfg.skipImport = parseBool(getenv(SKIP_IMPORT_ENV))
cfg.userAgent = fmt.Sprintf("Koito %s (contact@koito.io)", version)
@@ -215,13 +210,6 @@ func loadConfig(getenv func(string) string, version string) (*config, error) {
cfg.loginGate = true
}
- if getenv(FORCE_TZ) != "" {
- cfg.forceTZ, err = time.LoadLocation(getenv(FORCE_TZ))
- if err != nil {
- return nil, fmt.Errorf("forced timezone '%s' is not a valid timezone", getenv(FORCE_TZ))
- }
- }
-
switch strings.ToLower(getenv(LOG_LEVEL_ENV)) {
case "debug":
cfg.logLevel = 0
@@ -244,3 +232,192 @@ func parseBool(s string) bool {
return false
}
}
+
+// Global accessors for configuration values
+
+func UserAgent() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.userAgent
+}
+
+func ListenAddr() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return fmt.Sprintf("%s:%d", globalConfig.bindAddr, globalConfig.listenPort)
+}
+
+func ConfigDir() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.configDir
+}
+
+func DatabaseUrl() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.databaseUrl
+}
+
+func MusicBrainzUrl() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.musicBrainzUrl
+}
+
+func MusicBrainzRateLimit() int {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.musicBrainzRateLimit
+}
+
+func LogLevel() int {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.logLevel
+}
+
+func StructuredLogging() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.structuredLogging
+}
+
+func LbzRelayEnabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.lbzRelayEnabled
+}
+
+func LbzRelayUrl() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.lbzRelayUrl
+}
+
+func LbzRelayToken() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.lbzRelayToken
+}
+
+func DefaultPassword() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.defaultPw
+}
+
+func DefaultUsername() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.defaultUsername
+}
+
+func DefaultTheme() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.defaultTheme
+}
+
+func FullImageCacheEnabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.enableFullImageCache
+}
+
+func DeezerDisabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.disableDeezer
+}
+
+func CoverArtArchiveDisabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.disableCAA
+}
+
+func MusicBrainzDisabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.disableMusicBrainz
+}
+
+func SubsonicEnabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.subsonicEnabled
+}
+
+func SubsonicUrl() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.subsonicUrl
+}
+
+func SubsonicParams() string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.subsonicParams
+}
+
+func SkipImport() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.skipImport
+}
+
+func AllowedHosts() []string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.allowedHosts
+}
+
+func AllowAllHosts() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.allowAllHosts
+}
+
+func AllowedOrigins() []string {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.allowedOrigins
+}
+
+func RateLimitDisabled() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.disableRateLimit
+}
+
+func ThrottleImportMs() int {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.importThrottleMs
+}
+
+// returns the before, after times, in that order
+func ImportWindow() (time.Time, time.Time) {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.importBefore, globalConfig.importAfter
+}
+
+func FetchImagesDuringImport() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.fetchImageDuringImport
+}
+
+func ArtistSeparators() []*regexp.Regexp {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.artistSeparators
+}
+
+func LoginGate() bool {
+ lock.RLock()
+ defer lock.RUnlock()
+ return globalConfig.loginGate
+}
diff --git a/internal/cfg/getters.go b/internal/cfg/getters.go
deleted file mode 100644
index 596ca9d..0000000
--- a/internal/cfg/getters.go
+++ /dev/null
@@ -1,206 +0,0 @@
-package cfg
-
-import (
- "fmt"
- "regexp"
- "time"
-)
-
-func UserAgent() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.userAgent
-}
-
-func ListenAddr() string {
- lock.RLock()
- defer lock.RUnlock()
- return fmt.Sprintf("%s:%d", globalConfig.bindAddr, globalConfig.listenPort)
-}
-
-func ConfigDir() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.configDir
-}
-
-func DatabaseUrl() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.databaseUrl
-}
-
-func MusicBrainzUrl() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.musicBrainzUrl
-}
-
-func MusicBrainzRateLimit() int {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.musicBrainzRateLimit
-}
-
-func LogLevel() int {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.logLevel
-}
-
-func StructuredLogging() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.structuredLogging
-}
-
-func LbzRelayEnabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.lbzRelayEnabled
-}
-
-func LbzRelayUrl() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.lbzRelayUrl
-}
-
-func LbzRelayToken() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.lbzRelayToken
-}
-
-func DefaultPassword() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.defaultPw
-}
-
-func DefaultUsername() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.defaultUsername
-}
-
-func DefaultTheme() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.defaultTheme
-}
-
-func FullImageCacheEnabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.enableFullImageCache
-}
-
-func DeezerDisabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.disableDeezer
-}
-
-func CoverArtArchiveDisabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.disableCAA
-}
-
-func MusicBrainzDisabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.disableMusicBrainz
-}
-
-func SubsonicEnabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.subsonicEnabled
-}
-
-func SubsonicUrl() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.subsonicUrl
-}
-
-func SubsonicParams() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.subsonicParams
-}
-
-func LastFMApiKey() string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.lastfmApiKey
-}
-
-func SkipImport() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.skipImport
-}
-
-func AllowedHosts() []string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.allowedHosts
-}
-
-func AllowAllHosts() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.allowAllHosts
-}
-
-func AllowedOrigins() []string {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.allowedOrigins
-}
-
-func RateLimitDisabled() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.disableRateLimit
-}
-
-func ThrottleImportMs() int {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.importThrottleMs
-}
-
-// returns the before, after times, in that order
-func ImportWindow() (time.Time, time.Time) {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.importBefore, globalConfig.importAfter
-}
-
-func FetchImagesDuringImport() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.fetchImageDuringImport
-}
-
-func ArtistSeparators() []*regexp.Regexp {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.artistSeparators
-}
-
-func LoginGate() bool {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.loginGate
-}
-
-func ForceTZ() *time.Location {
- lock.RLock()
- defer lock.RUnlock()
- return globalConfig.forceTZ
-}
diff --git a/internal/cfg/setters.go b/internal/cfg/setters.go
deleted file mode 100644
index 8458780..0000000
--- a/internal/cfg/setters.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package cfg
-
-func SetLoginGate(val bool) {
- lock.Lock()
- defer lock.Unlock()
- globalConfig.loginGate = val
-}
diff --git a/internal/db/db.go b/internal/db/db.go
index 97badac..4695967 100644
--- a/internal/db/db.go
+++ b/internal/db/db.go
@@ -19,9 +19,9 @@ type DB interface {
GetTracksWithNoDurationButHaveMbzID(ctx context.Context, from int32) ([]*models.Track, error)
GetArtistsForAlbum(ctx context.Context, id int32) ([]*models.Artist, error)
GetArtistsForTrack(ctx context.Context, id int32) ([]*models.Artist, error)
- GetTopTracksPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[RankedItem[*models.Track]], error)
- GetTopArtistsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[RankedItem[*models.Artist]], error)
- GetTopAlbumsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[RankedItem[*models.Album]], error)
+ GetTopTracksPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Track], error)
+ GetTopArtistsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Artist], error)
+ GetTopAlbumsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Album], error)
GetListensPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Listen], error)
GetListenActivity(ctx context.Context, opts ListenActivityOpts) ([]ListenActivityItem, error)
GetAllArtistAliases(ctx context.Context, id int32) ([]models.Alias, error)
@@ -88,7 +88,6 @@ type DB interface {
// in seconds
CountTimeListenedToItem(ctx context.Context, opts TimeListenedOpts) (int64, error)
CountUsers(ctx context.Context) (int64, error)
-
// Search
SearchArtists(ctx context.Context, q string) ([]*models.Artist, error)
@@ -106,7 +105,6 @@ type DB interface {
ImageHasAssociation(ctx context.Context, image uuid.UUID) (bool, error)
GetImageSource(ctx context.Context, image uuid.UUID) (string, error)
AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.Album, error)
- ArtistsWithoutImages(ctx context.Context, from int32) ([]*models.Artist, error)
GetExportPage(ctx context.Context, opts GetExportPageOpts) ([]*ExportItem, error)
Ping(ctx context.Context) error
Close(ctx context.Context)
diff --git a/internal/db/period.go b/internal/db/period.go
index 00c4886..d28f59a 100644
--- a/internal/db/period.go
+++ b/internal/db/period.go
@@ -57,11 +57,11 @@ const (
// and end will be 23:59:59 on Saturday at the end of the current week.
// If opts.Year (or opts.Year + opts.Month) is provided, start and end will simply by the start and end times of that year/month.
func ListenActivityOptsToTimes(opts ListenActivityOpts) (start, end time.Time) {
+ now := time.Now()
loc := opts.Timezone
if loc == nil {
loc, _ = time.LoadLocation("UTC")
}
- now := time.Now().In(loc)
// If Year (and optionally Month) are specified, use calendar boundaries
if opts.Year != 0 {
@@ -91,9 +91,7 @@ func ListenActivityOptsToTimes(opts ListenActivityOpts) (start, end time.Time) {
// Align to most recent Sunday
weekday := int(now.Weekday()) // Sunday = 0
startOfThisWeek := time.Date(now.Year(), now.Month(), now.Day()-weekday, 0, 0, 0, 0, loc)
- // need to subtract 1 from range for week because we are going back from the beginning of this
- // week, so we sort of already went back a week
- start = startOfThisWeek.AddDate(0, 0, -7*(opts.Range-1))
+ start = startOfThisWeek.AddDate(0, 0, -7*opts.Range)
end = startOfThisWeek.AddDate(0, 0, 7).Add(-time.Nanosecond)
case StepMonth:
diff --git a/internal/db/psql/album.go b/internal/db/psql/album.go
index 758c287..630cf1f 100644
--- a/internal/db/psql/album.go
+++ b/internal/db/psql/album.go
@@ -23,13 +23,32 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu
var err error
var ret = new(models.Album)
- if opts.MusicBrainzID != uuid.Nil {
+ if opts.ID != 0 {
+ l.Debug().Msgf("Fetching album from DB with id %d", opts.ID)
+ row, err := d.q.GetRelease(ctx, opts.ID)
+ if err != nil {
+ return nil, fmt.Errorf("GetAlbum: %w", err)
+ }
+ ret.ID = row.ID
+ ret.MbzID = row.MusicBrainzID
+ ret.Title = row.Title
+ ret.Image = row.Image
+ ret.VariousArtists = row.VariousArtists
+ err = json.Unmarshal(row.Artists, &ret.Artists)
+ if err != nil {
+ return nil, fmt.Errorf("GetAlbum: json.Unmarshal: %w", err)
+ }
+ } else if opts.MusicBrainzID != uuid.Nil {
l.Debug().Msgf("Fetching album from DB with MusicBrainz Release ID %s", opts.MusicBrainzID)
row, err := d.q.GetReleaseByMbzID(ctx, &opts.MusicBrainzID)
if err != nil {
return nil, fmt.Errorf("GetAlbum: %w", err)
}
- opts.ID = row.ID
+ ret.ID = row.ID
+ ret.MbzID = row.MusicBrainzID
+ ret.Title = row.Title
+ ret.Image = row.Image
+ ret.VariousArtists = row.VariousArtists
} else if opts.ArtistID != 0 && opts.Title != "" {
l.Debug().Msgf("Fetching album from DB with artist_id %d and title %s", opts.ArtistID, opts.Title)
row, err := d.q.GetReleaseByArtistAndTitle(ctx, repository.GetReleaseByArtistAndTitleParams{
@@ -39,7 +58,11 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu
if err != nil {
return nil, fmt.Errorf("GetAlbum: %w", err)
}
- opts.ID = row.ID
+ ret.ID = row.ID
+ ret.MbzID = row.MusicBrainzID
+ ret.Title = row.Title
+ ret.Image = row.Image
+ ret.VariousArtists = row.VariousArtists
} else if opts.ArtistID != 0 && len(opts.Titles) > 0 {
l.Debug().Msgf("Fetching release group from DB with artist_id %d and titles %v", opts.ArtistID, opts.Titles)
row, err := d.q.GetReleaseByArtistAndTitles(ctx, repository.GetReleaseByArtistAndTitlesParams{
@@ -49,19 +72,19 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu
if err != nil {
return nil, fmt.Errorf("GetAlbum: %w", err)
}
- opts.ID = row.ID
- }
-
- l.Debug().Msgf("Fetching album from DB with id %d", opts.ID)
- row, err := d.q.GetRelease(ctx, opts.ID)
- if err != nil {
- return nil, fmt.Errorf("GetAlbum: %w", err)
+ ret.ID = row.ID
+ ret.MbzID = row.MusicBrainzID
+ ret.Title = row.Title
+ ret.Image = row.Image
+ ret.VariousArtists = row.VariousArtists
+ } else {
+ return nil, errors.New("GetAlbum: insufficient information to get album")
}
count, err := d.q.CountListensFromRelease(ctx, repository.CountListensFromReleaseParams{
ListenedAt: time.Unix(0, 0),
ListenedAt_2: time.Now(),
- ReleaseID: opts.ID,
+ ReleaseID: ret.ID,
})
if err != nil {
return nil, fmt.Errorf("GetAlbum: CountListensFromRelease: %w", err)
@@ -69,32 +92,17 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu
seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
Timeframe: db.Timeframe{Period: db.PeriodAllTime},
- AlbumID: opts.ID,
+ AlbumID: ret.ID,
})
if err != nil {
return nil, fmt.Errorf("GetAlbum: CountTimeListenedToItem: %w", err)
}
- firstListen, err := d.q.GetFirstListenFromRelease(ctx, opts.ID)
+ firstListen, err := d.q.GetFirstListenFromRelease(ctx, ret.ID)
if err != nil && !errors.Is(err, pgx.ErrNoRows) {
return nil, fmt.Errorf("GetAlbum: GetFirstListenFromRelease: %w", err)
}
- rank, err := d.q.GetReleaseAllTimeRank(ctx, opts.ID)
- if err != nil && !errors.Is(err, pgx.ErrNoRows) {
- return nil, fmt.Errorf("GetAlbum: GetReleaseAllTimeRank: %w", err)
- }
-
- ret.ID = row.ID
- ret.MbzID = row.MusicBrainzID
- ret.Title = row.Title
- ret.Image = row.Image
- ret.VariousArtists = row.VariousArtists
- err = json.Unmarshal(row.Artists, &ret.Artists)
- if err != nil {
- return nil, fmt.Errorf("GetAlbum: json.Unmarshal: %w", err)
- }
- ret.AllTimeRank = rank.Rank
ret.ListenCount = count
ret.TimeListened = seconds
ret.FirstListen = firstListen.ListenedAt.Unix()
@@ -274,9 +282,6 @@ func (d *Psql) UpdateAlbum(ctx context.Context, opts db.UpdateAlbumOpts) error {
}
}
if opts.Image != uuid.Nil {
- if opts.ImageSrc == "" {
- return fmt.Errorf("UpdateAlbum: image source must be provided when updating an image")
- }
l.Debug().Msgf("Updating release with ID %d with image %s", opts.ID, opts.Image)
err := qtx.UpdateReleaseImage(ctx, repository.UpdateReleaseImageParams{
ID: opts.ID,
diff --git a/internal/db/psql/artist.go b/internal/db/psql/artist.go
index 859a490..a67fc4c 100644
--- a/internal/db/psql/artist.go
+++ b/internal/db/psql/artist.go
@@ -20,60 +20,114 @@ import (
// this function sucks because sqlc keeps making new types for rows that are the same
func (d *Psql) GetArtist(ctx context.Context, opts db.GetArtistOpts) (*models.Artist, error) {
l := logger.FromContext(ctx)
- if opts.MusicBrainzID != uuid.Nil {
+ if opts.ID != 0 {
+ l.Debug().Msgf("Fetching artist from DB with id %d", opts.ID)
+ row, err := d.q.GetArtist(ctx, opts.ID)
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: GetArtist by ID: %w", err)
+ }
+ count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
+ ListenedAt: time.Unix(0, 0),
+ ListenedAt_2: time.Now(),
+ ArtistID: row.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err)
+ }
+ seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
+ Timeframe: db.Timeframe{Period: db.PeriodAllTime},
+ ArtistID: row.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err)
+ }
+ firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID)
+ if err != nil && !errors.Is(err, pgx.ErrNoRows) {
+ return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err)
+ }
+ return &models.Artist{
+ ID: row.ID,
+ MbzID: row.MusicBrainzID,
+ Name: row.Name,
+ Aliases: row.Aliases,
+ Image: row.Image,
+ ListenCount: count,
+ TimeListened: seconds,
+ FirstListen: firstListen.ListenedAt.Unix(),
+ }, nil
+ } else if opts.MusicBrainzID != uuid.Nil {
l.Debug().Msgf("Fetching artist from DB with MusicBrainz ID %s", opts.MusicBrainzID)
row, err := d.q.GetArtistByMbzID(ctx, &opts.MusicBrainzID)
if err != nil {
return nil, fmt.Errorf("GetArtist: GetArtistByMbzID: %w", err)
}
- opts.ID = row.ID
+ count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
+ ListenedAt: time.Unix(0, 0),
+ ListenedAt_2: time.Now(),
+ ArtistID: row.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err)
+ }
+ seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
+ Timeframe: db.Timeframe{Period: db.PeriodAllTime},
+ ArtistID: row.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err)
+ }
+ firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID)
+ if err != nil && !errors.Is(err, pgx.ErrNoRows) {
+ return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err)
+ }
+ return &models.Artist{
+ ID: row.ID,
+ MbzID: row.MusicBrainzID,
+ Name: row.Name,
+ Aliases: row.Aliases,
+ Image: row.Image,
+ ListenCount: count,
+ TimeListened: seconds,
+ FirstListen: firstListen.ListenedAt.Unix(),
+ }, nil
} else if opts.Name != "" {
l.Debug().Msgf("Fetching artist from DB with name '%s'", opts.Name)
row, err := d.q.GetArtistByName(ctx, opts.Name)
if err != nil {
return nil, fmt.Errorf("GetArtist: GetArtistByName: %w", err)
}
- opts.ID = row.ID
+ count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
+ ListenedAt: time.Unix(0, 0),
+ ListenedAt_2: time.Now(),
+ ArtistID: row.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err)
+ }
+ seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
+ Timeframe: db.Timeframe{Period: db.PeriodAllTime},
+ ArtistID: row.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err)
+ }
+ firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID)
+ if err != nil && !errors.Is(err, pgx.ErrNoRows) {
+ return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err)
+ }
+ return &models.Artist{
+ ID: row.ID,
+ MbzID: row.MusicBrainzID,
+ Name: row.Name,
+ Aliases: row.Aliases,
+ Image: row.Image,
+ ListenCount: count,
+ TimeListened: seconds,
+ FirstListen: firstListen.ListenedAt.Unix(),
+ }, nil
+ } else {
+ return nil, errors.New("insufficient information to get artist")
}
- l.Debug().Msgf("Fetching artist from DB with id %d", opts.ID)
- row, err := d.q.GetArtist(ctx, opts.ID)
- if err != nil {
- return nil, fmt.Errorf("GetArtist: GetArtist by ID: %w", err)
- }
- count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
- ListenedAt: time.Unix(0, 0),
- ListenedAt_2: time.Now(),
- ArtistID: row.ID,
- })
- if err != nil {
- return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err)
- }
- seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
- Timeframe: db.Timeframe{Period: db.PeriodAllTime},
- ArtistID: row.ID,
- })
- if err != nil {
- return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err)
- }
- firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID)
- if err != nil && !errors.Is(err, pgx.ErrNoRows) {
- return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err)
- }
- rank, err := d.q.GetArtistAllTimeRank(ctx, opts.ID)
- if err != nil && !errors.Is(err, pgx.ErrNoRows) {
- return nil, fmt.Errorf("GetArtist: GetArtistAllTimeRank: %w", err)
- }
- return &models.Artist{
- ID: row.ID,
- MbzID: row.MusicBrainzID,
- Name: row.Name,
- Aliases: row.Aliases,
- Image: row.Image,
- ListenCount: count,
- TimeListened: seconds,
- AllTimeRank: rank.Rank,
- FirstListen: firstListen.ListenedAt.Unix(),
- }, nil
}
// Inserts all unique aliases into the DB with specified source
@@ -210,9 +264,6 @@ func (d *Psql) UpdateArtist(ctx context.Context, opts db.UpdateArtistOpts) error
}
}
if opts.Image != uuid.Nil {
- if opts.ImageSrc == "" {
- return fmt.Errorf("UpdateAlbum: image source must be provided when updating an image")
- }
l.Debug().Msgf("Updating artist with id %d with image %s", opts.ID, opts.Image)
err = qtx.UpdateArtistImage(ctx, repository.UpdateArtistImageParams{
ID: opts.ID,
diff --git a/internal/db/psql/images.go b/internal/db/psql/images.go
index eef0d8f..49e2850 100644
--- a/internal/db/psql/images.go
+++ b/internal/db/psql/images.go
@@ -72,26 +72,3 @@ func (d *Psql) AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.A
}
return albums, nil
}
-
-// returns nil, nil on no results
-func (d *Psql) ArtistsWithoutImages(ctx context.Context, from int32) ([]*models.Artist, error) {
- rows, err := d.q.GetArtistsWithoutImages(ctx, repository.GetArtistsWithoutImagesParams{
- Limit: 20,
- ID: from,
- })
- if errors.Is(err, pgx.ErrNoRows) {
- return nil, nil
- } else if err != nil {
- return nil, fmt.Errorf("ArtistsWithoutImages: %w", err)
- }
-
- ret := make([]*models.Artist, len(rows))
- for i, row := range rows {
- ret[i] = &models.Artist{
- ID: row.ID,
- Name: row.Name,
- MbzID: row.MusicBrainzID,
- }
- }
- return ret, nil
-}
diff --git a/internal/db/psql/interest.go b/internal/db/psql/interest.go
index 0c8f4eb..9e8a623 100644
--- a/internal/db/psql/interest.go
+++ b/internal/db/psql/interest.go
@@ -14,54 +14,54 @@ func (d *Psql) GetInterest(ctx context.Context, opts db.GetInterestOpts) ([]db.I
return nil, errors.New("GetInterest: bucket count must be provided")
}
- ret := make([]db.InterestBucket, 0)
+ ret := make([]db.InterestBucket, opts.Buckets)
if opts.ArtistID != 0 {
resp, err := d.q.GetGroupedListensFromArtist(ctx, repository.GetGroupedListensFromArtistParams{
ArtistID: opts.ArtistID,
- BucketCount: int32(opts.Buckets),
+ BucketCount: opts.Buckets,
})
if err != nil {
return nil, fmt.Errorf("GetInterest: GetGroupedListensFromArtist: %w", err)
}
- for _, v := range resp {
- ret = append(ret, db.InterestBucket{
+ for i, v := range resp {
+ ret[i] = db.InterestBucket{
BucketStart: v.BucketStart,
BucketEnd: v.BucketEnd,
ListenCount: v.ListenCount,
- })
+ }
}
return ret, nil
} else if opts.AlbumID != 0 {
resp, err := d.q.GetGroupedListensFromRelease(ctx, repository.GetGroupedListensFromReleaseParams{
ReleaseID: opts.AlbumID,
- BucketCount: int32(opts.Buckets),
+ BucketCount: opts.Buckets,
})
if err != nil {
return nil, fmt.Errorf("GetInterest: GetGroupedListensFromRelease: %w", err)
}
- for _, v := range resp {
- ret = append(ret, db.InterestBucket{
+ for i, v := range resp {
+ ret[i] = db.InterestBucket{
BucketStart: v.BucketStart,
BucketEnd: v.BucketEnd,
ListenCount: v.ListenCount,
- })
+ }
}
return ret, nil
} else if opts.TrackID != 0 {
resp, err := d.q.GetGroupedListensFromTrack(ctx, repository.GetGroupedListensFromTrackParams{
ID: opts.TrackID,
- BucketCount: int32(opts.Buckets),
+ BucketCount: opts.Buckets,
})
if err != nil {
return nil, fmt.Errorf("GetInterest: GetGroupedListensFromTrack: %w", err)
}
- for _, v := range resp {
- ret = append(ret, db.InterestBucket{
+ for i, v := range resp {
+ ret[i] = db.InterestBucket{
BucketStart: v.BucketStart,
BucketEnd: v.BucketEnd,
ListenCount: v.ListenCount,
- })
+ }
}
return ret, nil
} else {
diff --git a/internal/db/psql/listen_activity.go b/internal/db/psql/listen_activity.go
index b2c7990..7a3a776 100644
--- a/internal/db/psql/listen_activity.go
+++ b/internal/db/psql/listen_activity.go
@@ -23,7 +23,7 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts
var listenActivity []db.ListenActivityItem
if opts.AlbumID > 0 {
l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for release group %d",
- opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05 MST"), t2.Format("Jan 02, 2006 15:04:05 MST"), opts.AlbumID)
+ opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.AlbumID)
rows, err := d.q.ListenActivityForRelease(ctx, repository.ListenActivityForReleaseParams{
Column1: opts.Timezone.String(),
ListenedAt: t1,
@@ -44,7 +44,7 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts
l.Debug().Msgf("Database responded with %d steps", len(rows))
} else if opts.ArtistID > 0 {
l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for artist %d",
- opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05 MST"), t2.Format("Jan 02, 2006 15:04:05 MST"), opts.ArtistID)
+ opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.ArtistID)
rows, err := d.q.ListenActivityForArtist(ctx, repository.ListenActivityForArtistParams{
Column1: opts.Timezone.String(),
ListenedAt: t1,
@@ -65,7 +65,7 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts
l.Debug().Msgf("Database responded with %d steps", len(rows))
} else if opts.TrackID > 0 {
l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for track %d",
- opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05 MST"), t2.Format("Jan 02, 2006 15:04:05 MST"), opts.TrackID)
+ opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.TrackID)
rows, err := d.q.ListenActivityForTrack(ctx, repository.ListenActivityForTrackParams{
Column1: opts.Timezone.String(),
ListenedAt: t1,
@@ -86,7 +86,7 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts
l.Debug().Msgf("Database responded with %d steps", len(rows))
} else {
l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v",
- opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05 MST"), t2.Format("Jan 02, 2006 15:04:05 MST"))
+ opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"))
rows, err := d.q.ListenActivity(ctx, repository.ListenActivityParams{
Column1: opts.Timezone.String(),
ListenedAt: t1,
diff --git a/internal/db/psql/listen_activity_test.go b/internal/db/psql/listen_activity_test.go
index affc202..9b277ff 100644
--- a/internal/db/psql/listen_activity_test.go
+++ b/internal/db/psql/listen_activity_test.go
@@ -97,19 +97,20 @@ func TestListenActivity(t *testing.T) {
err = store.Exec(context.Background(),
`INSERT INTO listens (user_id, track_id, listened_at)
- VALUES (1, 1, NOW() - INTERVAL '1 month 1 day'),
- (1, 1, NOW() - INTERVAL '2 months 1 day'),
- (1, 1, NOW() - INTERVAL '3 months 1 day'),
- (1, 2, NOW() - INTERVAL '1 month 1 day'),
- (1, 2, NOW() - INTERVAL '1 second'),
- (1, 2, NOW() - INTERVAL '2 seconds'),
- (1, 2, NOW() - INTERVAL '2 months 1 day')`)
+ VALUES (1, 1, NOW() - INTERVAL '1 month'),
+ (1, 1, NOW() - INTERVAL '2 months'),
+ (1, 1, NOW() - INTERVAL '3 months'),
+ (1, 2, NOW() - INTERVAL '1 month'),
+ (1, 2, NOW() - INTERVAL '2 months')`)
require.NoError(t, err)
+ // This test is bad, and I think it's because of daylight savings.
+ // I need to find a better test.
+
activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepMonth, Range: 8})
require.NoError(t, err)
- require.Len(t, activity, 4)
- assert.Equal(t, []int64{1, 2, 2, 2}, flattenListenCounts(activity))
+ // require.Len(t, activity, 8)
+ // assert.Equal(t, []int64{0, 0, 0, 0, 1, 2, 2, 0}, flattenListenCounts(activity))
// Truncate listens table and insert specific dates for testing opts.Step = db.StepYear
err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`)
diff --git a/internal/db/psql/merge.go b/internal/db/psql/merge.go
index dd375c5..d9e24b6 100644
--- a/internal/db/psql/merge.go
+++ b/internal/db/psql/merge.go
@@ -52,7 +52,7 @@ func (d *Psql) MergeTracks(ctx context.Context, fromId, toId int32) error {
}
err = qtx.CleanOrphanedEntries(ctx)
if err != nil {
- l.Err(err).Msg("MergeTracks: Failed to clean orphaned entries")
+ l.Err(err).Msg("Failed to clean orphaned entries")
return err
}
return tx.Commit(ctx)
diff --git a/internal/db/psql/merge_test.go b/internal/db/psql/merge_test.go
index 38e843a..08169fb 100644
--- a/internal/db/psql/merge_test.go
+++ b/internal/db/psql/merge_test.go
@@ -12,27 +12,27 @@ func setupTestDataForMerge(t *testing.T) {
truncateTestData(t)
// Insert artists
err := store.Exec(context.Background(),
- `INSERT INTO artists (musicbrainz_id, image, image_source)
+ `INSERT INTO artists (musicbrainz_id, image, image_source)
VALUES ('00000000-0000-0000-0000-000000000001', '10000000-0000-0000-0000-000000000000', 'source.com'),
('00000000-0000-0000-0000-000000000002', NULL, NULL)`)
require.NoError(t, err)
err = store.Exec(context.Background(),
- `INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
+ `INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
VALUES (1, 'Artist One', 'Testing', true),
(2, 'Artist Two', 'Testing', true)`)
require.NoError(t, err)
// Insert albums
err = store.Exec(context.Background(),
- `INSERT INTO releases (musicbrainz_id, image, image_source)
+ `INSERT INTO releases (musicbrainz_id, image, image_source)
VALUES ('11111111-1111-1111-1111-111111111111', '20000000-0000-0000-0000-000000000000', 'source.com'),
('22222222-2222-2222-2222-222222222222', NULL, NULL),
(NULL, NULL, NULL)`)
require.NoError(t, err)
err = store.Exec(context.Background(),
- `INSERT INTO release_aliases (release_id, alias, source, is_primary)
+ `INSERT INTO release_aliases (release_id, alias, source, is_primary)
VALUES (1, 'Album One', 'Testing', true),
(2, 'Album Two', 'Testing', true),
(3, 'Album Three', 'Testing', true)`)
@@ -40,7 +40,7 @@ func setupTestDataForMerge(t *testing.T) {
// Insert tracks
err = store.Exec(context.Background(),
- `INSERT INTO tracks (musicbrainz_id, release_id)
+ `INSERT INTO tracks (musicbrainz_id, release_id)
VALUES ('33333333-3333-3333-3333-333333333333', 1),
('44444444-4444-4444-4444-444444444444', 2),
('55555555-5555-5555-5555-555555555555', 1),
@@ -48,7 +48,7 @@ func setupTestDataForMerge(t *testing.T) {
require.NoError(t, err)
err = store.Exec(context.Background(),
- `INSERT INTO track_aliases (track_id, alias, source, is_primary)
+ `INSERT INTO track_aliases (track_id, alias, source, is_primary)
VALUES (1, 'Track One', 'Testing', true),
(2, 'Track Two', 'Testing', true),
(3, 'Track Three', 'Testing', true),
@@ -57,18 +57,18 @@ func setupTestDataForMerge(t *testing.T) {
// Associate artists with albums and tracks
err = store.Exec(context.Background(),
- `INSERT INTO artist_releases (artist_id, release_id)
+ `INSERT INTO artist_releases (artist_id, release_id)
VALUES (1, 1), (2, 2), (1, 3)`)
require.NoError(t, err)
err = store.Exec(context.Background(),
- `INSERT INTO artist_tracks (artist_id, track_id)
+ `INSERT INTO artist_tracks (artist_id, track_id)
VALUES (1, 1), (2, 2), (1, 3), (1, 4)`)
require.NoError(t, err)
// Insert listens
err = store.Exec(context.Background(),
- `INSERT INTO listens (user_id, track_id, listened_at)
+ `INSERT INTO listens (user_id, track_id, listened_at)
VALUES (1, 1, NOW() - INTERVAL '1 day'),
(1, 2, NOW() - INTERVAL '2 days'),
(1, 3, NOW() - INTERVAL '3 days'),
@@ -90,14 +90,14 @@ func TestMergeTracks(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, 2, count, "expected all listens to be merged into Track 2")
- // Verify old artist is not associated with album
+ // Verify artist is associated with album
exists, err := store.RowExists(ctx, `
SELECT EXISTS (
SELECT 1 FROM artist_releases
WHERE release_id = $1 AND artist_id = $2
)`, 2, 1)
require.NoError(t, err)
- assert.False(t, exists)
+ assert.True(t, exists, "expected old artist to be associated with album")
truncateTestData(t)
}
diff --git a/internal/db/psql/top_albums.go b/internal/db/psql/top_albums.go
index 652b790..8610ce5 100644
--- a/internal/db/psql/top_albums.go
+++ b/internal/db/psql/top_albums.go
@@ -11,7 +11,7 @@ import (
"github.com/gabehf/koito/internal/repository"
)
-func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[db.RankedItem[*models.Album]], error) {
+func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Album], error) {
l := logger.FromContext(ctx)
offset := (opts.Page - 1) * opts.Limit
t1, t2 := db.TimeframeToTimeRange(opts.Timeframe)
@@ -19,7 +19,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts)
opts.Limit = DefaultItemsPerPage
}
- var rgs []db.RankedItem[*models.Album]
+ var rgs []*models.Album
var count int64
if opts.ArtistID != 0 {
@@ -36,7 +36,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts)
if err != nil {
return nil, fmt.Errorf("GetTopAlbumsPaginated: GetTopReleasesFromArtist: %w", err)
}
- rgs = make([]db.RankedItem[*models.Album], len(rows))
+ rgs = make([]*models.Album, len(rows))
l.Debug().Msgf("Database responded with %d items", len(rows))
for i, v := range rows {
artists := make([]models.SimpleArtist, 0)
@@ -45,7 +45,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts)
l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", v.ID)
return nil, fmt.Errorf("GetTopAlbumsPaginated: Unmarshal: %w", err)
}
- rgs[i].Item = &models.Album{
+ rgs[i] = &models.Album{
ID: v.ID,
MbzID: v.MusicBrainzID,
Title: v.Title,
@@ -54,7 +54,6 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts)
VariousArtists: v.VariousArtists,
ListenCount: v.ListenCount,
}
- rgs[i].Rank = v.Rank
}
count, err = d.q.CountReleasesFromArtist(ctx, int32(opts.ArtistID))
if err != nil {
@@ -72,7 +71,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts)
if err != nil {
return nil, fmt.Errorf("GetTopAlbumsPaginated: GetTopReleasesPaginated: %w", err)
}
- rgs = make([]db.RankedItem[*models.Album], len(rows))
+ rgs = make([]*models.Album, len(rows))
l.Debug().Msgf("Database responded with %d items", len(rows))
for i, row := range rows {
artists := make([]models.SimpleArtist, 0)
@@ -81,16 +80,16 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts)
l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", row.ID)
return nil, fmt.Errorf("GetTopAlbumsPaginated: Unmarshal: %w", err)
}
- rgs[i].Item = &models.Album{
- ID: row.ID,
- MbzID: row.MusicBrainzID,
+ t := &models.Album{
Title: row.Title,
+ MbzID: row.MusicBrainzID,
+ ID: row.ID,
Image: row.Image,
Artists: artists,
VariousArtists: row.VariousArtists,
ListenCount: row.ListenCount,
}
- rgs[i].Rank = row.Rank
+ rgs[i] = t
}
count, err = d.q.CountTopReleases(ctx, repository.CountTopReleasesParams{
ListenedAt: t1,
@@ -101,7 +100,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts)
}
l.Debug().Msgf("Database responded with %d albums out of a total %d", len(rows), count)
}
- return &db.PaginatedResponse[db.RankedItem[*models.Album]]{
+ return &db.PaginatedResponse[*models.Album]{
Items: rgs,
TotalCount: count,
ItemsPerPage: int32(opts.Limit),
diff --git a/internal/db/psql/top_albums_test.go b/internal/db/psql/top_albums_test.go
index eb4efde..ff0efef 100644
--- a/internal/db/psql/top_albums_test.go
+++ b/internal/db/psql/top_albums_test.go
@@ -18,16 +18,16 @@ func TestGetTopAlbumsPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 4)
assert.Equal(t, int64(4), resp.TotalCount)
- assert.Equal(t, "Release One", resp.Items[0].Item.Title)
- assert.Equal(t, "Release Two", resp.Items[1].Item.Title)
- assert.Equal(t, "Release Three", resp.Items[2].Item.Title)
- assert.Equal(t, "Release Four", resp.Items[3].Item.Title)
+ assert.Equal(t, "Release One", resp.Items[0].Title)
+ assert.Equal(t, "Release Two", resp.Items[1].Title)
+ assert.Equal(t, "Release Three", resp.Items[2].Title)
+ assert.Equal(t, "Release Four", resp.Items[3].Title)
// Test pagination
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Timeframe: db.Timeframe{Period: db.PeriodAllTime}})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
- assert.Equal(t, "Release Two", resp.Items[0].Item.Title)
+ assert.Equal(t, "Release Two", resp.Items[0].Title)
// Test page out of range
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Timeframe: db.Timeframe{Period: db.PeriodAllTime}})
@@ -57,29 +57,29 @@ func TestGetTopAlbumsPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Release Four", resp.Items[0].Item.Title)
+ assert.Equal(t, "Release Four", resp.Items[0].Title)
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodMonth}})
require.NoError(t, err)
require.Len(t, resp.Items, 2)
assert.Equal(t, int64(2), resp.TotalCount)
- assert.Equal(t, "Release Three", resp.Items[0].Item.Title)
- assert.Equal(t, "Release Four", resp.Items[1].Item.Title)
+ assert.Equal(t, "Release Three", resp.Items[0].Title)
+ assert.Equal(t, "Release Four", resp.Items[1].Title)
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}})
require.NoError(t, err)
require.Len(t, resp.Items, 3)
assert.Equal(t, int64(3), resp.TotalCount)
- assert.Equal(t, "Release Two", resp.Items[0].Item.Title)
- assert.Equal(t, "Release Three", resp.Items[1].Item.Title)
- assert.Equal(t, "Release Four", resp.Items[2].Item.Title)
+ assert.Equal(t, "Release Two", resp.Items[0].Title)
+ assert.Equal(t, "Release Three", resp.Items[1].Title)
+ assert.Equal(t, "Release Four", resp.Items[2].Title)
// test specific artist
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}, ArtistID: 2})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Release Two", resp.Items[0].Item.Title)
+ assert.Equal(t, "Release Two", resp.Items[0].Title)
// Test specify dates
@@ -89,11 +89,11 @@ func TestGetTopAlbumsPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Release One", resp.Items[0].Item.Title)
+ assert.Equal(t, "Release One", resp.Items[0].Title)
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Month: 6, Year: 2024}})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Release Two", resp.Items[0].Item.Title)
+ assert.Equal(t, "Release Two", resp.Items[0].Title)
}
diff --git a/internal/db/psql/top_artists.go b/internal/db/psql/top_artists.go
index 497efbd..f66f082 100644
--- a/internal/db/psql/top_artists.go
+++ b/internal/db/psql/top_artists.go
@@ -10,7 +10,7 @@ import (
"github.com/gabehf/koito/internal/repository"
)
-func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[db.RankedItem[*models.Artist]], error) {
+func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Artist], error) {
l := logger.FromContext(ctx)
offset := (opts.Page - 1) * opts.Limit
t1, t2 := db.TimeframeToTimeRange(opts.Timeframe)
@@ -28,7 +28,7 @@ func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts)
if err != nil {
return nil, fmt.Errorf("GetTopArtistsPaginated: GetTopArtistsPaginated: %w", err)
}
- rgs := make([]db.RankedItem[*models.Artist], len(rows))
+ rgs := make([]*models.Artist, len(rows))
for i, row := range rows {
t := &models.Artist{
Name: row.Name,
@@ -37,8 +37,7 @@ func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts)
Image: row.Image,
ListenCount: row.ListenCount,
}
- rgs[i].Item = t
- rgs[i].Rank = row.Rank
+ rgs[i] = t
}
count, err := d.q.CountTopArtists(ctx, repository.CountTopArtistsParams{
ListenedAt: t1,
@@ -49,7 +48,7 @@ func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts)
}
l.Debug().Msgf("Database responded with %d artists out of a total %d", len(rows), count)
- return &db.PaginatedResponse[db.RankedItem[*models.Artist]]{
+ return &db.PaginatedResponse[*models.Artist]{
Items: rgs,
TotalCount: count,
ItemsPerPage: int32(opts.Limit),
diff --git a/internal/db/psql/top_artists_test.go b/internal/db/psql/top_artists_test.go
index 7a69ab5..182d96e 100644
--- a/internal/db/psql/top_artists_test.go
+++ b/internal/db/psql/top_artists_test.go
@@ -18,16 +18,16 @@ func TestGetTopArtistsPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 4)
assert.Equal(t, int64(4), resp.TotalCount)
- assert.Equal(t, "Artist One", resp.Items[0].Item.Name)
- assert.Equal(t, "Artist Two", resp.Items[1].Item.Name)
- assert.Equal(t, "Artist Three", resp.Items[2].Item.Name)
- assert.Equal(t, "Artist Four", resp.Items[3].Item.Name)
+ assert.Equal(t, "Artist One", resp.Items[0].Name)
+ assert.Equal(t, "Artist Two", resp.Items[1].Name)
+ assert.Equal(t, "Artist Three", resp.Items[2].Name)
+ assert.Equal(t, "Artist Four", resp.Items[3].Name)
// Test pagination
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Timeframe: db.Timeframe{Period: db.PeriodAllTime}})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
- assert.Equal(t, "Artist Two", resp.Items[0].Item.Name)
+ assert.Equal(t, "Artist Two", resp.Items[0].Name)
// Test page out of range
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Timeframe: db.Timeframe{Period: db.PeriodAllTime}})
@@ -57,22 +57,22 @@ func TestGetTopArtistsPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Artist Four", resp.Items[0].Item.Name)
+ assert.Equal(t, "Artist Four", resp.Items[0].Name)
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodMonth}})
require.NoError(t, err)
require.Len(t, resp.Items, 2)
assert.Equal(t, int64(2), resp.TotalCount)
- assert.Equal(t, "Artist Three", resp.Items[0].Item.Name)
- assert.Equal(t, "Artist Four", resp.Items[1].Item.Name)
+ assert.Equal(t, "Artist Three", resp.Items[0].Name)
+ assert.Equal(t, "Artist Four", resp.Items[1].Name)
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}})
require.NoError(t, err)
require.Len(t, resp.Items, 3)
assert.Equal(t, int64(3), resp.TotalCount)
- assert.Equal(t, "Artist Two", resp.Items[0].Item.Name)
- assert.Equal(t, "Artist Three", resp.Items[1].Item.Name)
- assert.Equal(t, "Artist Four", resp.Items[2].Item.Name)
+ assert.Equal(t, "Artist Two", resp.Items[0].Name)
+ assert.Equal(t, "Artist Three", resp.Items[1].Name)
+ assert.Equal(t, "Artist Four", resp.Items[2].Name)
// Test specify dates
@@ -82,11 +82,11 @@ func TestGetTopArtistsPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Artist One", resp.Items[0].Item.Name)
+ assert.Equal(t, "Artist One", resp.Items[0].Name)
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Month: 6, Year: 2024}})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Artist Two", resp.Items[0].Item.Name)
+ assert.Equal(t, "Artist Two", resp.Items[0].Name)
}
diff --git a/internal/db/psql/top_tracks.go b/internal/db/psql/top_tracks.go
index 89960e8..da34efc 100644
--- a/internal/db/psql/top_tracks.go
+++ b/internal/db/psql/top_tracks.go
@@ -11,14 +11,14 @@ import (
"github.com/gabehf/koito/internal/repository"
)
-func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[db.RankedItem[*models.Track]], error) {
+func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Track], error) {
l := logger.FromContext(ctx)
offset := (opts.Page - 1) * opts.Limit
t1, t2 := db.TimeframeToTimeRange(opts.Timeframe)
if opts.Limit == 0 {
opts.Limit = DefaultItemsPerPage
}
- var tracks []db.RankedItem[*models.Track]
+ var tracks []*models.Track
var count int64
if opts.AlbumID > 0 {
l.Debug().Msgf("Fetching top %d tracks on page %d from range %v to %v",
@@ -33,7 +33,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts)
if err != nil {
return nil, fmt.Errorf("GetTopTracksPaginated: GetTopTracksInReleasePaginated: %w", err)
}
- tracks = make([]db.RankedItem[*models.Track], len(rows))
+ tracks = make([]*models.Track, len(rows))
for i, row := range rows {
artists := make([]models.SimpleArtist, 0)
err = json.Unmarshal(row.Artists, &artists)
@@ -50,8 +50,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts)
AlbumID: row.ReleaseID,
Artists: artists,
}
- tracks[i].Item = t
- tracks[i].Rank = row.Rank
+ tracks[i] = t
}
count, err = d.q.CountTopTracksByRelease(ctx, repository.CountTopTracksByReleaseParams{
ListenedAt: t1,
@@ -74,7 +73,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts)
if err != nil {
return nil, fmt.Errorf("GetTopTracksPaginated: GetTopTracksByArtistPaginated: %w", err)
}
- tracks = make([]db.RankedItem[*models.Track], len(rows))
+ tracks = make([]*models.Track, len(rows))
for i, row := range rows {
artists := make([]models.SimpleArtist, 0)
err = json.Unmarshal(row.Artists, &artists)
@@ -91,8 +90,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts)
AlbumID: row.ReleaseID,
Artists: artists,
}
- tracks[i].Item = t
- tracks[i].Rank = row.Rank
+ tracks[i] = t
}
count, err = d.q.CountTopTracksByArtist(ctx, repository.CountTopTracksByArtistParams{
ListenedAt: t1,
@@ -114,7 +112,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts)
if err != nil {
return nil, fmt.Errorf("GetTopTracksPaginated: GetTopTracksPaginated: %w", err)
}
- tracks = make([]db.RankedItem[*models.Track], len(rows))
+ tracks = make([]*models.Track, len(rows))
for i, row := range rows {
artists := make([]models.SimpleArtist, 0)
err = json.Unmarshal(row.Artists, &artists)
@@ -131,8 +129,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts)
AlbumID: row.ReleaseID,
Artists: artists,
}
- tracks[i].Item = t
- tracks[i].Rank = row.Rank
+ tracks[i] = t
}
count, err = d.q.CountTopTracks(ctx, repository.CountTopTracksParams{
ListenedAt: t1,
@@ -144,7 +141,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts)
l.Debug().Msgf("Database responded with %d tracks out of a total %d", len(rows), count)
}
- return &db.PaginatedResponse[db.RankedItem[*models.Track]]{
+ return &db.PaginatedResponse[*models.Track]{
Items: tracks,
TotalCount: count,
ItemsPerPage: int32(opts.Limit),
diff --git a/internal/db/psql/top_tracks_test.go b/internal/db/psql/top_tracks_test.go
index 934d9b7..15f898f 100644
--- a/internal/db/psql/top_tracks_test.go
+++ b/internal/db/psql/top_tracks_test.go
@@ -18,19 +18,19 @@ func TestGetTopTracksPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 4)
assert.Equal(t, int64(4), resp.TotalCount)
- assert.Equal(t, "Track One", resp.Items[0].Item.Title)
- assert.Equal(t, "Track Two", resp.Items[1].Item.Title)
- assert.Equal(t, "Track Three", resp.Items[2].Item.Title)
- assert.Equal(t, "Track Four", resp.Items[3].Item.Title)
+ assert.Equal(t, "Track One", resp.Items[0].Title)
+ assert.Equal(t, "Track Two", resp.Items[1].Title)
+ assert.Equal(t, "Track Three", resp.Items[2].Title)
+ assert.Equal(t, "Track Four", resp.Items[3].Title)
// ensure artists are included
- require.Len(t, resp.Items[0].Item.Artists, 1)
- assert.Equal(t, "Artist One", resp.Items[0].Item.Artists[0].Name)
+ require.Len(t, resp.Items[0].Artists, 1)
+ assert.Equal(t, "Artist One", resp.Items[0].Artists[0].Name)
// Test pagination
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Timeframe: db.Timeframe{Period: db.PeriodAllTime}})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
- assert.Equal(t, "Track Two", resp.Items[0].Item.Title)
+ assert.Equal(t, "Track Two", resp.Items[0].Title)
// Test page out of range
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Timeframe: db.Timeframe{Period: db.PeriodAllTime}})
@@ -60,41 +60,41 @@ func TestGetTopTracksPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Track Four", resp.Items[0].Item.Title)
+ assert.Equal(t, "Track Four", resp.Items[0].Title)
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodMonth}})
require.NoError(t, err)
require.Len(t, resp.Items, 2)
assert.Equal(t, int64(2), resp.TotalCount)
- assert.Equal(t, "Track Three", resp.Items[0].Item.Title)
- assert.Equal(t, "Track Four", resp.Items[1].Item.Title)
+ assert.Equal(t, "Track Three", resp.Items[0].Title)
+ assert.Equal(t, "Track Four", resp.Items[1].Title)
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}})
require.NoError(t, err)
require.Len(t, resp.Items, 3)
assert.Equal(t, int64(3), resp.TotalCount)
- assert.Equal(t, "Track Two", resp.Items[0].Item.Title)
- assert.Equal(t, "Track Three", resp.Items[1].Item.Title)
- assert.Equal(t, "Track Four", resp.Items[2].Item.Title)
+ assert.Equal(t, "Track Two", resp.Items[0].Title)
+ assert.Equal(t, "Track Three", resp.Items[1].Title)
+ assert.Equal(t, "Track Four", resp.Items[2].Title)
// Test filter by artists and releases
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, ArtistID: 1})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Track One", resp.Items[0].Item.Title)
+ assert.Equal(t, "Track One", resp.Items[0].Title)
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, AlbumID: 2})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Track Two", resp.Items[0].Item.Title)
+ assert.Equal(t, "Track Two", resp.Items[0].Title)
// when both artistID and albumID are specified, artist id is ignored
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, AlbumID: 2, ArtistID: 1})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Track Two", resp.Items[0].Item.Title)
+ assert.Equal(t, "Track Two", resp.Items[0].Title)
// Test specify dates
@@ -104,11 +104,11 @@ func TestGetTopTracksPaginated(t *testing.T) {
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Track One", resp.Items[0].Item.Title)
+ assert.Equal(t, "Track One", resp.Items[0].Title)
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Month: 6, Year: 2024}})
require.NoError(t, err)
require.Len(t, resp.Items, 1)
assert.Equal(t, int64(1), resp.TotalCount)
- assert.Equal(t, "Track Two", resp.Items[0].Item.Title)
+ assert.Equal(t, "Track Two", resp.Items[0].Title)
}
diff --git a/internal/db/psql/track.go b/internal/db/psql/track.go
index d4cc616..f20263a 100644
--- a/internal/db/psql/track.go
+++ b/internal/db/psql/track.go
@@ -21,13 +21,37 @@ func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Trac
l := logger.FromContext(ctx)
var track models.Track
- if opts.MusicBrainzID != uuid.Nil {
+ if opts.ID != 0 {
+ l.Debug().Msgf("Fetching track from DB with id %d", opts.ID)
+ t, err := d.q.GetTrack(ctx, opts.ID)
+ if err != nil {
+ return nil, fmt.Errorf("GetTrack: GetTrack By ID: %w", err)
+ }
+ track = models.Track{
+ ID: t.ID,
+ MbzID: t.MusicBrainzID,
+ Title: t.Title,
+ AlbumID: t.ReleaseID,
+ Image: t.Image,
+ Duration: t.Duration,
+ }
+ err = json.Unmarshal(t.Artists, &track.Artists)
+ if err != nil {
+ return nil, fmt.Errorf("GetTrack: json.Unmarshal: %w", err)
+ }
+ } else if opts.MusicBrainzID != uuid.Nil {
l.Debug().Msgf("Fetching track from DB with MusicBrainz ID %s", opts.MusicBrainzID)
t, err := d.q.GetTrackByMbzID(ctx, &opts.MusicBrainzID)
if err != nil {
return nil, fmt.Errorf("GetTrack: GetTrackByMbzID: %w", err)
}
- opts.ID = t.ID
+ track = models.Track{
+ ID: t.ID,
+ MbzID: t.MusicBrainzID,
+ Title: t.Title,
+ AlbumID: t.ReleaseID,
+ Duration: t.Duration,
+ }
} else if len(opts.ArtistIDs) > 0 && opts.ReleaseID != 0 {
l.Debug().Msgf("Fetching track from DB from release id %d with title '%s' and artist id(s) '%v'", opts.ReleaseID, opts.Title, opts.ArtistIDs)
t, err := d.q.GetTrackByTrackInfo(ctx, repository.GetTrackByTrackInfoParams{
@@ -38,19 +62,21 @@ func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Trac
if err != nil {
return nil, fmt.Errorf("GetTrack: GetTrackByTrackInfo: %w", err)
}
- opts.ID = t.ID
- }
-
- l.Debug().Msgf("Fetching track from DB with id %d", opts.ID)
- t, err := d.q.GetTrack(ctx, opts.ID)
- if err != nil {
- return nil, fmt.Errorf("GetTrack: GetTrack By ID: %w", err)
+ track = models.Track{
+ ID: t.ID,
+ MbzID: t.MusicBrainzID,
+ Title: t.Title,
+ AlbumID: t.ReleaseID,
+ Duration: t.Duration,
+ }
+ } else {
+ return nil, errors.New("GetTrack: insufficient information to get track")
}
count, err := d.q.CountListensFromTrack(ctx, repository.CountListensFromTrackParams{
ListenedAt: time.Unix(0, 0),
ListenedAt_2: time.Now(),
- TrackID: opts.ID,
+ TrackID: track.ID,
})
if err != nil {
return nil, fmt.Errorf("GetTrack: CountListensFromTrack: %w", err)
@@ -58,37 +84,20 @@ func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Trac
seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{
Timeframe: db.Timeframe{Period: db.PeriodAllTime},
- TrackID: opts.ID,
+ TrackID: track.ID,
})
if err != nil {
return nil, fmt.Errorf("GetTrack: CountTimeListenedToItem: %w", err)
}
- firstListen, err := d.q.GetFirstListenFromTrack(ctx, opts.ID)
+ firstListen, err := d.q.GetFirstListenFromTrack(ctx, track.ID)
if err != nil && !errors.Is(err, pgx.ErrNoRows) {
return nil, fmt.Errorf("GetAlbum: GetFirstListenFromRelease: %w", err)
}
- rank, err := d.q.GetTrackAllTimeRank(ctx, opts.ID)
- if err != nil && !errors.Is(err, pgx.ErrNoRows) {
- return nil, fmt.Errorf("GetAlbum: GetTrackAllTimeRank: %w", err)
- }
- track = models.Track{
- ID: t.ID,
- MbzID: t.MusicBrainzID,
- Title: t.Title,
- AlbumID: t.ReleaseID,
- Image: t.Image,
- Duration: t.Duration,
- AllTimeRank: rank.Rank,
- ListenCount: count,
- TimeListened: seconds,
- FirstListen: firstListen.ListenedAt.Unix(),
- }
- err = json.Unmarshal(t.Artists, &track.Artists)
- if err != nil {
- return nil, fmt.Errorf("GetTrack: json.Unmarshal: %w", err)
- }
+ track.ListenCount = count
+ track.TimeListened = seconds
+ track.FirstListen = firstListen.ListenedAt.Unix()
return &track, nil
}
@@ -137,13 +146,6 @@ func (d *Psql) SaveTrack(ctx context.Context, opts db.SaveTrackOpts) (*models.Tr
if err != nil {
return nil, fmt.Errorf("SaveTrack: AssociateArtistToTrack: %w", err)
}
- err = qtx.AssociateArtistToRelease(ctx, repository.AssociateArtistToReleaseParams{
- ArtistID: aid,
- ReleaseID: trackRow.ReleaseID,
- })
- if err != nil {
- return nil, fmt.Errorf("SaveTrack: AssociateArtistToTrack: %w", err)
- }
}
// insert primary alias
err = qtx.InsertTrackAlias(ctx, repository.InsertTrackAliasParams{
@@ -240,28 +242,7 @@ func (d *Psql) SaveTrackAliases(ctx context.Context, id int32, aliases []string,
}
func (d *Psql) DeleteTrack(ctx context.Context, id int32) error {
- l := logger.FromContext(ctx)
- tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
- if err != nil {
- l.Err(err).Msg("Failed to begin transaction")
- return fmt.Errorf("DeleteTrack: %w", err)
- }
- defer tx.Rollback(ctx)
- qtx := d.q.WithTx(tx)
-
- err = qtx.DeleteTrack(ctx, id)
- if err != nil {
- return fmt.Errorf("DeleteTrack: DeleteTrack: %w", err)
- }
-
- // also clean orphaned entries to ensure artists are disassociated with releases where
- // they no longer have any tracks on the release
- err = qtx.CleanOrphanedEntries(ctx)
- if err != nil {
- return fmt.Errorf("DeleteTrack: CleanOrphanedEntries: %w", err)
- }
-
- return tx.Commit(ctx)
+ return d.q.DeleteTrack(ctx, id)
}
func (d *Psql) DeleteTrackAlias(ctx context.Context, id int32, alias string) error {
@@ -399,7 +380,7 @@ func (d *Psql) SetPrimaryTrackArtist(ctx context.Context, id int32, artistId int
func (d *Psql) GetTracksWithNoDurationButHaveMbzID(ctx context.Context, from int32) ([]*models.Track, error) {
results, err := d.q.GetTracksWithNoDurationButHaveMbzID(ctx, repository.GetTracksWithNoDurationButHaveMbzIDParams{
Limit: 20,
- ID: from,
+ ID: 0,
})
if errors.Is(err, pgx.ErrNoRows) {
return nil, nil
diff --git a/internal/db/psql/track_test.go b/internal/db/psql/track_test.go
index f0ecd09..7fa58d4 100644
--- a/internal/db/psql/track_test.go
+++ b/internal/db/psql/track_test.go
@@ -62,7 +62,7 @@ func testDataForTracks(t *testing.T) {
VALUES (1, 1), (2, 2)`)
require.NoError(t, err)
- // Insert listens
+ // Associate tracks with artists
err = store.Exec(context.Background(),
`INSERT INTO listens (user_id, track_id, listened_at)
VALUES (1, 1, NOW()), (1, 2, NOW())`)
@@ -228,27 +228,3 @@ func TestDeleteTrack(t *testing.T) {
_, err = store.Count(ctx, `SELECT * FROM tracks WHERE id = 2`)
require.ErrorIs(t, err, pgx.ErrNoRows) // no rows error
}
-
-func TestReleaseAssociations(t *testing.T) {
- testDataForTracks(t)
- ctx := context.Background()
-
- track, err := store.SaveTrack(ctx, db.SaveTrackOpts{
- Title: "Track Three",
- AlbumID: 2,
- ArtistIDs: []int32{2, 1}, // Artist Two feat. Artist One
- Duration: 100,
- })
- require.NoError(t, err)
- count, err := store.Count(ctx, `SELECT COUNT(*) FROM artist_releases WHERE release_id = 2`)
- require.NoError(t, err)
- require.Equal(t, 2, count, "expected release to be associated with artist from inserted track")
-
- err = store.DeleteTrack(ctx, track.ID)
- require.NoError(t, err)
-
- count, err = store.Count(ctx, `SELECT COUNT(*) FROM artist_releases WHERE release_id = 2`)
- require.NoError(t, err)
- require.Equal(t, 1, count, "expected artist no longer on release to be disassociated from release")
-
-}
diff --git a/internal/db/types.go b/internal/db/types.go
index 46d3c01..93ff031 100644
--- a/internal/db/types.go
+++ b/internal/db/types.go
@@ -28,11 +28,6 @@ type PaginatedResponse[T any] struct {
CurrentPage int32 `json:"current_page"`
}
-type RankedItem[T any] struct {
- Item T `json:"item"`
- Rank int64 `json:"rank"`
-}
-
type ExportItem struct {
ListenedAt time.Time
UserID int32
diff --git a/internal/images/deezer.go b/internal/images/deezer.go
index 2ced676..8fb7b27 100644
--- a/internal/images/deezer.go
+++ b/internal/images/deezer.go
@@ -110,9 +110,6 @@ func (c *DeezerClient) getEntity(ctx context.Context, endpoint string, result an
return nil
}
-// Deezer behavior is that it serves a default image when it can't find one for an artist, so
-// this function will just download the default image thinking that it is an actual artist image.
-// I don't know how to fix this yet.
func (c *DeezerClient) GetArtistImages(ctx context.Context, aliases []string) (string, error) {
l := logger.FromContext(ctx)
resp := new(DeezerArtistResponse)
diff --git a/internal/images/imagesrc.go b/internal/images/imagesrc.go
index 46fe87a..21eec65 100644
--- a/internal/images/imagesrc.go
+++ b/internal/images/imagesrc.go
@@ -5,7 +5,6 @@ import (
"context"
"fmt"
"net/http"
- "strings"
"sync"
"github.com/gabehf/koito/internal/logger"
@@ -17,8 +16,6 @@ type ImageSource struct {
deezerC *DeezerClient
subsonicEnabled bool
subsonicC *SubsonicClient
- lastfmEnabled bool
- lastfmC *LastFMClient
caaEnabled bool
}
type ImageSourceOpts struct {
@@ -26,7 +23,6 @@ type ImageSourceOpts struct {
EnableCAA bool
EnableDeezer bool
EnableSubsonic bool
- EnableLastFM bool
}
var once sync.Once
@@ -34,7 +30,6 @@ var imgsrc ImageSource
type ArtistImageOpts struct {
Aliases []string
- MBID *uuid.UUID
}
type AlbumImageOpts struct {
@@ -60,10 +55,6 @@ func Initialize(opts ImageSourceOpts) {
imgsrc.subsonicEnabled = true
imgsrc.subsonicC = NewSubsonicClient()
}
- if opts.EnableLastFM {
- imgsrc.lastfmEnabled = true
- imgsrc.lastfmC = NewLastFMClient()
- }
})
}
@@ -74,46 +65,31 @@ func Shutdown() {
func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
l := logger.FromContext(ctx)
if imgsrc.subsonicEnabled {
- img, err := imgsrc.subsonicC.GetArtistImage(ctx, opts.MBID, opts.Aliases[0])
+ img, err := imgsrc.subsonicC.GetArtistImage(ctx, opts.Aliases[0])
if err != nil {
- l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from Subsonic")
- } else if img != "" {
+ return "", err
+ }
+ if img != "" {
return img, nil
}
- } else {
- l.Debug().Msg("GetArtistImage: Subsonic image fetching is disabled")
+ l.Debug().Msg("Could not find artist image from Subsonic")
}
- if imgsrc.lastfmEnabled {
- img, err := imgsrc.lastfmC.GetArtistImage(ctx, opts.MBID, opts.Aliases[0])
- if err != nil {
- l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from LastFM")
- } else if img != "" {
- return img, nil
- }
- } else {
- l.Debug().Msg("GetArtistImage: LastFM image fetching is disabled")
- }
- if imgsrc.deezerEnabled {
+ if imgsrc.deezerC != nil {
img, err := imgsrc.deezerC.GetArtistImages(ctx, opts.Aliases)
if err != nil {
- l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from Deezer")
return "", err
- } else if img != "" {
- return img, nil
}
- } else {
- l.Debug().Msg("GetArtistImage: Deezer image fetching is disabled")
+ return img, nil
}
l.Warn().Msg("GetArtistImage: No image providers are enabled")
return "", nil
}
-
func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
l := logger.FromContext(ctx)
if imgsrc.subsonicEnabled {
- img, err := imgsrc.subsonicC.GetAlbumImage(ctx, opts.ReleaseMbzID, opts.Artists[0], opts.Album)
+ img, err := imgsrc.subsonicC.GetAlbumImage(ctx, opts.Artists[0], opts.Album)
if err != nil {
- l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Subsonic")
+ return "", err
}
if img != "" {
return img, nil
@@ -126,41 +102,29 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
url := fmt.Sprintf(caaBaseUrl+"/release/%s/front", opts.ReleaseMbzID.String())
resp, err := http.DefaultClient.Head(url)
if err != nil {
- l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from CoverArtArchive with Release MBID")
- } else {
- if resp.StatusCode == 200 {
- return url, nil
- } else {
- l.Debug().Int("status", resp.StatusCode).Msg("GetAlbumImage: Got non-OK response from CoverArtArchive")
- }
+ return "", err
}
+ if resp.StatusCode == 200 {
+ return url, nil
+ }
+ l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release ID")
}
if opts.ReleaseGroupMbzID != nil && *opts.ReleaseGroupMbzID != uuid.Nil {
url := fmt.Sprintf(caaBaseUrl+"/release-group/%s/front", opts.ReleaseGroupMbzID.String())
resp, err := http.DefaultClient.Head(url)
if err != nil {
- l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from CoverArtArchive with Release Group MBID")
+ return "", err
}
if resp.StatusCode == 200 {
return url, nil
}
+ l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release group ID")
}
}
- if imgsrc.lastfmEnabled {
- img, err := imgsrc.lastfmC.GetAlbumImage(ctx, opts.ReleaseMbzID, opts.Artists[0], opts.Album)
- if err != nil {
- l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Subsonic")
- }
- if img != "" {
- return img, nil
- }
- l.Debug().Msg("Could not find album cover from Subsonic")
- }
if imgsrc.deezerEnabled {
l.Debug().Msg("Attempting to find album image from Deezer")
img, err := imgsrc.deezerC.GetAlbumImages(ctx, opts.Artists, opts.Album)
if err != nil {
- l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Deezer")
return "", err
}
return img, nil
@@ -168,23 +132,3 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
l.Warn().Msg("GetAlbumImage: No image providers are enabled")
return "", nil
}
-
-// ValidateImageURL checks if the URL points to a valid image by performing a HEAD request.
-func ValidateImageURL(url string) error {
- resp, err := http.Head(url)
- if err != nil {
- return fmt.Errorf("ValidateImageURL: http.Head: %w", err)
- }
- defer resp.Body.Close()
-
- if resp.StatusCode != http.StatusOK {
- return fmt.Errorf("ValidateImageURL: HEAD request failed, status code: %d", resp.StatusCode)
- }
-
- contentType := resp.Header.Get("Content-Type")
- if !strings.HasPrefix(contentType, "image/") {
- return fmt.Errorf("ValidateImageURL: URL does not point to an image, content type: %s", contentType)
- }
-
- return nil
-}
diff --git a/internal/images/lastfm.go b/internal/images/lastfm.go
deleted file mode 100644
index f35f6a3..0000000
--- a/internal/images/lastfm.go
+++ /dev/null
@@ -1,298 +0,0 @@
-package images
-
-import (
- "context"
- "encoding/json"
- "fmt"
- "io"
- "net/http"
- "net/url"
- "strings"
-
- "github.com/gabehf/koito/internal/cfg"
- "github.com/gabehf/koito/internal/logger"
- "github.com/gabehf/koito/queue"
- "github.com/google/uuid"
-)
-
-// i told gemini to write this cuz i figured it would be simple enough and
-// it looks like it just works? maybe ai is actually worth one quintillion gallons of water
-
-type LastFMClient struct {
- apiKey string
- baseUrl string
- userAgent string
- requestQueue *queue.RequestQueue
-}
-
-// LastFM JSON structures use "#text" for the value of XML-mapped fields
-type lastFMImage struct {
- URL string `json:"#text"`
- Size string `json:"size"`
-}
-
-type lastFMAlbumResponse struct {
- Album struct {
- Name string `json:"name"`
- Image []lastFMImage `json:"image"`
- } `json:"album"`
- Error int `json:"error"`
- Message string `json:"message"`
-}
-
-type lastFMArtistResponse struct {
- Artist struct {
- Name string `json:"name"`
- Image []lastFMImage `json:"image"`
- } `json:"artist"`
- Error int `json:"error"`
- Message string `json:"message"`
-}
-
-const (
- lastFMApiBaseUrl = "http://ws.audioscrobbler.com/2.0/"
-)
-
-func NewLastFMClient() *LastFMClient {
- ret := new(LastFMClient)
- ret.apiKey = cfg.LastFMApiKey()
- ret.baseUrl = lastFMApiBaseUrl
- ret.userAgent = cfg.UserAgent()
- ret.requestQueue = queue.NewRequestQueue(5, 5)
- return ret
-}
-
-func (c *LastFMClient) queue(ctx context.Context, req *http.Request) ([]byte, error) {
- l := logger.FromContext(ctx)
- req.Header.Set("User-Agent", c.userAgent)
- req.Header.Set("Accept", "application/json")
-
- resultChan := c.requestQueue.Enqueue(func(client *http.Client, done chan<- queue.RequestResult) {
- resp, err := client.Do(req)
- if err != nil {
- l.Debug().Err(err).Str("url", req.URL.String()).Msg("Failed to contact LastFM")
- done <- queue.RequestResult{Err: err}
- return
- }
- defer resp.Body.Close()
-
- // LastFM might return 200 OK even for API errors (like "Artist not found"),
- // so we rely on parsing the JSON body for logic errors later,
- // but we still check for HTTP protocol failures here.
- if resp.StatusCode >= 500 {
- err = fmt.Errorf("received server error from LastFM: %s", resp.Status)
- done <- queue.RequestResult{Body: nil, Err: err}
- return
- }
-
- body, err := io.ReadAll(resp.Body)
- done <- queue.RequestResult{Body: body, Err: err}
- })
-
- result := <-resultChan
- return result.Body, result.Err
-}
-
-func (c *LastFMClient) getEntity(ctx context.Context, params url.Values, result any) error {
- l := logger.FromContext(ctx)
-
- // Add standard parameters
- params.Set("api_key", c.apiKey)
- params.Set("format", "json")
-
- // Construct URL
- reqUrl, _ := url.Parse(c.baseUrl)
- reqUrl.RawQuery = params.Encode()
-
- l.Debug().Msgf("Sending request to LastFM: GET %s", reqUrl.String())
-
- req, err := http.NewRequest("GET", reqUrl.String(), nil)
- if err != nil {
- return fmt.Errorf("getEntity: %w", err)
- }
-
- l.Debug().Msg("Adding LastFM request to queue")
- body, err := c.queue(ctx, req)
- if err != nil {
- l.Err(err).Msg("LastFM request failed")
- return fmt.Errorf("getEntity: %w", err)
- }
-
- err = json.Unmarshal(body, result)
- if err != nil {
- l.Err(err).Msg("Failed to unmarshal LastFM response")
- return fmt.Errorf("getEntity: %w", err)
- }
-
- return nil
-}
-
-// selectBestImage picks the largest available image from the LastFM slice
-func (c *LastFMClient) selectBestImage(images []lastFMImage) string {
- // Rank preference: mega > extralarge > large > medium > small
- // Since LastFM usually returns them in order of size, we could take the last one,
- // but a map lookup is safer against API changes.
-
- imgMap := make(map[string]string)
- for _, img := range images {
- if img.URL != "" {
- imgMap[img.Size] = img.URL
- }
- }
-
- if url, ok := imgMap["mega"]; ok {
- if err := ValidateImageURL(overrideImgSize(url)); err == nil {
- return overrideImgSize(url)
- } else {
- return url
- }
- }
- if url, ok := imgMap["extralarge"]; ok {
- if err := ValidateImageURL(overrideImgSize(url)); err == nil {
- return overrideImgSize(url)
- } else {
- return url
- }
- }
- if url, ok := imgMap["large"]; ok {
- if err := ValidateImageURL(overrideImgSize(url)); err == nil {
- return overrideImgSize(url)
- } else {
- return url
- }
- }
- if url, ok := imgMap["medium"]; ok {
- return url
- }
- if url, ok := imgMap["small"]; ok {
- return url
- }
-
- return ""
-}
-
-// lastfm seems to only return a 300x300 image even for "mega" and "extralarge" images, so I'm cheating
-func overrideImgSize(url string) string {
- return strings.Replace(url, "300x300", "600x600", 1)
-}
-
-func (c *LastFMClient) GetAlbumImage(ctx context.Context, mbid *uuid.UUID, artist, album string) (string, error) {
- l := logger.FromContext(ctx)
- resp := new(lastFMAlbumResponse)
- l.Debug().Msgf("Finding album image for %s from artist %s", album, artist)
-
- // Helper to run the fetch
- fetch := func(query paramsBuilder) error {
- params := url.Values{}
- params.Set("method", "album.getInfo")
- query(params)
- return c.getEntity(ctx, params, resp)
- }
-
- // 1. Try MBID search first
- if mbid != nil {
- l.Debug().Str("mbid", mbid.String()).Msg("Searching album image by MBID")
- err := fetch(func(p url.Values) {
- p.Set("mbid", mbid.String())
- })
-
- // If success and no API error code
- if err == nil && resp.Error == 0 && len(resp.Album.Image) > 0 {
- best := c.selectBestImage(resp.Album.Image)
- if best != "" {
- return best, nil
- }
- } else if resp.Error != 0 {
- l.Debug().Int("api_error", resp.Error).Msg("LastFM MBID lookup failed, falling back to name")
- }
- }
-
- // 2. Fallback to Artist + Album name match
- l.Debug().Str("title", album).Str("artist", artist).Msg("Searching album image by title and artist")
-
- // Clear previous response structure just in case
- resp = new(lastFMAlbumResponse)
-
- err := fetch(func(p url.Values) {
- p.Set("artist", artist)
- p.Set("album", album)
- // Auto-correct spelling is useful for name lookups
- p.Set("autocorrect", "1")
- })
-
- if err != nil {
- return "", fmt.Errorf("GetAlbumImage: %v", err)
- }
-
- if resp.Error != 0 {
- return "", fmt.Errorf("GetAlbumImage: LastFM API error %d: %s", resp.Error, resp.Message)
- }
-
- best := c.selectBestImage(resp.Album.Image)
- if best == "" {
- return "", fmt.Errorf("GetAlbumImage: no suitable image found")
- }
-
- return best, nil
-}
-
-func (c *LastFMClient) GetArtistImage(ctx context.Context, mbid *uuid.UUID, artist string) (string, error) {
- l := logger.FromContext(ctx)
- resp := new(lastFMArtistResponse)
- l.Debug().Msgf("Finding artist image for %s", artist)
-
- fetch := func(query paramsBuilder) error {
- params := url.Values{}
- params.Set("method", "artist.getInfo")
- query(params)
- return c.getEntity(ctx, params, resp)
- }
-
- // 1. Try MBID search
- if mbid != nil {
- l.Debug().Str("mbid", mbid.String()).Msg("Searching artist image by MBID")
- err := fetch(func(p url.Values) {
- p.Set("mbid", mbid.String())
- })
-
- if err == nil && resp.Error == 0 && len(resp.Artist.Image) > 0 {
- best := c.selectBestImage(resp.Artist.Image)
- if best != "" {
- // Validate to match Subsonic implementation behavior
- if err := ValidateImageURL(best); err == nil {
- return best, nil
- }
- }
- }
- }
-
- // 2. Fallback to Artist name
- l.Debug().Str("artist", artist).Msg("Searching artist image by name")
- resp = new(lastFMArtistResponse)
-
- err := fetch(func(p url.Values) {
- p.Set("artist", artist)
- p.Set("autocorrect", "1")
- })
-
- if err != nil {
- return "", fmt.Errorf("GetArtistImage: %v", err)
- }
-
- if resp.Error != 0 {
- return "", fmt.Errorf("GetArtistImage: LastFM API error %d: %s", resp.Error, resp.Message)
- }
-
- best := c.selectBestImage(resp.Artist.Image)
- if best == "" {
- return "", fmt.Errorf("GetArtistImage: no suitable image found")
- }
-
- if err := ValidateImageURL(best); err != nil {
- return "", fmt.Errorf("GetArtistImage: failed to validate image url")
- }
-
- return best, nil
-}
-
-type paramsBuilder func(url.Values)
diff --git a/internal/images/subsonic.go b/internal/images/subsonic.go
index 4fd55c0..961b4c2 100644
--- a/internal/images/subsonic.go
+++ b/internal/images/subsonic.go
@@ -11,7 +11,6 @@ import (
"github.com/gabehf/koito/internal/cfg"
"github.com/gabehf/koito/internal/logger"
"github.com/gabehf/koito/queue"
- "github.com/google/uuid"
)
type SubsonicClient struct {
@@ -27,8 +26,6 @@ type SubsonicAlbumResponse struct {
SearchResult3 struct {
Album []struct {
CoverArt string `json:"coverArt"`
- Artist string `json:"artist"`
- MBID string `json:"musicBrainzId"`
} `json:"album"`
} `json:"searchResult3"`
} `json:"subsonic-response"`
@@ -46,7 +43,7 @@ type SubsonicArtistResponse struct {
}
const (
- subsonicAlbumSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=0&songCount=0&albumCount=10"
+ subsonicAlbumSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=0&songCount=0&albumCount=1"
subsonicArtistSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=1&songCount=0&albumCount=0"
subsonicCoverArtFmtStr = "/rest/getCoverArt?%s&id=%s&v=1.13.0&c=koito"
)
@@ -109,72 +106,32 @@ func (c *SubsonicClient) getEntity(ctx context.Context, endpoint string, result
return nil
}
-func (c *SubsonicClient) GetAlbumImage(ctx context.Context, mbid *uuid.UUID, artist, album string) (string, error) {
+func (c *SubsonicClient) GetAlbumImage(ctx context.Context, artist, album string) (string, error) {
l := logger.FromContext(ctx)
resp := new(SubsonicAlbumResponse)
l.Debug().Msgf("Finding album image for %s from artist %s", album, artist)
- // first try mbid search
- if mbid != nil {
- l.Debug().Str("mbid", mbid.String()).Msg("Searching album image by MBID")
- err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(mbid.String())), resp)
- if err != nil {
- return "", fmt.Errorf("GetAlbumImage: %v", err)
- }
- l.Debug().Any("subsonic_response", resp).Msg("")
- if len(resp.SubsonicResponse.SearchResult3.Album) >= 1 {
- return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil
- }
- }
- // else do artist match
- l.Debug().Str("title", album).Str("artist", artist).Msg("Searching album image by title and artist")
- err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(album)), resp)
+ err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(artist+" "+album)), resp)
if err != nil {
return "", fmt.Errorf("GetAlbumImage: %v", err)
}
- l.Debug().Any("subsonic_response", resp).Msg("")
- if len(resp.SubsonicResponse.SearchResult3.Album) < 1 {
- return "", fmt.Errorf("GetAlbumImage: failed to get album art from subsonic")
+ l.Debug().Any("subsonic_response", resp).Send()
+ if len(resp.SubsonicResponse.SearchResult3.Album) < 1 || resp.SubsonicResponse.SearchResult3.Album[0].CoverArt == "" {
+ return "", fmt.Errorf("GetAlbumImage: failed to get album art")
}
- for _, album := range resp.SubsonicResponse.SearchResult3.Album {
- if album.Artist == artist {
- return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil
- }
- }
- return "", fmt.Errorf("GetAlbumImage: failed to get album art from subsonic")
+ return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil
}
-func (c *SubsonicClient) GetArtistImage(ctx context.Context, mbid *uuid.UUID, artist string) (string, error) {
+func (c *SubsonicClient) GetArtistImage(ctx context.Context, artist string) (string, error) {
l := logger.FromContext(ctx)
resp := new(SubsonicArtistResponse)
l.Debug().Msgf("Finding artist image for %s", artist)
- // first try mbid search
- if mbid != nil {
- l.Debug().Str("mbid", mbid.String()).Msg("Searching artist image by MBID")
- err := c.getEntity(ctx, fmt.Sprintf(subsonicArtistSearchFmtStr, c.authParams, url.QueryEscape(mbid.String())), resp)
- if err != nil {
- return "", fmt.Errorf("GetArtistImage: %v", err)
- }
- l.Debug().Any("subsonic_response", resp).Msg("")
- if len(resp.SubsonicResponse.SearchResult3.Artist) < 1 || resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl == "" {
- return "", fmt.Errorf("GetArtistImage: failed to get artist art")
- }
- // Subsonic seems to have a tendency to return an artist image even though the url is a 404
- if err = ValidateImageURL(resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl); err != nil {
- return "", fmt.Errorf("GetArtistImage: failed to get validate image url")
- }
- }
- l.Debug().Str("artist", artist).Msg("Searching artist image by name")
err := c.getEntity(ctx, fmt.Sprintf(subsonicArtistSearchFmtStr, c.authParams, url.QueryEscape(artist)), resp)
if err != nil {
return "", fmt.Errorf("GetArtistImage: %v", err)
}
- l.Debug().Any("subsonic_response", resp).Msg("")
+ l.Debug().Any("subsonic_response", resp).Send()
if len(resp.SubsonicResponse.SearchResult3.Artist) < 1 || resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl == "" {
return "", fmt.Errorf("GetArtistImage: failed to get artist art")
}
- // Subsonic seems to have a tendency to return an artist image even though the url is a 404
- if err = ValidateImageURL(resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl); err != nil {
- return "", fmt.Errorf("GetArtistImage: failed to get validate image url")
- }
return resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl, nil
}
diff --git a/internal/importer/listenbrainz.go b/internal/importer/listenbrainz.go
index 7c1a8bb..4187bbb 100644
--- a/internal/importer/listenbrainz.go
+++ b/internal/importer/listenbrainz.go
@@ -85,14 +85,7 @@ func ImportListenBrainzFile(ctx context.Context, store db.DB, mbzc mbz.MusicBrai
}
artistMbzIDs, err := utils.ParseUUIDSlice(payload.TrackMeta.AdditionalInfo.ArtistMBIDs)
if err != nil {
- l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Failed to parse one or more UUIDs")
- }
- if len(artistMbzIDs) < 1 {
- l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Attempting to parse artist UUIDs from mbid_mapping")
- utils.ParseUUIDSlice(payload.TrackMeta.MBIDMapping.ArtistMBIDs)
- if err != nil {
- l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Failed to parse one or more UUIDs")
- }
+ l.Debug().Err(err).Msg("Failed to parse one or more uuids")
}
rgMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.ReleaseGroupMBID)
if err != nil {
@@ -100,17 +93,11 @@ func ImportListenBrainzFile(ctx context.Context, store db.DB, mbzc mbz.MusicBrai
}
releaseMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.ReleaseMBID)
if err != nil {
- releaseMbzID, err = uuid.Parse(payload.TrackMeta.MBIDMapping.ReleaseMBID)
- if err != nil {
- releaseMbzID = uuid.Nil
- }
+ releaseMbzID = uuid.Nil
}
recordingMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.RecordingMBID)
if err != nil {
- recordingMbzID, err = uuid.Parse(payload.TrackMeta.MBIDMapping.RecordingMBID)
- if err != nil {
- recordingMbzID = uuid.Nil
- }
+ recordingMbzID = uuid.Nil
}
var client string
diff --git a/internal/models/album.go b/internal/models/album.go
index a295fe9..24948f9 100644
--- a/internal/models/album.go
+++ b/internal/models/album.go
@@ -12,5 +12,11 @@ type Album struct {
ListenCount int64 `json:"listen_count"`
TimeListened int64 `json:"time_listened"`
FirstListen int64 `json:"first_listen"`
- AllTimeRank int64 `json:"all_time_rank"`
}
+
+// type SimpleAlbum struct {
+// ID int32 `json:"id"`
+// Title string `json:"title"`
+// VariousArtists bool `json:"is_various_artists"`
+// Image uuid.UUID `json:"image"`
+// }
diff --git a/internal/models/artist.go b/internal/models/artist.go
index 07f09e6..7784e51 100644
--- a/internal/models/artist.go
+++ b/internal/models/artist.go
@@ -12,7 +12,6 @@ type Artist struct {
TimeListened int64 `json:"time_listened"`
FirstListen int64 `json:"first_listen"`
IsPrimary bool `json:"is_primary,omitempty"`
- AllTimeRank int64 `json:"all_time_rank"`
}
type SimpleArtist struct {
diff --git a/internal/models/track.go b/internal/models/track.go
index 4cb5b04..8eb802c 100644
--- a/internal/models/track.go
+++ b/internal/models/track.go
@@ -13,5 +13,4 @@ type Track struct {
AlbumID int32 `json:"album_id"`
TimeListened int64 `json:"time_listened"`
FirstListen int64 `json:"first_listen"`
- AllTimeRank int64 `json:"all_time_rank"`
}
diff --git a/internal/repository/artist.sql.go b/internal/repository/artist.sql.go
index 8506975..3d33446 100644
--- a/internal/repository/artist.sql.go
+++ b/internal/repository/artist.sql.go
@@ -134,39 +134,6 @@ func (q *Queries) GetArtist(ctx context.Context, id int32) (GetArtistRow, error)
return i, err
}
-const getArtistAllTimeRank = `-- name: GetArtistAllTimeRank :one
-SELECT
- artist_id,
- rank
-FROM (
- SELECT
- x.artist_id,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
- FROM (
- SELECT
- at.artist_id,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- JOIN artist_tracks at ON t.id = at.track_id
- GROUP BY at.artist_id
- ) x
- )
-WHERE artist_id = $1
-`
-
-type GetArtistAllTimeRankRow struct {
- ArtistID int32
- Rank int64
-}
-
-func (q *Queries) GetArtistAllTimeRank(ctx context.Context, artistID int32) (GetArtistAllTimeRankRow, error) {
- row := q.db.QueryRow(ctx, getArtistAllTimeRank, artistID)
- var i GetArtistAllTimeRankRow
- err := row.Scan(&i.ArtistID, &i.Rank)
- return i, err
-}
-
const getArtistByImage = `-- name: GetArtistByImage :one
SELECT id, musicbrainz_id, image, image_source FROM artists WHERE image = $1 LIMIT 1
`
@@ -254,47 +221,6 @@ func (q *Queries) GetArtistByName(ctx context.Context, alias string) (GetArtistB
return i, err
}
-const getArtistsWithoutImages = `-- name: GetArtistsWithoutImages :many
-SELECT
- id, musicbrainz_id, image, image_source, name
-FROM artists_with_name
-WHERE image IS NULL
- AND id > $2
-ORDER BY id ASC
-LIMIT $1
-`
-
-type GetArtistsWithoutImagesParams struct {
- Limit int32
- ID int32
-}
-
-func (q *Queries) GetArtistsWithoutImages(ctx context.Context, arg GetArtistsWithoutImagesParams) ([]ArtistsWithName, error) {
- rows, err := q.db.Query(ctx, getArtistsWithoutImages, arg.Limit, arg.ID)
- if err != nil {
- return nil, err
- }
- defer rows.Close()
- var items []ArtistsWithName
- for rows.Next() {
- var i ArtistsWithName
- if err := rows.Scan(
- &i.ID,
- &i.MusicBrainzID,
- &i.Image,
- &i.ImageSource,
- &i.Name,
- ); err != nil {
- return nil, err
- }
- items = append(items, i)
- }
- if err := rows.Err(); err != nil {
- return nil, err
- }
- return items, nil
-}
-
const getReleaseArtists = `-- name: GetReleaseArtists :many
SELECT
a.id, a.musicbrainz_id, a.image, a.image_source, a.name,
@@ -343,27 +269,18 @@ func (q *Queries) GetReleaseArtists(ctx context.Context, releaseID int32) ([]Get
const getTopArtistsPaginated = `-- name: GetTopArtistsPaginated :many
SELECT
- x.id,
- x.name,
- x.musicbrainz_id,
- x.image,
- x.listen_count,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
-FROM (
- SELECT
a.id,
a.name,
a.musicbrainz_id,
a.image,
COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- JOIN artist_tracks at ON at.track_id = t.id
- JOIN artists_with_name a ON a.id = at.artist_id
- WHERE l.listened_at BETWEEN $1 AND $2
- GROUP BY a.id, a.name, a.musicbrainz_id, a.image
-) x
-ORDER BY x.listen_count DESC, x.id
+FROM listens l
+JOIN tracks t ON l.track_id = t.id
+JOIN artist_tracks at ON at.track_id = t.id
+JOIN artists_with_name a ON a.id = at.artist_id
+WHERE l.listened_at BETWEEN $1 AND $2
+GROUP BY a.id, a.name, a.musicbrainz_id, a.image, a.image_source, a.name
+ORDER BY listen_count DESC, a.id
LIMIT $3 OFFSET $4
`
@@ -380,7 +297,6 @@ type GetTopArtistsPaginatedRow struct {
MusicBrainzID *uuid.UUID
Image *uuid.UUID
ListenCount int64
- Rank int64
}
func (q *Queries) GetTopArtistsPaginated(ctx context.Context, arg GetTopArtistsPaginatedParams) ([]GetTopArtistsPaginatedRow, error) {
@@ -403,7 +319,6 @@ func (q *Queries) GetTopArtistsPaginated(ctx context.Context, arg GetTopArtistsP
&i.MusicBrainzID,
&i.Image,
&i.ListenCount,
- &i.Rank,
); err != nil {
return nil, err
}
diff --git a/internal/repository/etc.sql.go b/internal/repository/etc.sql.go
index 484f5c4..ed902ea 100644
--- a/internal/repository/etc.sql.go
+++ b/internal/repository/etc.sql.go
@@ -15,17 +15,11 @@ BEGIN
DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l);
DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t);
DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at);
- DELETE FROM artist_releases ar
- WHERE NOT EXISTS (
- SELECT 1
- FROM artist_tracks at
- JOIN tracks t ON at.track_id = t.id
- WHERE at.artist_id = ar.artist_id
- AND t.release_id = ar.release_id
- );
END $$
`
+// DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t);
+// DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg);
func (q *Queries) CleanOrphanedEntries(ctx context.Context) error {
_, err := q.db.Exec(ctx, cleanOrphanedEntries)
return err
diff --git a/internal/repository/interest.sql.go b/internal/repository/interest.sql.go
index ae77764..27c1920 100644
--- a/internal/repository/interest.sql.go
+++ b/internal/repository/interest.sql.go
@@ -11,57 +11,64 @@ import (
)
const getGroupedListensFromArtist = `-- name: GetGroupedListensFromArtist :many
-WITH bounds AS (
+WITH artist_listens AS (
SELECT
- MIN(l.listened_at) AS start_time,
- NOW() AS end_time
+ l.listened_at
FROM listens l
JOIN tracks t ON t.id = l.track_id
JOIN artist_tracks at ON at.track_id = t.id
WHERE at.artist_id = $1
),
-stats AS (
+bounds AS (
SELECT
- start_time,
- end_time,
- EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
- ((end_time - start_time) / $2::int) AS bucket_interval
- FROM bounds
+ MIN(listened_at) AS start_time,
+ MAX(listened_at) AS end_time
+ FROM artist_listens
),
-bucket_series AS (
- SELECT generate_series(0, $2::int - 1) AS idx
-),
-listen_indices AS (
+bucketed AS (
SELECT
LEAST(
- $2::int - 1,
+ $2 - 1,
FLOOR(
- (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
- * $2::int
+ (
+ EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
+ /
+ NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
+ ) * $2
)::int
- ) AS bucket_idx
- FROM listens l
- JOIN tracks t ON t.id = l.track_id
- JOIN artist_tracks at ON at.track_id = t.id
- CROSS JOIN stats s
- WHERE at.artist_id = $1
- AND s.start_time IS NOT NULL
+ ) AS bucket_idx,
+ b.start_time,
+ b.end_time
+ FROM artist_listens al
+ CROSS JOIN bounds b
+),
+aggregated AS (
+ SELECT
+ start_time
+ + (
+ bucket_idx * (end_time - start_time)
+ / $2
+ ) AS bucket_start,
+ start_time
+ + (
+ (bucket_idx + 1) * (end_time - start_time)
+ / $2
+ ) AS bucket_end,
+ COUNT(*) AS listen_count
+ FROM bucketed
+ GROUP BY bucket_idx, start_time, end_time
)
SELECT
- (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
- (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
- COUNT(li.bucket_idx) AS listen_count
-FROM bucket_series bs
-CROSS JOIN stats s
-LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
-WHERE s.start_time IS NOT NULL
-GROUP BY bs.idx, s.start_time, s.bucket_interval
-ORDER BY bs.idx
+ bucket_start::timestamptz,
+ bucket_end::timestamptz,
+ listen_count
+FROM aggregated
+ORDER BY bucket_start
`
type GetGroupedListensFromArtistParams struct {
ArtistID int32
- BucketCount int32
+ BucketCount interface{}
}
type GetGroupedListensFromArtistRow struct {
@@ -91,55 +98,63 @@ func (q *Queries) GetGroupedListensFromArtist(ctx context.Context, arg GetGroupe
}
const getGroupedListensFromRelease = `-- name: GetGroupedListensFromRelease :many
-WITH bounds AS (
+WITH artist_listens AS (
SELECT
- MIN(l.listened_at) AS start_time,
- NOW() AS end_time
+ l.listened_at
FROM listens l
JOIN tracks t ON t.id = l.track_id
WHERE t.release_id = $1
),
-stats AS (
+bounds AS (
SELECT
- start_time,
- end_time,
- EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
- ((end_time - start_time) / $2::int) AS bucket_interval
- FROM bounds
+ MIN(listened_at) AS start_time,
+ MAX(listened_at) AS end_time
+ FROM artist_listens
),
-bucket_series AS (
- SELECT generate_series(0, $2::int - 1) AS idx
-),
-listen_indices AS (
+bucketed AS (
SELECT
LEAST(
- $2::int - 1,
+ $2 - 1,
FLOOR(
- (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
- * $2::int
+ (
+ EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
+ /
+ NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
+ ) * $2
)::int
- ) AS bucket_idx
- FROM listens l
- JOIN tracks t ON t.id = l.track_id
- CROSS JOIN stats s
- WHERE t.release_id = $1
- AND s.start_time IS NOT NULL
+ ) AS bucket_idx,
+ b.start_time,
+ b.end_time
+ FROM artist_listens al
+ CROSS JOIN bounds b
+),
+aggregated AS (
+ SELECT
+ start_time
+ + (
+ bucket_idx * (end_time - start_time)
+ / $2
+ ) AS bucket_start,
+ start_time
+ + (
+ (bucket_idx + 1) * (end_time - start_time)
+ / $2
+ ) AS bucket_end,
+ COUNT(*) AS listen_count
+ FROM bucketed
+ GROUP BY bucket_idx, start_time, end_time
)
SELECT
- (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
- (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
- COUNT(li.bucket_idx) AS listen_count
-FROM bucket_series bs
-CROSS JOIN stats s
-LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
-WHERE s.start_time IS NOT NULL
-GROUP BY bs.idx, s.start_time, s.bucket_interval
-ORDER BY bs.idx
+ bucket_start::timestamptz,
+ bucket_end::timestamptz,
+ listen_count
+FROM aggregated
+ORDER BY bucket_start
`
type GetGroupedListensFromReleaseParams struct {
ReleaseID int32
- BucketCount int32
+ BucketCount interface{}
}
type GetGroupedListensFromReleaseRow struct {
@@ -169,55 +184,63 @@ func (q *Queries) GetGroupedListensFromRelease(ctx context.Context, arg GetGroup
}
const getGroupedListensFromTrack = `-- name: GetGroupedListensFromTrack :many
-WITH bounds AS (
+WITH artist_listens AS (
SELECT
- MIN(l.listened_at) AS start_time,
- NOW() AS end_time
+ l.listened_at
FROM listens l
JOIN tracks t ON t.id = l.track_id
WHERE t.id = $1
),
-stats AS (
+bounds AS (
SELECT
- start_time,
- end_time,
- EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds,
- ((end_time - start_time) / $2::int) AS bucket_interval
- FROM bounds
+ MIN(listened_at) AS start_time,
+ MAX(listened_at) AS end_time
+ FROM artist_listens
),
-bucket_series AS (
- SELECT generate_series(0, $2::int - 1) AS idx
-),
-listen_indices AS (
+bucketed AS (
SELECT
LEAST(
- $2::int - 1,
+ $2 - 1,
FLOOR(
- (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0))
- * $2::int
+ (
+ EXTRACT(EPOCH FROM (al.listened_at - b.start_time))
+ /
+ NULLIF(EXTRACT(EPOCH FROM (b.end_time - b.start_time)), 0)
+ ) * $2
)::int
- ) AS bucket_idx
- FROM listens l
- JOIN tracks t ON t.id = l.track_id
- CROSS JOIN stats s
- WHERE t.id = $1
- AND s.start_time IS NOT NULL
+ ) AS bucket_idx,
+ b.start_time,
+ b.end_time
+ FROM artist_listens al
+ CROSS JOIN bounds b
+),
+aggregated AS (
+ SELECT
+ start_time
+ + (
+ bucket_idx * (end_time - start_time)
+ / $2
+ ) AS bucket_start,
+ start_time
+ + (
+ (bucket_idx + 1) * (end_time - start_time)
+ / $2
+ ) AS bucket_end,
+ COUNT(*) AS listen_count
+ FROM bucketed
+ GROUP BY bucket_idx, start_time, end_time
)
SELECT
- (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start,
- (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end,
- COUNT(li.bucket_idx) AS listen_count
-FROM bucket_series bs
-CROSS JOIN stats s
-LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx
-WHERE s.start_time IS NOT NULL
-GROUP BY bs.idx, s.start_time, s.bucket_interval
-ORDER BY bs.idx
+ bucket_start::timestamptz,
+ bucket_end::timestamptz,
+ listen_count
+FROM aggregated
+ORDER BY bucket_start
`
type GetGroupedListensFromTrackParams struct {
ID int32
- BucketCount int32
+ BucketCount interface{}
}
type GetGroupedListensFromTrackRow struct {
diff --git a/internal/repository/release.sql.go b/internal/repository/release.sql.go
index f62e086..3d77eef 100644
--- a/internal/repository/release.sql.go
+++ b/internal/repository/release.sql.go
@@ -141,38 +141,6 @@ func (q *Queries) GetRelease(ctx context.Context, id int32) (GetReleaseRow, erro
return i, err
}
-const getReleaseAllTimeRank = `-- name: GetReleaseAllTimeRank :one
-SELECT
- release_id,
- rank
-FROM (
- SELECT
- x.release_id,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
- FROM (
- SELECT
- t.release_id,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- GROUP BY t.release_id
- ) x
- )
-WHERE release_id = $1
-`
-
-type GetReleaseAllTimeRankRow struct {
- ReleaseID int32
- Rank int64
-}
-
-func (q *Queries) GetReleaseAllTimeRank(ctx context.Context, releaseID int32) (GetReleaseAllTimeRankRow, error) {
- row := q.db.QueryRow(ctx, getReleaseAllTimeRank, releaseID)
- var i GetReleaseAllTimeRankRow
- err := row.Scan(&i.ReleaseID, &i.Rank)
- return i, err
-}
-
const getReleaseByArtistAndTitle = `-- name: GetReleaseByArtistAndTitle :one
SELECT r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title
FROM releases_with_title r
@@ -353,22 +321,17 @@ func (q *Queries) GetReleasesWithoutImages(ctx context.Context, arg GetReleasesW
const getTopReleasesFromArtist = `-- name: GetTopReleasesFromArtist :many
SELECT
- x.id, x.musicbrainz_id, x.image, x.various_artists, x.image_source, x.title, x.listen_count,
- get_artists_for_release(x.id) AS artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
-FROM (
- SELECT
- r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- JOIN releases_with_title r ON t.release_id = r.id
- JOIN artist_releases ar ON r.id = ar.release_id
- WHERE ar.artist_id = $5
- AND l.listened_at BETWEEN $1 AND $2
- GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
-) x
-ORDER BY listen_count DESC, x.id
+ r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
+ COUNT(*) AS listen_count,
+ get_artists_for_release(r.id) AS artists
+FROM listens l
+JOIN tracks t ON l.track_id = t.id
+JOIN releases_with_title r ON t.release_id = r.id
+JOIN artist_releases ar ON r.id = ar.release_id
+WHERE ar.artist_id = $5
+ AND l.listened_at BETWEEN $1 AND $2
+GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
+ORDER BY listen_count DESC, r.id
LIMIT $3 OFFSET $4
`
@@ -389,7 +352,6 @@ type GetTopReleasesFromArtistRow struct {
Title string
ListenCount int64
Artists []byte
- Rank int64
}
func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleasesFromArtistParams) ([]GetTopReleasesFromArtistRow, error) {
@@ -416,7 +378,6 @@ func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleas
&i.Title,
&i.ListenCount,
&i.Artists,
- &i.Rank,
); err != nil {
return nil, err
}
@@ -430,20 +391,15 @@ func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleas
const getTopReleasesPaginated = `-- name: GetTopReleasesPaginated :many
SELECT
- x.id, x.musicbrainz_id, x.image, x.various_artists, x.image_source, x.title, x.listen_count,
- get_artists_for_release(x.id) AS artists,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
-FROM (
- SELECT
- r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- JOIN releases_with_title r ON t.release_id = r.id
- WHERE l.listened_at BETWEEN $1 AND $2
- GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
-) x
-ORDER BY listen_count DESC, x.id
+ r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
+ COUNT(*) AS listen_count,
+ get_artists_for_release(r.id) AS artists
+FROM listens l
+JOIN tracks t ON l.track_id = t.id
+JOIN releases_with_title r ON t.release_id = r.id
+WHERE l.listened_at BETWEEN $1 AND $2
+GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
+ORDER BY listen_count DESC, r.id
LIMIT $3 OFFSET $4
`
@@ -463,7 +419,6 @@ type GetTopReleasesPaginatedRow struct {
Title string
ListenCount int64
Artists []byte
- Rank int64
}
func (q *Queries) GetTopReleasesPaginated(ctx context.Context, arg GetTopReleasesPaginatedParams) ([]GetTopReleasesPaginatedRow, error) {
@@ -489,7 +444,6 @@ func (q *Queries) GetTopReleasesPaginated(ctx context.Context, arg GetTopRelease
&i.Title,
&i.ListenCount,
&i.Artists,
- &i.Rank,
); err != nil {
return nil, err
}
diff --git a/internal/repository/track.sql.go b/internal/repository/track.sql.go
index b376198..6b11b01 100644
--- a/internal/repository/track.sql.go
+++ b/internal/repository/track.sql.go
@@ -155,30 +155,22 @@ func (q *Queries) GetAllTracksFromArtist(ctx context.Context, artistID int32) ([
const getTopTracksByArtistPaginated = `-- name: GetTopTracksByArtistPaginated :many
SELECT
- x.track_id AS id,
+ t.id,
t.title,
t.musicbrainz_id,
t.release_id,
r.image,
- x.listen_count,
- get_artists_for_track(x.track_id) AS artists,
- x.rank
-FROM (
- SELECT
- l.track_id,
- COUNT(*) AS listen_count,
- RANK() OVER (ORDER BY COUNT(*) DESC) as rank
- FROM listens l
- JOIN artist_tracks at ON l.track_id = at.track_id
- WHERE l.listened_at BETWEEN $1 AND $2
- AND at.artist_id = $5
- GROUP BY l.track_id
- ORDER BY listen_count DESC
- LIMIT $3 OFFSET $4
-) x
-JOIN tracks_with_title t ON x.track_id = t.id
+ COUNT(*) AS listen_count,
+ get_artists_for_track(t.id) AS artists
+FROM listens l
+JOIN tracks_with_title t ON l.track_id = t.id
JOIN releases r ON t.release_id = r.id
-ORDER BY x.listen_count DESC, x.track_id
+JOIN artist_tracks at ON at.track_id = t.id
+WHERE l.listened_at BETWEEN $1 AND $2
+ AND at.artist_id = $5
+GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ORDER BY listen_count DESC, t.id
+LIMIT $3 OFFSET $4
`
type GetTopTracksByArtistPaginatedParams struct {
@@ -197,7 +189,6 @@ type GetTopTracksByArtistPaginatedRow struct {
Image *uuid.UUID
ListenCount int64
Artists []byte
- Rank int64
}
func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopTracksByArtistPaginatedParams) ([]GetTopTracksByArtistPaginatedRow, error) {
@@ -223,7 +214,6 @@ func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopT
&i.Image,
&i.ListenCount,
&i.Artists,
- &i.Rank,
); err != nil {
return nil, err
}
@@ -237,30 +227,21 @@ func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopT
const getTopTracksInReleasePaginated = `-- name: GetTopTracksInReleasePaginated :many
SELECT
- x.track_id AS id,
+ t.id,
t.title,
t.musicbrainz_id,
t.release_id,
r.image,
- x.listen_count,
- get_artists_for_track(x.track_id) AS artists,
- x.rank
-FROM (
- SELECT
- l.track_id,
- COUNT(*) AS listen_count,
- RANK() OVER (ORDER BY COUNT(*) DESC) as rank
- FROM listens l
- JOIN tracks t ON l.track_id = t.id
- WHERE l.listened_at BETWEEN $1 AND $2
- AND t.release_id = $5
- GROUP BY l.track_id
- ORDER BY listen_count DESC
- LIMIT $3 OFFSET $4
-) x
-JOIN tracks_with_title t ON x.track_id = t.id
+ COUNT(*) AS listen_count,
+ get_artists_for_track(t.id) AS artists
+FROM listens l
+JOIN tracks_with_title t ON l.track_id = t.id
JOIN releases r ON t.release_id = r.id
-ORDER BY x.listen_count DESC, x.track_id
+WHERE l.listened_at BETWEEN $1 AND $2
+ AND t.release_id = $5
+GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ORDER BY listen_count DESC, t.id
+LIMIT $3 OFFSET $4
`
type GetTopTracksInReleasePaginatedParams struct {
@@ -279,7 +260,6 @@ type GetTopTracksInReleasePaginatedRow struct {
Image *uuid.UUID
ListenCount int64
Artists []byte
- Rank int64
}
func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTopTracksInReleasePaginatedParams) ([]GetTopTracksInReleasePaginatedRow, error) {
@@ -305,7 +285,6 @@ func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTop
&i.Image,
&i.ListenCount,
&i.Artists,
- &i.Rank,
); err != nil {
return nil, err
}
@@ -319,28 +298,20 @@ func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTop
const getTopTracksPaginated = `-- name: GetTopTracksPaginated :many
SELECT
- x.track_id AS id,
+ t.id,
t.title,
t.musicbrainz_id,
t.release_id,
r.image,
- x.listen_count,
- get_artists_for_track(x.track_id) AS artists,
- x.rank
-FROM (
- SELECT
- track_id,
- COUNT(*) AS listen_count,
- RANK() OVER (ORDER BY COUNT(*) DESC) as rank
- FROM listens
- WHERE listened_at BETWEEN $1 AND $2
- GROUP BY track_id
- ORDER BY listen_count DESC
- LIMIT $3 OFFSET $4
-) x
-JOIN tracks_with_title t ON x.track_id = t.id
+ COUNT(*) AS listen_count,
+ get_artists_for_track(t.id) AS artists
+FROM listens l
+JOIN tracks_with_title t ON l.track_id = t.id
JOIN releases r ON t.release_id = r.id
-ORDER BY x.listen_count DESC, x.track_id
+WHERE l.listened_at BETWEEN $1 AND $2
+GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
+ORDER BY listen_count DESC, t.id
+LIMIT $3 OFFSET $4
`
type GetTopTracksPaginatedParams struct {
@@ -358,7 +329,6 @@ type GetTopTracksPaginatedRow struct {
Image *uuid.UUID
ListenCount int64
Artists []byte
- Rank int64
}
func (q *Queries) GetTopTracksPaginated(ctx context.Context, arg GetTopTracksPaginatedParams) ([]GetTopTracksPaginatedRow, error) {
@@ -383,7 +353,6 @@ func (q *Queries) GetTopTracksPaginated(ctx context.Context, arg GetTopTracksPag
&i.Image,
&i.ListenCount,
&i.Artists,
- &i.Rank,
); err != nil {
return nil, err
}
@@ -430,37 +399,6 @@ func (q *Queries) GetTrack(ctx context.Context, id int32) (GetTrackRow, error) {
return i, err
}
-const getTrackAllTimeRank = `-- name: GetTrackAllTimeRank :one
-SELECT
- id,
- rank
-FROM (
- SELECT
- x.id,
- RANK() OVER (ORDER BY x.listen_count DESC) AS rank
- FROM (
- SELECT
- t.id,
- COUNT(*) AS listen_count
- FROM listens l
- JOIN tracks_with_title t ON l.track_id = t.id
- GROUP BY t.id) x
- ) y
-WHERE id = $1
-`
-
-type GetTrackAllTimeRankRow struct {
- ID int32
- Rank int64
-}
-
-func (q *Queries) GetTrackAllTimeRank(ctx context.Context, id int32) (GetTrackAllTimeRankRow, error) {
- row := q.db.QueryRow(ctx, getTrackAllTimeRank, id)
- var i GetTrackAllTimeRankRow
- err := row.Scan(&i.ID, &i.Rank)
- return i, err
-}
-
const getTrackByMbzID = `-- name: GetTrackByMbzID :one
SELECT id, musicbrainz_id, duration, release_id, title FROM tracks_with_title
WHERE musicbrainz_id = $1 LIMIT 1
diff --git a/internal/summary/summary.go b/internal/summary/summary.go
index 7a2b9d7..518121f 100644
--- a/internal/summary/summary.go
+++ b/internal/summary/summary.go
@@ -9,20 +9,20 @@ import (
)
type Summary struct {
- Title string `json:"title,omitempty"`
- TopArtists []db.RankedItem[*models.Artist] `json:"top_artists"` // ListenCount and TimeListened are overriden with stats from timeframe
- TopAlbums []db.RankedItem[*models.Album] `json:"top_albums"` // ListenCount and TimeListened are overriden with stats from timeframe
- TopTracks []db.RankedItem[*models.Track] `json:"top_tracks"` // ListenCount and TimeListened are overriden with stats from timeframe
- MinutesListened int `json:"minutes_listened"`
- AvgMinutesPerDay int `json:"avg_minutes_listened_per_day"`
- Plays int `json:"plays"`
- AvgPlaysPerDay float32 `json:"avg_plays_per_day"`
- UniqueTracks int `json:"unique_tracks"`
- UniqueAlbums int `json:"unique_albums"`
- UniqueArtists int `json:"unique_artists"`
- NewTracks int `json:"new_tracks"`
- NewAlbums int `json:"new_albums"`
- NewArtists int `json:"new_artists"`
+ Title string `json:"title,omitempty"`
+ TopArtists []*models.Artist `json:"top_artists"` // ListenCount and TimeListened are overriden with stats from timeframe
+ TopAlbums []*models.Album `json:"top_albums"` // ListenCount and TimeListened are overriden with stats from timeframe
+ TopTracks []*models.Track `json:"top_tracks"` // ListenCount and TimeListened are overriden with stats from timeframe
+ MinutesListened int `json:"minutes_listened"`
+ AvgMinutesPerDay int `json:"avg_minutes_listened_per_day"`
+ Plays int `json:"plays"`
+ AvgPlaysPerDay float32 `json:"avg_plays_per_day"`
+ UniqueTracks int `json:"unique_tracks"`
+ UniqueAlbums int `json:"unique_albums"`
+ UniqueArtists int `json:"unique_artists"`
+ NewTracks int `json:"new_tracks"`
+ NewAlbums int `json:"new_albums"`
+ NewArtists int `json:"new_artists"`
}
func GenerateSummary(ctx context.Context, store db.DB, userId int32, timeframe db.Timeframe, title string) (summary *Summary, err error) {
@@ -37,16 +37,16 @@ func GenerateSummary(ctx context.Context, store db.DB, userId int32, timeframe d
summary.TopArtists = topArtists.Items
// replace ListenCount and TimeListened with stats from timeframe
for i, artist := range summary.TopArtists {
- timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ArtistID: artist.Item.ID, Timeframe: timeframe})
+ timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ArtistID: artist.ID, Timeframe: timeframe})
if err != nil {
return nil, fmt.Errorf("GenerateSummary: %w", err)
}
- listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{ArtistID: artist.Item.ID, Timeframe: timeframe})
+ listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{ArtistID: artist.ID, Timeframe: timeframe})
if err != nil {
return nil, fmt.Errorf("GenerateSummary: %w", err)
}
- summary.TopArtists[i].Item.TimeListened = timelistened
- summary.TopArtists[i].Item.ListenCount = listens
+ summary.TopArtists[i].TimeListened = timelistened
+ summary.TopArtists[i].ListenCount = listens
}
topAlbums, err := store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Page: 1, Limit: 5, Timeframe: timeframe})
@@ -56,16 +56,16 @@ func GenerateSummary(ctx context.Context, store db.DB, userId int32, timeframe d
summary.TopAlbums = topAlbums.Items
// replace ListenCount and TimeListened with stats from timeframe
for i, album := range summary.TopAlbums {
- timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{AlbumID: album.Item.ID, Timeframe: timeframe})
+ timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{AlbumID: album.ID, Timeframe: timeframe})
if err != nil {
return nil, fmt.Errorf("GenerateSummary: %w", err)
}
- listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{AlbumID: album.Item.ID, Timeframe: timeframe})
+ listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{AlbumID: album.ID, Timeframe: timeframe})
if err != nil {
return nil, fmt.Errorf("GenerateSummary: %w", err)
}
- summary.TopAlbums[i].Item.TimeListened = timelistened
- summary.TopAlbums[i].Item.ListenCount = listens
+ summary.TopAlbums[i].TimeListened = timelistened
+ summary.TopAlbums[i].ListenCount = listens
}
topTracks, err := store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Page: 1, Limit: 5, Timeframe: timeframe})
@@ -75,16 +75,16 @@ func GenerateSummary(ctx context.Context, store db.DB, userId int32, timeframe d
summary.TopTracks = topTracks.Items
// replace ListenCount and TimeListened with stats from timeframe
for i, track := range summary.TopTracks {
- timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{TrackID: track.Item.ID, Timeframe: timeframe})
+ timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{TrackID: track.ID, Timeframe: timeframe})
if err != nil {
return nil, fmt.Errorf("GenerateSummary: %w", err)
}
- listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{TrackID: track.Item.ID, Timeframe: timeframe})
+ listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{TrackID: track.ID, Timeframe: timeframe})
if err != nil {
return nil, fmt.Errorf("GenerateSummary: %w", err)
}
- summary.TopTracks[i].Item.TimeListened = timelistened
- summary.TopTracks[i].Item.ListenCount = listens
+ summary.TopTracks[i].TimeListened = timelistened
+ summary.TopTracks[i].ListenCount = listens
}
t1, t2 := db.TimeframeToTimeRange(timeframe)
diff --git a/test_assets/koito_export_test.json b/test_assets/koito_export_test.json
index e2cd8ea..b7ce463 100644
--- a/test_assets/koito_export_test.json
+++ b/test_assets/koito_export_test.json
@@ -18,7 +18,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
+ "mbid": null,
"aliases": [
{
"alias": "American Football (LP3)",
@@ -70,7 +70,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
+ "mbid": null,
"aliases": [
{
"alias": "American Football (LP3)",
@@ -122,7 +122,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
+ "mbid": null,
"aliases": [
{
"alias": "American Football (LP3)",
@@ -174,7 +174,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
+ "mbid": null,
"aliases": [
{
"alias": "American Football (LP3)",
@@ -226,7 +226,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
+ "mbid": null,
"aliases": [
{
"alias": "American Football (LP3)",
@@ -278,7 +278,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
+ "mbid": null,
"aliases": [
{
"alias": "American Football (LP3)",
@@ -330,7 +330,7 @@
},
"album": {
"image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg",
- "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161",
+ "mbid": null,
"aliases": [
{
"alias": "American Football (LP3)",
@@ -703,4 +703,4 @@
]
}
]
-}
+}
\ No newline at end of file
diff --git a/test_assets/listenbrainz_shoko1_123456789.zip b/test_assets/listenbrainz_shoko1_123456789.zip
deleted file mode 100644
index 14c97a2..0000000
Binary files a/test_assets/listenbrainz_shoko1_123456789.zip and /dev/null differ