diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..d5ed451 --- /dev/null +++ b/.env.example @@ -0,0 +1,5 @@ +KOITO_ALLOWED_HOSTS=* +KOITO_LOG_LEVEL=debug +KOITO_CONFIG_DIR=test_config_dir +KOITO_DATABASE_URL=postgres://postgres:secret@localhost:5432?sslmode=disable +TZ=Etc/UTC diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..fbf205d --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# These are supported funding model platforms + +ko_fi: gabehf diff --git a/.github/workflows/astro.yml b/.github/workflows/astro.yml index 2da5fc4..428b7b8 100644 --- a/.github/workflows/astro.yml +++ b/.github/workflows/astro.yml @@ -2,10 +2,13 @@ name: Deploy to GitHub Pages on: push: - branches: [main] + tags: + - "v*" paths: - - 'docs/**' - - '.github/workflows/**' + - "docs/**" + - ".github/workflows/**" + + workflow_dispatch: permissions: contents: read @@ -21,9 +24,9 @@ jobs: - name: Install, build, and upload your site output uses: withastro/action@v4 with: - path: ./docs # The root location of your Astro project inside the repository. (optional) - node-version: 20 # The specific version of Node that should be used to build your site. Defaults to 22. (optional) - package-manager: yarn@1.22.22 # The Node package manager that should be used to install dependencies and build your site. Automatically detected based on your lockfile. (optional) + path: ./docs # The root location of your Astro project inside the repository. (optional) + node-version: 20 # The specific version of Node that should be used to build your site. Defaults to 22. (optional) + package-manager: yarn@1.22.22 # The Node package manager that should be used to install dependencies and build your site. Automatically detected based on your lockfile. (optional) deploy: needs: build @@ -34,4 +37,4 @@ jobs: steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4 \ No newline at end of file + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 95d893e..466a4f6 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -17,6 +17,7 @@ on: - main paths-ignore: - "docs/**" + - "README.md" workflow_dispatch: diff --git a/.gitignore b/.gitignore index bade026..083bb78 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ test_config_dir +.env diff --git a/Makefile b/Makefile index fbca22e..99455ac 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,8 @@ +ifneq (,$(wildcard ./.env)) + include .env + export +endif + .PHONY: all test clean client postgres.schemadump: @@ -10,7 +15,7 @@ postgres.schemadump: -v --dbname="koitodb" -f "/tmp/dump/schema.sql" postgres.run: - docker run --name koito-db -p 5432:5432 -e POSTGRES_PASSWORD=secret -d postgres + docker run --name koito-db -p 5432:5432 -v koito_dev_db:/var/lib/postgresql -e POSTGRES_PASSWORD=secret -d postgres postgres.run-scratch: docker run --name koito-scratch -p 5433:5432 -e POSTGRES_PASSWORD=secret -d postgres @@ -28,10 +33,10 @@ postgres.remove-scratch: docker stop koito-scratch && docker rm koito-scratch api.debug: postgres.start - KOITO_ALLOWED_HOSTS=* KOITO_LOG_LEVEL=debug KOITO_CONFIG_DIR=test_config_dir KOITO_DATABASE_URL=postgres://postgres:secret@localhost:5432?sslmode=disable go run cmd/api/main.go + go run cmd/api/main.go api.scratch: postgres.run-scratch - KOITO_ALLOWED_HOSTS=* KOITO_LOG_LEVEL=debug KOITO_CONFIG_DIR=test_config_dir/scratch KOITO_DATABASE_URL=postgres://postgres:secret@localhost:5433?sslmode=disable go run cmd/api/main.go + KOITO_DATABASE_URL=postgres://postgres:secret@localhost:5433?sslmode=disable go run cmd/api/main.go api.test: go test ./... -timeout 60s diff --git a/README.md b/README.md index 2bc10ce..b51b2ff 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,21 @@ -# Koito +
+ +![Koito logo](https://github.com/user-attachments/assets/bd69a050-b40f-4da7-8ff1-4607554bfd6d) + +*Koito (小糸) is a Japanese surname. It is also homophonous with the words 恋と (koi to), meaning "and/with love".* + +
+ +
+ + [![Ko-Fi](https://img.shields.io/badge/Ko--fi-F16061?style=for-the-badge&logo=ko-fi&logoColor=white)](https://ko-fi.com/gabehf) + +
Koito is a modern, themeable ListenBrainz-compatible scrobbler for self-hosters who want control over their data and insights into their listening habits. It supports relaying to other compatible scrobblers, so you can try it safely without replacing your current setup. -> This project is currently pre-release, and therefore you can expect rapid development and some bugs. If you don't want to replace your current scrobbler +> This project is under active development and still considered "unstable", and therefore you can expect some bugs. If you don't want to replace your current scrobbler with Koito quite yet, you can [set up a relay](https://koito.io/guides/scrobbler/#set-up-a-relay) from Koito to another ListenBrainz-compatible scrobbler. This is what I've been doing for the entire development of this app and it hasn't failed me once. Or, you can always use something like [multi-scrobbler](https://github.com/FoxxMD/multi-scrobbler). @@ -23,8 +35,9 @@ You can view my public instance with my listening data at https://koito.mnrva.de ## Screenshots ![screenshot one](assets/screenshot1.png) -![screenshot two](assets/screenshot2.png) -![screenshot three](assets/screenshot3.png) +image +image + ## Installation @@ -75,6 +88,16 @@ There are currently some known issues that I am actively working on, in addition If you have any feature ideas, open a GitHub issue to let me know. I'm sorting through ideas to decide which data visualizations and customization options to add next. +## Star History + + + + + + Star History Chart + + + ## Albums that fueled development + notes More relevant here than any of my other projects... @@ -84,5 +107,4 @@ Not just during development, you can see my complete listening data on my [live #### Random notes - I find it a little annoying when READMEs use emoji but everyone else is doing it so I felt like I had to... -- It's funny how you can see the days in my listening history when I was just working on this project because they have way more listens than other days. -- About 50% of the reason I built this was minor/not-so-minor greivances with Maloja. Could I have just contributed to Maloja? Maybe, but I like building stuff and I like Koito's UI a lot more anyways. \ No newline at end of file +- About 50% of the reason I built this was minor/not-so-minor greivances with Maloja. Could I have just contributed to Maloja? Maybe, but I like building stuff and I like Koito's UI a lot more anyways. diff --git a/client/api/api.ts b/client/api/api.ts index 27d631a..bd2430b 100644 --- a/client/api/api.ts +++ b/client/api/api.ts @@ -23,6 +23,12 @@ interface timeframe { to?: number; period?: string; } +interface getInterestArgs { + buckets: number; + artist_id: number; + album_id: number; + track_id: number; +} async function handleJson(r: Response): Promise { if (!r.ok) { @@ -42,32 +48,32 @@ async function getLastListens( async function getTopTracks( args: getItemsArgs -): Promise> { +): Promise>> { let url = `/apis/web/v1/top-tracks?period=${args.period}&limit=${args.limit}&page=${args.page}`; if (args.artist_id) url += `&artist_id=${args.artist_id}`; else if (args.album_id) url += `&album_id=${args.album_id}`; const r = await fetch(url); - return handleJson>(r); + return handleJson>>(r); } async function getTopAlbums( args: getItemsArgs -): Promise> { +): Promise>> { let url = `/apis/web/v1/top-albums?period=${args.period}&limit=${args.limit}&page=${args.page}`; if (args.artist_id) url += `&artist_id=${args.artist_id}`; const r = await fetch(url); - return handleJson>(r); + return handleJson>>(r); } async function getTopArtists( args: getItemsArgs -): Promise> { +): Promise>> { const url = `/apis/web/v1/top-artists?period=${args.period}&limit=${args.limit}&page=${args.page}`; const r = await fetch(url); - return handleJson>(r); + return handleJson>>(r); } async function getActivity( @@ -79,6 +85,13 @@ async function getActivity( return handleJson(r); } +async function getInterest(args: getInterestArgs): Promise { + const r = await fetch( + `/apis/web/v1/interest?buckets=${args.buckets}&album_id=${args.album_id}&artist_id=${args.artist_id}&track_id=${args.track_id}` + ); + return handleJson(r); +} + async function getStats(period: string): Promise { const r = await fetch(`/apis/web/v1/stats?period=${period}`); @@ -270,6 +283,19 @@ function setPrimaryAlias( body: form, }); } +function updateMbzId( + type: string, + id: number, + mbzid: string +): Promise { + const form = new URLSearchParams(); + form.append(`${type}_id`, String(id)); + form.append("mbz_id", mbzid); + return fetch(`/apis/web/v1/mbzid`, { + method: "PATCH", + body: form, + }); +} function getAlbum(id: number): Promise { return fetch(`/apis/web/v1/album?id=${id}`).then( (r) => r.json() as Promise @@ -302,6 +328,7 @@ export { getTopAlbums, getTopArtists, getActivity, + getInterest, getStats, search, replaceImage, @@ -318,6 +345,7 @@ export { createAlias, deleteAlias, setPrimaryAlias, + updateMbzId, getApiKeys, createApiKey, deleteApiKey, @@ -339,6 +367,7 @@ type Track = { musicbrainz_id: string; time_listened: number; first_listen: number; + all_time_rank: number; }; type Artist = { id: number; @@ -350,6 +379,7 @@ type Artist = { time_listened: number; first_listen: number; is_primary: boolean; + all_time_rank: number; }; type Album = { id: number; @@ -361,6 +391,7 @@ type Album = { musicbrainz_id: string; time_listened: number; first_listen: number; + all_time_rank: number; }; type Alias = { id: number; @@ -379,10 +410,19 @@ type PaginatedResponse = { current_page: number; items_per_page: number; }; +type Ranked = { + item: T; + rank: number; +}; type ListenActivityItem = { start_time: Date; listens: number; }; +type InterestBucket = { + bucket_start: Date; + bucket_end: Date; + listen_count: number; +}; type SimpleArtists = { name: string; id: number; @@ -422,9 +462,9 @@ type NowPlaying = { }; type RewindStats = { title: string; - top_artists: Artist[]; - top_albums: Album[]; - top_tracks: Track[]; + top_artists: Ranked[]; + top_albums: Ranked[]; + top_tracks: Ranked[]; minutes_listened: number; avg_minutes_listened_per_day: number; plays: number; @@ -440,13 +480,16 @@ type RewindStats = { export type { getItemsArgs, getActivityArgs, + getInterestArgs, Track, Artist, Album, Listen, SearchResponse, PaginatedResponse, + Ranked, ListenActivityItem, + InterestBucket, User, Alias, ApiKey, diff --git a/client/app/app.css b/client/app/app.css index 217e955..15cfbc0 100644 --- a/client/app/app.css +++ b/client/app/app.css @@ -58,6 +58,7 @@ --header-sm: 16px; --header-xl-weight: 600; --header-weight: 600; + --header-line-height: 3rem; } @media (min-width: 60rem) { @@ -68,6 +69,7 @@ --header-sm: 16px; --header-xl-weight: 600; --header-weight: 600; + --header-line-height: 1.3em; } } @@ -98,6 +100,7 @@ h1 { font-family: "League Spartan"; font-weight: var(--header-weight); font-size: var(--header-xl); + line-height: var(--header-line-height); } h2 { font-family: "League Spartan"; @@ -130,30 +133,21 @@ h4 { text-decoration: underline; } -input[type="text"] { - border: 1px solid var(--color-bg); -} -input[type="text"]:focus { - outline: none; - border: 1px solid var(--color-fg-tertiary); -} +input[type="text"], +input[type="password"], textarea { border: 1px solid var(--color-bg); } -textarea:focus { - outline: none; - border: 1px solid var(--color-fg-tertiary); +input[type="checkbox"] { + height: fit-content; } -input[type="password"] { - border: 1px solid var(--color-bg); -} -input[type="password"]:focus { - outline: none; - border: 1px solid var(--color-fg-tertiary); -} -input[type="checkbox"]:focus { - outline: none; - border: 1px solid var(--color-fg-tertiary); +input:focus-visible, +button:focus-visible, +a:focus-visible, +select:focus-visible, +textarea:focus-visible { + border-color: transparent; + outline: 2px solid var(--color-fg-tertiary); } button:hover { diff --git a/client/app/components/ActivityGrid.tsx b/client/app/components/ActivityGrid.tsx index 7706694..0d39e2c 100644 --- a/client/app/components/ActivityGrid.tsx +++ b/client/app/components/ActivityGrid.tsx @@ -63,19 +63,19 @@ export default function ActivityGrid({ queryFn: ({ queryKey }) => getActivity(queryKey[1] as getActivityArgs), }); - const { theme, themeName } = useTheme(); + const { theme } = useTheme(); const color = getPrimaryColor(theme); if (isPending) { return ( -
+

Activity

Loading...

); } else if (isError) { return ( -
+

Activity

Error: {error.message}

@@ -129,14 +129,7 @@ export default function ActivityGrid({ } v = Math.min(v, t); - if (themeName === "pearl") { - // special case for the only light theme lol - // could be generalized by pragmatically comparing the - // lightness of the bg vs the primary but eh - return (t - v) / t; - } else { - return ((v - t) / t) * 0.8; - } + return ((v - t) / t) * 0.8; }; const CHUNK_SIZE = 26 * 7; diff --git a/client/app/components/AllTimeStats.tsx b/client/app/components/AllTimeStats.tsx index 8f1bc40..6a3ebac 100644 --- a/client/app/components/AllTimeStats.tsx +++ b/client/app/components/AllTimeStats.tsx @@ -7,10 +7,12 @@ export default function AllTimeStats() { queryFn: ({ queryKey }) => getStats(queryKey[1]), }); + const header = "All time stats"; + if (isPending) { return ( -
-

All Time Stats

+
+

{header}

Loading...

); @@ -18,7 +20,7 @@ export default function AllTimeStats() { return ( <>
-

All Time Stats

+

{header}

Error: {error.message}

@@ -29,7 +31,7 @@ export default function AllTimeStats() { return (
-

All Time Stats

+

{header}

getTopAlbums(queryKey[1] as getItemsArgs), }); @@ -39,16 +39,20 @@ export default function ArtistAlbums({ artistId, name, period }: Props) {

Albums featuring {name}

{data.items.map((item) => ( - + {item.title}
-

{item.title}

+

{item.item.title}

- {item.listen_count} play{item.listen_count > 1 ? "s" : ""} + {item.item.listen_count} play + {item.item.listen_count > 1 ? "s" : ""}

diff --git a/client/app/components/InterestGraph.tsx b/client/app/components/InterestGraph.tsx new file mode 100644 index 0000000..9e2baaf --- /dev/null +++ b/client/app/components/InterestGraph.tsx @@ -0,0 +1,112 @@ +import { useQuery } from "@tanstack/react-query"; +import { getInterest, type getInterestArgs } from "api/api"; +import { useTheme } from "~/hooks/useTheme"; +import type { Theme } from "~/styles/themes.css"; +import { Area, AreaChart } from "recharts"; +import { RechartsDevtools } from "@recharts/devtools"; + +function getPrimaryColor(theme: Theme): string { + const value = theme.primary; + const rgbMatch = value.match( + /^rgb\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*\)$/ + ); + if (rgbMatch) { + const [, r, g, b] = rgbMatch.map(Number); + return "#" + [r, g, b].map((n) => n.toString(16).padStart(2, "0")).join(""); + } + + return value; +} +interface Props { + buckets?: number; + artistId?: number; + albumId?: number; + trackId?: number; +} + +export default function InterestGraph({ + buckets = 16, + artistId = 0, + albumId = 0, + trackId = 0, +}: Props) { + const { isPending, isError, data, error } = useQuery({ + queryKey: [ + "interest", + { + buckets: buckets, + artist_id: artistId, + album_id: albumId, + track_id: trackId, + }, + ], + queryFn: ({ queryKey }) => getInterest(queryKey[1] as getInterestArgs), + }); + + const { theme } = useTheme(); + const color = getPrimaryColor(theme); + + if (isPending) { + return ( +
+

Interest over time

+

Loading...

+
+ ); + } else if (isError) { + return ( +
+

Interest over time

+

Error: {error.message}

+
+ ); + } + + // Note: I would really like to have the animation for the graph, however + // the line graph can get weirdly clipped before the animation is done + // so I think I just have to remove it for now. + + return ( +
+

Interest over time

+ + + + + + + + + + +
+ ); +} diff --git a/client/app/components/LastPlays.tsx b/client/app/components/LastPlays.tsx index 9a719d0..ace86fd 100644 --- a/client/app/components/LastPlays.tsx +++ b/client/app/components/LastPlays.tsx @@ -42,6 +42,8 @@ export default function LastPlays(props: Props) { queryFn: () => getNowPlaying(), }); + const header = "Last played"; + const [items, setItems] = useState(null); const handleDelete = async (listen: Listen) => { @@ -63,14 +65,14 @@ export default function LastPlays(props: Props) { if (isPending) { return (
-

Last Played

+

{header}

Loading...

); } else if (isError) { return (
-

Last Played

+

{header}

Error: {error.message}

); @@ -86,7 +88,7 @@ export default function LastPlays(props: Props) { return (

- Last Played + {header}

diff --git a/client/app/components/TopAlbums.tsx b/client/app/components/TopAlbums.tsx index 052e76a..d8a3b00 100644 --- a/client/app/components/TopAlbums.tsx +++ b/client/app/components/TopAlbums.tsx @@ -30,17 +30,19 @@ export default function TopAlbums(props: Props) { queryFn: ({ queryKey }) => getTopAlbums(queryKey[1] as getItemsArgs), }); + const header = "Top albums"; + if (isPending) { return (
-

Top Albums

+

{header}

Loading...

); } else if (isError) { return (
-

Top Albums

+

{header}

Error: {error.message}

); @@ -54,7 +56,7 @@ export default function TopAlbums(props: Props) { props.artistId ? `&artist_id=${props.artistId}` : "" }`} > - Top Albums + {header}
diff --git a/client/app/components/TopArtists.tsx b/client/app/components/TopArtists.tsx index c169448..a1db871 100644 --- a/client/app/components/TopArtists.tsx +++ b/client/app/components/TopArtists.tsx @@ -21,17 +21,19 @@ export default function TopArtists(props: Props) { queryFn: ({ queryKey }) => getTopArtists(queryKey[1] as getItemsArgs), }); + const header = "Top artists"; + if (isPending) { return (
-

Top Artists

+

{header}

Loading...

); } else if (isError) { return (
-

Top Artists

+

{header}

Error: {error.message}

); @@ -40,9 +42,7 @@ export default function TopArtists(props: Props) { return (

- - Top Artists - + {header}

diff --git a/client/app/components/TopItemList.tsx b/client/app/components/TopItemList.tsx index 5b20d39..4d355b7 100644 --- a/client/app/components/TopItemList.tsx +++ b/client/app/components/TopItemList.tsx @@ -1,102 +1,171 @@ import { Link, useNavigate } from "react-router"; import ArtistLinks from "./ArtistLinks"; -import { imageUrl, type Album, type Artist, type Track, type PaginatedResponse } from "api/api"; +import { + imageUrl, + type Album, + type Artist, + type Track, + type PaginatedResponse, + type Ranked, +} from "api/api"; type Item = Album | Track | Artist; -interface Props { - data: PaginatedResponse - separators?: ConstrainBoolean - type: "album" | "track" | "artist"; - className?: string, +interface Props> { + data: PaginatedResponse; + separators?: ConstrainBoolean; + ranked?: boolean; + type: "album" | "track" | "artist"; + className?: string; } -export default function TopItemList({ data, separators, type, className }: Props) { +export default function TopItemList>({ + data, + separators, + type, + className, + ranked, +}: Props) { + return ( +
+ {data.items.map((item, index) => { + const key = `${type}-${item.item.id}`; + return ( +
+ +
+ ); + })} +
+ ); +} - return ( -
- {data.items.map((item, index) => { - const key = `${type}-${item.id}`; - return ( -
- -
- ); - })} +function ItemCard({ + item, + type, + rank, + ranked, +}: { + item: Item; + type: "album" | "track" | "artist"; + rank: number; + ranked?: boolean; +}) { + const itemClasses = `flex items-center gap-2`; + + switch (type) { + case "album": { + const album = item as Album; + + return ( +
+ {ranked &&
{rank}
} + + {album.title} + +
+ + {album.title} + +
+ {album.is_various_artists ? ( + Various Artists + ) : ( +
+ +
+ )} +
{album.listen_count} plays
+
- ); -} - -function ItemCard({ item, type }: { item: Item; type: "album" | "track" | "artist" }) { - - const itemClasses = `flex items-center gap-2` - - switch (type) { - case "album": { - const album = item as Album; - - return ( -
- - {album.title} - -
- - {album.title} - -
- {album.is_various_artists ? - Various Artists - : -
- -
- } -
{album.listen_count} plays
-
-
- ); - } - case "track": { - const track = item as Track; - - return ( -
- - {track.title} - -
- - {track.title} - -
-
- -
-
{track.listen_count} plays
-
-
- ); - } - case "artist": { - const artist = item as Artist; - return ( -
- - {artist.name} -
- {artist.name} -
{artist.listen_count} plays
-
- -
- ); - } + ); } + case "track": { + const track = item as Track; + + return ( +
+ {ranked &&
{rank}
} + + {track.title} + +
+ + {track.title} + +
+
+ +
+
{track.listen_count} plays
+
+
+ ); + } + case "artist": { + const artist = item as Artist; + return ( +
+ {ranked &&
{rank}
} + + {artist.name} +
+ {artist.name} +
+ {artist.listen_count} plays +
+
+ +
+ ); + } + } } diff --git a/client/app/components/TopTracks.tsx b/client/app/components/TopTracks.tsx index 85fef79..bfe31ca 100644 --- a/client/app/components/TopTracks.tsx +++ b/client/app/components/TopTracks.tsx @@ -28,17 +28,19 @@ const TopTracks = (props: Props) => { queryFn: ({ queryKey }) => getTopTracks(queryKey[1] as getItemsArgs), }); + const header = "Top tracks"; + if (isPending) { return (
-

Top Tracks

+

{header}

Loading...

); } else if (isError) { return (
-

Top Tracks

+

{header}

Error: {error.message}

); @@ -53,7 +55,7 @@ const TopTracks = (props: Props) => {

- Top Tracks + {header}

diff --git a/client/app/components/icons/MbzIcon.tsx b/client/app/components/icons/MbzIcon.tsx new file mode 100644 index 0000000..1ce66ad --- /dev/null +++ b/client/app/components/icons/MbzIcon.tsx @@ -0,0 +1,23 @@ +interface Props { + size: number; + hover?: boolean; +} +export default function MbzIcon({ size, hover }: Props) { + let classNames = ""; + if (hover) { + classNames += "icon-hover-fill"; + } + return ( +
+ + + +
+ ); +} diff --git a/client/app/components/modals/DeleteModal.tsx b/client/app/components/modals/DeleteModal.tsx index 06bfdaf..227951e 100644 --- a/client/app/components/modals/DeleteModal.tsx +++ b/client/app/components/modals/DeleteModal.tsx @@ -20,7 +20,7 @@ export default function DeleteModal({ open, setOpen, title, id, type }: Props) { setLoading(true); deleteItem(type.toLowerCase(), id).then((r) => { if (r.ok) { - navigate("/"); + navigate(-1); } else { console.log(r); } diff --git a/client/app/components/modals/EditModal/EditModal.tsx b/client/app/components/modals/EditModal/EditModal.tsx index cbced25..a5c981e 100644 --- a/client/app/components/modals/EditModal/EditModal.tsx +++ b/client/app/components/modals/EditModal/EditModal.tsx @@ -4,6 +4,7 @@ import { deleteAlias, getAliases, setPrimaryAlias, + updateMbzId, type Alias, } from "api/api"; import { Modal } from "../Modal"; @@ -12,6 +13,7 @@ import { useEffect, useState } from "react"; import { Trash } from "lucide-react"; import SetVariousArtists from "./SetVariousArtist"; import SetPrimaryArtist from "./SetPrimaryArtist"; +import UpdateMbzID from "./UpdateMbzID"; interface Props { type: string; @@ -69,7 +71,7 @@ export default function EditModal({ open, setOpen, type, id }: Props) { const handleNewAlias = () => { setError(undefined); if (input === "") { - setError("alias must be provided"); + setError("no input"); return; } setLoading(true); @@ -156,6 +158,7 @@ export default function EditModal({ open, setOpen, type, id }: Props) { {type.toLowerCase() === "track" && ( )} +
); diff --git a/client/app/components/modals/EditModal/UpdateMbzID.tsx b/client/app/components/modals/EditModal/UpdateMbzID.tsx new file mode 100644 index 0000000..0654cc1 --- /dev/null +++ b/client/app/components/modals/EditModal/UpdateMbzID.tsx @@ -0,0 +1,53 @@ +import { updateMbzId } from "api/api"; +import { useState } from "react"; +import { AsyncButton } from "~/components/AsyncButton"; + +interface Props { + type: string; + id: number; +} + +export default function UpdateMbzID({ type, id }: Props) { + const [err, setError] = useState(); + const [input, setInput] = useState(""); + const [loading, setLoading] = useState(false); + const [mbzid, setMbzid] = useState<"">(); + const [success, setSuccess] = useState(""); + + const handleUpdateMbzID = () => { + setError(undefined); + if (input === "") { + setError("no input"); + return; + } + setLoading(true); + updateMbzId(type, id, input).then((r) => { + if (r.ok) { + setSuccess("successfully updated MusicBrainz ID"); + } else { + r.json().then((r) => setError(r.error)); + } + }); + setLoading(false); + }; + + return ( +
+

Update MusicBrainz ID

+
+ setInput(e.target.value)} + /> + + Submit + +
+ {err &&

{err}

} + {success &&

{success}

} +
+ ); +} diff --git a/client/app/components/modals/LoginForm.tsx b/client/app/components/modals/LoginForm.tsx index 66ae6cb..1078476 100644 --- a/client/app/components/modals/LoginForm.tsx +++ b/client/app/components/modals/LoginForm.tsx @@ -54,7 +54,7 @@ export default function LoginForm() { className="w-full mx-auto fg bg rounded p-2" onChange={(e) => setPassword(e.target.value)} /> -
+
(); const [debouncedQuery, setDebouncedQuery] = useState(query); const [mergeTarget, setMergeTarget] = useState<{ title: string; id: number }>( @@ -101,11 +101,12 @@ export default function MergeModal(props: Props) { { setQuery(e.target.value); e.target.select()}} onChange={(e) => setQuery(e.target.value)} /> @@ -128,7 +129,7 @@ export default function MergeModal(props: Props) { > Merge Items -
+
{(props.type.toLowerCase() === "album" || props.type.toLowerCase() === "artist") && ( -
+
{ const handleKeyDown = (e: KeyboardEvent) => { - if (e.key === 'Escape') onClose(); + // Close on Escape key + if (e.key === 'Escape') { + onClose() + // Trap tab navigation to the modal + } else if (e.key === 'Tab') { + if (modalRef.current) { + const focusableEls = modalRef.current.querySelectorAll( + 'button:not(:disabled), [href], input:not(:disabled), select:not(:disabled), textarea:not(:disabled), [tabindex]:not([tabindex="-1"])' + ); + const firstEl = focusableEls[0]; + const lastEl = focusableEls[focusableEls.length - 1]; + const activeEl = document.activeElement + + if (e.shiftKey && activeEl === firstEl) { + e.preventDefault(); + lastEl.focus(); + } else if (!e.shiftKey && activeEl === lastEl) { + e.preventDefault(); + firstEl.focus(); + } else if (!Array.from(focusableEls).find(node => node.isEqualNode(activeEl))) { + e.preventDefault(); + firstEl.focus(); + } + } + }; }; if (isOpen) document.addEventListener('keydown', handleKeyDown); return () => document.removeEventListener('keydown', handleKeyDown); @@ -70,13 +94,13 @@ export function Modal({ }`} style={{ maxWidth: maxW ?? 600, height: h ?? '' }} > + {children} - {children}
, document.body diff --git a/client/app/components/rewind/Rewind.tsx b/client/app/components/rewind/Rewind.tsx index 8e1908c..a22fe15 100644 --- a/client/app/components/rewind/Rewind.tsx +++ b/client/app/components/rewind/Rewind.tsx @@ -8,9 +8,16 @@ interface Props { } export default function Rewind(props: Props) { - const artistimg = props.stats.top_artists[0].image; - const albumimg = props.stats.top_albums[0].image; - const trackimg = props.stats.top_tracks[0].image; + const artistimg = props.stats.top_artists[0]?.item.image; + const albumimg = props.stats.top_albums[0]?.item.image; + const trackimg = props.stats.top_tracks[0]?.item.image; + if ( + !props.stats.top_artists[0] || + !props.stats.top_albums[0] || + !props.stats.top_tracks[0] + ) { + return

Not enough data exists to create a Rewind for this period :(

; + } return (

{props.stats.title}

diff --git a/client/app/components/rewind/RewindTopItem.tsx b/client/app/components/rewind/RewindTopItem.tsx index ffbe488..5093768 100644 --- a/client/app/components/rewind/RewindTopItem.tsx +++ b/client/app/components/rewind/RewindTopItem.tsx @@ -1,7 +1,9 @@ +import type { Ranked } from "api/api"; + type TopItemProps = { title: string; imageSrc: string; - items: T[]; + items: Ranked[]; getLabel: (item: T) => string; includeTime?: boolean; }; @@ -28,23 +30,23 @@ export function RewindTopItem<
-

{getLabel(top)}

+

{getLabel(top.item)}

- {`${top.listen_count} plays`} + {`${top.item.listen_count} plays`} {includeTime - ? ` (${Math.floor(top.time_listened / 60)} minutes)` + ? ` (${Math.floor(top.item.time_listened / 60)} minutes)` : ``}
{rest.map((e) => ( -
- {getLabel(e)} +
+ {getLabel(e.item)} - {` - ${e.listen_count} plays`} + {` - ${e.item.listen_count} plays`} {includeTime - ? ` (${Math.floor(e.time_listened / 60)} minutes)` + ? ` (${Math.floor(e.item.time_listened / 60)} minutes)` : ``}
diff --git a/client/app/components/sidebar/Sidebar.tsx b/client/app/components/sidebar/Sidebar.tsx index 15ac8b5..2bd88f3 100644 --- a/client/app/components/sidebar/Sidebar.tsx +++ b/client/app/components/sidebar/Sidebar.tsx @@ -2,7 +2,7 @@ import { ExternalLink, History, Home, Info } from "lucide-react"; import SidebarSearch from "./SidebarSearch"; import SidebarItem from "./SidebarItem"; import SidebarSettings from "./SidebarSettings"; -import { getRewindYear } from "~/utils/utils"; +import { getRewindParams, getRewindYear } from "~/utils/utils"; export default function Sidebar() { const iconSize = 20; @@ -45,7 +45,7 @@ export default function Sidebar() { {}} modal={<>} diff --git a/client/app/components/themeSwitcher/ThemeOption.tsx b/client/app/components/themeSwitcher/ThemeOption.tsx index 51b9acf..7c0166b 100644 --- a/client/app/components/themeSwitcher/ThemeOption.tsx +++ b/client/app/components/themeSwitcher/ThemeOption.tsx @@ -1,23 +1,43 @@ import type { Theme } from "~/styles/themes.css"; interface Props { - theme: Theme - themeName: string - setTheme: Function + theme: Theme; + themeName: string; + setTheme: Function; } export default function ThemeOption({ theme, themeName, setTheme }: Props) { + const capitalizeFirstLetter = (s: string) => { + return s.charAt(0).toUpperCase() + s.slice(1); + }; - const capitalizeFirstLetter = (s: string) => { - return s.charAt(0).toUpperCase() + s.slice(1); - } - - return ( -
setTheme(themeName)} className="rounded-md p-3 sm:p-5 hover:cursor-pointer flex gap-4 items-center border-2" style={{background: theme.bg, color: theme.fg, borderColor: theme.bgSecondary}}> -
{capitalizeFirstLetter(themeName)}
-
-
-
-
- ) -} \ No newline at end of file + return ( +
setTheme(themeName)} + className="rounded-md p-3 sm:p-5 hover:cursor-pointer flex gap-3 items-center border-2 justify-between" + style={{ + background: theme.bg, + color: theme.fg, + borderColor: theme.bgSecondary, + }} + > +
+ {capitalizeFirstLetter(themeName)} +
+
+
+
+
+
+
+ ); +} diff --git a/client/app/components/themeSwitcher/ThemeSwitcher.tsx b/client/app/components/themeSwitcher/ThemeSwitcher.tsx index 62374be..f27d41c 100644 --- a/client/app/components/themeSwitcher/ThemeSwitcher.tsx +++ b/client/app/components/themeSwitcher/ThemeSwitcher.tsx @@ -49,7 +49,7 @@ export function ThemeSwitcher() { Reset
-
+
{Object.entries(themes).map(([name, themeData]) => ( [ { rel: "preconnect", href: "https://fonts.googleapis.com" }, @@ -35,14 +38,23 @@ export const links: Route.LinksFunction = () => [ export function Layout({ children }: { children: React.ReactNode }) { return ( - + - + - + @@ -60,71 +72,71 @@ export function Layout({ children }: { children: React.ReactNode }) { export default function App() { return ( <> - - - -
- -
- -
-
-
-
-
-
+ + + +
+ +
+ +
+
+
+
+
+
); } export function HydrateFallback() { - return null + return null; } export function ErrorBoundary() { - const error = useRouteError(); - let message = "Oops!"; - let details = "An unexpected error occurred."; - let stack: string | undefined; + const error = useRouteError(); + let message = "Oops!"; + let details = "An unexpected error occurred."; + let stack: string | undefined; - if (isRouteErrorResponse(error)) { - message = error.status === 404 ? "404" : "Error"; - details = error.status === 404 + if (isRouteErrorResponse(error)) { + message = error.status === 404 ? "404" : "Error"; + details = + error.status === 404 ? "The requested page could not be found." : error.statusText || details; - } else if (import.meta.env.DEV && error instanceof Error) { - details = error.message; - stack = error.stack; - } + } else if (import.meta.env.DEV && error instanceof Error) { + details = error.message; + stack = error.stack; + } + const title = `${message} - Koito`; - const title = `${message} - Koito` - - return ( - - - {title} -
- -
-
-
- -
-

{message}

-

{details}

-
-
- {stack && ( -
-                                {stack}
-                                
- )} -
-
-
+ return ( + + + {title} + +
+
+
+
+ +
+

{message}

+

{details}

- - - ); +
+ {stack && ( +
+                  {stack}
+                
+ )} +
+
+
+
+
+
+ ); } diff --git a/client/app/routes/Charts/AlbumChart.tsx b/client/app/routes/Charts/AlbumChart.tsx index ba323bf..7a157a8 100644 --- a/client/app/routes/Charts/AlbumChart.tsx +++ b/client/app/routes/Charts/AlbumChart.tsx @@ -1,12 +1,12 @@ import TopItemList from "~/components/TopItemList"; import ChartLayout from "./ChartLayout"; import { useLoaderData, type LoaderFunctionArgs } from "react-router"; -import { type Album, type PaginatedResponse } from "api/api"; +import { type Album, type PaginatedResponse, type Ranked } from "api/api"; export async function clientLoader({ request }: LoaderFunctionArgs) { const url = new URL(request.url); const page = url.searchParams.get("page") || "0"; - url.searchParams.set('page', page) + url.searchParams.set("page", page); const res = await fetch( `/apis/web/v1/top-albums?${url.searchParams.toString()}` @@ -20,7 +20,9 @@ export async function clientLoader({ request }: LoaderFunctionArgs) { } export default function AlbumChart() { - const { top_albums: initialData } = useLoaderData<{ top_albums: PaginatedResponse }>(); + const { top_albums: initialData } = useLoaderData<{ + top_albums: PaginatedResponse>; + }>(); return ( ( -
-
- - -
+
+
+ + +
-
diff --git a/client/app/routes/Charts/ArtistChart.tsx b/client/app/routes/Charts/ArtistChart.tsx index ec3dfd8..8bc2935 100644 --- a/client/app/routes/Charts/ArtistChart.tsx +++ b/client/app/routes/Charts/ArtistChart.tsx @@ -1,12 +1,12 @@ import TopItemList from "~/components/TopItemList"; import ChartLayout from "./ChartLayout"; import { useLoaderData, type LoaderFunctionArgs } from "react-router"; -import { type Album, type PaginatedResponse } from "api/api"; +import { type Album, type PaginatedResponse, type Ranked } from "api/api"; export async function clientLoader({ request }: LoaderFunctionArgs) { const url = new URL(request.url); const page = url.searchParams.get("page") || "0"; - url.searchParams.set('page', page) + url.searchParams.set("page", page); const res = await fetch( `/apis/web/v1/top-artists?${url.searchParams.toString()}` @@ -20,7 +20,9 @@ export async function clientLoader({ request }: LoaderFunctionArgs) { } export default function Artist() { - const { top_artists: initialData } = useLoaderData<{ top_artists: PaginatedResponse }>(); + const { top_artists: initialData } = useLoaderData<{ + top_artists: PaginatedResponse>; + }>(); return ( ( -
-
- - -
+
+
+ + +
-
diff --git a/client/app/routes/Charts/ChartLayout.tsx b/client/app/routes/Charts/ChartLayout.tsx index ee5ef59..90858bd 100644 --- a/client/app/routes/Charts/ChartLayout.tsx +++ b/client/app/routes/Charts/ChartLayout.tsx @@ -1,264 +1,272 @@ -import { - useFetcher, - useLocation, - useNavigate, -} from "react-router" -import { useEffect, useState } from "react" -import { average } from "color.js" -import { imageUrl, type PaginatedResponse } from "api/api" -import PeriodSelector from "~/components/PeriodSelector" +import { useFetcher, useLocation, useNavigate } from "react-router"; +import { useEffect, useState } from "react"; +import { average } from "color.js"; +import { imageUrl, type PaginatedResponse } from "api/api"; +import PeriodSelector from "~/components/PeriodSelector"; interface ChartLayoutProps { - title: "Top Albums" | "Top Tracks" | "Top Artists" | "Last Played" - initialData: PaginatedResponse - endpoint: string - render: (opts: { - data: PaginatedResponse - page: number - onNext: () => void - onPrev: () => void - }) => React.ReactNode + title: "Top Albums" | "Top Tracks" | "Top Artists" | "Last Played"; + initialData: PaginatedResponse; + endpoint: string; + render: (opts: { + data: PaginatedResponse; + page: number; + onNext: () => void; + onPrev: () => void; + }) => React.ReactNode; } export default function ChartLayout({ - title, - initialData, - endpoint, - render, + title, + initialData, + endpoint, + render, }: ChartLayoutProps) { - const pgTitle = `${title} - Koito` + const pgTitle = `${title} - Koito`; - const fetcher = useFetcher() - const location = useLocation() - const navigate = useNavigate() + const fetcher = useFetcher(); + const location = useLocation(); + const navigate = useNavigate(); - const currentParams = new URLSearchParams(location.search) - const currentPage = parseInt(currentParams.get("page") || "1", 10) + const currentParams = new URLSearchParams(location.search); + const currentPage = parseInt(currentParams.get("page") || "1", 10); - const data: PaginatedResponse = fetcher.data?.[endpoint] - ? fetcher.data[endpoint] - : initialData + const data: PaginatedResponse = fetcher.data?.[endpoint] + ? fetcher.data[endpoint] + : initialData; - const [bgColor, setBgColor] = useState("(--color-bg)") + const [bgColor, setBgColor] = useState("(--color-bg)"); - useEffect(() => { - if ((data?.items?.length ?? 0) === 0) return + useEffect(() => { + if ((data?.items?.length ?? 0) === 0) return; - const img = (data.items[0] as any)?.image - if (!img) return + const img = (data.items[0] as any)?.item?.image; + if (!img) return; - average(imageUrl(img, "small"), { amount: 1 }).then((color) => { - setBgColor(`rgba(${color[0]},${color[1]},${color[2]},0.4)`) - }) - }, [data]) + average(imageUrl(img, "small"), { amount: 1 }).then((color) => { + setBgColor(`rgba(${color[0]},${color[1]},${color[2]},0.4)`); + }); + }, [data]); - const period = currentParams.get("period") ?? "day" - const year = currentParams.get("year") - const month = currentParams.get("month") - const week = currentParams.get("week") + const period = currentParams.get("period") ?? "day"; + const year = currentParams.get("year"); + const month = currentParams.get("month"); + const week = currentParams.get("week"); - const updateParams = (params: Record) => { - const nextParams = new URLSearchParams(location.search) - - for (const key in params) { - const val = params[key] - if (val !== null) { - nextParams.set(key, val) - } else { - nextParams.delete(key) - } - } - - const url = `/${endpoint}?${nextParams.toString()}` - navigate(url, { replace: false }) + const updateParams = (params: Record) => { + const nextParams = new URLSearchParams(location.search); + + for (const key in params) { + const val = params[key]; + if (val !== null) { + nextParams.set(key, val); + } else { + nextParams.delete(key); + } } - - const handleSetPeriod = (p: string) => { - updateParams({ - period: p, - page: "1", - year: null, - month: null, - week: null, - }) - } - const handleSetYear = (val: string) => { - if (val == "") { - updateParams({ - period: period, - page: "1", - year: null, - month: null, - week: null - }) - return - } - updateParams({ - period: null, - page: "1", - year: val, - }) - } - const handleSetMonth = (val: string) => { - updateParams({ - period: null, - page: "1", - year: year ?? new Date().getFullYear().toString(), - month: val, - }) - } - const handleSetWeek = (val: string) => { - updateParams({ - period: null, - page: "1", - year: year ?? new Date().getFullYear().toString(), - month: null, - week: val, - }) - } - useEffect(() => { - fetcher.load(`/${endpoint}?${currentParams.toString()}`) - }, [location.search]) + const url = `/${endpoint}?${nextParams.toString()}`; + navigate(url, { replace: false }); + }; - const setPage = (nextPage: number) => { - const nextParams = new URLSearchParams(location.search) - nextParams.set("page", String(nextPage)) - const url = `/${endpoint}?${nextParams.toString()}` - fetcher.load(url) - navigate(url, { replace: false }) - } - - const handleNextPage = () => setPage(currentPage + 1) - const handlePrevPage = () => setPage(currentPage - 1) - - const yearOptions = Array.from({ length: 10 }, (_, i) => `${new Date().getFullYear() - i}`) - const monthOptions = Array.from({ length: 12 }, (_, i) => `${i + 1}`) - const weekOptions = Array.from({ length: 53 }, (_, i) => `${i + 1}`) - - const getDateRange = (): string => { - let from: Date - let to: Date - - const now = new Date() - const currentYear = now.getFullYear() - const currentMonth = now.getMonth() // 0-indexed - const currentDate = now.getDate() - - if (year && month) { - from = new Date(parseInt(year), parseInt(month) - 1, 1) - to = new Date(from) - to.setMonth(from.getMonth() + 1) - to.setDate(0) - } else if (year && week) { - const base = new Date(parseInt(year), 0, 1) // Jan 1 of the year - const weekNumber = parseInt(week) - from = new Date(base) - from.setDate(base.getDate() + (weekNumber - 1) * 7) - to = new Date(from) - to.setDate(from.getDate() + 6) - } else if (year) { - from = new Date(parseInt(year), 0, 1) - to = new Date(parseInt(year), 11, 31) - } else { - switch (period) { - case "day": - from = new Date(now) - to = new Date(now) - break - case "week": - to = new Date(now) - from = new Date(now) - from.setDate(to.getDate() - 6) - break - case "month": - to = new Date(now) - from = new Date(now) - if (currentMonth === 0) { - from = new Date(currentYear - 1, 11, currentDate) - } else { - from = new Date(currentYear, currentMonth - 1, currentDate) - } - break - case "year": - to = new Date(now) - from = new Date(currentYear - 1, currentMonth, currentDate) - break - case "all_time": - return "All Time" - default: - return "" - } - } - - const formatter = new Intl.DateTimeFormat(undefined, { - year: "numeric", - month: "long", - day: "numeric", - }) - - return `${formatter.format(from)} - ${formatter.format(to)}` + const handleSetPeriod = (p: string) => { + updateParams({ + period: p, + page: "1", + year: null, + month: null, + week: null, + }); + }; + const handleSetYear = (val: string) => { + if (val == "") { + updateParams({ + period: period, + page: "1", + year: null, + month: null, + week: null, + }); + return; } - + updateParams({ + period: null, + page: "1", + year: val, + }); + }; + const handleSetMonth = (val: string) => { + updateParams({ + period: null, + page: "1", + year: year ?? new Date().getFullYear().toString(), + month: val, + }); + }; + const handleSetWeek = (val: string) => { + updateParams({ + period: null, + page: "1", + year: year ?? new Date().getFullYear().toString(), + month: null, + week: val, + }); + }; - return ( -
- {pgTitle} - - -
-

{title}

-
- -
- - - -
-
-

{getDateRange()}

-
- {render({ - data, - page: currentPage, - onNext: handleNextPage, - onPrev: handlePrevPage, - })} -
-
-
- ) + useEffect(() => { + fetcher.load(`/${endpoint}?${currentParams.toString()}`); + }, [location.search]); + + const setPage = (nextPage: number) => { + const nextParams = new URLSearchParams(location.search); + nextParams.set("page", String(nextPage)); + const url = `/${endpoint}?${nextParams.toString()}`; + fetcher.load(url); + navigate(url, { replace: false }); + }; + + const handleNextPage = () => setPage(currentPage + 1); + const handlePrevPage = () => setPage(currentPage - 1); + + const yearOptions = Array.from( + { length: 10 }, + (_, i) => `${new Date().getFullYear() - i}` + ); + const monthOptions = Array.from({ length: 12 }, (_, i) => `${i + 1}`); + const weekOptions = Array.from({ length: 53 }, (_, i) => `${i + 1}`); + + const getDateRange = (): string => { + let from: Date; + let to: Date; + + const now = new Date(); + const currentYear = now.getFullYear(); + const currentMonth = now.getMonth(); // 0-indexed + const currentDate = now.getDate(); + + if (year && month) { + from = new Date(parseInt(year), parseInt(month) - 1, 1); + to = new Date(from); + to.setMonth(from.getMonth() + 1); + to.setDate(0); + } else if (year && week) { + const base = new Date(parseInt(year), 0, 1); // Jan 1 of the year + const weekNumber = parseInt(week); + from = new Date(base); + from.setDate(base.getDate() + (weekNumber - 1) * 7); + to = new Date(from); + to.setDate(from.getDate() + 6); + } else if (year) { + from = new Date(parseInt(year), 0, 1); + to = new Date(parseInt(year), 11, 31); + } else { + switch (period) { + case "day": + from = new Date(now); + to = new Date(now); + break; + case "week": + to = new Date(now); + from = new Date(now); + from.setDate(to.getDate() - 6); + break; + case "month": + to = new Date(now); + from = new Date(now); + if (currentMonth === 0) { + from = new Date(currentYear - 1, 11, currentDate); + } else { + from = new Date(currentYear, currentMonth - 1, currentDate); + } + break; + case "year": + to = new Date(now); + from = new Date(currentYear - 1, currentMonth, currentDate); + break; + case "all_time": + return "All Time"; + default: + return ""; + } + } + + const formatter = new Intl.DateTimeFormat(undefined, { + year: "numeric", + month: "long", + day: "numeric", + }); + + return `${formatter.format(from)} - ${formatter.format(to)}`; + }; + + return ( +
+ {pgTitle} + + +
+

{title}

+
+ +
+ + + +
+
+

{getDateRange()}

+
+ {render({ + data, + page: currentPage, + onNext: handleNextPage, + onPrev: handlePrevPage, + })} +
+
+
+ ); } diff --git a/client/app/routes/Charts/TrackChart.tsx b/client/app/routes/Charts/TrackChart.tsx index eeeb145..450d022 100644 --- a/client/app/routes/Charts/TrackChart.tsx +++ b/client/app/routes/Charts/TrackChart.tsx @@ -1,12 +1,12 @@ import TopItemList from "~/components/TopItemList"; import ChartLayout from "./ChartLayout"; import { useLoaderData, type LoaderFunctionArgs } from "react-router"; -import { type Album, type PaginatedResponse } from "api/api"; +import { type Track, type PaginatedResponse, type Ranked } from "api/api"; export async function clientLoader({ request }: LoaderFunctionArgs) { const url = new URL(request.url); const page = url.searchParams.get("page") || "0"; - url.searchParams.set('page', page) + url.searchParams.set("page", page); const res = await fetch( `/apis/web/v1/top-tracks?${url.searchParams.toString()}` @@ -15,12 +15,14 @@ export async function clientLoader({ request }: LoaderFunctionArgs) { throw new Response("Failed to load top tracks", { status: 500 }); } - const top_tracks: PaginatedResponse = await res.json(); + const top_tracks: PaginatedResponse = await res.json(); return { top_tracks }; } export default function TrackChart() { - const { top_tracks: initialData } = useLoaderData<{ top_tracks: PaginatedResponse }>(); + const { top_tracks: initialData } = useLoaderData<{ + top_tracks: PaginatedResponse>; + }>(); return ( ( -
-
- - -
+
+
+ + +
-
diff --git a/client/app/routes/Home.tsx b/client/app/routes/Home.tsx index 55c62bf..597c563 100644 --- a/client/app/routes/Home.tsx +++ b/client/app/routes/Home.tsx @@ -10,20 +10,17 @@ import PeriodSelector from "~/components/PeriodSelector"; import { useAppContext } from "~/providers/AppProvider"; export function meta({}: Route.MetaArgs) { - return [ - { title: "Koito" }, - { name: "description", content: "Koito" }, - ]; + return [{ title: "Koito" }, { name: "description", content: "Koito" }]; } export default function Home() { - const [period, setPeriod] = useState('week') + const [period, setPeriod] = useState("week"); const { homeItems } = useAppContext(); return ( -
-
+
+
@@ -33,7 +30,10 @@ export default function Home() { - +
diff --git a/client/app/routes/MediaItems/Album.tsx b/client/app/routes/MediaItems/Album.tsx index 4806384..e6f413e 100644 --- a/client/app/routes/MediaItems/Album.tsx +++ b/client/app/routes/MediaItems/Album.tsx @@ -7,6 +7,7 @@ import PeriodSelector from "~/components/PeriodSelector"; import MediaLayout from "./MediaLayout"; import ActivityGrid from "~/components/ActivityGrid"; import { timeListenedString } from "~/utils/utils"; +import InterestGraph from "~/components/InterestGraph"; export async function clientLoader({ params }: LoaderFunctionArgs) { const res = await fetch(`/apis/web/v1/album?id=${params.id}`); @@ -29,6 +30,7 @@ export default function Album() { title={album.title} img={album.image} id={album.id} + rank={album.all_time_rank} musicbrainzId={album.musicbrainz_id} imgItemId={album.id} mergeFunc={mergeAlbums} @@ -44,22 +46,22 @@ export default function Album() { }} subContent={
- {album.listen_count && ( + {album.listen_count !== 0 && (

{album.listen_count} play{album.listen_count > 1 ? "s" : ""}

)} - { + {album.time_listened !== 0 && (

{timeListenedString(album.time_listened)}

- } - { + )} + {album.first_listen > 0 && (

Listening since{" "} {new Date(album.first_listen * 1000).toLocaleDateString()}

- } + )}
} > @@ -69,7 +71,10 @@ export default function Album() {
- +
+ + +
); diff --git a/client/app/routes/MediaItems/Artist.tsx b/client/app/routes/MediaItems/Artist.tsx index 3f9485c..a23e4cd 100644 --- a/client/app/routes/MediaItems/Artist.tsx +++ b/client/app/routes/MediaItems/Artist.tsx @@ -8,6 +8,7 @@ import MediaLayout from "./MediaLayout"; import ArtistAlbums from "~/components/ArtistAlbums"; import ActivityGrid from "~/components/ActivityGrid"; import { timeListenedString } from "~/utils/utils"; +import InterestGraph from "~/components/InterestGraph"; export async function clientLoader({ params }: LoaderFunctionArgs) { const res = await fetch(`/apis/web/v1/artist?id=${params.id}`); @@ -35,6 +36,7 @@ export default function Artist() { title={artist.name} img={artist.image} id={artist.id} + rank={artist.all_time_rank} musicbrainzId={artist.musicbrainz_id} imgItemId={artist.id} mergeFunc={mergeArtists} @@ -55,17 +57,17 @@ export default function Artist() { {artist.listen_count} play{artist.listen_count > 1 ? "s" : ""}

)} - { + {artist.time_listened !== 0 && (

{timeListenedString(artist.time_listened)}

- } - { + )} + {artist.first_listen > 0 && (

Listening since{" "} {new Date(artist.first_listen * 1000).toLocaleDateString()}

- } + )}
} > @@ -76,7 +78,10 @@ export default function Artist() {
- +
+ + +
diff --git a/client/app/routes/MediaItems/MediaLayout.tsx b/client/app/routes/MediaItems/MediaLayout.tsx index 93c25e1..eaf100b 100644 --- a/client/app/routes/MediaItems/MediaLayout.tsx +++ b/client/app/routes/MediaItems/MediaLayout.tsx @@ -10,97 +10,200 @@ import DeleteModal from "~/components/modals/DeleteModal"; import RenameModal from "~/components/modals/EditModal/EditModal"; import EditModal from "~/components/modals/EditModal/EditModal"; import AddListenModal from "~/components/modals/AddListenModal"; +import MbzIcon from "~/components/icons/MbzIcon"; +import { Link } from "react-router"; -export type MergeFunc = (from: number, to: number, replaceImage: boolean) => Promise -export type MergeSearchCleanerFunc = (r: SearchResponse, id: number) => SearchResponse +export type MergeFunc = ( + from: number, + to: number, + replaceImage: boolean +) => Promise; +export type MergeSearchCleanerFunc = ( + r: SearchResponse, + id: number +) => SearchResponse; interface Props { - type: "Track" | "Album" | "Artist" - title: string - img: string - id: number - musicbrainzId: string - imgItemId: number - mergeFunc: MergeFunc - mergeCleanerFunc: MergeSearchCleanerFunc - children: React.ReactNode - subContent: React.ReactNode + type: "Track" | "Album" | "Artist"; + title: string; + img: string; + id: number; + rank: number; + musicbrainzId: string; + imgItemId: number; + mergeFunc: MergeFunc; + mergeCleanerFunc: MergeSearchCleanerFunc; + children: React.ReactNode; + subContent: React.ReactNode; } export default function MediaLayout(props: Props) { - const [bgColor, setBgColor] = useState("(--color-bg)"); - const [mergeModalOpen, setMergeModalOpen] = useState(false); - const [deleteModalOpen, setDeleteModalOpen] = useState(false); - const [imageModalOpen, setImageModalOpen] = useState(false); - const [renameModalOpen, setRenameModalOpen] = useState(false); - const [addListenModalOpen, setAddListenModalOpen] = useState(false); - const { user } = useAppContext(); + const [bgColor, setBgColor] = useState("(--color-bg)"); + const [mergeModalOpen, setMergeModalOpen] = useState(false); + const [deleteModalOpen, setDeleteModalOpen] = useState(false); + const [imageModalOpen, setImageModalOpen] = useState(false); + const [renameModalOpen, setRenameModalOpen] = useState(false); + const [addListenModalOpen, setAddListenModalOpen] = useState(false); + const { user } = useAppContext(); - useEffect(() => { - average(imageUrl(props.img, 'small'), { amount: 1 }).then((color) => { - setBgColor(`rgba(${color[0]},${color[1]},${color[2]},0.4)`); - }); - }, [props.img]); + useEffect(() => { + average(imageUrl(props.img, "small"), { amount: 1 }).then((color) => { + setBgColor(`rgba(${color[0]},${color[1]},${color[2]},0.4)`); + }); + }, [props.img]); - const replaceImageCallback = () => { - window.location.reload() - } + const replaceImageCallback = () => { + window.location.reload(); + }; - const title = `${props.title} - Koito` + const title = `${props.title} - Koito`; - const mobileIconSize = 22 - const normalIconSize = 30 + const mobileIconSize = 22; + const normalIconSize = 30; - let vw = Math.max(document.documentElement.clientWidth || 0, window.innerWidth || 0) + let vw = Math.max( + document.documentElement.clientWidth || 0, + window.innerWidth || 0 + ); - let iconSize = vw > 768 ? normalIconSize : mobileIconSize + let iconSize = vw > 768 ? normalIconSize : mobileIconSize; - return ( -
- - {title} - - -
-
-
- {props.title} -
-
-

{props.type}

-

{props.title}

- {props.subContent} -
- { user && -
- { props.type === "Track" && - <> - - - - } - - - - - - - - -
- } -
- {props.children} + console.log("MBZ:", props.musicbrainzId); + + return ( +
+ + {title} + + +
+
+
+ {props.title} +
+
+

{props.type}

+
+

+ {props.title} + + {" "} + #{props.rank} + +

-
- ); + {props.subContent} +
+
+ {props.musicbrainzId && ( + + + + )} + {user && ( + <> + {props.type === "Track" && ( + <> + + + + )} + + + {props.type !== "Track" && ( + + )} + + + + + + + + )} +
+
+ {props.children} +
+ + ); } diff --git a/client/app/routes/MediaItems/Track.tsx b/client/app/routes/MediaItems/Track.tsx index 5690232..6b6690e 100644 --- a/client/app/routes/MediaItems/Track.tsx +++ b/client/app/routes/MediaItems/Track.tsx @@ -6,6 +6,7 @@ import PeriodSelector from "~/components/PeriodSelector"; import MediaLayout from "./MediaLayout"; import ActivityGrid from "~/components/ActivityGrid"; import { timeListenedString } from "~/utils/utils"; +import InterestGraph from "~/components/InterestGraph"; export async function clientLoader({ params }: LoaderFunctionArgs) { let res = await fetch(`/apis/web/v1/track?id=${params.id}`); @@ -33,7 +34,8 @@ export default function Track() { title={track.title} img={track.image} id={track.id} - musicbrainzId={album.musicbrainz_id} + rank={track.all_time_rank} + musicbrainzId={track.musicbrainz_id} imgItemId={track.album_id} mergeFunc={mergeTracks} mergeCleanerFunc={(r, id) => { @@ -48,23 +50,28 @@ export default function Track() { }} subContent={
- appears on {album.title} - {track.listen_count && ( +

+ Appears on{" "} + + {album.title} + +

+ {track.listen_count !== 0 && (

{track.listen_count} play{track.listen_count > 1 ? "s" : ""}

)} - { + {track.time_listened !== 0 && (

{timeListenedString(track.time_listened)}

- } - { + )} + {track.first_listen > 0 && (

Listening since{" "} {new Date(track.first_listen * 1000).toLocaleDateString()}

- } + )}
} > @@ -73,7 +80,10 @@ export default function Track() {
- +
+ + +
); diff --git a/client/app/routes/RewindPage.tsx b/client/app/routes/RewindPage.tsx index b14e5fc..ad92497 100644 --- a/client/app/routes/RewindPage.tsx +++ b/client/app/routes/RewindPage.tsx @@ -1,52 +1,213 @@ import Rewind from "~/components/rewind/Rewind"; import type { Route } from "./+types/Home"; -import { type RewindStats } from "api/api"; -import { useState } from "react"; +import { imageUrl, type RewindStats } from "api/api"; +import { useEffect, useState } from "react"; import type { LoaderFunctionArgs } from "react-router"; import { useLoaderData } from "react-router"; -import { getRewindYear } from "~/utils/utils"; +import { getRewindParams, getRewindYear } from "~/utils/utils"; +import { useNavigate } from "react-router"; +import { average } from "color.js"; +import { ChevronLeft, ChevronRight } from "lucide-react"; + +// TODO: Bind year and month selectors to what data actually exists + +const months = [ + "Full Year", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", +]; export async function clientLoader({ request }: LoaderFunctionArgs) { const url = new URL(request.url); - const year = url.searchParams.get("year") || getRewindYear(); + const year = parseInt( + url.searchParams.get("year") || getRewindParams().year.toString() + ); + const month = parseInt( + url.searchParams.get("month") || getRewindParams().month.toString() + ); - const res = await fetch(`/apis/web/v1/summary?year=${year}`); + const res = await fetch(`/apis/web/v1/summary?year=${year}&month=${month}`); if (!res.ok) { throw new Response("Failed to load summary", { status: 500 }); } const stats: RewindStats = await res.json(); - stats.title = `Your ${year} Rewind`; + stats.title = `Your ${month === 0 ? "" : months[month]} ${year} Rewind`; return { stats }; } -export function meta({}: Route.MetaArgs) { - return [ - { title: `Rewind - Koito` }, - { name: "description", content: "Rewind - Koito" }, - ]; -} - export default function RewindPage() { + const currentParams = new URLSearchParams(location.search); + let year = parseInt( + currentParams.get("year") || getRewindParams().year.toString() + ); + let month = parseInt( + currentParams.get("month") || getRewindParams().month.toString() + ); + const navigate = useNavigate(); const [showTime, setShowTime] = useState(false); const { stats: stats } = useLoaderData<{ stats: RewindStats }>(); + + const [bgColor, setBgColor] = useState("(--color-bg)"); + + useEffect(() => { + if (!stats.top_artists[0]) return; + + const img = (stats.top_artists[0] as any)?.item.image; + if (!img) return; + + average(imageUrl(img, "small"), { amount: 1 }).then((color) => { + setBgColor(`rgba(${color[0]},${color[1]},${color[2]},0.4)`); + }); + }, [stats]); + + const updateParams = (params: Record) => { + const nextParams = new URLSearchParams(location.search); + + for (const key in params) { + const val = params[key]; + + if (val !== null) { + nextParams.set(key, val); + } + } + + const url = `/rewind?${nextParams.toString()}`; + + navigate(url, { replace: false }); + }; + + const navigateMonth = (direction: "prev" | "next") => { + if (direction === "next") { + if (month === 12) { + month = 0; + } else { + month += 1; + } + } else { + if (month === 0) { + month = 12; + } else { + month -= 1; + } + } + console.log(`Month: ${month}`); + + updateParams({ + year: year.toString(), + month: month.toString(), + }); + }; + const navigateYear = (direction: "prev" | "next") => { + if (direction === "next") { + year += 1; + } else { + year -= 1; + } + + updateParams({ + year: year.toString(), + month: month.toString(), + }); + }; + + const pgTitle = `${stats.title} - Koito`; + return ( -
- {stats.title} - Koito - - -
-
- - setShowTime(!showTime)} - > +
+
+ {pgTitle} + + +
+
+
+
+ +

+ {months[month]} +

+ +
+
+ +

{year}

+ +
+
+
+ + setShowTime(!showTime)} + > +
+
+ {stats !== undefined && ( + + )}
- {stats !== undefined && }
-
+
); } diff --git a/client/app/styles/themes.css.ts b/client/app/styles/themes.css.ts index d5390ae..1a3a57d 100644 --- a/client/app/styles/themes.css.ts +++ b/client/app/styles/themes.css.ts @@ -92,7 +92,7 @@ export const themes: Record = { fg: "#fef9f3", fgSecondary: "#dbc6b0", fgTertiary: "#a3917a", - primary: "#d97706", + primary: "#F0850A", primaryDim: "#b45309", accent: "#8c4c28", accentDim: "#6b3b1f", diff --git a/client/app/tz.ts b/client/app/tz.ts new file mode 100644 index 0000000..3d82e0c --- /dev/null +++ b/client/app/tz.ts @@ -0,0 +1,10 @@ +export function initTimezoneCookie() { + if (typeof window === "undefined") return; + + if (document.cookie.includes("tz=")) return; + + const tz = Intl.DateTimeFormat().resolvedOptions().timeZone; + if (!tz) return; + + document.cookie = `tz=${tz}; Path=/; Max-Age=31536000; SameSite=Lax`; +} diff --git a/client/app/utils/utils.ts b/client/app/utils/utils.ts index 50c0c16..4acbad5 100644 --- a/client/app/utils/utils.ts +++ b/client/app/utils/utils.ts @@ -16,12 +16,15 @@ const timeframeToInterval = (timeframe: Timeframe): string => { }; const getRewindYear = (): number => { + return new Date().getFullYear() - 1; +}; + +const getRewindParams = (): { month: number; year: number } => { const today = new Date(); - if (today.getMonth() > 10 && today.getDate() >= 30) { - // if we are in december 30/31, just serve current year - return today.getFullYear(); + if (today.getMonth() == 0) { + return { month: 0, year: today.getFullYear() - 1 }; } else { - return today.getFullYear() - 1; + return { month: today.getMonth(), year: today.getFullYear() }; } }; @@ -114,5 +117,5 @@ const timeListenedString = (seconds: number) => { return `${minutes} minutes listened`; }; -export { hexToHSL, timeListenedString, getRewindYear }; +export { hexToHSL, timeListenedString, getRewindYear, getRewindParams }; export type { hsl }; diff --git a/client/package.json b/client/package.json index ce0614f..eee0653 100644 --- a/client/package.json +++ b/client/package.json @@ -13,6 +13,7 @@ "@radix-ui/react-tabs": "^1.1.12", "@react-router/node": "^7.5.3", "@react-router/serve": "^7.5.3", + "@recharts/devtools": "^0.0.7", "@tanstack/react-query": "^5.80.6", "@vanilla-extract/css": "^1.17.4", "color.js": "^1.2.0", @@ -20,7 +21,9 @@ "lucide-react": "^0.513.0", "react": "^19.1.0", "react-dom": "^19.1.0", - "react-router": "^7.5.3" + "react-is": "^19.2.3", + "react-router": "^7.5.3", + "recharts": "^3.6.0" }, "devDependencies": { "@react-router/dev": "^7.5.3", diff --git a/client/yarn.lock b/client/yarn.lock index 552e8f9..48a33db 100644 --- a/client/yarn.lock +++ b/client/yarn.lock @@ -689,6 +689,23 @@ morgan "^1.10.0" source-map-support "^0.5.21" +"@recharts/devtools@^0.0.7": + version "0.0.7" + resolved "https://registry.yarnpkg.com/@recharts/devtools/-/devtools-0.0.7.tgz#a909d102efd76fc45bc2b7a150e67a02da04b4c1" + integrity sha512-ud66rUf3FYf1yQLGSCowI50EQyC/rcZblvDgNvfUIVaEXyQtr5K2DFgwegziqbVclsVBQLTxyntVViJN5H4oWQ== + +"@reduxjs/toolkit@1.x.x || 2.x.x": + version "2.11.2" + resolved "https://registry.yarnpkg.com/@reduxjs/toolkit/-/toolkit-2.11.2.tgz#582225acea567329ca6848583e7dd72580d38e82" + integrity sha512-Kd6kAHTA6/nUpp8mySPqj3en3dm0tdMIgbttnQ1xFMVpufoj+ADi8pXLBsd4xzTRHQa7t/Jv8W5UnCuW4kuWMQ== + dependencies: + "@standard-schema/spec" "^1.0.0" + "@standard-schema/utils" "^0.3.0" + immer "^11.0.0" + redux "^5.0.1" + redux-thunk "^3.1.0" + reselect "^5.1.0" + "@rollup/rollup-android-arm-eabi@4.42.0": version "4.42.0" resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.42.0.tgz#8baae15a6a27f18b7c5be420e00ab08c7d3dd6f4" @@ -789,6 +806,16 @@ resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.42.0.tgz#516c6770ba15fe6aef369d217a9747492c01e8b7" integrity sha512-LpHiJRwkaVz/LqjHjK8LCi8osq7elmpwujwbXKNW88bM8eeGxavJIKKjkjpMHAh/2xfnrt1ZSnhTv41WYUHYmA== +"@standard-schema/spec@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@standard-schema/spec/-/spec-1.1.0.tgz#a79b55dbaf8604812f52d140b2c9ab41bc150bb8" + integrity sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w== + +"@standard-schema/utils@^0.3.0": + version "0.3.0" + resolved "https://registry.yarnpkg.com/@standard-schema/utils/-/utils-0.3.0.tgz#3d5e608f16c2390c10528e98e59aef6bf73cae7b" + integrity sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g== + "@tailwindcss/node@4.1.8": version "4.1.8" resolved "https://registry.yarnpkg.com/@tailwindcss/node/-/node-4.1.8.tgz#e29187abec6194ce1e9f072208c62116a79a129b" @@ -918,6 +945,57 @@ dependencies: tslib "^2.4.0" +"@types/d3-array@^3.0.3": + version "3.2.2" + resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.2.2.tgz#e02151464d02d4a1b44646d0fcdb93faf88fde8c" + integrity sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw== + +"@types/d3-color@*": + version "3.1.3" + resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-3.1.3.tgz#368c961a18de721da8200e80bf3943fb53136af2" + integrity sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A== + +"@types/d3-ease@^3.0.0": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-ease/-/d3-ease-3.0.2.tgz#e28db1bfbfa617076f7770dd1d9a48eaa3b6c51b" + integrity sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA== + +"@types/d3-interpolate@^3.0.1": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz#412b90e84870285f2ff8a846c6eb60344f12a41c" + integrity sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA== + dependencies: + "@types/d3-color" "*" + +"@types/d3-path@*": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-3.1.1.tgz#f632b380c3aca1dba8e34aa049bcd6a4af23df8a" + integrity sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg== + +"@types/d3-scale@^4.0.2": + version "4.0.9" + resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.9.tgz#57a2f707242e6fe1de81ad7bfcccaaf606179afb" + integrity sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw== + dependencies: + "@types/d3-time" "*" + +"@types/d3-shape@^3.1.0": + version "3.1.8" + resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-3.1.8.tgz#d1516cc508753be06852cd06758e3bb54a22b0e3" + integrity sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w== + dependencies: + "@types/d3-path" "*" + +"@types/d3-time@*", "@types/d3-time@^3.0.0": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.4.tgz#8472feecd639691450dd8000eb33edd444e1323f" + integrity sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g== + +"@types/d3-timer@^3.0.0": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-timer/-/d3-timer-3.0.2.tgz#70bbda77dc23aa727413e22e214afa3f0e852f70" + integrity sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw== + "@types/estree@1.0.7": version "1.0.7" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.7.tgz#4158d3105276773d5b7695cd4834b1722e4f37a8" @@ -949,6 +1027,11 @@ dependencies: csstype "^3.0.2" +"@types/use-sync-external-store@^0.0.6": + version "0.0.6" + resolved "https://registry.yarnpkg.com/@types/use-sync-external-store/-/use-sync-external-store-0.0.6.tgz#60be8d21baab8c305132eb9cb912ed497852aadc" + integrity sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg== + "@vanilla-extract/babel-plugin-debug-ids@^1.2.2": version "1.2.2" resolved "https://registry.yarnpkg.com/@vanilla-extract/babel-plugin-debug-ids/-/babel-plugin-debug-ids-1.2.2.tgz#0bcb26614d8c6c4c0d95f8f583d838ce71294633" @@ -1163,6 +1246,11 @@ chownr@^3.0.0: resolved "https://registry.yarnpkg.com/chownr/-/chownr-3.0.0.tgz#9855e64ecd240a9cc4267ce8a4aa5d24a1da15e4" integrity sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g== +clsx@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999" + integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA== + color-convert@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" @@ -1261,6 +1349,77 @@ csstype@^3.0.2, csstype@^3.0.7: resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== +"d3-array@2 - 3", "d3-array@2.10.0 - 3", d3-array@^3.1.6: + version "3.2.4" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5" + integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== + dependencies: + internmap "1 - 2" + +"d3-color@1 - 3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2" + integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA== + +d3-ease@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-3.0.1.tgz#9658ac38a2140d59d346160f1f6c30fda0bd12f4" + integrity sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w== + +"d3-format@1 - 3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641" + integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== + +"d3-interpolate@1.2.0 - 3", d3-interpolate@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d" + integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g== + dependencies: + d3-color "1 - 3" + +d3-path@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-3.1.0.tgz#22df939032fb5a71ae8b1800d61ddb7851c42526" + integrity sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ== + +d3-scale@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396" + integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ== + dependencies: + d3-array "2.10.0 - 3" + d3-format "1 - 3" + d3-interpolate "1.2.0 - 3" + d3-time "2.1.1 - 3" + d3-time-format "2 - 4" + +d3-shape@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-3.2.0.tgz#a1a839cbd9ba45f28674c69d7f855bcf91dfc6a5" + integrity sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA== + dependencies: + d3-path "^3.1.0" + +"d3-time-format@2 - 4": + version "4.1.0" + resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a" + integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== + dependencies: + d3-time "1 - 3" + +"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.1.0.tgz#9310db56e992e3c0175e1ef385e545e48a9bb5c7" + integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q== + dependencies: + d3-array "2 - 3" + +d3-timer@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-3.0.1.tgz#6284d2a2708285b1abb7e201eda4380af35e63b0" + integrity sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA== + debug@2.6.9: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" @@ -1275,6 +1434,11 @@ debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.4.1: dependencies: ms "^2.1.3" +decimal.js-light@^2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/decimal.js-light/-/decimal.js-light-2.5.1.tgz#134fd32508f19e208f4fb2f8dac0d2626a867934" + integrity sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg== + dedent@^1.5.3: version "1.6.0" resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.6.0.tgz#79d52d6389b1ffa67d2bcef59ba51847a9d503b2" @@ -1384,6 +1548,11 @@ es-object-atoms@^1.0.0, es-object-atoms@^1.1.1: dependencies: es-errors "^1.3.0" +es-toolkit@^1.39.3: + version "1.43.0" + resolved "https://registry.yarnpkg.com/es-toolkit/-/es-toolkit-1.43.0.tgz#2c278d55ffeb30421e6e73a009738ed37b10ef61" + integrity sha512-SKCT8AsWvYzBBuUqMk4NPwFlSdqLpJwmy6AP322ERn8W2YLIB6JBXnwMI2Qsh2gfphT3q7EKAxKb23cvFHFwKA== + esbuild@^0.25.0, "esbuild@npm:esbuild@>=0.17.6 <0.26.0": version "0.25.5" resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.25.5.tgz#71075054993fdfae76c66586f9b9c1f8d7edd430" @@ -1438,6 +1607,11 @@ eval@0.1.8: "@types/node" "*" require-like ">= 0.1.1" +eventemitter3@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" + integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== + exit-hook@2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-2.2.1.tgz#007b2d92c6428eda2b76e7016a34351586934593" @@ -1646,11 +1820,26 @@ iconv-lite@0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" +immer@^10.1.1: + version "10.2.0" + resolved "https://registry.yarnpkg.com/immer/-/immer-10.2.0.tgz#88a4ce06a1af64172d254b70f7cb04df51c871b1" + integrity sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw== + +immer@^11.0.0: + version "11.1.3" + resolved "https://registry.yarnpkg.com/immer/-/immer-11.1.3.tgz#78681e1deb6cec39753acf04eb16d7576c04f4d6" + integrity sha512-6jQTc5z0KJFtr1UgFpIL3N9XSC3saRaI9PwWtzM2pSqkNGtiNkYY2OSwkOGDK2XcTRcLb1pi/aNkKZz0nxVH4Q== + inherits@2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== +"internmap@1 - 2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" + integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== + ipaddr.js@1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" @@ -2180,6 +2369,19 @@ react-dom@^19.1.0: dependencies: scheduler "^0.26.0" +react-is@^19.2.3: + version "19.2.3" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-19.2.3.tgz#eec2feb69c7fb31f77d0b5c08c10ae1c88886b29" + integrity sha512-qJNJfu81ByyabuG7hPFEbXqNcWSU3+eVus+KJs+0ncpGfMyYdvSmxiJxbWR65lYi1I+/0HBcliO029gc4F+PnA== + +"react-redux@8.x.x || 9.x.x": + version "9.2.0" + resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-9.2.0.tgz#96c3ab23fb9a3af2cb4654be4b51c989e32366f5" + integrity sha512-ROY9fvHhwOD9ySfrF0wmvu//bKCQ6AeZZq1nJNtbDC+kk5DuSuNX/n6YWYF/SYy7bSba4D4FSz8DJeKY/S/r+g== + dependencies: + "@types/use-sync-external-store" "^0.0.6" + use-sync-external-store "^1.4.0" + react-refresh@^0.14.0: version "0.14.2" resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.14.2.tgz#3833da01ce32da470f1f936b9d477da5c7028bf9" @@ -2203,11 +2405,43 @@ readdirp@^4.0.1: resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-4.1.2.tgz#eb85801435fbf2a7ee58f19e0921b068fc69948d" integrity sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg== +recharts@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/recharts/-/recharts-3.6.0.tgz#403f0606581153601857e46733277d1411633df3" + integrity sha512-L5bjxvQRAe26RlToBAziKUB7whaGKEwD3znoM6fz3DrTowCIC/FnJYnuq1GEzB8Zv2kdTfaxQfi5GoH0tBinyg== + dependencies: + "@reduxjs/toolkit" "1.x.x || 2.x.x" + clsx "^2.1.1" + decimal.js-light "^2.5.1" + es-toolkit "^1.39.3" + eventemitter3 "^5.0.1" + immer "^10.1.1" + react-redux "8.x.x || 9.x.x" + reselect "5.1.1" + tiny-invariant "^1.3.3" + use-sync-external-store "^1.2.2" + victory-vendor "^37.0.2" + +redux-thunk@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/redux-thunk/-/redux-thunk-3.1.0.tgz#94aa6e04977c30e14e892eae84978c1af6058ff3" + integrity sha512-NW2r5T6ksUKXCabzhL9z+h206HQw/NJkcLm1GPImRQ8IzfXwRGqjVhKJGauHirT0DAuyy6hjdnMZaRoAcy0Klw== + +redux@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/redux/-/redux-5.0.1.tgz#97fa26881ce5746500125585d5642c77b6e9447b" + integrity sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w== + "require-like@>= 0.1.1": version "0.1.2" resolved "https://registry.yarnpkg.com/require-like/-/require-like-0.1.2.tgz#ad6f30c13becd797010c468afa775c0c0a6b47fa" integrity sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A== +reselect@5.1.1, reselect@^5.1.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/reselect/-/reselect-5.1.1.tgz#c766b1eb5d558291e5e550298adb0becc24bb72e" + integrity sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w== + retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" @@ -2492,6 +2726,11 @@ tar@^7.4.3: mkdirp "^3.0.1" yallist "^5.0.0" +tiny-invariant@^1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.3.3.tgz#46680b7a873a0d5d10005995eb90a70d74d60127" + integrity sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg== + tinyglobby@^0.2.13: version "0.2.14" resolved "https://registry.yarnpkg.com/tinyglobby/-/tinyglobby-0.2.14.tgz#5280b0cf3f972b050e74ae88406c0a6a58f4079d" @@ -2566,6 +2805,11 @@ update-browserslist-db@^1.1.3: escalade "^3.2.0" picocolors "^1.1.1" +use-sync-external-store@^1.2.2, use-sync-external-store@^1.4.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz#b174bfa65cb2b526732d9f2ac0a408027876f32d" + integrity sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w== + utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" @@ -2594,6 +2838,26 @@ vary@~1.1.2: resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== +victory-vendor@^37.0.2: + version "37.3.6" + resolved "https://registry.yarnpkg.com/victory-vendor/-/victory-vendor-37.3.6.tgz#401ac4b029a0b3d33e0cba8e8a1d765c487254da" + integrity sha512-SbPDPdDBYp+5MJHhBCAyI7wKM3d5ivekigc2Dk2s7pgbZ9wIgIBYGVw4zGHBml/qTFbexrofXW6Gu4noGxrOwQ== + dependencies: + "@types/d3-array" "^3.0.3" + "@types/d3-ease" "^3.0.0" + "@types/d3-interpolate" "^3.0.1" + "@types/d3-scale" "^4.0.2" + "@types/d3-shape" "^3.1.0" + "@types/d3-time" "^3.0.0" + "@types/d3-timer" "^3.0.0" + d3-array "^3.1.6" + d3-ease "^3.0.1" + d3-interpolate "^3.0.1" + d3-scale "^4.0.2" + d3-shape "^3.1.0" + d3-time "^3.0.0" + d3-timer "^3.0.1" + vite-node@^3.1.4, vite-node@^3.2.2: version "3.2.3" resolved "https://registry.yarnpkg.com/vite-node/-/vite-node-3.2.3.tgz#1c5a2282fe100114c26fd221daf506e69d392a36" diff --git a/db/migrations/000005_rm_orphan_artist_releases.sql b/db/migrations/000005_rm_orphan_artist_releases.sql new file mode 100644 index 0000000..bfb361f --- /dev/null +++ b/db/migrations/000005_rm_orphan_artist_releases.sql @@ -0,0 +1,9 @@ +-- +goose Up +DELETE FROM artist_releases ar +WHERE NOT EXISTS ( + SELECT 1 + FROM artist_tracks at + JOIN tracks t ON at.track_id = t.id + WHERE at.artist_id = ar.artist_id + AND t.release_id = ar.release_id +); diff --git a/db/queries/artist.sql b/db/queries/artist.sql index e20326d..70a2fdd 100644 --- a/db/queries/artist.sql +++ b/db/queries/artist.sql @@ -56,22 +56,60 @@ LEFT JOIN artist_aliases aa ON a.id = aa.artist_id WHERE a.musicbrainz_id = $1 GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name; +-- name: GetArtistsWithoutImages :many +SELECT + * +FROM artists_with_name +WHERE image IS NULL + AND id > $2 +ORDER BY id ASC +LIMIT $1; + -- name: GetTopArtistsPaginated :many SELECT + x.id, + x.name, + x.musicbrainz_id, + x.image, + x.listen_count, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank +FROM ( + SELECT a.id, a.name, a.musicbrainz_id, a.image, COUNT(*) AS listen_count -FROM listens l -JOIN tracks t ON l.track_id = t.id -JOIN artist_tracks at ON at.track_id = t.id -JOIN artists_with_name a ON a.id = at.artist_id -WHERE l.listened_at BETWEEN $1 AND $2 -GROUP BY a.id, a.name, a.musicbrainz_id, a.image, a.image_source, a.name -ORDER BY listen_count DESC, a.id + FROM listens l + JOIN tracks t ON l.track_id = t.id + JOIN artist_tracks at ON at.track_id = t.id + JOIN artists_with_name a ON a.id = at.artist_id + WHERE l.listened_at BETWEEN $1 AND $2 + GROUP BY a.id, a.name, a.musicbrainz_id, a.image +) x +ORDER BY x.listen_count DESC, x.id LIMIT $3 OFFSET $4; +-- name: GetArtistAllTimeRank :one +SELECT + artist_id, + rank +FROM ( + SELECT + x.artist_id, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank + FROM ( + SELECT + at.artist_id, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks t ON l.track_id = t.id + JOIN artist_tracks at ON t.id = at.track_id + GROUP BY at.artist_id + ) x + ) +WHERE artist_id = $1; + -- name: CountTopArtists :one SELECT COUNT(DISTINCT at.artist_id) AS total_count FROM listens l diff --git a/db/queries/etc.sql b/db/queries/etc.sql index 44139b8..38465f2 100644 --- a/db/queries/etc.sql +++ b/db/queries/etc.sql @@ -3,7 +3,13 @@ DO $$ BEGIN DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l); DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t); --- DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t); --- DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg); DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at); + DELETE FROM artist_releases ar + WHERE NOT EXISTS ( + SELECT 1 + FROM artist_tracks at + JOIN tracks t ON at.track_id = t.id + WHERE at.artist_id = ar.artist_id + AND t.release_id = ar.release_id + ); END $$; diff --git a/db/queries/interest.sql b/db/queries/interest.sql new file mode 100644 index 0000000..874f4cd --- /dev/null +++ b/db/queries/interest.sql @@ -0,0 +1,139 @@ +-- name: GetGroupedListensFromArtist :many +WITH bounds AS ( + SELECT + MIN(l.listened_at) AS start_time, + NOW() AS end_time + FROM listens l + JOIN tracks t ON t.id = l.track_id + JOIN artist_tracks at ON at.track_id = t.id + WHERE at.artist_id = $1 +), +stats AS ( + SELECT + start_time, + end_time, + EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds, + ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval + FROM bounds +), +bucket_series AS ( + SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx +), +listen_indices AS ( + SELECT + LEAST( + sqlc.arg(bucket_count)::int - 1, + FLOOR( + (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0)) + * sqlc.arg(bucket_count)::int + )::int + ) AS bucket_idx + FROM listens l + JOIN tracks t ON t.id = l.track_id + JOIN artist_tracks at ON at.track_id = t.id + CROSS JOIN stats s + WHERE at.artist_id = $1 + AND s.start_time IS NOT NULL +) +SELECT + (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start, + (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end, + COUNT(li.bucket_idx) AS listen_count +FROM bucket_series bs +CROSS JOIN stats s +LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx +WHERE s.start_time IS NOT NULL +GROUP BY bs.idx, s.start_time, s.bucket_interval +ORDER BY bs.idx; + +-- name: GetGroupedListensFromRelease :many +WITH bounds AS ( + SELECT + MIN(l.listened_at) AS start_time, + NOW() AS end_time + FROM listens l + JOIN tracks t ON t.id = l.track_id + WHERE t.release_id = $1 +), +stats AS ( + SELECT + start_time, + end_time, + EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds, + ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval + FROM bounds +), +bucket_series AS ( + SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx +), +listen_indices AS ( + SELECT + LEAST( + sqlc.arg(bucket_count)::int - 1, + FLOOR( + (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0)) + * sqlc.arg(bucket_count)::int + )::int + ) AS bucket_idx + FROM listens l + JOIN tracks t ON t.id = l.track_id + CROSS JOIN stats s + WHERE t.release_id = $1 + AND s.start_time IS NOT NULL +) +SELECT + (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start, + (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end, + COUNT(li.bucket_idx) AS listen_count +FROM bucket_series bs +CROSS JOIN stats s +LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx +WHERE s.start_time IS NOT NULL +GROUP BY bs.idx, s.start_time, s.bucket_interval +ORDER BY bs.idx; + +-- name: GetGroupedListensFromTrack :many +WITH bounds AS ( + SELECT + MIN(l.listened_at) AS start_time, + NOW() AS end_time + FROM listens l + JOIN tracks t ON t.id = l.track_id + WHERE t.id = $1 +), +stats AS ( + SELECT + start_time, + end_time, + EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds, + ((end_time - start_time) / sqlc.arg(bucket_count)::int) AS bucket_interval + FROM bounds +), +bucket_series AS ( + SELECT generate_series(0, sqlc.arg(bucket_count)::int - 1) AS idx +), +listen_indices AS ( + SELECT + LEAST( + sqlc.arg(bucket_count)::int - 1, + FLOOR( + (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0)) + * sqlc.arg(bucket_count)::int + )::int + ) AS bucket_idx + FROM listens l + JOIN tracks t ON t.id = l.track_id + CROSS JOIN stats s + WHERE t.id = $1 + AND s.start_time IS NOT NULL +) +SELECT + (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start, + (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end, + COUNT(li.bucket_idx) AS listen_count +FROM bucket_series bs +CROSS JOIN stats s +LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx +WHERE s.start_time IS NOT NULL +GROUP BY bs.idx, s.start_time, s.bucket_interval +ORDER BY bs.idx; diff --git a/db/queries/listen.sql b/db/queries/listen.sql index fc8c502..fab9687 100644 --- a/db/queries/listen.sql +++ b/db/queries/listen.sql @@ -4,7 +4,7 @@ VALUES ($1, $2, $3, $4) ON CONFLICT DO NOTHING; -- name: GetLastListensPaginated :many -SELECT +SELECT l.*, t.title AS track_title, t.release_id AS release_id, @@ -16,31 +16,31 @@ ORDER BY l.listened_at DESC LIMIT $3 OFFSET $4; -- name: GetLastListensFromArtistPaginated :many -SELECT +SELECT l.*, t.title AS track_title, t.release_id AS release_id, get_artists_for_track(t.id) AS artists FROM listens l JOIN tracks_with_title t ON l.track_id = t.id -JOIN artist_tracks at ON t.id = at.track_id +JOIN artist_tracks at ON t.id = at.track_id WHERE at.artist_id = $5 AND l.listened_at BETWEEN $1 AND $2 ORDER BY l.listened_at DESC LIMIT $3 OFFSET $4; -- name: GetFirstListenFromArtist :one -SELECT +SELECT l.* FROM listens l JOIN tracks_with_title t ON l.track_id = t.id -JOIN artist_tracks at ON t.id = at.track_id +JOIN artist_tracks at ON t.id = at.track_id WHERE at.artist_id = $1 ORDER BY l.listened_at ASC LIMIT 1; -- name: GetLastListensFromReleasePaginated :many -SELECT +SELECT l.*, t.title AS track_title, t.release_id AS release_id, @@ -53,7 +53,7 @@ ORDER BY l.listened_at DESC LIMIT $3 OFFSET $4; -- name: GetFirstListenFromRelease :one -SELECT +SELECT l.* FROM listens l JOIN tracks t ON l.track_id = t.id @@ -62,7 +62,7 @@ ORDER BY l.listened_at ASC LIMIT 1; -- name: GetLastListensFromTrackPaginated :many -SELECT +SELECT l.*, t.title AS track_title, t.release_id AS release_id, @@ -75,7 +75,7 @@ ORDER BY l.listened_at DESC LIMIT $3 OFFSET $4; -- name: GetFirstListenFromTrack :one -SELECT +SELECT l.* FROM listens l JOIN tracks t ON l.track_id = t.id @@ -83,6 +83,13 @@ WHERE t.id = $1 ORDER BY l.listened_at ASC LIMIT 1; +-- name: GetFirstListen :one +SELECT + * +FROM listens +ORDER BY listened_at ASC +LIMIT 1; + -- name: CountListens :one SELECT COUNT(*) AS total_count FROM listens l @@ -137,90 +144,51 @@ WHERE l.listened_at BETWEEN $1 AND $2 AND t.id = $3; -- name: ListenActivity :many -WITH buckets AS ( - SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start -), -bucketed_listens AS ( - SELECT - b.bucket_start, - COUNT(l.listened_at) AS listen_count - FROM buckets b - LEFT JOIN listens l - ON l.listened_at >= b.bucket_start - AND l.listened_at < b.bucket_start + $3::interval - GROUP BY b.bucket_start - ORDER BY b.bucket_start -) -SELECT * FROM bucketed_listens; +SELECT + (listened_at AT TIME ZONE $1::text)::date as day, + COUNT(*) AS listen_count +FROM listens +WHERE listened_at >= $2 +AND listened_at < $3 +GROUP BY day +ORDER BY day; -- name: ListenActivityForArtist :many -WITH buckets AS ( - SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start -), -filtered_listens AS ( - SELECT l.* - FROM listens l - JOIN artist_tracks t ON l.track_id = t.track_id - WHERE t.artist_id = $4 -), -bucketed_listens AS ( - SELECT - b.bucket_start, - COUNT(l.listened_at) AS listen_count - FROM buckets b - LEFT JOIN filtered_listens l - ON l.listened_at >= b.bucket_start - AND l.listened_at < b.bucket_start + $3::interval - GROUP BY b.bucket_start - ORDER BY b.bucket_start -) -SELECT * FROM bucketed_listens; +SELECT + (listened_at AT TIME ZONE $1::text)::date as day, + COUNT(*) AS listen_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN artist_tracks at ON t.id = at.track_id +WHERE l.listened_at >= $2 +AND l.listened_at < $3 +AND at.artist_id = $4 +GROUP BY day +ORDER BY day; -- name: ListenActivityForRelease :many -WITH buckets AS ( - SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start -), -filtered_listens AS ( - SELECT l.* - FROM listens l - JOIN tracks t ON l.track_id = t.id - WHERE t.release_id = $4 -), -bucketed_listens AS ( - SELECT - b.bucket_start, - COUNT(l.listened_at) AS listen_count - FROM buckets b - LEFT JOIN filtered_listens l - ON l.listened_at >= b.bucket_start - AND l.listened_at < b.bucket_start + $3::interval - GROUP BY b.bucket_start - ORDER BY b.bucket_start -) -SELECT * FROM bucketed_listens; +SELECT + (listened_at AT TIME ZONE $1::text)::date as day, + COUNT(*) AS listen_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at >= $2 +AND l.listened_at < $3 +AND t.release_id = $4 +GROUP BY day +ORDER BY day; -- name: ListenActivityForTrack :many -WITH buckets AS ( - SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start -), -filtered_listens AS ( - SELECT l.* - FROM listens l - JOIN tracks t ON l.track_id = t.id - WHERE t.id = $4 -), -bucketed_listens AS ( - SELECT - b.bucket_start, - COUNT(l.listened_at) AS listen_count - FROM buckets b - LEFT JOIN filtered_listens l - ON l.listened_at >= b.bucket_start - AND l.listened_at < b.bucket_start + $3::interval - GROUP BY b.bucket_start - ORDER BY b.bucket_start -) -SELECT * FROM bucketed_listens; +SELECT + (listened_at AT TIME ZONE $1::text)::date as day, + COUNT(*) AS listen_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at >= $2 +AND l.listened_at < $3 +AND t.id = $4 +GROUP BY day +ORDER BY day; -- name: UpdateTrackIdForListens :exec UPDATE listens SET track_id = $2 diff --git a/db/queries/release.sql b/db/queries/release.sql index 86727f4..23bd2f2 100644 --- a/db/queries/release.sql +++ b/db/queries/release.sql @@ -32,34 +32,76 @@ JOIN artist_releases ar ON r.id = ar.release_id WHERE r.title = ANY ($1::TEXT[]) AND ar.artist_id = $2 LIMIT 1; +-- name: GetReleaseByArtistAndTitlesNoMbzID :one +SELECT r.* +FROM releases_with_title r +JOIN artist_releases ar ON r.id = ar.release_id +WHERE r.title = ANY ($1::TEXT[]) + AND ar.artist_id = $2 + AND EXISTS ( + SELECT 1 + FROM releases r2 + WHERE r2.id = r.id + AND r2.musicbrainz_id IS NULL + ); + -- name: GetTopReleasesFromArtist :many SELECT - r.*, - COUNT(*) AS listen_count, - get_artists_for_release(r.id) AS artists -FROM listens l -JOIN tracks t ON l.track_id = t.id -JOIN releases_with_title r ON t.release_id = r.id -JOIN artist_releases ar ON r.id = ar.release_id -WHERE ar.artist_id = $5 - AND l.listened_at BETWEEN $1 AND $2 -GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source -ORDER BY listen_count DESC, r.id + x.*, + get_artists_for_release(x.id) AS artists, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank +FROM ( + SELECT + r.*, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks t ON l.track_id = t.id + JOIN releases_with_title r ON t.release_id = r.id + JOIN artist_releases ar ON r.id = ar.release_id + WHERE ar.artist_id = $5 + AND l.listened_at BETWEEN $1 AND $2 + GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source +) x +ORDER BY listen_count DESC, x.id LIMIT $3 OFFSET $4; -- name: GetTopReleasesPaginated :many SELECT - r.*, - COUNT(*) AS listen_count, - get_artists_for_release(r.id) AS artists -FROM listens l -JOIN tracks t ON l.track_id = t.id -JOIN releases_with_title r ON t.release_id = r.id -WHERE l.listened_at BETWEEN $1 AND $2 -GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source -ORDER BY listen_count DESC, r.id + x.*, + get_artists_for_release(x.id) AS artists, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank +FROM ( + SELECT + r.*, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks t ON l.track_id = t.id + JOIN releases_with_title r ON t.release_id = r.id + WHERE l.listened_at BETWEEN $1 AND $2 + GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source +) x +ORDER BY listen_count DESC, x.id LIMIT $3 OFFSET $4; +-- name: GetReleaseAllTimeRank :one +SELECT + release_id, + rank +FROM ( + SELECT + x.release_id, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank + FROM ( + SELECT + t.release_id, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks t ON l.track_id = t.id + GROUP BY t.release_id + ) x + ) +WHERE release_id = $1; + -- name: CountTopReleases :one SELECT COUNT(DISTINCT r.id) AS total_count FROM listens l diff --git a/db/queries/track.sql b/db/queries/track.sql index a9fc425..3be4d7e 100644 --- a/db/queries/track.sql +++ b/db/queries/track.sql @@ -27,68 +27,112 @@ FROM tracks_with_title t JOIN artist_tracks at ON t.id = at.track_id WHERE at.artist_id = $1; --- name: GetTrackByTitleAndArtists :one +-- name: GetTrackByTrackInfo :one SELECT t.* FROM tracks_with_title t JOIN artist_tracks at ON at.track_id = t.id WHERE t.title = $1 - AND at.artist_id = ANY($2::int[]) + AND at.artist_id = ANY($3::int[]) + AND t.release_id = $2 GROUP BY t.id, t.title, t.musicbrainz_id, t.duration, t.release_id -HAVING COUNT(DISTINCT at.artist_id) = cardinality($2::int[]); +HAVING COUNT(DISTINCT at.artist_id) = cardinality($3::int[]); -- name: GetTopTracksPaginated :many SELECT - t.id, + x.track_id AS id, t.title, t.musicbrainz_id, t.release_id, r.image, - COUNT(*) AS listen_count, - get_artists_for_track(t.id) AS artists -FROM listens l -JOIN tracks_with_title t ON l.track_id = t.id + x.listen_count, + get_artists_for_track(x.track_id) AS artists, + x.rank +FROM ( + SELECT + track_id, + COUNT(*) AS listen_count, + RANK() OVER (ORDER BY COUNT(*) DESC) as rank + FROM listens + WHERE listened_at BETWEEN $1 AND $2 + GROUP BY track_id + ORDER BY listen_count DESC + LIMIT $3 OFFSET $4 +) x +JOIN tracks_with_title t ON x.track_id = t.id JOIN releases r ON t.release_id = r.id -WHERE l.listened_at BETWEEN $1 AND $2 -GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image -ORDER BY listen_count DESC, t.id -LIMIT $3 OFFSET $4; +ORDER BY x.listen_count DESC, x.track_id; -- name: GetTopTracksByArtistPaginated :many SELECT - t.id, + x.track_id AS id, t.title, t.musicbrainz_id, t.release_id, r.image, - COUNT(*) AS listen_count, - get_artists_for_track(t.id) AS artists -FROM listens l -JOIN tracks_with_title t ON l.track_id = t.id + x.listen_count, + get_artists_for_track(x.track_id) AS artists, + x.rank +FROM ( + SELECT + l.track_id, + COUNT(*) AS listen_count, + RANK() OVER (ORDER BY COUNT(*) DESC) as rank + FROM listens l + JOIN artist_tracks at ON l.track_id = at.track_id + WHERE l.listened_at BETWEEN $1 AND $2 + AND at.artist_id = $5 + GROUP BY l.track_id + ORDER BY listen_count DESC + LIMIT $3 OFFSET $4 +) x +JOIN tracks_with_title t ON x.track_id = t.id JOIN releases r ON t.release_id = r.id -JOIN artist_tracks at ON at.track_id = t.id -WHERE l.listened_at BETWEEN $1 AND $2 - AND at.artist_id = $5 -GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image -ORDER BY listen_count DESC, t.id -LIMIT $3 OFFSET $4; +ORDER BY x.listen_count DESC, x.track_id; -- name: GetTopTracksInReleasePaginated :many SELECT - t.id, + x.track_id AS id, t.title, t.musicbrainz_id, t.release_id, r.image, - COUNT(*) AS listen_count, - get_artists_for_track(t.id) AS artists -FROM listens l -JOIN tracks_with_title t ON l.track_id = t.id + x.listen_count, + get_artists_for_track(x.track_id) AS artists, + x.rank +FROM ( + SELECT + l.track_id, + COUNT(*) AS listen_count, + RANK() OVER (ORDER BY COUNT(*) DESC) as rank + FROM listens l + JOIN tracks t ON l.track_id = t.id + WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $5 + GROUP BY l.track_id + ORDER BY listen_count DESC + LIMIT $3 OFFSET $4 +) x +JOIN tracks_with_title t ON x.track_id = t.id JOIN releases r ON t.release_id = r.id -WHERE l.listened_at BETWEEN $1 AND $2 - AND t.release_id = $5 -GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image -ORDER BY listen_count DESC, t.id -LIMIT $3 OFFSET $4; +ORDER BY x.listen_count DESC, x.track_id; + +-- name: GetTrackAllTimeRank :one +SELECT + id, + rank +FROM ( + SELECT + x.id, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank + FROM ( + SELECT + t.id, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks_with_title t ON l.track_id = t.id + GROUP BY t.id) x + ) y +WHERE id = $1; -- name: CountTopTracks :one SELECT COUNT(DISTINCT l.track_id) AS total_count @@ -136,3 +180,13 @@ WHERE artist_id = $1 AND track_id = $2; -- name: DeleteTrack :exec DELETE FROM tracks WHERE id = $1; + +-- name: GetTracksWithNoDurationButHaveMbzID :many +SELECT + * +FROM tracks_with_title +WHERE duration = 0 + AND musicbrainz_id IS NOT NULL + AND id > $2 +ORDER BY id ASC +LIMIT $1; diff --git a/docs/astro.config.mjs b/docs/astro.config.mjs index 845acb4..7875016 100644 --- a/docs/astro.config.mjs +++ b/docs/astro.config.mjs @@ -1,57 +1,69 @@ // @ts-check -import { defineConfig } from 'astro/config'; -import starlight from '@astrojs/starlight'; +import { defineConfig } from "astro/config"; +import starlight from "@astrojs/starlight"; -import tailwindcss from '@tailwindcss/vite'; +import tailwindcss from "@tailwindcss/vite"; // https://astro.build/config export default defineConfig({ integrations: [ - starlight({ - head: [ - { - tag: 'script', - attrs: { - src: 'https://static.cloudflareinsights.com/beacon.min.js', - 'data-cf-beacon': '{"token": "1948caaaba10463fa1d310ee02b0951c"}', - defer: true, - } - } - ], - title: 'Koito', - logo: { - src: './src/assets/logo_text.png', - replacesTitle: true, + starlight({ + head: [ + { + tag: "script", + attrs: { + src: "https://static.cloudflareinsights.com/beacon.min.js", + "data-cf-beacon": '{"token": "1948caaaba10463fa1d310ee02b0951c"}', + defer: true, }, - social: [{ icon: 'github', label: 'GitHub', href: 'https://github.com/gabehf/koito' }], - sidebar: [ - { - label: 'Guides', - items: [ - // Each item here is one entry in the navigation menu. - { label: 'Installation', slug: 'guides/installation' }, - { label: 'Importing Data', slug: 'guides/importing' }, - { label: 'Setting up the Scrobbler', slug: 'guides/scrobbler' }, - { label: 'Editing Data', slug: 'guides/editing' }, - ], - }, - { - label: 'Reference', - items: [ - { label: 'Configuration Options', slug: 'reference/configuration' }, - ] - }, + }, + ], + title: "Koito", + logo: { + src: "./src/assets/logo_text.png", + replacesTitle: true, + }, + social: [ + { + icon: "github", + label: "GitHub", + href: "https://github.com/gabehf/koito", + }, + ], + sidebar: [ + { + label: "Guides", + items: [ + // Each item here is one entry in the navigation menu. + { label: "Installation", slug: "guides/installation" }, + { label: "Importing Data", slug: "guides/importing" }, + { label: "Setting up the Scrobbler", slug: "guides/scrobbler" }, + { label: "Editing Data", slug: "guides/editing" }, ], - customCss: [ - // Path to your Tailwind base styles: - './src/styles/global.css', - ], - }), - ], + }, + { + label: "Quickstart", + items: [ + { label: "Setup with Navidrome", slug: "quickstart/navidrome" }, + ], + }, + { + label: "Reference", + items: [ + { label: "Configuration Options", slug: "reference/configuration" }, + ], + }, + ], + customCss: [ + // Path to your Tailwind base styles: + "./src/styles/global.css", + ], + }), + ], site: "https://koito.io", vite: { plugins: [tailwindcss()], }, -}); \ No newline at end of file +}); diff --git a/docs/src/assets/navidrome_lbz_switch.png b/docs/src/assets/navidrome_lbz_switch.png new file mode 100644 index 0000000..a8b44be Binary files /dev/null and b/docs/src/assets/navidrome_lbz_switch.png differ diff --git a/docs/src/content/docs/index.mdx b/docs/src/content/docs/index.mdx index a4d1858..f590ebb 100644 --- a/docs/src/content/docs/index.mdx +++ b/docs/src/content/docs/index.mdx @@ -28,7 +28,7 @@ import { Card, CardGrid } from '@astrojs/starlight/components'; Koito can be connected to any music server or client that allows for custom ListenBrainz URLs. - Automatically relay listens submitted to your Koito instance to other ListenBrainz compatble servers. + Automatically relay listens submitted to your Koito instance to other ListenBrainz compatible servers. Koito automatically fetches data from MusicBrainz and images from Deezer and Cover Art Archive to compliment what is provided by your music server. diff --git a/docs/src/content/docs/quickstart/navidrome.md b/docs/src/content/docs/quickstart/navidrome.md new file mode 100644 index 0000000..b24bdb8 --- /dev/null +++ b/docs/src/content/docs/quickstart/navidrome.md @@ -0,0 +1,68 @@ +--- +title: Navidrome Quickstart +description: How to set up Koito to work with your Navidrome instance. +--- + +## Configure Koito +This quickstart assumes you are using Docker compose. Below is an example file, adjusted from the actual file I use personally. +```yaml title="compose.yaml" +services: + koito: + image: gabehf/koito:latest + container_name: koito + depends_on: + - db + user: 1000:1000 + environment: + - KOITO_DATABASE_URL=postgres://postgres:@db:5432/koitodb + - KOITO_ALLOWED_HOSTS=koito.mydomain.com,192.168.1.100 + - KOITO_SUBSONIC_URL=https://navidrome.mydomain.com # the url to your navidrome instance + - KOITO_SUBSONIC_PARAMS=u=&t=&s= + - KOITO_DEFAULT_THEME=black # i like this theme, use whatever you want + ports: + - "4110:4110" + volumes: + - ./koito-data:/etc/koito + restart: unless-stopped + + db: + user: 1000:1000 + image: postgres:16 + container_name: psql + restart: unless-stopped + environment: + POSTGRES_DB: koitodb + POSTGRES_USER: postgres + POSTGRES_PASSWORD: + volumes: + - ./db-data:/var/lib/postgresql/data +``` + +### How do I get the Subsonic params? +The easiest way to get your Subsonic parameters to open your browser and sign into Navidrome, then press F12 to get to +the developer options and navigate to the **Network** tab. Find a `getCoverArt` request (there should be a lot on the home +page) and look for the part of the URL that looks like `u=&t=&s=`. This +is what you need to copy and provide to Koito. +:::note +If you don't want to use Navidrome to provide images to Koito, you can skip the `KOITO_SUBSONIC_URL` and `KOITO_SUBSONIC_PARAMS` +variables entirely. +::: + +## Configure Navidrome +You have to provide Navidrome with the environment variables `ND_LISTENBRAINZ_ENABLED=true` and +`ND_LISTENBRAINZ_BASEURL=/apis/listenbrainz/1`. The place where you edit these environment variables will change +depending on how you have chosen to deploy Navidrome. + +## Enable ListenBrainz in Navidrome +In Navidome, click on **Settings** in the top right, then click **Personal**. + +Here, you will see that **Scrobble to ListenBrainz** is turned off. Flip that switch on. +![navidrome listenbrainz switch screenshot](../../../assets/navidrome_lbz_switch.png) + +When you flip it on, Navidrome will prompt you for a ListenBrainz token. To get this token, open your Koito page and sign in. +Press the settings button (or hit `\`) and go to the **API Keys** tab. Copy the autogenerated API key by either clicking the +copy button, or clicking on the key itself and copying with ctrl+c. + +After hitting **Save** in Navidrome, your listen activity will start being sent to Koito as you listen to tracks. + +Happy scrobbling! diff --git a/docs/src/content/docs/reference/configuration.md b/docs/src/content/docs/reference/configuration.md index 4e806a0..2af573c 100644 --- a/docs/src/content/docs/reference/configuration.md +++ b/docs/src/content/docs/reference/configuration.md @@ -64,6 +64,8 @@ If the environment variable is defined without **and** with the suffix at the sa ##### KOITO_CONFIG_DIR - Default: `/etc/koito` - Description: The location where import folders and image caches are stored. +##### KOITO_FORCE_TZ +- Description: A canonical IANA database time zone name (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) that Koito will use to serve all clients. Overrides any timezones requested via a `tz` cookie or `tz` query parameter. Koito will fail to start if this value is invalid. ##### KOITO_DISABLE_DEEZER - Default: `false` - Description: Disables Deezer as a source for finding artist and album images. @@ -78,6 +80,13 @@ If the environment variable is defined without **and** with the suffix at the sa ##### KOITO_SUBSONIC_PARAMS - Required: `true` if KOITO_SUBSONIC_URL is set - Description: The `u`, `t`, and `s` authentication parameters to use for authenticated requests to your subsonic server, in the format `u=XXX&t=XXX&s=XXX`. An easy way to find them is to open the network tab in the developer tools of your browser of choice and copy them from a request. +:::caution +If Koito is unable to validate your Subsonic configuration, it will fail to start. If you notice your container isn't running after +changing these parameters, check the logs! +::: +##### KOITO_LASTFM_API_KEY +- Required: `false` +- Description: Your LastFM API key, which will be used for fetching images if provided. You can get an API key [here](https://www.last.fm/api/authentication), ##### KOITO_SKIP_IMPORT - Default: `false` - Description: Skips running the importer on startup. diff --git a/engine/engine.go b/engine/engine.go index b8e01b8..979667e 100644 --- a/engine/engine.go +++ b/engine/engine.go @@ -2,6 +2,7 @@ package engine import ( "context" + "encoding/json" "fmt" "io" "net/http" @@ -95,6 +96,10 @@ func Run( defer store.Close(ctx) l.Info().Msg("Engine: Database connection established") + if cfg.ForceTZ() != nil { + l.Debug().Msgf("Engine: Forcing the use of timezone '%s'", cfg.ForceTZ().String()) + } + l.Debug().Msg("Engine: Initializing MusicBrainz client") var mbzC mbz.MusicBrainzCaller if !cfg.MusicBrainzDisabled() { @@ -105,12 +110,39 @@ func Run( l.Warn().Msg("Engine: MusicBrainz client disabled") } + if cfg.SubsonicEnabled() { + l.Debug().Msg("Engine: Checking Subsonic configuration") + pingURL := cfg.SubsonicUrl() + "/rest/ping.view?" + cfg.SubsonicParams() + "&f=json&v=1&c=koito" + + resp, err := http.Get(pingURL) + if err != nil { + l.Fatal().Err(err).Msg("Engine: Failed to contact Subsonic server! Ensure the provided URL is correct") + } else { + defer resp.Body.Close() + + var result struct { + Response struct { + Status string `json:"status"` + } `json:"subsonic-response"` + } + + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + l.Fatal().Err(err).Msg("Engine: Failed to parse Subsonic response") + } else if result.Response.Status != "ok" { + l.Fatal().Msg("Engine: Provided Subsonic credentials are invalid") + } else { + l.Info().Msg("Engine: Subsonic credentials validated successfully") + } + } + } + l.Debug().Msg("Engine: Initializing image sources") images.Initialize(images.ImageSourceOpts{ UserAgent: cfg.UserAgent(), EnableCAA: !cfg.CoverArtArchiveDisabled(), EnableDeezer: !cfg.DeezerDisabled(), EnableSubsonic: cfg.SubsonicEnabled(), + EnableLastFM: cfg.LastFMApiKey() != "", }) l.Info().Msg("Engine: Image sources initialized") @@ -184,6 +216,8 @@ func Run( } }() + l.Info().Msg("Engine: Beginning startup tasks...") + l.Debug().Msg("Engine: Checking import configuration") if !cfg.SkipImport() { go func() { @@ -191,16 +225,14 @@ func Run( }() } - // l.Info().Msg("Creating test export file") - // go func() { - // err := export.ExportData(ctx, "koito", store) - // if err != nil { - // l.Err(err).Msg("Failed to generate export file") - // } - // }() - l.Info().Msg("Engine: Pruning orphaned images") go catalog.PruneOrphanedImages(logger.NewContext(l), store) + l.Info().Msg("Engine: Running duration backfill task") + go catalog.BackfillTrackDurationsFromMusicBrainz(ctx, store, mbzC) + l.Info().Msg("Engine: Attempting to fetch missing artist images") + go catalog.FetchMissingArtistImages(ctx, store) + l.Info().Msg("Engine: Attempting to fetch missing album images") + go catalog.FetchMissingAlbumImages(ctx, store) l.Info().Msg("Engine: Initialization finished") quit := make(chan os.Signal, 1) @@ -221,19 +253,19 @@ func Run( } func RunImporter(l *zerolog.Logger, store db.DB, mbzc mbz.MusicBrainzCaller) { - l.Debug().Msg("Checking for import files...") + l.Debug().Msg("Importer: Checking for import files...") files, err := os.ReadDir(path.Join(cfg.ConfigDir(), "import")) if err != nil { - l.Err(err).Msg("Failed to read files from import dir") + l.Err(err).Msg("Importer: Failed to read files from import dir") } if len(files) > 0 { - l.Info().Msg("Files found in import directory. Attempting to import...") + l.Info().Msg("Importer: Files found in import directory. Attempting to import...") } else { return } defer func() { if r := recover(); r != nil { - l.Error().Interface("recover", r).Msg("Panic when importing files") + l.Error().Interface("recover", r).Msg("Importer: Panic when importing files") } }() for _, file := range files { @@ -241,37 +273,37 @@ func RunImporter(l *zerolog.Logger, store db.DB, mbzc mbz.MusicBrainzCaller) { continue } if strings.Contains(file.Name(), "Streaming_History_Audio") { - l.Info().Msgf("Import file %s detecting as being Spotify export", file.Name()) + l.Info().Msgf("Importer: Import file %s detecting as being Spotify export", file.Name()) err := importer.ImportSpotifyFile(logger.NewContext(l), store, file.Name()) if err != nil { - l.Err(err).Msgf("Failed to import file: %s", file.Name()) + l.Err(err).Msgf("Importer: Failed to import file: %s", file.Name()) } } else if strings.Contains(file.Name(), "maloja") { - l.Info().Msgf("Import file %s detecting as being Maloja export", file.Name()) + l.Info().Msgf("Importer: Import file %s detecting as being Maloja export", file.Name()) err := importer.ImportMalojaFile(logger.NewContext(l), store, file.Name()) if err != nil { - l.Err(err).Msgf("Failed to import file: %s", file.Name()) + l.Err(err).Msgf("Importer: Failed to import file: %s", file.Name()) } } else if strings.Contains(file.Name(), "recenttracks") { - l.Info().Msgf("Import file %s detecting as being ghan.nl LastFM export", file.Name()) + l.Info().Msgf("Importer: Import file %s detecting as being ghan.nl LastFM export", file.Name()) err := importer.ImportLastFMFile(logger.NewContext(l), store, mbzc, file.Name()) if err != nil { - l.Err(err).Msgf("Failed to import file: %s", file.Name()) + l.Err(err).Msgf("Importer: Failed to import file: %s", file.Name()) } } else if strings.Contains(file.Name(), "listenbrainz") { - l.Info().Msgf("Import file %s detecting as being ListenBrainz export", file.Name()) + l.Info().Msgf("Importer: Import file %s detecting as being ListenBrainz export", file.Name()) err := importer.ImportListenBrainzExport(logger.NewContext(l), store, mbzc, file.Name()) if err != nil { - l.Err(err).Msgf("Failed to import file: %s", file.Name()) + l.Err(err).Msgf("Importer: Failed to import file: %s", file.Name()) } } else if strings.Contains(file.Name(), "koito") { - l.Info().Msgf("Import file %s detecting as being Koito export", file.Name()) + l.Info().Msgf("Importer: Import file %s detecting as being Koito export", file.Name()) err := importer.ImportKoitoFile(logger.NewContext(l), store, file.Name()) if err != nil { - l.Err(err).Msgf("Failed to import file: %s", file.Name()) + l.Err(err).Msgf("Importer: Failed to import file: %s", file.Name()) } } else { - l.Warn().Msgf("File %s not recognized as a valid import file; make sure it is valid and named correctly", file.Name()) + l.Warn().Msgf("Importer: File %s not recognized as a valid import file; make sure it is valid and named correctly", file.Name()) } } } diff --git a/engine/handlers/get_listen_activity.go b/engine/handlers/get_listen_activity.go index 86cf71a..c11ed3e 100644 --- a/engine/handlers/get_listen_activity.go +++ b/engine/handlers/get_listen_activity.go @@ -4,6 +4,7 @@ import ( "net/http" "strconv" "strings" + "time" "github.com/gabehf/koito/internal/db" "github.com/gabehf/koito/internal/logger" @@ -19,7 +20,7 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R rangeStr := r.URL.Query().Get("range") _range, err := strconv.Atoi(rangeStr) - if err != nil { + if err != nil && rangeStr != "" { l.Debug().AnErr("error", err).Msg("GetListenActivityHandler: Invalid range parameter") utils.WriteError(w, "invalid range parameter", http.StatusBadRequest) return @@ -27,7 +28,7 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R monthStr := r.URL.Query().Get("month") month, err := strconv.Atoi(monthStr) - if err != nil { + if err != nil && monthStr != "" { l.Debug().AnErr("error", err).Msg("GetListenActivityHandler: Invalid month parameter") utils.WriteError(w, "invalid month parameter", http.StatusBadRequest) return @@ -35,7 +36,7 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R yearStr := r.URL.Query().Get("year") year, err := strconv.Atoi(yearStr) - if err != nil { + if err != nil && yearStr != "" { l.Debug().AnErr("error", err).Msg("GetListenActivityHandler: Invalid year parameter") utils.WriteError(w, "invalid year parameter", http.StatusBadRequest) return @@ -43,7 +44,7 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R artistIdStr := r.URL.Query().Get("artist_id") artistId, err := strconv.Atoi(artistIdStr) - if err != nil { + if err != nil && artistIdStr != "" { l.Debug().AnErr("error", err).Msg("GetListenActivityHandler: Invalid artist ID parameter") utils.WriteError(w, "invalid artist ID parameter", http.StatusBadRequest) return @@ -51,7 +52,7 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R albumIdStr := r.URL.Query().Get("album_id") albumId, err := strconv.Atoi(albumIdStr) - if err != nil { + if err != nil && albumIdStr != "" { l.Debug().AnErr("error", err).Msg("GetListenActivityHandler: Invalid album ID parameter") utils.WriteError(w, "invalid album ID parameter", http.StatusBadRequest) return @@ -59,7 +60,7 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R trackIdStr := r.URL.Query().Get("track_id") trackId, err := strconv.Atoi(trackIdStr) - if err != nil { + if err != nil && trackIdStr != "" { l.Debug().AnErr("error", err).Msg("GetListenActivityHandler: Invalid track ID parameter") utils.WriteError(w, "invalid track ID parameter", http.StatusBadRequest) return @@ -85,11 +86,17 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R Range: _range, Month: month, Year: year, + Timezone: parseTZ(r), AlbumID: int32(albumId), ArtistID: int32(artistId), TrackID: int32(trackId), } + if strings.ToLower(opts.Timezone.String()) == "local" { + opts.Timezone, _ = time.LoadLocation("UTC") + l.Warn().Msg("GetListenActivityHandler: Timezone is unset, using UTC") + } + l.Debug().Msgf("GetListenActivityHandler: Retrieving listen activity with options: %+v", opts) activity, err := store.GetListenActivity(ctx, opts) @@ -99,7 +106,72 @@ func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.R return } + activity = processActivity(activity, opts) + l.Debug().Msg("GetListenActivityHandler: Successfully retrieved listen activity") utils.WriteJSON(w, http.StatusOK, activity) } } + +// ngl i hate this +func processActivity( + items []db.ListenActivityItem, + opts db.ListenActivityOpts, +) []db.ListenActivityItem { + from, to := db.ListenActivityOptsToTimes(opts) + + buckets := make(map[string]int64) + + for _, item := range items { + bucketStart := normalizeToStep(item.Start, opts.Step) + key := bucketStart.Format("2006-01-02") + buckets[key] += item.Listens + } + + var result []db.ListenActivityItem + + for t := normalizeToStep(from, opts.Step); t.Before(to); t = addStep(t, opts.Step) { + key := t.Format("2006-01-02") + + result = append(result, db.ListenActivityItem{ + Start: t, + Listens: buckets[key], + }) + } + + return result +} + +func normalizeToStep(t time.Time, step db.StepInterval) time.Time { + switch step { + case db.StepDay: + return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) + + case db.StepWeek: + weekday := int(t.Weekday()) + if weekday == 0 { + weekday = 7 + } + start := t.AddDate(0, 0, -(weekday - 1)) + return time.Date(start.Year(), start.Month(), start.Day(), 0, 0, 0, 0, t.Location()) + + case db.StepMonth: + return time.Date(t.Year(), t.Month(), 1, 0, 0, 0, 0, t.Location()) + + default: + return t + } +} + +func addStep(t time.Time, step db.StepInterval) time.Time { + switch step { + case db.StepDay: + return t.AddDate(0, 0, 1) + case db.StepWeek: + return t.AddDate(0, 0, 7) + case db.StepMonth: + return t.AddDate(0, 1, 0) + default: + return t.AddDate(0, 0, 1) + } +} diff --git a/engine/handlers/get_summary.go b/engine/handlers/get_summary.go index 614a48d..ec15f19 100644 --- a/engine/handlers/get_summary.go +++ b/engine/handlers/get_summary.go @@ -13,7 +13,7 @@ func SummaryHandler(store db.DB) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() l := logger.FromContext(ctx) - l.Debug().Msg("GetTopAlbumsHandler: Received request to retrieve top albums") + l.Debug().Msg("SummaryHandler: Received request to retrieve summary") timeframe := TimeframeFromRequest(r) summary, err := summary.GenerateSummary(ctx, store, 1, timeframe, "") diff --git a/engine/handlers/handlers.go b/engine/handlers/handlers.go index 6364363..78bc228 100644 --- a/engine/handlers/handlers.go +++ b/engine/handlers/handlers.go @@ -6,7 +6,9 @@ import ( "strconv" "strings" "time" + _ "time/tzdata" + "github.com/gabehf/koito/internal/cfg" "github.com/gabehf/koito/internal/db" "github.com/gabehf/koito/internal/logger" ) @@ -37,17 +39,6 @@ func OptsFromRequest(r *http.Request) db.GetItemsOpts { page = 1 } - weekStr := r.URL.Query().Get("week") - week, _ := strconv.Atoi(weekStr) - monthStr := r.URL.Query().Get("month") - month, _ := strconv.Atoi(monthStr) - yearStr := r.URL.Query().Get("year") - year, _ := strconv.Atoi(yearStr) - fromStr := r.URL.Query().Get("from") - from, _ := strconv.Atoi(fromStr) - toStr := r.URL.Query().Get("to") - to, _ := strconv.Atoi(toStr) - artistIdStr := r.URL.Query().Get("artist_id") artistId, _ := strconv.Atoi(artistIdStr) albumIdStr := r.URL.Query().Get("album_id") @@ -55,6 +46,8 @@ func OptsFromRequest(r *http.Request) db.GetItemsOpts { trackIdStr := r.URL.Query().Get("track_id") trackId, _ := strconv.Atoi(trackIdStr) + tf := TimeframeFromRequest(r) + var period db.Period switch strings.ToLower(r.URL.Query().Get("period")) { case "day": @@ -67,108 +60,195 @@ func OptsFromRequest(r *http.Request) db.GetItemsOpts { period = db.PeriodYear case "all_time": period = db.PeriodAllTime - default: - l.Debug().Msgf("OptsFromRequest: Using default value '%s' for period", db.PeriodDay) - period = db.PeriodDay } l.Debug().Msgf("OptsFromRequest: Parsed options: limit=%d, page=%d, week=%d, month=%d, year=%d, from=%d, to=%d, artist_id=%d, album_id=%d, track_id=%d, period=%s", - limit, page, week, month, year, from, to, artistId, albumId, trackId, period) + limit, page, tf.Week, tf.Month, tf.Year, tf.FromUnix, tf.ToUnix, artistId, albumId, trackId, period) return db.GetItemsOpts{ - Limit: limit, - Period: period, - Page: page, - Week: week, - Month: month, - Year: year, - From: int64(from), - To: int64(to), - ArtistID: artistId, - AlbumID: albumId, - TrackID: trackId, + Limit: limit, + Page: page, + Timeframe: tf, + ArtistID: artistId, + AlbumID: albumId, + TrackID: trackId, } } -// Takes a request and returns a db.Timeframe representing the week, month, year, period, or unix -// time range specified by the request parameters func TimeframeFromRequest(r *http.Request) db.Timeframe { - opts := OptsFromRequest(r) - now := time.Now() - loc := now.Location() + q := r.URL.Query() - // if 'from' is set, but 'to' is not set, assume 'to' should be now - if opts.From != 0 && opts.To == 0 { - opts.To = now.Unix() - } - - // YEAR - if opts.Year != 0 && opts.Month == 0 && opts.Week == 0 { - start := time.Date(opts.Year, 1, 1, 0, 0, 0, 0, loc) - end := time.Date(opts.Year+1, 1, 1, 0, 0, 0, 0, loc).Add(-time.Second) - - opts.From = start.Unix() - opts.To = end.Unix() - } - - // MONTH (+ optional year) - if opts.Month != 0 { - year := opts.Year - if year == 0 { - year = now.Year() - if int(now.Month()) < opts.Month { - year-- - } + parseInt := func(key string) int { + v := q.Get(key) + if v == "" { + return 0 } - - start := time.Date(year, time.Month(opts.Month), 1, 0, 0, 0, 0, loc) - end := endOfMonth(year, time.Month(opts.Month), loc) - - opts.From = start.Unix() - opts.To = end.Unix() + i, _ := strconv.Atoi(v) + return i } - // WEEK (+ optional year) - if opts.Week != 0 { - year := opts.Year - if year == 0 { - year = now.Year() - - _, currentWeek := now.ISOWeek() - if currentWeek < opts.Week { - year-- - } + parseInt64 := func(key string) int64 { + v := q.Get(key) + if v == "" { + return 0 } - - // ISO week 1 is defined as the week with Jan 4 in it - jan4 := time.Date(year, 1, 4, 0, 0, 0, 0, loc) - week1Start := startOfWeek(jan4) - - start := week1Start.AddDate(0, 0, (opts.Week-1)*7) - end := endOfWeek(start) - - opts.From = start.Unix() - opts.To = end.Unix() + i, _ := strconv.ParseInt(v, 10, 64) + return i } return db.Timeframe{ - Period: opts.Period, - T1u: opts.From, - T2u: opts.To, + Period: db.Period(q.Get("period")), + Year: parseInt("year"), + Month: parseInt("month"), + Week: parseInt("week"), + FromUnix: parseInt64("from"), + ToUnix: parseInt64("to"), + Timezone: parseTZ(r), } } -func startOfWeek(t time.Time) time.Time { - // ISO week: Monday = 1 - weekday := int(t.Weekday()) - if weekday == 0 { // Sunday - weekday = 7 + +func parseTZ(r *http.Request) *time.Location { + + // this map is obviously AI. + // i manually referenced as many links as I could and couldn't find any + // incorrect entries here so hopefully it is all correct. + overrides := map[string]string{ + // --- North America --- + "America/Indianapolis": "America/Indiana/Indianapolis", + "America/Knoxville": "America/Indiana/Knoxville", + "America/Louisville": "America/Kentucky/Louisville", + "America/Montreal": "America/Toronto", + "America/Shiprock": "America/Denver", + "America/Fort_Wayne": "America/Indiana/Indianapolis", + "America/Virgin": "America/Port_of_Spain", + "America/Santa_Isabel": "America/Tijuana", + "America/Ensenada": "America/Tijuana", + "America/Rosario": "America/Argentina/Cordoba", + "America/Jujuy": "America/Argentina/Jujuy", + "America/Mendoza": "America/Argentina/Mendoza", + "America/Catamarca": "America/Argentina/Catamarca", + "America/Cordoba": "America/Argentina/Cordoba", + "America/Buenos_Aires": "America/Argentina/Buenos_Aires", + "America/Coral_Harbour": "America/Atikokan", + "America/Atka": "America/Adak", + "US/Alaska": "America/Anchorage", + "US/Aleutian": "America/Adak", + "US/Arizona": "America/Phoenix", + "US/Central": "America/Chicago", + "US/Eastern": "America/New_York", + "US/East-Indiana": "America/Indiana/Indianapolis", + "US/Hawaii": "Pacific/Honolulu", + "US/Indiana-Starke": "America/Indiana/Knoxville", + "US/Michigan": "America/Detroit", + "US/Mountain": "America/Denver", + "US/Pacific": "America/Los_Angeles", + "US/Samoa": "Pacific/Pago_Pago", + "Canada/Atlantic": "America/Halifax", + "Canada/Central": "America/Winnipeg", + "Canada/Eastern": "America/Toronto", + "Canada/Mountain": "America/Edmonton", + "Canada/Newfoundland": "America/St_Johns", + "Canada/Pacific": "America/Vancouver", + + // --- Asia --- + "Asia/Calcutta": "Asia/Kolkata", + "Asia/Saigon": "Asia/Ho_Chi_Minh", + "Asia/Katmandu": "Asia/Kathmandu", + "Asia/Rangoon": "Asia/Yangon", + "Asia/Ulan_Bator": "Asia/Ulaanbaatar", + "Asia/Macao": "Asia/Macau", + "Asia/Tel_Aviv": "Asia/Jerusalem", + "Asia/Ashkhabad": "Asia/Ashgabat", + "Asia/Chungking": "Asia/Chongqing", + "Asia/Dacca": "Asia/Dhaka", + "Asia/Istanbul": "Europe/Istanbul", + "Asia/Kashgar": "Asia/Urumqi", + "Asia/Thimbu": "Asia/Thimphu", + "Asia/Ujung_Pandang": "Asia/Makassar", + "ROC": "Asia/Taipei", + "Iran": "Asia/Tehran", + "Israel": "Asia/Jerusalem", + "Japan": "Asia/Tokyo", + "Singapore": "Asia/Singapore", + "Hongkong": "Asia/Hong_Kong", + + // --- Europe --- + "Europe/Kiev": "Europe/Kyiv", + "Europe/Belfast": "Europe/London", + "Europe/Tiraspol": "Europe/Chisinau", + "Europe/Nicosia": "Asia/Nicosia", + "Europe/Moscow": "Europe/Moscow", + "W-SU": "Europe/Moscow", + "GB": "Europe/London", + "GB-Eire": "Europe/London", + "Eire": "Europe/Dublin", + "Poland": "Europe/Warsaw", + "Portugal": "Europe/Lisbon", + "Turkey": "Europe/Istanbul", + + // --- Australia / Pacific --- + "Australia/ACT": "Australia/Sydney", + "Australia/Canberra": "Australia/Sydney", + "Australia/LHI": "Australia/Lord_Howe", + "Australia/North": "Australia/Darwin", + "Australia/NSW": "Australia/Sydney", + "Australia/Queensland": "Australia/Brisbane", + "Australia/South": "Australia/Adelaide", + "Australia/Tasmania": "Australia/Hobart", + "Australia/Victoria": "Australia/Melbourne", + "Australia/West": "Australia/Perth", + "Australia/Yancowinna": "Australia/Broken_Hill", + "Pacific/Samoa": "Pacific/Pago_Pago", + "Pacific/Yap": "Pacific/Chuuk", + "Pacific/Truk": "Pacific/Chuuk", + "Pacific/Ponape": "Pacific/Pohnpei", + "NZ": "Pacific/Auckland", + "NZ-CHAT": "Pacific/Chatham", + + // --- Africa --- + "Africa/Asmera": "Africa/Asmara", + "Africa/Timbuktu": "Africa/Bamako", + "Egypt": "Africa/Cairo", + "Libya": "Africa/Tripoli", + + // --- Atlantic --- + "Atlantic/Faeroe": "Atlantic/Faroe", + "Atlantic/Jan_Mayen": "Europe/Oslo", + "Iceland": "Atlantic/Reykjavik", + + // --- Etc / Misc --- + "UTC": "UTC", + "Etc/UTC": "UTC", + "Etc/GMT": "UTC", + "GMT": "UTC", + "Zulu": "UTC", + "Universal": "UTC", } - return time.Date(t.Year(), t.Month(), t.Day()-weekday+1, 0, 0, 0, 0, t.Location()) -} -func endOfWeek(t time.Time) time.Time { - return startOfWeek(t).AddDate(0, 0, 7).Add(-time.Second) -} -func endOfMonth(year int, month time.Month, loc *time.Location) time.Time { - startNextMonth := time.Date(year, month+1, 1, 0, 0, 0, 0, loc) - return startNextMonth.Add(-time.Second) + + if cfg.ForceTZ() != nil { + return cfg.ForceTZ() + } + + if tz := r.URL.Query().Get("tz"); tz != "" { + if fixedTz, exists := overrides[tz]; exists { + tz = fixedTz + } + if loc, err := time.LoadLocation(tz); err == nil { + return loc + } + } + + if c, err := r.Cookie("tz"); err == nil { + var tz string + if fixedTz, exists := overrides[c.Value]; exists { + tz = fixedTz + } else { + tz = c.Value + } + if loc, err := time.LoadLocation(tz); err == nil { + return loc + } + } + + return time.Now().Location() } diff --git a/engine/handlers/interest.go b/engine/handlers/interest.go new file mode 100644 index 0000000..9787c45 --- /dev/null +++ b/engine/handlers/interest.go @@ -0,0 +1,47 @@ +package handlers + +import ( + "net/http" + "strconv" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func GetInterestHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + l.Debug().Msg("GetInterestHandler: Received request to retrieve interest") + + // im just using this to parse the artist/album/track id, which is bad + parsed := OptsFromRequest(r) + + bucketCountStr := r.URL.Query().Get("buckets") + var buckets = 0 + var err error + if buckets, err = strconv.Atoi(bucketCountStr); err != nil { + l.Debug().Msg("GetInterestHandler: Buckets is not an integer") + utils.WriteError(w, "parameter 'buckets' must be an integer", http.StatusBadRequest) + return + } + + opts := db.GetInterestOpts{ + Buckets: buckets, + AlbumID: int32(parsed.AlbumID), + ArtistID: int32(parsed.ArtistID), + TrackID: int32(parsed.TrackID), + } + + interest, err := store.GetInterest(ctx, opts) + if err != nil { + l.Err(err).Msg("GetInterestHandler: Failed to query interest") + utils.WriteError(w, "Failed to retrieve interest: "+err.Error(), http.StatusInternalServerError) + return + } + + utils.WriteJSON(w, http.StatusOK, interest) + } +} diff --git a/engine/handlers/lbz_submit_listen.go b/engine/handlers/lbz_submit_listen.go index e92eb48..daf7969 100644 --- a/engine/handlers/lbz_submit_listen.go +++ b/engine/handlers/lbz_submit_listen.go @@ -90,6 +90,11 @@ func LbzSubmitListenHandler(store db.DB, mbzc mbz.MusicBrainzCaller) func(w http utils.WriteError(w, "failed to read request body", http.StatusBadRequest) return } + + if cfg.LbzRelayEnabled() { + go doLbzRelay(requestBytes, l) + } + if err := json.NewDecoder(bytes.NewBuffer(requestBytes)).Decode(&req); err != nil { l.Err(err).Msg("LbzSubmitListenHandler: Failed to decode request") utils.WriteError(w, "failed to decode request", http.StatusBadRequest) @@ -103,7 +108,7 @@ func LbzSubmitListenHandler(store db.DB, mbzc mbz.MusicBrainzCaller) func(w http return } - l.Debug().Any("request_body", req).Msg("LbzSubmitListenHandler: Parsed request body") + l.Info().Any("request_body", req).Msg("LbzSubmitListenHandler: Parsed request body") if len(req.Payload) < 1 { l.Debug().Msg("LbzSubmitListenHandler: Payload is empty") @@ -234,10 +239,6 @@ func LbzSubmitListenHandler(store db.DB, mbzc mbz.MusicBrainzCaller) func(w http w.WriteHeader(http.StatusOK) w.Header().Set("Content-Type", "application/json") w.Write([]byte("{\"status\": \"ok\"}")) - - if cfg.LbzRelayEnabled() { - go doLbzRelay(requestBytes, l) - } } } diff --git a/engine/handlers/mbzid.go b/engine/handlers/mbzid.go new file mode 100644 index 0000000..e7aafd8 --- /dev/null +++ b/engine/handlers/mbzid.go @@ -0,0 +1,105 @@ +package handlers + +import ( + "net/http" + "strconv" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" +) + +func UpdateMbzIdHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + l.Debug().Msg("UpdateMbzIdHandler: Received request to set update MusicBrainz ID") + + err := r.ParseForm() + if err != nil { + l.Debug().Msg("UpdateMbzIdHandler: Failed to parse form") + utils.WriteError(w, "form is invalid", http.StatusBadRequest) + return + } + + // Parse query parameters + artistIDStr := r.FormValue("artist_id") + albumIDStr := r.FormValue("album_id") + trackIDStr := r.FormValue("track_id") + mbzidStr := r.FormValue("mbz_id") + + if mbzidStr == "" || (artistIDStr == "" && albumIDStr == "" && trackIDStr == "") { + l.Debug().Msg("UpdateMbzIdHandler: Request is missing required parameters") + utils.WriteError(w, "mbzid and artist_id, album_id, or track_id must be provided", http.StatusBadRequest) + return + } + if utils.MoreThanOneString(artistIDStr, albumIDStr, trackIDStr) { + l.Debug().Msg("UpdateMbzIdHandler: Request has more than one of artist_id, album_id, and track_id") + utils.WriteError(w, "only one of artist_id, album_id, or track_id can be provided at a time", http.StatusBadRequest) + return + } + var mbzid uuid.UUID + if mbzid, err = uuid.Parse(mbzidStr); err != nil { + l.Debug().Msg("UpdateMbzIdHandler: Provided MusicBrainz ID is invalid") + utils.WriteError(w, "provided musicbrainz id is invalid", http.StatusBadRequest) + return + } + + if artistIDStr != "" { + var artistID int + artistID, err = strconv.Atoi(artistIDStr) + if err != nil { + l.Debug().AnErr("error", err).Msg("UpdateMbzIdHandler: Invalid artist id") + utils.WriteError(w, "invalid artist_id", http.StatusBadRequest) + return + } + err = store.UpdateArtist(ctx, db.UpdateArtistOpts{ + ID: int32(artistID), + MusicBrainzID: mbzid, + }) + if err != nil { + l.Error().Err(err).Msg("UpdateMbzIdHandler: Failed to update musicbrainz id") + utils.WriteError(w, "failed to update musicbrainz id", http.StatusInternalServerError) + return + } + } else if albumIDStr != "" { + var albumID int + albumID, err = strconv.Atoi(albumIDStr) + if err != nil { + l.Debug().AnErr("error", err).Msg("UpdateMbzIdHandler: Invalid album id") + utils.WriteError(w, "invalid artist_id", http.StatusBadRequest) + return + } + err = store.UpdateAlbum(ctx, db.UpdateAlbumOpts{ + ID: int32(albumID), + MusicBrainzID: mbzid, + }) + if err != nil { + l.Error().Err(err).Msg("UpdateMbzIdHandler: Failed to update musicbrainz id") + utils.WriteError(w, "failed to update musicbrainz id", http.StatusInternalServerError) + return + } + } else if trackIDStr != "" { + var trackID int + trackID, err = strconv.Atoi(trackIDStr) + if err != nil { + l.Debug().AnErr("error", err).Msg("UpdateMbzIdHandler: Invalid track id") + utils.WriteError(w, "invalid artist_id", http.StatusBadRequest) + return + } + err = store.UpdateTrack(ctx, db.UpdateTrackOpts{ + ID: int32(trackID), + MusicBrainzID: mbzid, + }) + if err != nil { + l.Error().Err(err).Msg("UpdateMbzIdHandler: Failed to update musicbrainz id") + utils.WriteError(w, "failed to update musicbrainz id", http.StatusInternalServerError) + return + } + } + + w.WriteHeader(http.StatusNoContent) + } +} diff --git a/engine/handlers/replace_image.go b/engine/handlers/replace_image.go index 66c0bbe..9a2835d 100644 --- a/engine/handlers/replace_image.go +++ b/engine/handlers/replace_image.go @@ -9,6 +9,7 @@ import ( "github.com/gabehf/koito/internal/catalog" "github.com/gabehf/koito/internal/cfg" "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/images" "github.com/gabehf/koito/internal/logger" "github.com/gabehf/koito/internal/utils" "github.com/google/uuid" @@ -75,7 +76,7 @@ func ReplaceImageHandler(store db.DB) http.HandlerFunc { fileUrl := r.FormValue("image_url") if fileUrl != "" { l.Debug().Msg("ReplaceImageHandler: Image identified as remote file") - err = catalog.ValidateImageURL(fileUrl) + err = images.ValidateImageURL(fileUrl) if err != nil { l.Debug().AnErr("error", err).Msg("ReplaceImageHandler: Invalid image URL") utils.WriteError(w, "url is invalid or not an image file", http.StatusBadRequest) diff --git a/engine/handlers/stats.go b/engine/handlers/stats.go index 0bc7c8a..accd9e7 100644 --- a/engine/handlers/stats.go +++ b/engine/handlers/stats.go @@ -2,7 +2,6 @@ package handlers import ( "net/http" - "strings" "github.com/gabehf/koito/internal/db" "github.com/gabehf/koito/internal/logger" @@ -23,54 +22,39 @@ func StatsHandler(store db.DB) http.HandlerFunc { l.Debug().Msg("StatsHandler: Received request to retrieve statistics") - var period db.Period - switch strings.ToLower(r.URL.Query().Get("period")) { - case "day": - period = db.PeriodDay - case "week": - period = db.PeriodWeek - case "month": - period = db.PeriodMonth - case "year": - period = db.PeriodYear - case "all_time": - period = db.PeriodAllTime - default: - l.Debug().Msgf("StatsHandler: Using default value '%s' for period", db.PeriodDay) - period = db.PeriodDay - } + tf := TimeframeFromRequest(r) - l.Debug().Msgf("StatsHandler: Fetching statistics for period '%s'", period) + l.Debug().Msg("StatsHandler: Fetching statistics") - listens, err := store.CountListens(r.Context(), db.Timeframe{Period: period}) + listens, err := store.CountListens(r.Context(), tf) if err != nil { l.Err(err).Msg("StatsHandler: Failed to fetch listen count") utils.WriteError(w, "failed to get listens: "+err.Error(), http.StatusInternalServerError) return } - tracks, err := store.CountTracks(r.Context(), db.Timeframe{Period: period}) + tracks, err := store.CountTracks(r.Context(), tf) if err != nil { l.Err(err).Msg("StatsHandler: Failed to fetch track count") utils.WriteError(w, "failed to get tracks: "+err.Error(), http.StatusInternalServerError) return } - albums, err := store.CountAlbums(r.Context(), db.Timeframe{Period: period}) + albums, err := store.CountAlbums(r.Context(), tf) if err != nil { l.Err(err).Msg("StatsHandler: Failed to fetch album count") utils.WriteError(w, "failed to get albums: "+err.Error(), http.StatusInternalServerError) return } - artists, err := store.CountArtists(r.Context(), db.Timeframe{Period: period}) + artists, err := store.CountArtists(r.Context(), tf) if err != nil { l.Err(err).Msg("StatsHandler: Failed to fetch artist count") utils.WriteError(w, "failed to get artists: "+err.Error(), http.StatusInternalServerError) return } - timeListenedS, err := store.CountTimeListened(r.Context(), db.Timeframe{Period: period}) + timeListenedS, err := store.CountTimeListened(r.Context(), tf) if err != nil { l.Err(err).Msg("StatsHandler: Failed to fetch time listened") utils.WriteError(w, "failed to get time listened: "+err.Error(), http.StatusInternalServerError) diff --git a/engine/import_test.go b/engine/import_test.go index 6a84d7b..fa69e73 100644 --- a/engine/import_test.go +++ b/engine/import_test.go @@ -61,7 +61,9 @@ func TestImportSpotify(t *testing.T) { a, err := store.GetArtist(context.Background(), db.GetArtistOpts{Name: "The Story So Far"}) require.NoError(t, err) - track, err := store.GetTrack(context.Background(), db.GetTrackOpts{Title: "Clairvoyant", ArtistIDs: []int32{a.ID}}) + r, err := store.GetAlbum(context.Background(), db.GetAlbumOpts{ArtistID: a.ID, Title: "The Story So Far / Stick To Your Guns Split"}) + require.NoError(t, err) + track, err := store.GetTrack(context.Background(), db.GetTrackOpts{Title: "Clairvoyant", ReleaseID: r.ID, ArtistIDs: []int32{a.ID}}) require.NoError(t, err) t.Log(track) assert.Equal(t, "Clairvoyant", track.Title) @@ -107,15 +109,15 @@ func TestImportLastFM(t *testing.T) { artist, err := store.GetArtist(context.Background(), db.GetArtistOpts{MusicBrainzID: uuid.MustParse("4b00640f-3be6-43f8-9b34-ff81bd89320a")}) require.NoError(t, err) assert.Equal(t, "OurR", artist.Name) - artist, err = store.GetArtist(context.Background(), db.GetArtistOpts{Name: "CHUU"}) + artist, err = store.GetArtist(context.Background(), db.GetArtistOpts{Name: "Necry Talkie"}) require.NoError(t, err) - track, err := store.GetTrack(context.Background(), db.GetTrackOpts{Title: "because I'm stupid?", ArtistIDs: []int32{artist.ID}}) + track, err := store.GetTrack(context.Background(), db.GetTrackOpts{Title: "放課後の記憶", ReleaseID: album.ID, ArtistIDs: []int32{artist.ID}}) require.NoError(t, err) t.Log(track) - listens, err := store.GetListensPaginated(context.Background(), db.GetItemsOpts{TrackID: int(track.ID), Period: db.PeriodAllTime}) + listens, err := store.GetListensPaginated(context.Background(), db.GetItemsOpts{TrackID: int(track.ID), Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, listens.Items, 1) - assert.WithinDuration(t, time.Unix(1749776100, 0), listens.Items[0].Time, 1*time.Second) + assert.WithinDuration(t, time.Unix(1749774900, 0), listens.Items[0].Time, 1*time.Second) truncateTestData(t) } @@ -141,15 +143,15 @@ func TestImportLastFM_MbzDisabled(t *testing.T) { artist, err := store.GetArtist(context.Background(), db.GetArtistOpts{MusicBrainzID: uuid.MustParse("4b00640f-3be6-43f8-9b34-ff81bd89320a")}) require.NoError(t, err) assert.Equal(t, "OurR", artist.Name) - artist, err = store.GetArtist(context.Background(), db.GetArtistOpts{Name: "CHUU"}) + artist, err = store.GetArtist(context.Background(), db.GetArtistOpts{Name: "Necry Talkie"}) require.NoError(t, err) - track, err := store.GetTrack(context.Background(), db.GetTrackOpts{Title: "because I'm stupid?", ArtistIDs: []int32{artist.ID}}) + track, err := store.GetTrack(context.Background(), db.GetTrackOpts{Title: "放課後の記憶", ReleaseID: album.ID, ArtistIDs: []int32{artist.ID}}) require.NoError(t, err) t.Log(track) - listens, err := store.GetListensPaginated(context.Background(), db.GetItemsOpts{TrackID: int(track.ID), Period: db.PeriodAllTime}) + listens, err := store.GetListensPaginated(context.Background(), db.GetItemsOpts{TrackID: int(track.ID), Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, listens.Items, 1) - assert.WithinDuration(t, time.Unix(1749776100, 0), listens.Items[0].Time, 1*time.Second) + assert.WithinDuration(t, time.Unix(1749774900, 0), listens.Items[0].Time, 1*time.Second) truncateTestData(t) } @@ -216,7 +218,7 @@ func TestImportListenBrainz(t *testing.T) { track, err := store.GetTrack(context.Background(), db.GetTrackOpts{MusicBrainzID: uuid.MustParse("08e8f55b-f1a4-46b8-b2d1-fab4c592165c")}) require.NoError(t, err) assert.Equal(t, "Desert", track.Title) - listens, err := store.GetListensPaginated(context.Background(), db.GetItemsOpts{TrackID: int(track.ID), Period: db.PeriodAllTime}) + listens, err := store.GetListensPaginated(context.Background(), db.GetItemsOpts{TrackID: int(track.ID), Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) assert.Len(t, listens.Items, 1) assert.WithinDuration(t, time.Unix(1749780612, 0), listens.Items[0].Time, 1*time.Second) @@ -254,7 +256,7 @@ func TestImportListenBrainz_MbzDisabled(t *testing.T) { track, err := store.GetTrack(context.Background(), db.GetTrackOpts{MusicBrainzID: uuid.MustParse("08e8f55b-f1a4-46b8-b2d1-fab4c592165c")}) require.NoError(t, err) assert.Equal(t, "Desert", track.Title) - listens, err := store.GetListensPaginated(context.Background(), db.GetItemsOpts{TrackID: int(track.ID), Period: db.PeriodAllTime}) + listens, err := store.GetListensPaginated(context.Background(), db.GetItemsOpts{TrackID: int(track.ID), Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) assert.Len(t, listens.Items, 1) assert.WithinDuration(t, time.Unix(1749780612, 0), listens.Items[0].Time, 1*time.Second) @@ -262,6 +264,34 @@ func TestImportListenBrainz_MbzDisabled(t *testing.T) { truncateTestData(t) } +func TestImportListenBrainz_MBIDMapping(t *testing.T) { + + src := path.Join("..", "test_assets", "listenbrainz_shoko1_123456789.zip") + destDir := filepath.Join(cfg.ConfigDir(), "import") + dest := filepath.Join(destDir, "listenbrainz_shoko1_123456789.zip") + + // not going to make the dest dir because engine should make it already + + input, err := os.ReadFile(src) + require.NoError(t, err) + + require.NoError(t, os.WriteFile(dest, input, os.ModePerm)) + + engine.RunImporter(logger.Get(), store, &mbz.MbzErrorCaller{}) + + album, err := store.GetAlbum(context.Background(), db.GetAlbumOpts{MusicBrainzID: uuid.MustParse("177ebc28-0115-3897-8eb3-ebf74ce23790")}) + require.NoError(t, err) + assert.Equal(t, "Zombie", album.Title) + artist, err := store.GetArtist(context.Background(), db.GetArtistOpts{MusicBrainzID: uuid.MustParse("c98d40fd-f6cf-4b26-883e-eaa515ee2851")}) + require.NoError(t, err) + assert.Equal(t, "The Cranberries", artist.Name) + track, err := store.GetTrack(context.Background(), db.GetTrackOpts{MusicBrainzID: uuid.MustParse("3bbeb4e3-ab6d-460d-bfc5-de49e4251061")}) + require.NoError(t, err) + assert.Equal(t, "Zombie", track.Title) + + truncateTestData(t) +} + func TestImportKoito(t *testing.T) { src := path.Join("..", "test_assets", "koito_export_test.json") @@ -274,6 +304,7 @@ func TestImportKoito(t *testing.T) { giriReleaseMBID := uuid.MustParse("ac1f8da0-21d7-426e-83b0-befff06f0871") suzukiMBID := uuid.MustParse("30f851bb-dba3-4e9b-811c-5f27f595c86a") nijinoTrackMBID := uuid.MustParse("a4f26836-3894-46c1-acac-227808308687") + lp3MBID := uuid.MustParse("d0ec30bd-7cdc-417c-979d-5a0631b8a161") input, err := os.ReadFile(src) require.NoError(t, err) @@ -284,11 +315,11 @@ func TestImportKoito(t *testing.T) { // ensure all artists are saved _, err = store.GetArtist(ctx, db.GetArtistOpts{Name: "American Football"}) - require.NoError(t, err) + assert.NoError(t, err) _, err = store.GetArtist(ctx, db.GetArtistOpts{Name: "Rachel Goswell"}) - require.NoError(t, err) + assert.NoError(t, err) _, err = store.GetArtist(ctx, db.GetArtistOpts{Name: "Elizabeth Powell"}) - require.NoError(t, err) + assert.NoError(t, err) // ensure artist aliases are saved artist, err := store.GetArtist(ctx, db.GetArtistOpts{MusicBrainzID: suzukiMBID}) @@ -310,6 +341,12 @@ func TestImportKoito(t *testing.T) { aliases, err := store.GetAllAlbumAliases(ctx, album.ID) require.NoError(t, err) assert.Contains(t, utils.FlattenAliases(aliases), "Nijinoiroyo Azayakadeare (NELKE ver.)") + // ensure album associations are saved + album, err = store.GetAlbum(ctx, db.GetAlbumOpts{MusicBrainzID: lp3MBID}) + require.NoError(t, err) + assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "Elizabeth Powell") + assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "Rachel Goswell") + assert.Contains(t, utils.FlattenSimpleArtistNames(album.Artists), "American Football") // ensure all tracks are saved track, err := store.GetTrack(ctx, db.GetTrackOpts{MusicBrainzID: nijinoTrackMBID}) @@ -323,7 +360,9 @@ func TestImportKoito(t *testing.T) { artist, err = store.GetArtist(ctx, db.GetArtistOpts{MusicBrainzID: suzukiMBID}) require.NoError(t, err) - _, err = store.GetTrack(ctx, db.GetTrackOpts{Title: "GIRI GIRI", ArtistIDs: []int32{artist.ID}}) + album, err = store.GetAlbum(ctx, db.GetAlbumOpts{ArtistID: artist.ID, Title: "GIRI GIRI"}) + require.NoError(t, err) + _, err = store.GetTrack(ctx, db.GetTrackOpts{Title: "GIRI GIRI", ReleaseID: album.ID, ArtistIDs: []int32{artist.ID}}) require.NoError(t, err) count, err := store.CountTracks(ctx, db.Timeframe{Period: db.PeriodAllTime}) diff --git a/engine/long_test.go b/engine/long_test.go index 6b90a22..db86ac2 100644 --- a/engine/long_test.go +++ b/engine/long_test.go @@ -74,15 +74,15 @@ func getApiKey(t *testing.T, session string) { func truncateTestData(t *testing.T) { err := store.Exec(context.Background(), - `TRUNCATE - artists, + `TRUNCATE + artists, artist_aliases, - tracks, - artist_tracks, - releases, - artist_releases, - release_aliases, - listens + tracks, + artist_tracks, + releases, + artist_releases, + release_aliases, + listens RESTART IDENTITY CASCADE`) require.NoError(t, err) } @@ -211,7 +211,7 @@ func TestGetters(t *testing.T) { assert.Equal(t, "花の塔", track.Title) // Listen was saved - resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/listens") + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/listens?period=all_time") assert.NoError(t, err) var listens db.PaginatedResponse[models.Listen] err = json.NewDecoder(resp.Body).Decode(&listens) @@ -220,21 +220,21 @@ func TestGetters(t *testing.T) { assert.EqualValues(t, 2, listens.Items[0].Track.ID) assert.Equal(t, "Where Our Blue Is", listens.Items[0].Track.Title) - resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-artists") + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-artists?period=all_time") assert.NoError(t, err) var artists db.PaginatedResponse[models.Artist] err = json.NewDecoder(resp.Body).Decode(&artists) require.NoError(t, err) require.Len(t, artists.Items, 3) - resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-albums") + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-albums?period=all_time") assert.NoError(t, err) var albums db.PaginatedResponse[models.Album] err = json.NewDecoder(resp.Body).Decode(&albums) require.NoError(t, err) require.Len(t, albums.Items, 3) - resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-tracks") + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-tracks?period=all_time") assert.NoError(t, err) var tracks db.PaginatedResponse[models.Track] err = json.NewDecoder(resp.Body).Decode(&tracks) @@ -356,6 +356,51 @@ func TestDelete(t *testing.T) { truncateTestData(t) } +func TestLoginGate(t *testing.T) { + + t.Run("Submit Listens", doSubmitListens) + + req, err := http.NewRequest("DELETE", host()+"/apis/web/v1/artist?id=1", nil) + require.NoError(t, err) + req.Header.Add("Authorization", "Token "+apikey) + resp, err := http.DefaultClient.Do(req) + assert.NoError(t, err) + assert.Equal(t, 204, resp.StatusCode) + + req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil) + require.NoError(t, err) + resp, err = http.DefaultClient.Do(req) + assert.NoError(t, err) + assert.Equal(t, 200, resp.StatusCode) + var artist models.Artist + err = json.NewDecoder(resp.Body).Decode(&artist) + require.NoError(t, err) + assert.Equal(t, "ネクライトーキー", artist.Name) + + cfg.SetLoginGate(true) + + req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil) + require.NoError(t, err) + // req.Header.Add("Authorization", "Token "+apikey) + resp, err = http.DefaultClient.Do(req) + assert.NoError(t, err) + assert.Equal(t, 401, resp.StatusCode) + + req, err = http.NewRequest("GET", host()+"/apis/web/v1/artist?id=3", nil) + require.NoError(t, err) + req.Header.Add("Authorization", "Token "+apikey) + resp, err = http.DefaultClient.Do(req) + assert.NoError(t, err) + assert.Equal(t, 200, resp.StatusCode) + err = json.NewDecoder(resp.Body).Decode(&artist) + require.NoError(t, err) + assert.Equal(t, "ネクライトーキー", artist.Name) + + cfg.SetLoginGate(false) + + truncateTestData(t) +} + func TestAliasesAndSearch(t *testing.T) { t.Run("Submit Listens", doSubmitListens) @@ -439,7 +484,7 @@ func TestStats(t *testing.T) { t.Run("Submit Listens", doSubmitListens) - resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/stats") + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/stats?period=all_time") t.Log(resp) require.NoError(t, err) var actual handlers.StatsResponse diff --git a/engine/middleware/authenticate.go b/engine/middleware/authenticate.go new file mode 100644 index 0000000..830fb78 --- /dev/null +++ b/engine/middleware/authenticate.go @@ -0,0 +1,166 @@ +package middleware + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" + "time" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" +) + +type MiddlwareContextKey string + +const ( + UserContextKey MiddlwareContextKey = "user" + apikeyContextKey MiddlwareContextKey = "apikeyID" +) + +type AuthMode int + +const ( + AuthModeSessionCookie AuthMode = iota + AuthModeAPIKey + AuthModeSessionOrAPIKey + AuthModeLoginGate +) + +func Authenticate(store db.DB, mode AuthMode) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + var user *models.User + var err error + + switch mode { + case AuthModeSessionCookie: + user, err = validateSession(ctx, store, r) + + case AuthModeAPIKey: + user, err = validateAPIKey(ctx, store, r) + + case AuthModeSessionOrAPIKey: + user, err = validateSession(ctx, store, r) + if err != nil || user == nil { + user, err = validateAPIKey(ctx, store, r) + } + + case AuthModeLoginGate: + if cfg.LoginGate() { + user, err = validateSession(ctx, store, r) + if err != nil || user == nil { + user, err = validateAPIKey(ctx, store, r) + } + } else { + next.ServeHTTP(w, r) + return + } + } + + if err != nil { + l.Err(err).Msg("authentication failed") + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + if user == nil { + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + ctx = context.WithValue(ctx, UserContextKey, user) + r = r.WithContext(ctx) + + next.ServeHTTP(w, r) + }) + } +} + +func validateSession(ctx context.Context, store db.DB, r *http.Request) (*models.User, error) { + l := logger.FromContext(r.Context()) + + l.Debug().Msgf("ValidateSession: Checking user authentication via session cookie") + + cookie, err := r.Cookie("koito_session") + var sid uuid.UUID + if err == nil { + sid, err = uuid.Parse(cookie.Value) + if err != nil { + l.Err(err).Msg("ValidateSession: Could not parse UUID from session cookie") + return nil, errors.New("session cookie is invalid") + } + } else { + l.Debug().Msgf("ValidateSession: No session cookie found; attempting API key authentication") + return nil, errors.New("session cookie is missing") + } + + l.Debug().Msg("ValidateSession: Retrieved login cookie from request") + + u, err := store.GetUserBySession(r.Context(), sid) + if err != nil { + l.Err(fmt.Errorf("ValidateSession: %w", err)).Msg("Error accessing database") + return nil, errors.New("internal server error") + } + if u == nil { + l.Debug().Msg("ValidateSession: No user with session id found") + return nil, errors.New("no user with session id found") + } + + ctx = context.WithValue(r.Context(), UserContextKey, u) + r = r.WithContext(ctx) + + l.Debug().Msgf("ValidateSession: Refreshing session for user '%s'", u.Username) + + store.RefreshSession(r.Context(), sid, time.Now().Add(30*24*time.Hour)) + + l.Debug().Msgf("ValidateSession: Refreshed session for user '%s'", u.Username) + + return u, nil +} + +func validateAPIKey(ctx context.Context, store db.DB, r *http.Request) (*models.User, error) { + l := logger.FromContext(ctx) + + l.Debug().Msg("ValidateApiKey: Checking if user is already authenticated") + + authH := r.Header.Get("Authorization") + var token string + if strings.HasPrefix(strings.ToLower(authH), "token ") { + token = strings.TrimSpace(authH[6:]) // strip "Token " + } else { + l.Error().Msg("ValidateApiKey: Authorization header must be formatted 'Token {token}'") + return nil, errors.New("authorization header is invalid") + } + + u, err := store.GetUserByApiKey(ctx, token) + if err != nil { + l.Err(err).Msg("ValidateApiKey: Failed to get user from database using api key") + return nil, errors.New("internal server error") + } + if u == nil { + l.Debug().Msg("ValidateApiKey: API key does not exist") + return nil, errors.New("authorization token is invalid") + } + + ctx = context.WithValue(r.Context(), UserContextKey, u) + r = r.WithContext(ctx) + + return u, nil +} + +func GetUserFromContext(ctx context.Context) *models.User { + user, ok := ctx.Value(UserContextKey).(*models.User) + if !ok { + return nil + } + return user +} diff --git a/engine/middleware/validate.go b/engine/middleware/validate.go deleted file mode 100644 index b3e1369..0000000 --- a/engine/middleware/validate.go +++ /dev/null @@ -1,125 +0,0 @@ -package middleware - -import ( - "context" - "fmt" - "net/http" - "strings" - "time" - - "github.com/gabehf/koito/internal/db" - "github.com/gabehf/koito/internal/logger" - "github.com/gabehf/koito/internal/models" - "github.com/gabehf/koito/internal/utils" - "github.com/google/uuid" -) - -type MiddlwareContextKey string - -const ( - UserContextKey MiddlwareContextKey = "user" - apikeyContextKey MiddlwareContextKey = "apikeyID" -) - -func ValidateSession(store db.DB) func(next http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - l := logger.FromContext(r.Context()) - - l.Debug().Msgf("ValidateSession: Checking user authentication via session cookie") - - cookie, err := r.Cookie("koito_session") - var sid uuid.UUID - if err == nil { - sid, err = uuid.Parse(cookie.Value) - if err != nil { - l.Err(err).Msg("ValidateSession: Could not parse UUID from session cookie") - utils.WriteError(w, "session cookie is invalid", http.StatusUnauthorized) - return - } - } else { - l.Debug().Msgf("ValidateSession: No session cookie found; attempting API key authentication") - utils.WriteError(w, "session cookie is missing", http.StatusUnauthorized) - return - } - - l.Debug().Msg("ValidateSession: Retrieved login cookie from request") - - u, err := store.GetUserBySession(r.Context(), sid) - if err != nil { - l.Err(fmt.Errorf("ValidateSession: %w", err)).Msg("Error accessing database") - utils.WriteError(w, "internal server error", http.StatusInternalServerError) - return - } - if u == nil { - l.Debug().Msg("ValidateSession: No user with session id found") - utils.WriteError(w, "unauthorized", http.StatusUnauthorized) - return - } - - ctx := context.WithValue(r.Context(), UserContextKey, u) - r = r.WithContext(ctx) - - l.Debug().Msgf("ValidateSession: Refreshing session for user '%s'", u.Username) - - store.RefreshSession(r.Context(), sid, time.Now().Add(30*24*time.Hour)) - - l.Debug().Msgf("ValidateSession: Refreshed session for user '%s'", u.Username) - - next.ServeHTTP(w, r) - }) - } -} - -func ValidateApiKey(store db.DB) func(next http.Handler) http.Handler { - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - l := logger.FromContext(ctx) - - l.Debug().Msg("ValidateApiKey: Checking if user is already authenticated") - - u := GetUserFromContext(ctx) - if u != nil { - l.Debug().Msg("ValidateApiKey: User is already authenticated; skipping API key authentication") - next.ServeHTTP(w, r) - return - } - - authh := r.Header.Get("Authorization") - var token string - if strings.HasPrefix(strings.ToLower(authh), "token ") { - token = strings.TrimSpace(authh[6:]) // strip "Token " - } else { - l.Error().Msg("ValidateApiKey: Authorization header must be formatted 'Token {token}'") - utils.WriteError(w, "unauthorized", http.StatusUnauthorized) - return - } - - u, err := store.GetUserByApiKey(ctx, token) - if err != nil { - l.Err(err).Msg("Failed to get user from database using api key") - utils.WriteError(w, "internal server error", http.StatusInternalServerError) - return - } - if u == nil { - l.Debug().Msg("Api key does not exist") - utils.WriteError(w, "unauthorized", http.StatusUnauthorized) - return - } - - ctx = context.WithValue(r.Context(), UserContextKey, u) - r = r.WithContext(ctx) - - next.ServeHTTP(w, r) - }) - } -} - -func GetUserFromContext(ctx context.Context) *models.User { - user, ok := ctx.Value(UserContextKey).(*models.User) - if !ok { - return nil - } - return user -} diff --git a/engine/routes.go b/engine/routes.go index caff228..c62edf5 100644 --- a/engine/routes.go +++ b/engine/routes.go @@ -38,9 +38,7 @@ func bindRoutes( r.Get("/config", handlers.GetCfgHandler()) r.Group(func(r chi.Router) { - if cfg.LoginGate() { - r.Use(middleware.ValidateSession(db)) - } + r.Use(middleware.Authenticate(db, middleware.AuthModeLoginGate)) r.Get("/artist", handlers.GetArtistHandler(db)) r.Get("/artists", handlers.GetArtistsForItemHandler(db)) r.Get("/album", handlers.GetAlbumHandler(db)) @@ -55,6 +53,7 @@ func bindRoutes( r.Get("/search", handlers.SearchHandler(db)) r.Get("/aliases", handlers.GetAliasesHandler(db)) r.Get("/summary", handlers.SummaryHandler(db)) + r.Get("/interest", handlers.GetInterestHandler(db)) }) r.Post("/logout", handlers.LogoutHandler(db)) if !cfg.RateLimitDisabled() { @@ -78,7 +77,7 @@ func bindRoutes( }) r.Group(func(r chi.Router) { - r.Use(middleware.ValidateSession(db)) + r.Use(middleware.Authenticate(db, middleware.AuthModeSessionOrAPIKey)) r.Get("/export", handlers.ExportHandler(db)) r.Post("/replace-image", handlers.ReplaceImageHandler(db)) r.Patch("/album", handlers.UpdateAlbumHandler(db)) @@ -94,6 +93,7 @@ func bindRoutes( r.Post("/aliases", handlers.CreateAliasHandler(db)) r.Post("/aliases/delete", handlers.DeleteAliasHandler(db)) r.Post("/aliases/primary", handlers.SetPrimaryAliasHandler(db)) + r.Patch("/mbzid", handlers.UpdateMbzIdHandler(db)) r.Get("/user/apikeys", handlers.GetApiKeysHandler(db)) r.Post("/user/apikeys", handlers.GenerateApiKeyHandler(db)) r.Patch("/user/apikeys", handlers.UpdateApiKeyLabelHandler(db)) @@ -109,8 +109,10 @@ func bindRoutes( AllowedHeaders: []string{"Content-Type", "Authorization"}, })) - r.With(middleware.ValidateApiKey(db)).Post("/submit-listens", handlers.LbzSubmitListenHandler(db, mbz)) - r.With(middleware.ValidateApiKey(db)).Get("/validate-token", handlers.LbzValidateTokenHandler(db)) + r.With(middleware.Authenticate(db, middleware.AuthModeAPIKey)). + Post("/submit-listens", handlers.LbzSubmitListenHandler(db, mbz)) + r.With(middleware.Authenticate(db, middleware.AuthModeAPIKey)). + Get("/validate-token", handlers.LbzValidateTokenHandler(db)) }) // serve react client diff --git a/internal/catalog/associate_album.go b/internal/catalog/associate_album.go index 55bc44c..3a63c58 100644 --- a/internal/catalog/associate_album.go +++ b/internal/catalog/associate_album.go @@ -82,11 +82,8 @@ func createOrUpdateAlbumWithMbzReleaseID(ctx context.Context, d db.DB, opts Asso titles := []string{release.Title, opts.ReleaseName} utils.Unique(&titles) - l.Debug().Msgf("Searching for albums '%v' from artist id %d in DB", titles, opts.Artists[0].ID) - album, err = d.GetAlbum(ctx, db.GetAlbumOpts{ - ArtistID: opts.Artists[0].ID, - Titles: titles, - }) + l.Debug().Msgf("Searching for albums '%v' from artist id %d and no associated MusicBrainz ID in DB", titles, opts.Artists[0].ID) + album, err = d.GetAlbumWithNoMbzIDByTitles(ctx, opts.Artists[0].ID, titles) if err == nil { l.Debug().Msgf("Found album %s, updating with MusicBrainz Release ID...", album.Title) err := d.UpdateAlbum(ctx, db.UpdateAlbumOpts{ diff --git a/internal/catalog/associate_artists.go b/internal/catalog/associate_artists.go index 6387d4b..15b91c9 100644 --- a/internal/catalog/associate_artists.go +++ b/internal/catalog/associate_artists.go @@ -96,6 +96,19 @@ func matchArtistsByMBIDMappings(ctx context.Context, d db.DB, opts AssociateArti }) if err == nil { l.Debug().Msgf("Artist '%s' found by Name", a.Artist) + if artist.MbzID == nil { + err := d.UpdateArtist(ctx, db.UpdateArtistOpts{ + ID: artist.ID, + MusicBrainzID: a.Mbid, + }) + if err != nil { + l.Err(err).Msg("matchArtistsByMBIDMappings: failed to update artist with MusicBrainz ID") + return nil, fmt.Errorf("matchArtistsByMBIDMappings: %w", err) + } + l.Debug().Msgf("Updated artist '%s' with MusicBrainz ID", artist.Name) + } else { + l.Warn().Msgf("Attempted to update artist %s with MusicBrainz ID, but an existing ID was already found", artist.Name) + } err = d.UpdateArtist(ctx, db.UpdateArtistOpts{ID: artist.ID, MusicBrainzID: a.Mbid}) if err != nil { l.Err(err).Msgf("matchArtistsByMBIDMappings: Failed to associate artist '%s' with MusicBrainz ID", artist.Name) diff --git a/internal/catalog/associate_track.go b/internal/catalog/associate_track.go index 635bdb1..3fa1fbc 100644 --- a/internal/catalog/associate_track.go +++ b/internal/catalog/associate_track.go @@ -39,7 +39,7 @@ func AssociateTrack(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*mod return matchTrackByMbzID(ctx, d, opts) } else { l.Debug().Msgf("Associating track '%s' by title and artist", opts.TrackName) - return matchTrackByTitleAndArtist(ctx, d, opts) + return matchTrackByTrackInfo(ctx, d, opts) } } @@ -56,45 +56,53 @@ func matchTrackByMbzID(ctx context.Context, d db.DB, opts AssociateTrackOpts) (* return nil, fmt.Errorf("matchTrackByMbzID: %w", err) } else { l.Debug().Msgf("Track '%s' could not be found by MusicBrainz ID", opts.TrackName) - track, err := matchTrackByTitleAndArtist(ctx, d, opts) + track, err := matchTrackByTrackInfo(ctx, d, opts) if err != nil { return nil, fmt.Errorf("matchTrackByMbzID: %w", err) } l.Debug().Msgf("Updating track '%s' with MusicBrainz ID %s", opts.TrackName, opts.TrackMbzID) - err = d.UpdateTrack(ctx, db.UpdateTrackOpts{ - ID: track.ID, - MusicBrainzID: opts.TrackMbzID, - }) - if err != nil { - return nil, fmt.Errorf("matchTrackByMbzID: %w", err) + if track.MbzID == nil || *track.MbzID == uuid.Nil { + err := d.UpdateTrack(ctx, db.UpdateTrackOpts{ + ID: track.ID, + MusicBrainzID: opts.TrackMbzID, + }) + if err != nil { + l.Err(err).Msg("matchArtistsByMBIDMappings: failed to update track with MusicBrainz ID") + return nil, fmt.Errorf("matchArtistsByMBIDMappings: %w", err) + } + l.Debug().Msgf("Updated track '%s' with MusicBrainz ID", track.Title) + } else { + l.Warn().Msgf("Attempted to update track %s with MusicBrainz ID, but an existing ID was already found", track.Title) } track.MbzID = &opts.TrackMbzID return track, nil } } -func matchTrackByTitleAndArtist(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*models.Track, error) { +func matchTrackByTrackInfo(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*models.Track, error) { l := logger.FromContext(ctx) // try provided track title track, err := d.GetTrack(ctx, db.GetTrackOpts{ Title: opts.TrackName, + ReleaseID: opts.AlbumID, ArtistIDs: opts.ArtistIDs, }) if err == nil { - l.Debug().Msgf("Track '%s' found by title and artist match", track.Title) + l.Debug().Msgf("Track '%s' found by title, release and artist match", track.Title) return track, nil } else if !errors.Is(err, pgx.ErrNoRows) { - return nil, fmt.Errorf("matchTrackByTitleAndArtist: %w", err) + return nil, fmt.Errorf("matchTrackByTrackInfo: %w", err) } else { if opts.TrackMbzID != uuid.Nil { mbzTrack, err := opts.Mbzc.GetTrack(ctx, opts.TrackMbzID) if err == nil { track, err := d.GetTrack(ctx, db.GetTrackOpts{ Title: mbzTrack.Title, + ReleaseID: opts.AlbumID, ArtistIDs: opts.ArtistIDs, }) if err == nil { - l.Debug().Msgf("Track '%s' found by MusicBrainz title and artist match", opts.TrackName) + l.Debug().Msgf("Track '%s' found by MusicBrainz title, release and artist match", opts.TrackName) return track, nil } } @@ -108,7 +116,7 @@ func matchTrackByTitleAndArtist(ctx context.Context, d db.DB, opts AssociateTrac Duration: opts.Duration, }) if err != nil { - return nil, fmt.Errorf("matchTrackByTitleAndArtist: %w", err) + return nil, fmt.Errorf("matchTrackByTrackInfo: %w", err) } if opts.TrackMbzID == uuid.Nil { l.Info().Msgf("Created track '%s' with title and artist", opts.TrackName) diff --git a/internal/catalog/duration.go b/internal/catalog/duration.go new file mode 100644 index 0000000..6217dd6 --- /dev/null +++ b/internal/catalog/duration.go @@ -0,0 +1,85 @@ +package catalog + +import ( + "context" + "fmt" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/mbz" + "github.com/google/uuid" +) + +func BackfillTrackDurationsFromMusicBrainz( + ctx context.Context, + store db.DB, + mbzCaller mbz.MusicBrainzCaller, +) error { + l := logger.FromContext(ctx) + l.Info().Msg("BackfillTrackDurationsFromMusicBrainz: Starting backfill of track durations from MusicBrainz") + + var from int32 = 0 + + for { + l.Debug().Int32("ID", from).Msg("Fetching tracks to backfill from ID") + tracks, err := store.GetTracksWithNoDurationButHaveMbzID(ctx, from) + if err != nil { + return fmt.Errorf("BackfillTrackDurationsFromMusicBrainz: failed to fetch tracks for duration backfill: %w", err) + } + + // nil, nil means no more results + if len(tracks) == 0 { + if from == 0 { + l.Info().Msg("BackfillTrackDurationsFromMusicBrainz: No tracks need updating. Skipping backfill...") + } else { + l.Info().Msg("BackfillTrackDurationsFromMusicBrainz: Backfill complete") + } + return nil + } + + for _, track := range tracks { + from = track.ID + + if track.MbzID == nil || *track.MbzID == uuid.Nil { + continue + } + + l.Debug(). + Str("title", track.Title). + Str("mbz_id", track.MbzID.String()). + Msg("BackfillTrackDurationsFromMusicBrainz: Backfilling duration from MusicBrainz") + + mbzTrack, err := mbzCaller.GetTrack(ctx, *track.MbzID) + if err != nil { + l.Err(err). + Str("title", track.Title). + Msg("BackfillTrackDurationsFromMusicBrainz: Failed to fetch track from MusicBrainz") + continue + } + + if mbzTrack.LengthMs <= 0 { + l.Debug(). + Str("title", track.Title). + Msg("BackfillTrackDurationsFromMusicBrainz: MusicBrainz track has no duration") + continue + } + + durationSeconds := int32(mbzTrack.LengthMs / 1000) + + err = store.UpdateTrack(ctx, db.UpdateTrackOpts{ + ID: track.ID, + Duration: durationSeconds, + }) + if err != nil { + l.Err(err). + Str("title", track.Title). + Msg("BackfillTrackDurationsFromMusicBrainz: Failed to update track duration") + } else { + l.Info(). + Str("title", track.Title). + Int32("duration_seconds", durationSeconds). + Msg("BackfillTrackDurationsFromMusicBrainz: Track duration backfilled successfully") + } + } + } +} diff --git a/internal/catalog/duration_test.go b/internal/catalog/duration_test.go new file mode 100644 index 0000000..911e345 --- /dev/null +++ b/internal/catalog/duration_test.go @@ -0,0 +1,36 @@ +package catalog_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/mbz" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestBackfillDuration(t *testing.T) { + setupTestDataWithMbzIDs(t) + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + } + + var err error + + err = catalog.BackfillTrackDurationsFromMusicBrainz(context.Background(), store, &mbz.MbzErrorCaller{}) + assert.NoError(t, err) + + err = catalog.BackfillTrackDurationsFromMusicBrainz(ctx, store, mbzc) + assert.NoError(t, err) + + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 AND duration > 0 + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "track was not updated with duration") +} diff --git a/internal/catalog/images.go b/internal/catalog/images.go index bf5aa26..72b6efd 100644 --- a/internal/catalog/images.go +++ b/internal/catalog/images.go @@ -13,7 +13,9 @@ import ( "github.com/gabehf/koito/internal/cfg" "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/images" "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" "github.com/google/uuid" "github.com/h2non/bimg" ) @@ -78,30 +80,10 @@ func SourceImageDir() string { } } -// ValidateImageURL checks if the URL points to a valid image by performing a HEAD request. -func ValidateImageURL(url string) error { - resp, err := http.Head(url) - if err != nil { - return fmt.Errorf("ValidateImageURL: http.Head: %w", err) - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return fmt.Errorf("ValidateImageURL: HEAD request failed, status code: %d", resp.StatusCode) - } - - contentType := resp.Header.Get("Content-Type") - if !strings.HasPrefix(contentType, "image/") { - return fmt.Errorf("ValidateImageURL: URL does not point to an image, content type: %s", contentType) - } - - return nil -} - // DownloadAndCacheImage downloads an image from the given URL, then calls CompressAndSaveImage. func DownloadAndCacheImage(ctx context.Context, id uuid.UUID, url string, size ImageSize) error { l := logger.FromContext(ctx) - err := ValidateImageURL(url) + err := images.ValidateImageURL(url) if err != nil { return fmt.Errorf("DownloadAndCacheImage: %w", err) } @@ -285,3 +267,127 @@ func pruneDirImgs(ctx context.Context, store db.DB, path string, memo map[string } return count, nil } + +func FetchMissingArtistImages(ctx context.Context, store db.DB) error { + l := logger.FromContext(ctx) + l.Info().Msg("FetchMissingArtistImages: Starting backfill of missing artist images") + + var from int32 = 0 + + for { + l.Debug().Int32("ID", from).Msg("Fetching artist images to backfill from ID") + artists, err := store.ArtistsWithoutImages(ctx, from) + if err != nil { + return fmt.Errorf("FetchMissingArtistImages: failed to fetch artists for image backfill: %w", err) + } + + if len(artists) == 0 { + if from == 0 { + l.Info().Msg("FetchMissingArtistImages: No artists with missing images found") + } else { + l.Info().Msg("FetchMissingArtistImages: Finished fetching missing artist images") + } + return nil + } + + for _, artist := range artists { + from = artist.ID + + l.Debug(). + Str("title", artist.Name). + Msg("FetchMissingArtistImages: Attempting to fetch missing artist image") + + var aliases []string + if aliasrow, err := store.GetAllArtistAliases(ctx, artist.ID); err != nil { + aliases = utils.FlattenAliases(aliasrow) + } else { + aliases = []string{artist.Name} + } + + var imgid uuid.UUID + imgUrl, imgErr := images.GetArtistImage(ctx, images.ArtistImageOpts{ + Aliases: aliases, + }) + if imgErr == nil && imgUrl != "" { + imgid = uuid.New() + err = store.UpdateArtist(ctx, db.UpdateArtistOpts{ + ID: artist.ID, + Image: imgid, + ImageSrc: imgUrl, + }) + if err != nil { + l.Err(err). + Str("title", artist.Name). + Msg("FetchMissingArtistImages: Failed to update artist with image in database") + continue + } + l.Info(). + Str("name", artist.Name). + Msg("FetchMissingArtistImages: Successfully fetched missing artist image") + } else { + l.Err(err). + Str("name", artist.Name). + Msg("FetchMissingArtistImages: Failed to fetch artist image") + } + } + } +} +func FetchMissingAlbumImages(ctx context.Context, store db.DB) error { + l := logger.FromContext(ctx) + l.Info().Msg("FetchMissingAlbumImages: Starting backfill of missing album images") + + var from int32 = 0 + + for { + l.Debug().Int32("ID", from).Msg("Fetching album images to backfill from ID") + albums, err := store.AlbumsWithoutImages(ctx, from) + if err != nil { + return fmt.Errorf("FetchMissingAlbumImages: failed to fetch albums for image backfill: %w", err) + } + + if len(albums) == 0 { + if from == 0 { + l.Info().Msg("FetchMissingAlbumImages: No albums with missing images found") + } else { + l.Info().Msg("FetchMissingAlbumImages: Finished fetching missing album images") + } + return nil + } + + for _, album := range albums { + from = album.ID + + l.Debug(). + Str("title", album.Title). + Msg("FetchMissingAlbumImages: Attempting to fetch missing album image") + + var imgid uuid.UUID + imgUrl, imgErr := images.GetAlbumImage(ctx, images.AlbumImageOpts{ + Artists: utils.FlattenSimpleArtistNames(album.Artists), + Album: album.Title, + ReleaseMbzID: album.MbzID, + }) + if imgErr == nil && imgUrl != "" { + imgid = uuid.New() + err = store.UpdateAlbum(ctx, db.UpdateAlbumOpts{ + ID: album.ID, + Image: imgid, + ImageSrc: imgUrl, + }) + if err != nil { + l.Err(err). + Str("title", album.Title). + Msg("FetchMissingAlbumImages: Failed to update album with image in database") + continue + } + l.Info(). + Str("name", album.Title). + Msg("FetchMissingAlbumImages: Successfully fetched missing album image") + } else { + l.Err(err). + Str("name", album.Title). + Msg("FetchMissingAlbumImages: Failed to fetch album image") + } + } + } +} diff --git a/internal/catalog/submit_listen_test.go b/internal/catalog/submit_listen_test.go index c1ff2f7..1548776 100644 --- a/internal/catalog/submit_listen_test.go +++ b/internal/catalog/submit_listen_test.go @@ -63,7 +63,7 @@ func TestSubmitListen_CreateAllMbzIDs(t *testing.T) { assert.True(t, exists, "expected listen row to exist") // Verify that listen time is correct - p, err := store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 1}) + p, err := store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 1, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, p.Items, 1) l := p.Items[0] @@ -282,6 +282,73 @@ func TestSubmitListen_MatchAllMbzIDs(t *testing.T) { assert.Equal(t, 1, count, "duplicate artist created") } +func TestSubmitListen_DoNotOverwriteMbzIDs(t *testing.T) { + setupTestDataWithMbzIDs(t) + + // artist gets matched with musicbrainz id + // release gets matched with mbz id + // track gets matched with mbz id + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + } + artistMbzID := uuid.MustParse("10000000-0000-0000-0000-000000000000") + releaseMbzID := uuid.MustParse("01000000-0000-0000-0000-000000000000") + existingReleaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00100000-0000-0000-0000-000000000000") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE musicbrainz_id = $1 + `, trackMbzID) + require.NoError(t, err) + assert.Equal(t, 0, count, "duplicate track created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE musicbrainz_id = $1 + `, releaseMbzID) + require.NoError(t, err) + assert.Equal(t, 0, count, "duplicate release group created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE musicbrainz_id = $1 + `, existingReleaseMbzID) + require.NoError(t, err) + assert.Equal(t, 1, count, "existing release group should not be overwritten") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE musicbrainz_id = $1 + `, artistMbzID) + require.NoError(t, err) + assert.Equal(t, 0, count, "duplicate artist created") +} + func TestSubmitListen_MatchTrackFromMbzTitle(t *testing.T) { setupTestDataSansMbzIDs(t) diff --git a/internal/cfg/cfg.go b/internal/cfg/cfg.go index 9e537eb..0cfc7bb 100644 --- a/internal/cfg/cfg.go +++ b/internal/cfg/cfg.go @@ -38,6 +38,7 @@ const ( DISABLE_MUSICBRAINZ_ENV = "KOITO_DISABLE_MUSICBRAINZ" SUBSONIC_URL_ENV = "KOITO_SUBSONIC_URL" SUBSONIC_PARAMS_ENV = "KOITO_SUBSONIC_PARAMS" + LASTFM_API_KEY_ENV = "KOITO_LASTFM_API_KEY" SKIP_IMPORT_ENV = "KOITO_SKIP_IMPORT" ALLOWED_HOSTS_ENV = "KOITO_ALLOWED_HOSTS" CORS_ORIGINS_ENV = "KOITO_CORS_ALLOWED_ORIGINS" @@ -48,6 +49,7 @@ const ( FETCH_IMAGES_DURING_IMPORT_ENV = "KOITO_FETCH_IMAGES_DURING_IMPORT" ARTIST_SEPARATORS_ENV = "KOITO_ARTIST_SEPARATORS_REGEX" LOGIN_GATE_ENV = "KOITO_LOGIN_GATE" + FORCE_TZ = "KOITO_FORCE_TZ" ) type config struct { @@ -72,6 +74,7 @@ type config struct { disableMusicBrainz bool subsonicUrl string subsonicParams string + lastfmApiKey string subsonicEnabled bool skipImport bool fetchImageDuringImport bool @@ -85,6 +88,7 @@ type config struct { importAfter time.Time artistSeparators []*regexp.Regexp loginGate bool + forceTZ *time.Location } var ( @@ -165,6 +169,7 @@ func loadConfig(getenv func(string) string, version string) (*config, error) { if cfg.subsonicEnabled && (cfg.subsonicUrl == "" || cfg.subsonicParams == "") { return nil, fmt.Errorf("loadConfig: invalid configuration: both %s and %s must be set in order to use subsonic image fetching", SUBSONIC_URL_ENV, SUBSONIC_PARAMS_ENV) } + cfg.lastfmApiKey = getenv(LASTFM_API_KEY_ENV) cfg.skipImport = parseBool(getenv(SKIP_IMPORT_ENV)) cfg.userAgent = fmt.Sprintf("Koito %s (contact@koito.io)", version) @@ -210,6 +215,13 @@ func loadConfig(getenv func(string) string, version string) (*config, error) { cfg.loginGate = true } + if getenv(FORCE_TZ) != "" { + cfg.forceTZ, err = time.LoadLocation(getenv(FORCE_TZ)) + if err != nil { + return nil, fmt.Errorf("forced timezone '%s' is not a valid timezone", getenv(FORCE_TZ)) + } + } + switch strings.ToLower(getenv(LOG_LEVEL_ENV)) { case "debug": cfg.logLevel = 0 @@ -232,192 +244,3 @@ func parseBool(s string) bool { return false } } - -// Global accessors for configuration values - -func UserAgent() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.userAgent -} - -func ListenAddr() string { - lock.RLock() - defer lock.RUnlock() - return fmt.Sprintf("%s:%d", globalConfig.bindAddr, globalConfig.listenPort) -} - -func ConfigDir() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.configDir -} - -func DatabaseUrl() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.databaseUrl -} - -func MusicBrainzUrl() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.musicBrainzUrl -} - -func MusicBrainzRateLimit() int { - lock.RLock() - defer lock.RUnlock() - return globalConfig.musicBrainzRateLimit -} - -func LogLevel() int { - lock.RLock() - defer lock.RUnlock() - return globalConfig.logLevel -} - -func StructuredLogging() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.structuredLogging -} - -func LbzRelayEnabled() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.lbzRelayEnabled -} - -func LbzRelayUrl() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.lbzRelayUrl -} - -func LbzRelayToken() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.lbzRelayToken -} - -func DefaultPassword() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.defaultPw -} - -func DefaultUsername() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.defaultUsername -} - -func DefaultTheme() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.defaultTheme -} - -func FullImageCacheEnabled() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.enableFullImageCache -} - -func DeezerDisabled() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.disableDeezer -} - -func CoverArtArchiveDisabled() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.disableCAA -} - -func MusicBrainzDisabled() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.disableMusicBrainz -} - -func SubsonicEnabled() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.subsonicEnabled -} - -func SubsonicUrl() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.subsonicUrl -} - -func SubsonicParams() string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.subsonicParams -} - -func SkipImport() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.skipImport -} - -func AllowedHosts() []string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.allowedHosts -} - -func AllowAllHosts() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.allowAllHosts -} - -func AllowedOrigins() []string { - lock.RLock() - defer lock.RUnlock() - return globalConfig.allowedOrigins -} - -func RateLimitDisabled() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.disableRateLimit -} - -func ThrottleImportMs() int { - lock.RLock() - defer lock.RUnlock() - return globalConfig.importThrottleMs -} - -// returns the before, after times, in that order -func ImportWindow() (time.Time, time.Time) { - lock.RLock() - defer lock.RUnlock() - return globalConfig.importBefore, globalConfig.importAfter -} - -func FetchImagesDuringImport() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.fetchImageDuringImport -} - -func ArtistSeparators() []*regexp.Regexp { - lock.RLock() - defer lock.RUnlock() - return globalConfig.artistSeparators -} - -func LoginGate() bool { - lock.RLock() - defer lock.RUnlock() - return globalConfig.loginGate -} diff --git a/internal/cfg/getters.go b/internal/cfg/getters.go new file mode 100644 index 0000000..596ca9d --- /dev/null +++ b/internal/cfg/getters.go @@ -0,0 +1,206 @@ +package cfg + +import ( + "fmt" + "regexp" + "time" +) + +func UserAgent() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.userAgent +} + +func ListenAddr() string { + lock.RLock() + defer lock.RUnlock() + return fmt.Sprintf("%s:%d", globalConfig.bindAddr, globalConfig.listenPort) +} + +func ConfigDir() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.configDir +} + +func DatabaseUrl() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.databaseUrl +} + +func MusicBrainzUrl() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.musicBrainzUrl +} + +func MusicBrainzRateLimit() int { + lock.RLock() + defer lock.RUnlock() + return globalConfig.musicBrainzRateLimit +} + +func LogLevel() int { + lock.RLock() + defer lock.RUnlock() + return globalConfig.logLevel +} + +func StructuredLogging() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.structuredLogging +} + +func LbzRelayEnabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.lbzRelayEnabled +} + +func LbzRelayUrl() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.lbzRelayUrl +} + +func LbzRelayToken() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.lbzRelayToken +} + +func DefaultPassword() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.defaultPw +} + +func DefaultUsername() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.defaultUsername +} + +func DefaultTheme() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.defaultTheme +} + +func FullImageCacheEnabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.enableFullImageCache +} + +func DeezerDisabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.disableDeezer +} + +func CoverArtArchiveDisabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.disableCAA +} + +func MusicBrainzDisabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.disableMusicBrainz +} + +func SubsonicEnabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.subsonicEnabled +} + +func SubsonicUrl() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.subsonicUrl +} + +func SubsonicParams() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.subsonicParams +} + +func LastFMApiKey() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.lastfmApiKey +} + +func SkipImport() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.skipImport +} + +func AllowedHosts() []string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.allowedHosts +} + +func AllowAllHosts() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.allowAllHosts +} + +func AllowedOrigins() []string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.allowedOrigins +} + +func RateLimitDisabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.disableRateLimit +} + +func ThrottleImportMs() int { + lock.RLock() + defer lock.RUnlock() + return globalConfig.importThrottleMs +} + +// returns the before, after times, in that order +func ImportWindow() (time.Time, time.Time) { + lock.RLock() + defer lock.RUnlock() + return globalConfig.importBefore, globalConfig.importAfter +} + +func FetchImagesDuringImport() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.fetchImageDuringImport +} + +func ArtistSeparators() []*regexp.Regexp { + lock.RLock() + defer lock.RUnlock() + return globalConfig.artistSeparators +} + +func LoginGate() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.loginGate +} + +func ForceTZ() *time.Location { + lock.RLock() + defer lock.RUnlock() + return globalConfig.forceTZ +} diff --git a/internal/cfg/setters.go b/internal/cfg/setters.go new file mode 100644 index 0000000..8458780 --- /dev/null +++ b/internal/cfg/setters.go @@ -0,0 +1,7 @@ +package cfg + +func SetLoginGate(val bool) { + lock.Lock() + defer lock.Unlock() + globalConfig.loginGate = val +} diff --git a/internal/db/db.go b/internal/db/db.go index a4f1b43..97badac 100644 --- a/internal/db/db.go +++ b/internal/db/db.go @@ -14,12 +14,14 @@ type DB interface { GetArtist(ctx context.Context, opts GetArtistOpts) (*models.Artist, error) GetAlbum(ctx context.Context, opts GetAlbumOpts) (*models.Album, error) + GetAlbumWithNoMbzIDByTitles(ctx context.Context, artistId int32, titles []string) (*models.Album, error) GetTrack(ctx context.Context, opts GetTrackOpts) (*models.Track, error) + GetTracksWithNoDurationButHaveMbzID(ctx context.Context, from int32) ([]*models.Track, error) GetArtistsForAlbum(ctx context.Context, id int32) ([]*models.Artist, error) GetArtistsForTrack(ctx context.Context, id int32) ([]*models.Artist, error) - GetTopTracksPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Track], error) - GetTopArtistsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Artist], error) - GetTopAlbumsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Album], error) + GetTopTracksPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[RankedItem[*models.Track]], error) + GetTopArtistsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[RankedItem[*models.Artist]], error) + GetTopAlbumsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[RankedItem[*models.Album]], error) GetListensPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Listen], error) GetListenActivity(ctx context.Context, opts ListenActivityOpts) ([]ListenActivityItem, error) GetAllArtistAliases(ctx context.Context, id int32) ([]models.Alias, error) @@ -29,6 +31,7 @@ type DB interface { GetUserBySession(ctx context.Context, sessionId uuid.UUID) (*models.User, error) GetUserByUsername(ctx context.Context, username string) (*models.User, error) GetUserByApiKey(ctx context.Context, key string) (*models.User, error) + GetInterest(ctx context.Context, opts GetInterestOpts) ([]InterestBucket, error) // Save @@ -85,6 +88,7 @@ type DB interface { // in seconds CountTimeListenedToItem(ctx context.Context, opts TimeListenedOpts) (int64, error) CountUsers(ctx context.Context) (int64, error) + // Search SearchArtists(ctx context.Context, q string) ([]*models.Artist, error) @@ -102,6 +106,7 @@ type DB interface { ImageHasAssociation(ctx context.Context, image uuid.UUID) (bool, error) GetImageSource(ctx context.Context, image uuid.UUID) (string, error) AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.Album, error) + ArtistsWithoutImages(ctx context.Context, from int32) ([]*models.Artist, error) GetExportPage(ctx context.Context, opts GetExportPageOpts) ([]*ExportItem, error) Ping(ctx context.Context) error Close(ctx context.Context) diff --git a/internal/db/opts.go b/internal/db/opts.go index 4ee59c9..cb23bd3 100644 --- a/internal/db/opts.go +++ b/internal/db/opts.go @@ -27,6 +27,7 @@ type GetTrackOpts struct { ID int32 MusicBrainzID uuid.UUID Title string + ReleaseID int32 ArtistIDs []int32 } @@ -116,14 +117,9 @@ type AddArtistsToAlbumOpts struct { } type GetItemsOpts struct { - Limit int - Period Period - Page int - Week int // 1-52 - Month int // 1-12 - Year int - From int64 // unix timestamp - To int64 // unix timestamp + Limit int + Page int + Timeframe Timeframe // Used only for getting top tracks ArtistID int @@ -138,6 +134,7 @@ type ListenActivityOpts struct { Range int Month int Year int + Timezone *time.Location AlbumID int32 ArtistID int32 TrackID int32 @@ -156,3 +153,10 @@ type GetExportPageOpts struct { TrackID int32 Limit int32 } + +type GetInterestOpts struct { + Buckets int + AlbumID int32 + ArtistID int32 + TrackID int32 +} diff --git a/internal/db/period.go b/internal/db/period.go index e6f38a3..00c4886 100644 --- a/internal/db/period.go +++ b/internal/db/period.go @@ -6,23 +6,6 @@ import ( // should this be in db package ??? -type Timeframe struct { - Period Period - T1u int64 - T2u int64 -} - -func TimeframeToTimeRange(timeframe Timeframe) (t1, t2 time.Time) { - if timeframe.T1u == 0 && timeframe.T2u == 0 { - t2 = time.Now() - t1 = StartTimeFromPeriod(timeframe.Period) - } else { - t1 = time.Unix(timeframe.T1u, 0) - t2 = time.Unix(timeframe.T2u, 0) - } - return -} - type Period string const ( @@ -31,9 +14,12 @@ const ( PeriodMonth Period = "month" PeriodYear Period = "year" PeriodAllTime Period = "all_time" - PeriodDefault Period = "day" ) +func (p Period) IsZero() bool { + return p == "" +} + func StartTimeFromPeriod(p Period) time.Time { now := time.Now() switch p { @@ -71,17 +57,21 @@ const ( // and end will be 23:59:59 on Saturday at the end of the current week. // If opts.Year (or opts.Year + opts.Month) is provided, start and end will simply by the start and end times of that year/month. func ListenActivityOptsToTimes(opts ListenActivityOpts) (start, end time.Time) { - now := time.Now() + loc := opts.Timezone + if loc == nil { + loc, _ = time.LoadLocation("UTC") + } + now := time.Now().In(loc) // If Year (and optionally Month) are specified, use calendar boundaries if opts.Year != 0 { if opts.Month != 0 { // Specific month of a specific year - start = time.Date(opts.Year, time.Month(opts.Month), 1, 0, 0, 0, 0, now.Location()) + start = time.Date(opts.Year, time.Month(opts.Month), 1, 0, 0, 0, 0, loc) end = start.AddDate(0, 1, 0).Add(-time.Nanosecond) } else { // Whole year - start = time.Date(opts.Year, 1, 1, 0, 0, 0, 0, now.Location()) + start = time.Date(opts.Year, 1, 1, 0, 0, 0, 0, loc) end = start.AddDate(1, 0, 0).Add(-time.Nanosecond) } return start, end @@ -93,30 +83,32 @@ func ListenActivityOptsToTimes(opts ListenActivityOpts) (start, end time.Time) { // Determine step and align accordingly switch opts.Step { case StepDay: - today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location()) + today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, loc) start = today.AddDate(0, 0, -opts.Range) end = today.AddDate(0, 0, 1).Add(-time.Nanosecond) case StepWeek: // Align to most recent Sunday weekday := int(now.Weekday()) // Sunday = 0 - startOfThisWeek := time.Date(now.Year(), now.Month(), now.Day()-weekday, 0, 0, 0, 0, now.Location()) - start = startOfThisWeek.AddDate(0, 0, -7*opts.Range) + startOfThisWeek := time.Date(now.Year(), now.Month(), now.Day()-weekday, 0, 0, 0, 0, loc) + // need to subtract 1 from range for week because we are going back from the beginning of this + // week, so we sort of already went back a week + start = startOfThisWeek.AddDate(0, 0, -7*(opts.Range-1)) end = startOfThisWeek.AddDate(0, 0, 7).Add(-time.Nanosecond) case StepMonth: - firstOfThisMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, now.Location()) + firstOfThisMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, loc) start = firstOfThisMonth.AddDate(0, -opts.Range, 0) end = firstOfThisMonth.AddDate(0, 1, 0).Add(-time.Nanosecond) case StepYear: - firstOfThisYear := time.Date(now.Year(), 1, 1, 0, 0, 0, 0, now.Location()) + firstOfThisYear := time.Date(now.Year(), 1, 1, 0, 0, 0, 0, loc) start = firstOfThisYear.AddDate(-opts.Range, 0, 0) end = firstOfThisYear.AddDate(1, 0, 0).Add(-time.Nanosecond) default: // Default to daily - today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location()) + today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, loc) start = today.AddDate(0, 0, -opts.Range) end = today.AddDate(0, 0, 1).Add(-time.Nanosecond) } diff --git a/internal/db/period_test.go b/internal/db/period_test.go index 8705ce7..0878637 100644 --- a/internal/db/period_test.go +++ b/internal/db/period_test.go @@ -3,6 +3,9 @@ package db_test import ( "testing" "time" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/require" ) func TestListenActivityOptsToTimes(t *testing.T) { @@ -21,6 +24,11 @@ func eod(t time.Time) time.Time { return time.Date(year, month, day, 23, 59, 59, 0, loc) } +func TestPeriodUnset(t *testing.T) { + var p db.Period + require.True(t, p.IsZero()) +} + func bod(t time.Time) time.Time { year, month, day := t.Date() loc := t.Location() diff --git a/internal/db/psql/album.go b/internal/db/psql/album.go index 5343e08..758c287 100644 --- a/internal/db/psql/album.go +++ b/internal/db/psql/album.go @@ -23,32 +23,13 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu var err error var ret = new(models.Album) - if opts.ID != 0 { - l.Debug().Msgf("Fetching album from DB with id %d", opts.ID) - row, err := d.q.GetRelease(ctx, opts.ID) - if err != nil { - return nil, fmt.Errorf("GetAlbum: %w", err) - } - ret.ID = row.ID - ret.MbzID = row.MusicBrainzID - ret.Title = row.Title - ret.Image = row.Image - ret.VariousArtists = row.VariousArtists - err = json.Unmarshal(row.Artists, &ret.Artists) - if err != nil { - return nil, fmt.Errorf("GetAlbum: json.Unmarshal: %w", err) - } - } else if opts.MusicBrainzID != uuid.Nil { + if opts.MusicBrainzID != uuid.Nil { l.Debug().Msgf("Fetching album from DB with MusicBrainz Release ID %s", opts.MusicBrainzID) row, err := d.q.GetReleaseByMbzID(ctx, &opts.MusicBrainzID) if err != nil { return nil, fmt.Errorf("GetAlbum: %w", err) } - ret.ID = row.ID - ret.MbzID = row.MusicBrainzID - ret.Title = row.Title - ret.Image = row.Image - ret.VariousArtists = row.VariousArtists + opts.ID = row.ID } else if opts.ArtistID != 0 && opts.Title != "" { l.Debug().Msgf("Fetching album from DB with artist_id %d and title %s", opts.ArtistID, opts.Title) row, err := d.q.GetReleaseByArtistAndTitle(ctx, repository.GetReleaseByArtistAndTitleParams{ @@ -58,11 +39,7 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu if err != nil { return nil, fmt.Errorf("GetAlbum: %w", err) } - ret.ID = row.ID - ret.MbzID = row.MusicBrainzID - ret.Title = row.Title - ret.Image = row.Image - ret.VariousArtists = row.VariousArtists + opts.ID = row.ID } else if opts.ArtistID != 0 && len(opts.Titles) > 0 { l.Debug().Msgf("Fetching release group from DB with artist_id %d and titles %v", opts.ArtistID, opts.Titles) row, err := d.q.GetReleaseByArtistAndTitles(ctx, repository.GetReleaseByArtistAndTitlesParams{ @@ -72,22 +49,87 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu if err != nil { return nil, fmt.Errorf("GetAlbum: %w", err) } + opts.ID = row.ID + } + + l.Debug().Msgf("Fetching album from DB with id %d", opts.ID) + row, err := d.q.GetRelease(ctx, opts.ID) + if err != nil { + return nil, fmt.Errorf("GetAlbum: %w", err) + } + + count, err := d.q.CountListensFromRelease(ctx, repository.CountListensFromReleaseParams{ + ListenedAt: time.Unix(0, 0), + ListenedAt_2: time.Now(), + ReleaseID: opts.ID, + }) + if err != nil { + return nil, fmt.Errorf("GetAlbum: CountListensFromRelease: %w", err) + } + + seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ + Timeframe: db.Timeframe{Period: db.PeriodAllTime}, + AlbumID: opts.ID, + }) + if err != nil { + return nil, fmt.Errorf("GetAlbum: CountTimeListenedToItem: %w", err) + } + + firstListen, err := d.q.GetFirstListenFromRelease(ctx, opts.ID) + if err != nil && !errors.Is(err, pgx.ErrNoRows) { + return nil, fmt.Errorf("GetAlbum: GetFirstListenFromRelease: %w", err) + } + + rank, err := d.q.GetReleaseAllTimeRank(ctx, opts.ID) + if err != nil && !errors.Is(err, pgx.ErrNoRows) { + return nil, fmt.Errorf("GetAlbum: GetReleaseAllTimeRank: %w", err) + } + + ret.ID = row.ID + ret.MbzID = row.MusicBrainzID + ret.Title = row.Title + ret.Image = row.Image + ret.VariousArtists = row.VariousArtists + err = json.Unmarshal(row.Artists, &ret.Artists) + if err != nil { + return nil, fmt.Errorf("GetAlbum: json.Unmarshal: %w", err) + } + ret.AllTimeRank = rank.Rank + ret.ListenCount = count + ret.TimeListened = seconds + ret.FirstListen = firstListen.ListenedAt.Unix() + + return ret, nil +} + +func (d *Psql) GetAlbumWithNoMbzIDByTitles(ctx context.Context, artistId int32, titles []string) (*models.Album, error) { + l := logger.FromContext(ctx) + ret := new(models.Album) + + if artistId != 0 && len(titles) > 0 { + l.Debug().Msgf("GetAlbumWithNoMbzIDByTitles: Fetching release group from DB with artist_id %d and titles %v and no associated MusicBrainz ID", artistId, titles) + row, err := d.q.GetReleaseByArtistAndTitlesNoMbzID(ctx, repository.GetReleaseByArtistAndTitlesNoMbzIDParams{ + ArtistID: artistId, + Column1: titles, + }) + if err != nil { + return nil, fmt.Errorf("GetAlbum: %w", err) + } ret.ID = row.ID ret.MbzID = row.MusicBrainzID ret.Title = row.Title ret.Image = row.Image ret.VariousArtists = row.VariousArtists } else { - return nil, errors.New("GetAlbum: insufficient information to get album") + return nil, errors.New("GetAlbumWithNoMbzIDByTitles: insufficient information to get album") } - count, err := d.q.CountListensFromRelease(ctx, repository.CountListensFromReleaseParams{ ListenedAt: time.Unix(0, 0), ListenedAt_2: time.Now(), ReleaseID: ret.ID, }) if err != nil { - return nil, fmt.Errorf("GetAlbum: CountListensFromRelease: %w", err) + return nil, fmt.Errorf("GetAlbumWithNoMbzIDByTitles: CountListensFromRelease: %w", err) } seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ @@ -95,12 +137,12 @@ func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Albu AlbumID: ret.ID, }) if err != nil { - return nil, fmt.Errorf("GetAlbum: CountTimeListenedToItem: %w", err) + return nil, fmt.Errorf("GetAlbumWithNoMbzIDByTitles: CountTimeListenedToItem: %w", err) } firstListen, err := d.q.GetFirstListenFromRelease(ctx, ret.ID) if err != nil && !errors.Is(err, pgx.ErrNoRows) { - return nil, fmt.Errorf("GetAlbum: GetFirstListenFromRelease: %w", err) + return nil, fmt.Errorf("GetAlbumWithNoMbzIDByTitles: GetFirstListenFromRelease: %w", err) } ret.ListenCount = count @@ -232,6 +274,9 @@ func (d *Psql) UpdateAlbum(ctx context.Context, opts db.UpdateAlbumOpts) error { } } if opts.Image != uuid.Nil { + if opts.ImageSrc == "" { + return fmt.Errorf("UpdateAlbum: image source must be provided when updating an image") + } l.Debug().Msgf("Updating release with ID %d with image %s", opts.ID, opts.Image) err := qtx.UpdateReleaseImage(ctx, repository.UpdateReleaseImageParams{ ID: opts.ID, diff --git a/internal/db/psql/artist.go b/internal/db/psql/artist.go index a67fc4c..859a490 100644 --- a/internal/db/psql/artist.go +++ b/internal/db/psql/artist.go @@ -20,114 +20,60 @@ import ( // this function sucks because sqlc keeps making new types for rows that are the same func (d *Psql) GetArtist(ctx context.Context, opts db.GetArtistOpts) (*models.Artist, error) { l := logger.FromContext(ctx) - if opts.ID != 0 { - l.Debug().Msgf("Fetching artist from DB with id %d", opts.ID) - row, err := d.q.GetArtist(ctx, opts.ID) - if err != nil { - return nil, fmt.Errorf("GetArtist: GetArtist by ID: %w", err) - } - count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{ - ListenedAt: time.Unix(0, 0), - ListenedAt_2: time.Now(), - ArtistID: row.ID, - }) - if err != nil { - return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err) - } - seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ - Timeframe: db.Timeframe{Period: db.PeriodAllTime}, - ArtistID: row.ID, - }) - if err != nil { - return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err) - } - firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID) - if err != nil && !errors.Is(err, pgx.ErrNoRows) { - return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err) - } - return &models.Artist{ - ID: row.ID, - MbzID: row.MusicBrainzID, - Name: row.Name, - Aliases: row.Aliases, - Image: row.Image, - ListenCount: count, - TimeListened: seconds, - FirstListen: firstListen.ListenedAt.Unix(), - }, nil - } else if opts.MusicBrainzID != uuid.Nil { + if opts.MusicBrainzID != uuid.Nil { l.Debug().Msgf("Fetching artist from DB with MusicBrainz ID %s", opts.MusicBrainzID) row, err := d.q.GetArtistByMbzID(ctx, &opts.MusicBrainzID) if err != nil { return nil, fmt.Errorf("GetArtist: GetArtistByMbzID: %w", err) } - count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{ - ListenedAt: time.Unix(0, 0), - ListenedAt_2: time.Now(), - ArtistID: row.ID, - }) - if err != nil { - return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err) - } - seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ - Timeframe: db.Timeframe{Period: db.PeriodAllTime}, - ArtistID: row.ID, - }) - if err != nil { - return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err) - } - firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID) - if err != nil && !errors.Is(err, pgx.ErrNoRows) { - return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err) - } - return &models.Artist{ - ID: row.ID, - MbzID: row.MusicBrainzID, - Name: row.Name, - Aliases: row.Aliases, - Image: row.Image, - ListenCount: count, - TimeListened: seconds, - FirstListen: firstListen.ListenedAt.Unix(), - }, nil + opts.ID = row.ID } else if opts.Name != "" { l.Debug().Msgf("Fetching artist from DB with name '%s'", opts.Name) row, err := d.q.GetArtistByName(ctx, opts.Name) if err != nil { return nil, fmt.Errorf("GetArtist: GetArtistByName: %w", err) } - count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{ - ListenedAt: time.Unix(0, 0), - ListenedAt_2: time.Now(), - ArtistID: row.ID, - }) - if err != nil { - return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err) - } - seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ - Timeframe: db.Timeframe{Period: db.PeriodAllTime}, - ArtistID: row.ID, - }) - if err != nil { - return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err) - } - firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID) - if err != nil && !errors.Is(err, pgx.ErrNoRows) { - return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err) - } - return &models.Artist{ - ID: row.ID, - MbzID: row.MusicBrainzID, - Name: row.Name, - Aliases: row.Aliases, - Image: row.Image, - ListenCount: count, - TimeListened: seconds, - FirstListen: firstListen.ListenedAt.Unix(), - }, nil - } else { - return nil, errors.New("insufficient information to get artist") + opts.ID = row.ID } + l.Debug().Msgf("Fetching artist from DB with id %d", opts.ID) + row, err := d.q.GetArtist(ctx, opts.ID) + if err != nil { + return nil, fmt.Errorf("GetArtist: GetArtist by ID: %w", err) + } + count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{ + ListenedAt: time.Unix(0, 0), + ListenedAt_2: time.Now(), + ArtistID: row.ID, + }) + if err != nil { + return nil, fmt.Errorf("GetArtist: CountListensFromArtist: %w", err) + } + seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ + Timeframe: db.Timeframe{Period: db.PeriodAllTime}, + ArtistID: row.ID, + }) + if err != nil { + return nil, fmt.Errorf("GetArtist: CountTimeListenedToItem: %w", err) + } + firstListen, err := d.q.GetFirstListenFromArtist(ctx, row.ID) + if err != nil && !errors.Is(err, pgx.ErrNoRows) { + return nil, fmt.Errorf("GetAlbum: GetFirstListenFromArtist: %w", err) + } + rank, err := d.q.GetArtistAllTimeRank(ctx, opts.ID) + if err != nil && !errors.Is(err, pgx.ErrNoRows) { + return nil, fmt.Errorf("GetArtist: GetArtistAllTimeRank: %w", err) + } + return &models.Artist{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Name: row.Name, + Aliases: row.Aliases, + Image: row.Image, + ListenCount: count, + TimeListened: seconds, + AllTimeRank: rank.Rank, + FirstListen: firstListen.ListenedAt.Unix(), + }, nil } // Inserts all unique aliases into the DB with specified source @@ -264,6 +210,9 @@ func (d *Psql) UpdateArtist(ctx context.Context, opts db.UpdateArtistOpts) error } } if opts.Image != uuid.Nil { + if opts.ImageSrc == "" { + return fmt.Errorf("UpdateAlbum: image source must be provided when updating an image") + } l.Debug().Msgf("Updating artist with id %d with image %s", opts.ID, opts.Image) err = qtx.UpdateArtistImage(ctx, repository.UpdateArtistImageParams{ ID: opts.ID, diff --git a/internal/db/psql/counts_test.go b/internal/db/psql/counts_test.go index 688fdf4..d94003e 100644 --- a/internal/db/psql/counts_test.go +++ b/internal/db/psql/counts_test.go @@ -46,7 +46,7 @@ func TestCountNewTracks(t *testing.T) { t1u := t1.Unix() t2, _ := time.Parse(time.DateOnly, "2025-12-31") t2u := t2.Unix() - count, err := store.CountNewTracks(ctx, db.Timeframe{T1u: t1u, T2u: t2u}) + count, err := store.CountNewTracks(ctx, db.Timeframe{FromUnix: t1u, ToUnix: t2u}) require.NoError(t, err) assert.Equal(t, int64(1), count, "expected tracks count to match inserted data") @@ -76,7 +76,7 @@ func TestCountNewAlbums(t *testing.T) { t1u := t1.Unix() t2, _ := time.Parse(time.DateOnly, "2025-12-31") t2u := t2.Unix() - count, err := store.CountNewAlbums(ctx, db.Timeframe{T1u: t1u, T2u: t2u}) + count, err := store.CountNewAlbums(ctx, db.Timeframe{FromUnix: t1u, ToUnix: t2u}) require.NoError(t, err) assert.Equal(t, int64(1), count, "expected albums count to match inserted data") @@ -106,7 +106,7 @@ func TestCountNewArtists(t *testing.T) { t1u := t1.Unix() t2, _ := time.Parse(time.DateOnly, "2025-12-31") t2u := t2.Unix() - count, err := store.CountNewArtists(ctx, db.Timeframe{T1u: t1u, T2u: t2u}) + count, err := store.CountNewArtists(ctx, db.Timeframe{FromUnix: t1u, ToUnix: t2u}) require.NoError(t, err) assert.Equal(t, int64(1), count, "expected artists count to match inserted data") diff --git a/internal/db/psql/images.go b/internal/db/psql/images.go index 49e2850..eef0d8f 100644 --- a/internal/db/psql/images.go +++ b/internal/db/psql/images.go @@ -72,3 +72,26 @@ func (d *Psql) AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.A } return albums, nil } + +// returns nil, nil on no results +func (d *Psql) ArtistsWithoutImages(ctx context.Context, from int32) ([]*models.Artist, error) { + rows, err := d.q.GetArtistsWithoutImages(ctx, repository.GetArtistsWithoutImagesParams{ + Limit: 20, + ID: from, + }) + if errors.Is(err, pgx.ErrNoRows) { + return nil, nil + } else if err != nil { + return nil, fmt.Errorf("ArtistsWithoutImages: %w", err) + } + + ret := make([]*models.Artist, len(rows)) + for i, row := range rows { + ret[i] = &models.Artist{ + ID: row.ID, + Name: row.Name, + MbzID: row.MusicBrainzID, + } + } + return ret, nil +} diff --git a/internal/db/psql/interest.go b/internal/db/psql/interest.go new file mode 100644 index 0000000..0c8f4eb --- /dev/null +++ b/internal/db/psql/interest.go @@ -0,0 +1,70 @@ +package psql + +import ( + "context" + "errors" + "fmt" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/repository" +) + +func (d *Psql) GetInterest(ctx context.Context, opts db.GetInterestOpts) ([]db.InterestBucket, error) { + if opts.Buckets == 0 { + return nil, errors.New("GetInterest: bucket count must be provided") + } + + ret := make([]db.InterestBucket, 0) + + if opts.ArtistID != 0 { + resp, err := d.q.GetGroupedListensFromArtist(ctx, repository.GetGroupedListensFromArtistParams{ + ArtistID: opts.ArtistID, + BucketCount: int32(opts.Buckets), + }) + if err != nil { + return nil, fmt.Errorf("GetInterest: GetGroupedListensFromArtist: %w", err) + } + for _, v := range resp { + ret = append(ret, db.InterestBucket{ + BucketStart: v.BucketStart, + BucketEnd: v.BucketEnd, + ListenCount: v.ListenCount, + }) + } + return ret, nil + } else if opts.AlbumID != 0 { + resp, err := d.q.GetGroupedListensFromRelease(ctx, repository.GetGroupedListensFromReleaseParams{ + ReleaseID: opts.AlbumID, + BucketCount: int32(opts.Buckets), + }) + if err != nil { + return nil, fmt.Errorf("GetInterest: GetGroupedListensFromRelease: %w", err) + } + for _, v := range resp { + ret = append(ret, db.InterestBucket{ + BucketStart: v.BucketStart, + BucketEnd: v.BucketEnd, + ListenCount: v.ListenCount, + }) + } + return ret, nil + } else if opts.TrackID != 0 { + resp, err := d.q.GetGroupedListensFromTrack(ctx, repository.GetGroupedListensFromTrackParams{ + ID: opts.TrackID, + BucketCount: int32(opts.Buckets), + }) + if err != nil { + return nil, fmt.Errorf("GetInterest: GetGroupedListensFromTrack: %w", err) + } + for _, v := range resp { + ret = append(ret, db.InterestBucket{ + BucketStart: v.BucketStart, + BucketEnd: v.BucketEnd, + ListenCount: v.ListenCount, + }) + } + return ret, nil + } else { + return nil, errors.New("GetInterest: artist id, album id, or track id must be provided") + } +} diff --git a/internal/db/psql/interest_test.go b/internal/db/psql/interest_test.go new file mode 100644 index 0000000..a00e796 --- /dev/null +++ b/internal/db/psql/interest_test.go @@ -0,0 +1,112 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// an llm wrote this because i didn't feel like it. it looks like it works, although +// it could stand to be more thorough +func TestGetInterest(t *testing.T) { + truncateTestData(t) + + ctx := context.Background() + + // --- Setup Data --- + + // Insert Artists + err := store.Exec(ctx, ` + INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002')`) + require.NoError(t, err) + + // Insert Releases (Albums) + err = store.Exec(ctx, ` + INSERT INTO releases (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000011')`) + require.NoError(t, err) + + // Insert Tracks (Both on Release 1) + err = store.Exec(ctx, ` + INSERT INTO tracks (musicbrainz_id, release_id) + VALUES ('11111111-1111-1111-1111-111111111111', 1), + ('22222222-2222-2222-2222-222222222222', 1)`) + require.NoError(t, err) + + // Link Artists to Tracks + // Artist 1 -> Track 1 + // Artist 2 -> Track 2 + err = store.Exec(ctx, ` + INSERT INTO artist_tracks (artist_id, track_id) + VALUES (1, 1), (2, 2)`) + require.NoError(t, err) + + // Insert Listens + // Track 1 (Artist 1, Release 1): 3 Listens + // Track 2 (Artist 2, Release 1): 2 Listens + err = store.Exec(ctx, ` + INSERT INTO listens (user_id, track_id, listened_at) VALUES + (1, 1, NOW() - INTERVAL '1 hour'), + (1, 1, NOW() - INTERVAL '2 hours'), + (1, 1, NOW() - INTERVAL '3 hours'), + (1, 2, NOW() - INTERVAL '1 hour'), + (1, 2, NOW() - INTERVAL '2 hours') + `) + require.NoError(t, err) + + // --- Test Validation --- + + t.Run("Validation", func(t *testing.T) { + // Error: Missing Buckets + _, err := store.GetInterest(ctx, db.GetInterestOpts{ArtistID: 1}) + assert.Error(t, err) + assert.Contains(t, err.Error(), "bucket count must be provided") + + // Error: Missing ID + _, err = store.GetInterest(ctx, db.GetInterestOpts{Buckets: 10}) + assert.Error(t, err) + assert.Contains(t, err.Error(), "must be provided") + }) + + // --- Test Data Retrieval --- + // Note: We use Buckets: 1 to ensure all listens are aggregated into a single result + // for easier assertion, avoiding complex date/time math in the test. + + t.Run("Artist Interest", func(t *testing.T) { + // Artist 1 should have 3 listens (from Track 1) + buckets, err := store.GetInterest(ctx, db.GetInterestOpts{ + ArtistID: 1, + Buckets: 1, + }) + require.NoError(t, err) + require.Len(t, buckets, 1) + assert.EqualValues(t, 3, buckets[0].ListenCount, "Artist 1 should have 3 listens") + }) + + t.Run("Album Interest", func(t *testing.T) { + // Album 1 contains Track 1 (3 listens) and Track 2 (2 listens) = 5 Total + buckets, err := store.GetInterest(ctx, db.GetInterestOpts{ + AlbumID: 1, + Buckets: 1, + }) + require.NoError(t, err) + require.Len(t, buckets, 1) + assert.EqualValues(t, 5, buckets[0].ListenCount, "Album 1 should have 5 listens total") + }) + + t.Run("Track Interest", func(t *testing.T) { + // Track 2 should have 2 listens + buckets, err := store.GetInterest(ctx, db.GetInterestOpts{ + TrackID: 2, + Buckets: 1, + }) + require.NoError(t, err) + require.Len(t, buckets, 1) + assert.EqualValues(t, 2, buckets[0].ListenCount, "Track 2 should have 2 listens") + }) +} diff --git a/internal/db/psql/listen.go b/internal/db/psql/listen.go index 24aac20..add6b33 100644 --- a/internal/db/psql/listen.go +++ b/internal/db/psql/listen.go @@ -11,38 +11,20 @@ import ( "github.com/gabehf/koito/internal/logger" "github.com/gabehf/koito/internal/models" "github.com/gabehf/koito/internal/repository" - "github.com/gabehf/koito/internal/utils" ) func (d *Psql) GetListensPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Listen], error) { l := logger.FromContext(ctx) offset := (opts.Page - 1) * opts.Limit - var t1 time.Time - var t2 time.Time - if opts.From != 0 && opts.To != 0 { - t1 = time.Unix(int64(opts.From), 0) - t2 = time.Unix(int64(opts.To), 0) - } else { - t1R, t2R, err := utils.DateRange(opts.Week, opts.Month, opts.Year) - if err != nil { - return nil, fmt.Errorf("GetListensPaginated: %w", err) - } - t1 = t1R - t2 = t2R - if opts.Month == 0 && opts.Year == 0 { - // use period, not date range - t2 = time.Now() - t1 = db.StartTimeFromPeriod(opts.Period) - } - } + t1, t2 := db.TimeframeToTimeRange(opts.Timeframe) if opts.Limit == 0 { opts.Limit = DefaultItemsPerPage } var listens []*models.Listen var count int64 if opts.TrackID > 0 { - l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching %d listens on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetLastListensFromTrackPaginated(ctx, repository.GetLastListensFromTrackPaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, @@ -77,8 +59,8 @@ func (d *Psql) GetListensPaginated(ctx context.Context, opts db.GetItemsOpts) (* return nil, fmt.Errorf("GetListensPaginated: CountListensFromTrack: %w", err) } } else if opts.AlbumID > 0 { - l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching %d listens on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetLastListensFromReleasePaginated(ctx, repository.GetLastListensFromReleasePaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, @@ -113,8 +95,8 @@ func (d *Psql) GetListensPaginated(ctx context.Context, opts db.GetItemsOpts) (* return nil, fmt.Errorf("GetListensPaginated: CountListensFromRelease: %w", err) } } else if opts.ArtistID > 0 { - l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching %d listens on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetLastListensFromArtistPaginated(ctx, repository.GetLastListensFromArtistPaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, @@ -149,8 +131,8 @@ func (d *Psql) GetListensPaginated(ctx context.Context, opts db.GetItemsOpts) (* return nil, fmt.Errorf("GetListensPaginated: CountListensFromArtist: %w", err) } } else { - l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching %d listens on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetLastListensPaginated(ctx, repository.GetLastListensPaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, diff --git a/internal/db/psql/listen_activity.go b/internal/db/psql/listen_activity.go index 47b1a13..b2c7990 100644 --- a/internal/db/psql/listen_activity.go +++ b/internal/db/psql/listen_activity.go @@ -23,12 +23,12 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts var listenActivity []db.ListenActivityItem if opts.AlbumID > 0 { l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for release group %d", - opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.AlbumID) + opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05 MST"), t2.Format("Jan 02, 2006 15:04:05 MST"), opts.AlbumID) rows, err := d.q.ListenActivityForRelease(ctx, repository.ListenActivityForReleaseParams{ - Column1: t1, - Column2: t2, - Column3: stepToInterval(opts.Step), - ReleaseID: opts.AlbumID, + Column1: opts.Timezone.String(), + ListenedAt: t1, + ListenedAt_2: t2, + ReleaseID: opts.AlbumID, }) if err != nil { return nil, fmt.Errorf("GetListenActivity: ListenActivityForRelease: %w", err) @@ -36,7 +36,7 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts listenActivity = make([]db.ListenActivityItem, len(rows)) for i, row := range rows { t := db.ListenActivityItem{ - Start: row.BucketStart, + Start: row.Day.Time, Listens: row.ListenCount, } listenActivity[i] = t @@ -44,12 +44,12 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts l.Debug().Msgf("Database responded with %d steps", len(rows)) } else if opts.ArtistID > 0 { l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for artist %d", - opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.ArtistID) + opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05 MST"), t2.Format("Jan 02, 2006 15:04:05 MST"), opts.ArtistID) rows, err := d.q.ListenActivityForArtist(ctx, repository.ListenActivityForArtistParams{ - Column1: t1, - Column2: t2, - Column3: stepToInterval(opts.Step), - ArtistID: opts.ArtistID, + Column1: opts.Timezone.String(), + ListenedAt: t1, + ListenedAt_2: t2, + ArtistID: opts.ArtistID, }) if err != nil { return nil, fmt.Errorf("GetListenActivity: ListenActivityForArtist: %w", err) @@ -57,7 +57,7 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts listenActivity = make([]db.ListenActivityItem, len(rows)) for i, row := range rows { t := db.ListenActivityItem{ - Start: row.BucketStart, + Start: row.Day.Time, Listens: row.ListenCount, } listenActivity[i] = t @@ -65,12 +65,12 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts l.Debug().Msgf("Database responded with %d steps", len(rows)) } else if opts.TrackID > 0 { l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for track %d", - opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.TrackID) + opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05 MST"), t2.Format("Jan 02, 2006 15:04:05 MST"), opts.TrackID) rows, err := d.q.ListenActivityForTrack(ctx, repository.ListenActivityForTrackParams{ - Column1: t1, - Column2: t2, - Column3: stepToInterval(opts.Step), - ID: opts.TrackID, + Column1: opts.Timezone.String(), + ListenedAt: t1, + ListenedAt_2: t2, + ID: opts.TrackID, }) if err != nil { return nil, fmt.Errorf("GetListenActivity: ListenActivityForTrack: %w", err) @@ -78,7 +78,7 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts listenActivity = make([]db.ListenActivityItem, len(rows)) for i, row := range rows { t := db.ListenActivityItem{ - Start: row.BucketStart, + Start: row.Day.Time, Listens: row.ListenCount, } listenActivity[i] = t @@ -86,11 +86,11 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts l.Debug().Msgf("Database responded with %d steps", len(rows)) } else { l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v", - opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05")) + opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05 MST"), t2.Format("Jan 02, 2006 15:04:05 MST")) rows, err := d.q.ListenActivity(ctx, repository.ListenActivityParams{ - Column1: t1, - Column2: t2, - Column3: stepToInterval(opts.Step), + Column1: opts.Timezone.String(), + ListenedAt: t1, + ListenedAt_2: t2, }) if err != nil { return nil, fmt.Errorf("GetListenActivity: ListenActivity: %w", err) @@ -98,7 +98,7 @@ func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts listenActivity = make([]db.ListenActivityItem, len(rows)) for i, row := range rows { t := db.ListenActivityItem{ - Start: row.BucketStart, + Start: row.Day.Time, Listens: row.ListenCount, } listenActivity[i] = t diff --git a/internal/db/psql/listen_activity_test.go b/internal/db/psql/listen_activity_test.go index 1041823..affc202 100644 --- a/internal/db/psql/listen_activity_test.go +++ b/internal/db/psql/listen_activity_test.go @@ -22,55 +22,55 @@ func TestListenActivity(t *testing.T) { truncateTestData(t) err := store.Exec(context.Background(), - `INSERT INTO artists (musicbrainz_id) + `INSERT INTO artists (musicbrainz_id) VALUES ('00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002')`) require.NoError(t, err) // Move artist names into artist_aliases err = store.Exec(context.Background(), - `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) VALUES (1, 'Artist One', 'Testing', true), (2, 'Artist Two', 'Testing', true)`) require.NoError(t, err) // Insert release groups err = store.Exec(context.Background(), - `INSERT INTO releases (musicbrainz_id) + `INSERT INTO releases (musicbrainz_id) VALUES ('00000000-0000-0000-0000-000000000011'), ('00000000-0000-0000-0000-000000000022')`) require.NoError(t, err) // Move release titles into release_aliases err = store.Exec(context.Background(), - `INSERT INTO release_aliases (release_id, alias, source, is_primary) + `INSERT INTO release_aliases (release_id, alias, source, is_primary) VALUES (1, 'Release One', 'Testing', true), (2, 'Release Two', 'Testing', true)`) require.NoError(t, err) // Insert tracks err = store.Exec(context.Background(), - `INSERT INTO tracks (musicbrainz_id, release_id) + `INSERT INTO tracks (musicbrainz_id, release_id) VALUES ('11111111-1111-1111-1111-111111111111', 1), ('22222222-2222-2222-2222-222222222222', 2)`) require.NoError(t, err) // Move track titles into track_aliases err = store.Exec(context.Background(), - `INSERT INTO track_aliases (track_id, alias, source, is_primary) + `INSERT INTO track_aliases (track_id, alias, source, is_primary) VALUES (1, 'Track One', 'Testing', true), (2, 'Track Two', 'Testing', true)`) require.NoError(t, err) // Associate tracks with artists err = store.Exec(context.Background(), - `INSERT INTO artist_tracks (artist_id, track_id) + `INSERT INTO artist_tracks (artist_id, track_id) VALUES (1, 1), (2, 2)`) require.NoError(t, err) // Insert listens err = store.Exec(context.Background(), - `INSERT INTO listens (user_id, track_id, listened_at) + `INSERT INTO listens (user_id, track_id, listened_at) VALUES (1, 1, NOW() - INTERVAL '1 day'), (1, 1, NOW() - INTERVAL '2 days'), (1, 1, NOW() - INTERVAL '1 week 1 day'), @@ -88,33 +88,35 @@ func TestListenActivity(t *testing.T) { // Test for opts.Step = db.StepDay activity, err := store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepDay}) require.NoError(t, err) - require.Len(t, activity, db.DefaultRange) - assert.Equal(t, []int64{0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 2, 0}, flattenListenCounts(activity)) + require.Len(t, activity, 3) + assert.Equal(t, []int64{2, 2, 2}, flattenListenCounts(activity)) // Truncate listens table and insert specific dates for testing opts.Step = db.StepMonth err = store.Exec(context.Background(), `TRUNCATE TABLE listens`) require.NoError(t, err) err = store.Exec(context.Background(), - `INSERT INTO listens (user_id, track_id, listened_at) - VALUES (1, 1, NOW() - INTERVAL '1 month'), - (1, 1, NOW() - INTERVAL '2 months'), - (1, 1, NOW() - INTERVAL '3 months'), - (1, 2, NOW() - INTERVAL '1 month'), - (1, 2, NOW() - INTERVAL '2 months')`) + `INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, NOW() - INTERVAL '1 month 1 day'), + (1, 1, NOW() - INTERVAL '2 months 1 day'), + (1, 1, NOW() - INTERVAL '3 months 1 day'), + (1, 2, NOW() - INTERVAL '1 month 1 day'), + (1, 2, NOW() - INTERVAL '1 second'), + (1, 2, NOW() - INTERVAL '2 seconds'), + (1, 2, NOW() - INTERVAL '2 months 1 day')`) require.NoError(t, err) activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepMonth, Range: 8}) require.NoError(t, err) - require.Len(t, activity, 8) - assert.Equal(t, []int64{0, 0, 0, 0, 1, 2, 2, 0}, flattenListenCounts(activity)) + require.Len(t, activity, 4) + assert.Equal(t, []int64{1, 2, 2, 2}, flattenListenCounts(activity)) // Truncate listens table and insert specific dates for testing opts.Step = db.StepYear err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`) require.NoError(t, err) err = store.Exec(context.Background(), - `INSERT INTO listens (user_id, track_id, listened_at) + `INSERT INTO listens (user_id, track_id, listened_at) VALUES (1, 1, NOW() - INTERVAL '1 year'), (1, 1, NOW() - INTERVAL '2 years'), (1, 2, NOW() - INTERVAL '1 year'), @@ -123,8 +125,8 @@ func TestListenActivity(t *testing.T) { activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepYear}) require.NoError(t, err) - require.Len(t, activity, db.DefaultRange) - assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 0}, flattenListenCounts(activity)) + require.Len(t, activity, 3) + assert.Equal(t, []int64{1, 1, 2}, flattenListenCounts(activity)) // Truncate and insert data for a specific month/year err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`) require.NoError(t, err) @@ -141,10 +143,10 @@ func TestListenActivity(t *testing.T) { Year: 2024, }) require.NoError(t, err) - require.Len(t, activity, 31) // number of days in march + require.Len(t, activity, 2) // number of days in march t.Log(activity) - assert.EqualValues(t, 1, activity[9].Listens) - assert.EqualValues(t, 1, activity[19].Listens) + assert.EqualValues(t, 1, activity[0].Listens) + assert.EqualValues(t, 1, activity[1].Listens) // Truncate and insert listens associated with two different albums err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`) @@ -161,53 +163,29 @@ func TestListenActivity(t *testing.T) { AlbumID: 1, // Track 1 only }) require.NoError(t, err) - require.Len(t, activity, db.DefaultRange) - assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0}, flattenListenCounts(activity)) + require.Len(t, activity, 2) + assert.Equal(t, []int64{1, 1}, flattenListenCounts(activity)) activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ Step: db.StepDay, TrackID: 1, // Track 1 only }) require.NoError(t, err) - require.Len(t, activity, db.DefaultRange) - assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0}, flattenListenCounts(activity)) + require.Len(t, activity, 2) + assert.Equal(t, []int64{1, 1}, flattenListenCounts(activity)) activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ Step: db.StepDay, ArtistID: 2, // Should only include listens to Track 2 }) require.NoError(t, err) - require.Len(t, activity, db.DefaultRange) - assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0}, flattenListenCounts(activity)) + require.Len(t, activity, 1) + assert.Equal(t, []int64{1}, flattenListenCounts(activity)) // month without year is disallowed _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ Step: db.StepDay, Month: 5, }) - require.Error(t, err) - - // invalid options - _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ - Year: -10, - }) - require.Error(t, err) - _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ - Year: 2025, - Month: -10, - }) - require.Error(t, err) - _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ - Range: -1, - }) - require.Error(t, err) - _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ - AlbumID: -1, - }) - require.Error(t, err) - _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ - ArtistID: -1, - }) - require.Error(t, err) - + assert.Error(t, err) } diff --git a/internal/db/psql/listen_test.go b/internal/db/psql/listen_test.go index b0fbd96..a687a43 100644 --- a/internal/db/psql/listen_test.go +++ b/internal/db/psql/listen_test.go @@ -14,49 +14,49 @@ func testDataForListens(t *testing.T) { truncateTestData(t) // Insert artists err := store.Exec(context.Background(), - `INSERT INTO artists (musicbrainz_id) + `INSERT INTO artists (musicbrainz_id) VALUES ('00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002')`) require.NoError(t, err) // Insert artist aliases err = store.Exec(context.Background(), - `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) VALUES (1, 'Artist One', 'Testing', true), (2, 'Artist Two', 'Testing', true)`) require.NoError(t, err) // Insert release groups err = store.Exec(context.Background(), - `INSERT INTO releases (musicbrainz_id) + `INSERT INTO releases (musicbrainz_id) VALUES ('00000000-0000-0000-0000-000000000011'), ('00000000-0000-0000-0000-000000000022')`) require.NoError(t, err) // Insert release aliases err = store.Exec(context.Background(), - `INSERT INTO release_aliases (release_id, alias, source, is_primary) + `INSERT INTO release_aliases (release_id, alias, source, is_primary) VALUES (1, 'Release One', 'Testing', true), (2, 'Release Two', 'Testing', true)`) require.NoError(t, err) // Insert tracks err = store.Exec(context.Background(), - `INSERT INTO tracks (musicbrainz_id, release_id) + `INSERT INTO tracks (musicbrainz_id, release_id) VALUES ('11111111-1111-1111-1111-111111111111', 1), ('22222222-2222-2222-2222-222222222222', 2)`) require.NoError(t, err) // Insert track aliases err = store.Exec(context.Background(), - `INSERT INTO track_aliases (track_id, alias, source, is_primary) + `INSERT INTO track_aliases (track_id, alias, source, is_primary) VALUES (1, 'Track One', 'Testing', true), (2, 'Track Two', 'Testing', true)`) require.NoError(t, err) // Insert artist track associations err = store.Exec(context.Background(), - `INSERT INTO artist_tracks (track_id, artist_id) + `INSERT INTO artist_tracks (track_id, artist_id) VALUES (1, 1), (2, 2)`) require.NoError(t, err) @@ -67,7 +67,7 @@ func TestGetListens(t *testing.T) { ctx := context.Background() // Test valid - resp, err := store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime}) + resp, err := store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, resp.Items, 10) assert.Equal(t, int64(10), resp.TotalCount) @@ -78,7 +78,7 @@ func TestGetListens(t *testing.T) { assert.Equal(t, "Artist Three", resp.Items[1].Track.Artists[0].Name) // Test pagination - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, resp.Items, 1) require.Len(t, resp.Items[0].Track.Artists, 1) @@ -89,7 +89,7 @@ func TestGetListens(t *testing.T) { assert.Equal(t, "Artist Three", resp.Items[0].Track.Artists[0].Name) // Test page out of range - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 10, Page: 10, Period: db.PeriodAllTime}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 10, Page: 10, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) assert.Empty(t, resp.Items) assert.False(t, resp.HasNextPage) @@ -102,7 +102,7 @@ func TestGetListens(t *testing.T) { assert.Error(t, err) // Test specify period - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodDay}}) require.NoError(t, err) require.Len(t, resp.Items, 0) // empty assert.Equal(t, int64(0), resp.TotalCount) @@ -112,38 +112,38 @@ func TestGetListens(t *testing.T) { require.Len(t, resp.Items, 0) // empty assert.Equal(t, int64(0), resp.TotalCount) - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodWeek}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodMonth}}) require.NoError(t, err) require.Len(t, resp.Items, 3) assert.Equal(t, int64(3), resp.TotalCount) - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}}) require.NoError(t, err) require.Len(t, resp.Items, 6) assert.Equal(t, int64(6), resp.TotalCount) // Test filter by artists, releases, and tracks - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, ArtistID: 1}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, ArtistID: 1}) require.NoError(t, err) require.Len(t, resp.Items, 4) assert.Equal(t, int64(4), resp.TotalCount) - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, AlbumID: 2}) require.NoError(t, err) require.Len(t, resp.Items, 3) assert.Equal(t, int64(3), resp.TotalCount) - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, TrackID: 3}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, TrackID: 3}) require.NoError(t, err) require.Len(t, resp.Items, 2) assert.Equal(t, int64(2), resp.TotalCount) // when both artistID and albumID are specified, artist id is ignored - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2, ArtistID: 1}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, AlbumID: 2, ArtistID: 1}) require.NoError(t, err) require.Len(t, resp.Items, 3) assert.Equal(t, int64(3), resp.TotalCount) @@ -152,20 +152,16 @@ func TestGetListens(t *testing.T) { testDataAbsoluteListenTimes(t) - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Year: 2023}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Year: 2023}}) require.NoError(t, err) require.Len(t, resp.Items, 4) assert.Equal(t, int64(4), resp.TotalCount) - resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024}) + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Month: 6, Year: 2024}}) require.NoError(t, err) require.Len(t, resp.Items, 3) assert.Equal(t, int64(3), resp.TotalCount) - // invalid, year required with month - _, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Month: 10}) - require.Error(t, err) - } func TestSaveListen(t *testing.T) { diff --git a/internal/db/psql/merge.go b/internal/db/psql/merge.go index d9e24b6..dd375c5 100644 --- a/internal/db/psql/merge.go +++ b/internal/db/psql/merge.go @@ -52,7 +52,7 @@ func (d *Psql) MergeTracks(ctx context.Context, fromId, toId int32) error { } err = qtx.CleanOrphanedEntries(ctx) if err != nil { - l.Err(err).Msg("Failed to clean orphaned entries") + l.Err(err).Msg("MergeTracks: Failed to clean orphaned entries") return err } return tx.Commit(ctx) diff --git a/internal/db/psql/merge_test.go b/internal/db/psql/merge_test.go index 08169fb..38e843a 100644 --- a/internal/db/psql/merge_test.go +++ b/internal/db/psql/merge_test.go @@ -12,27 +12,27 @@ func setupTestDataForMerge(t *testing.T) { truncateTestData(t) // Insert artists err := store.Exec(context.Background(), - `INSERT INTO artists (musicbrainz_id, image, image_source) + `INSERT INTO artists (musicbrainz_id, image, image_source) VALUES ('00000000-0000-0000-0000-000000000001', '10000000-0000-0000-0000-000000000000', 'source.com'), ('00000000-0000-0000-0000-000000000002', NULL, NULL)`) require.NoError(t, err) err = store.Exec(context.Background(), - `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) VALUES (1, 'Artist One', 'Testing', true), (2, 'Artist Two', 'Testing', true)`) require.NoError(t, err) // Insert albums err = store.Exec(context.Background(), - `INSERT INTO releases (musicbrainz_id, image, image_source) + `INSERT INTO releases (musicbrainz_id, image, image_source) VALUES ('11111111-1111-1111-1111-111111111111', '20000000-0000-0000-0000-000000000000', 'source.com'), ('22222222-2222-2222-2222-222222222222', NULL, NULL), (NULL, NULL, NULL)`) require.NoError(t, err) err = store.Exec(context.Background(), - `INSERT INTO release_aliases (release_id, alias, source, is_primary) + `INSERT INTO release_aliases (release_id, alias, source, is_primary) VALUES (1, 'Album One', 'Testing', true), (2, 'Album Two', 'Testing', true), (3, 'Album Three', 'Testing', true)`) @@ -40,7 +40,7 @@ func setupTestDataForMerge(t *testing.T) { // Insert tracks err = store.Exec(context.Background(), - `INSERT INTO tracks (musicbrainz_id, release_id) + `INSERT INTO tracks (musicbrainz_id, release_id) VALUES ('33333333-3333-3333-3333-333333333333', 1), ('44444444-4444-4444-4444-444444444444', 2), ('55555555-5555-5555-5555-555555555555', 1), @@ -48,7 +48,7 @@ func setupTestDataForMerge(t *testing.T) { require.NoError(t, err) err = store.Exec(context.Background(), - `INSERT INTO track_aliases (track_id, alias, source, is_primary) + `INSERT INTO track_aliases (track_id, alias, source, is_primary) VALUES (1, 'Track One', 'Testing', true), (2, 'Track Two', 'Testing', true), (3, 'Track Three', 'Testing', true), @@ -57,18 +57,18 @@ func setupTestDataForMerge(t *testing.T) { // Associate artists with albums and tracks err = store.Exec(context.Background(), - `INSERT INTO artist_releases (artist_id, release_id) + `INSERT INTO artist_releases (artist_id, release_id) VALUES (1, 1), (2, 2), (1, 3)`) require.NoError(t, err) err = store.Exec(context.Background(), - `INSERT INTO artist_tracks (artist_id, track_id) + `INSERT INTO artist_tracks (artist_id, track_id) VALUES (1, 1), (2, 2), (1, 3), (1, 4)`) require.NoError(t, err) // Insert listens err = store.Exec(context.Background(), - `INSERT INTO listens (user_id, track_id, listened_at) + `INSERT INTO listens (user_id, track_id, listened_at) VALUES (1, 1, NOW() - INTERVAL '1 day'), (1, 2, NOW() - INTERVAL '2 days'), (1, 3, NOW() - INTERVAL '3 days'), @@ -90,14 +90,14 @@ func TestMergeTracks(t *testing.T) { require.NoError(t, err) assert.Equal(t, 2, count, "expected all listens to be merged into Track 2") - // Verify artist is associated with album + // Verify old artist is not associated with album exists, err := store.RowExists(ctx, ` SELECT EXISTS ( SELECT 1 FROM artist_releases WHERE release_id = $1 AND artist_id = $2 )`, 2, 1) require.NoError(t, err) - assert.True(t, exists, "expected old artist to be associated with album") + assert.False(t, exists) truncateTestData(t) } diff --git a/internal/db/psql/top_albums.go b/internal/db/psql/top_albums.go index f10d705..652b790 100644 --- a/internal/db/psql/top_albums.go +++ b/internal/db/psql/top_albums.go @@ -4,41 +4,27 @@ import ( "context" "encoding/json" "fmt" - "time" "github.com/gabehf/koito/internal/db" "github.com/gabehf/koito/internal/logger" "github.com/gabehf/koito/internal/models" "github.com/gabehf/koito/internal/repository" - "github.com/gabehf/koito/internal/utils" ) -func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Album], error) { +func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[db.RankedItem[*models.Album]], error) { l := logger.FromContext(ctx) offset := (opts.Page - 1) * opts.Limit - t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year) - if err != nil { - return nil, fmt.Errorf("GetTopAlbumsPaginated: %w", err) - } - if opts.Month == 0 && opts.Year == 0 { - // use period, not date range - t2 = time.Now() - t1 = db.StartTimeFromPeriod(opts.Period) - } - if opts.From != 0 || opts.To != 0 { - t1 = time.Unix(opts.From, 0) - t2 = time.Unix(opts.To, 0) - } + t1, t2 := db.TimeframeToTimeRange(opts.Timeframe) if opts.Limit == 0 { opts.Limit = DefaultItemsPerPage } - var rgs []*models.Album + var rgs []db.RankedItem[*models.Album] var count int64 if opts.ArtistID != 0 { - l.Debug().Msgf("Fetching top %d albums from artist id %d with period %s on page %d from range %v to %v", - opts.Limit, opts.ArtistID, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching top %d albums from artist id %d on page %d from range %v to %v", + opts.Limit, opts.ArtistID, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetTopReleasesFromArtist(ctx, repository.GetTopReleasesFromArtistParams{ ArtistID: int32(opts.ArtistID), @@ -50,7 +36,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) if err != nil { return nil, fmt.Errorf("GetTopAlbumsPaginated: GetTopReleasesFromArtist: %w", err) } - rgs = make([]*models.Album, len(rows)) + rgs = make([]db.RankedItem[*models.Album], len(rows)) l.Debug().Msgf("Database responded with %d items", len(rows)) for i, v := range rows { artists := make([]models.SimpleArtist, 0) @@ -59,7 +45,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", v.ID) return nil, fmt.Errorf("GetTopAlbumsPaginated: Unmarshal: %w", err) } - rgs[i] = &models.Album{ + rgs[i].Item = &models.Album{ ID: v.ID, MbzID: v.MusicBrainzID, Title: v.Title, @@ -68,14 +54,15 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) VariousArtists: v.VariousArtists, ListenCount: v.ListenCount, } + rgs[i].Rank = v.Rank } count, err = d.q.CountReleasesFromArtist(ctx, int32(opts.ArtistID)) if err != nil { return nil, fmt.Errorf("GetTopAlbumsPaginated: CountReleasesFromArtist: %w", err) } } else { - l.Debug().Msgf("Fetching top %d albums with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching top %d albums on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetTopReleasesPaginated(ctx, repository.GetTopReleasesPaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, @@ -85,7 +72,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) if err != nil { return nil, fmt.Errorf("GetTopAlbumsPaginated: GetTopReleasesPaginated: %w", err) } - rgs = make([]*models.Album, len(rows)) + rgs = make([]db.RankedItem[*models.Album], len(rows)) l.Debug().Msgf("Database responded with %d items", len(rows)) for i, row := range rows { artists := make([]models.SimpleArtist, 0) @@ -94,16 +81,16 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", row.ID) return nil, fmt.Errorf("GetTopAlbumsPaginated: Unmarshal: %w", err) } - t := &models.Album{ - Title: row.Title, - MbzID: row.MusicBrainzID, + rgs[i].Item = &models.Album{ ID: row.ID, + MbzID: row.MusicBrainzID, + Title: row.Title, Image: row.Image, Artists: artists, VariousArtists: row.VariousArtists, ListenCount: row.ListenCount, } - rgs[i] = t + rgs[i].Rank = row.Rank } count, err = d.q.CountTopReleases(ctx, repository.CountTopReleasesParams{ ListenedAt: t1, @@ -114,7 +101,7 @@ func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) } l.Debug().Msgf("Database responded with %d albums out of a total %d", len(rows), count) } - return &db.PaginatedResponse[*models.Album]{ + return &db.PaginatedResponse[db.RankedItem[*models.Album]]{ Items: rgs, TotalCount: count, ItemsPerPage: int32(opts.Limit), diff --git a/internal/db/psql/top_albums_test.go b/internal/db/psql/top_albums_test.go index d698be6..eb4efde 100644 --- a/internal/db/psql/top_albums_test.go +++ b/internal/db/psql/top_albums_test.go @@ -14,23 +14,23 @@ func TestGetTopAlbumsPaginated(t *testing.T) { ctx := context.Background() // Test valid - resp, err := store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime}) + resp, err := store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, resp.Items, 4) assert.Equal(t, int64(4), resp.TotalCount) - assert.Equal(t, "Release One", resp.Items[0].Title) - assert.Equal(t, "Release Two", resp.Items[1].Title) - assert.Equal(t, "Release Three", resp.Items[2].Title) - assert.Equal(t, "Release Four", resp.Items[3].Title) + assert.Equal(t, "Release One", resp.Items[0].Item.Title) + assert.Equal(t, "Release Two", resp.Items[1].Item.Title) + assert.Equal(t, "Release Three", resp.Items[2].Item.Title) + assert.Equal(t, "Release Four", resp.Items[3].Item.Title) // Test pagination - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, resp.Items, 1) - assert.Equal(t, "Release Two", resp.Items[0].Title) + assert.Equal(t, "Release Two", resp.Items[0].Item.Title) // Test page out of range - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Empty(t, resp.Items) assert.False(t, resp.HasNextPage) @@ -43,7 +43,7 @@ func TestGetTopAlbumsPaginated(t *testing.T) { assert.Error(t, err) // Test specify period - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodDay}}) require.NoError(t, err) require.Len(t, resp.Items, 0) // empty assert.Equal(t, int64(0), resp.TotalCount) @@ -53,51 +53,47 @@ func TestGetTopAlbumsPaginated(t *testing.T) { require.Len(t, resp.Items, 0) // empty assert.Equal(t, int64(0), resp.TotalCount) - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodWeek}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Release Four", resp.Items[0].Title) + assert.Equal(t, "Release Four", resp.Items[0].Item.Title) - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodMonth}}) require.NoError(t, err) require.Len(t, resp.Items, 2) assert.Equal(t, int64(2), resp.TotalCount) - assert.Equal(t, "Release Three", resp.Items[0].Title) - assert.Equal(t, "Release Four", resp.Items[1].Title) + assert.Equal(t, "Release Three", resp.Items[0].Item.Title) + assert.Equal(t, "Release Four", resp.Items[1].Item.Title) - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}}) require.NoError(t, err) require.Len(t, resp.Items, 3) assert.Equal(t, int64(3), resp.TotalCount) - assert.Equal(t, "Release Two", resp.Items[0].Title) - assert.Equal(t, "Release Three", resp.Items[1].Title) - assert.Equal(t, "Release Four", resp.Items[2].Title) + assert.Equal(t, "Release Two", resp.Items[0].Item.Title) + assert.Equal(t, "Release Three", resp.Items[1].Item.Title) + assert.Equal(t, "Release Four", resp.Items[2].Item.Title) // test specific artist - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear, ArtistID: 2}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}, ArtistID: 2}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Release Two", resp.Items[0].Title) + assert.Equal(t, "Release Two", resp.Items[0].Item.Title) // Test specify dates testDataAbsoluteListenTimes(t) - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Year: 2023}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Year: 2023}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Release One", resp.Items[0].Title) + assert.Equal(t, "Release One", resp.Items[0].Item.Title) - resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024}) + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Month: 6, Year: 2024}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Release Two", resp.Items[0].Title) - - // invalid, year required with month - _, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Month: 10}) - require.Error(t, err) + assert.Equal(t, "Release Two", resp.Items[0].Item.Title) } diff --git a/internal/db/psql/top_artists.go b/internal/db/psql/top_artists.go index 9201f82..497efbd 100644 --- a/internal/db/psql/top_artists.go +++ b/internal/db/psql/top_artists.go @@ -3,36 +3,22 @@ package psql import ( "context" "fmt" - "time" "github.com/gabehf/koito/internal/db" "github.com/gabehf/koito/internal/logger" "github.com/gabehf/koito/internal/models" "github.com/gabehf/koito/internal/repository" - "github.com/gabehf/koito/internal/utils" ) -func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Artist], error) { +func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[db.RankedItem[*models.Artist]], error) { l := logger.FromContext(ctx) offset := (opts.Page - 1) * opts.Limit - t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year) - if err != nil { - return nil, fmt.Errorf("GetTopArtistsPaginated: %w", err) - } - if opts.Month == 0 && opts.Year == 0 { - // use period, not date range - t2 = time.Now() - t1 = db.StartTimeFromPeriod(opts.Period) - } - if opts.From != 0 || opts.To != 0 { - t1 = time.Unix(opts.From, 0) - t2 = time.Unix(opts.To, 0) - } + t1, t2 := db.TimeframeToTimeRange(opts.Timeframe) if opts.Limit == 0 { opts.Limit = DefaultItemsPerPage } - l.Debug().Msgf("Fetching top %d artists with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching top %d artists on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetTopArtistsPaginated(ctx, repository.GetTopArtistsPaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, @@ -42,7 +28,7 @@ func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) if err != nil { return nil, fmt.Errorf("GetTopArtistsPaginated: GetTopArtistsPaginated: %w", err) } - rgs := make([]*models.Artist, len(rows)) + rgs := make([]db.RankedItem[*models.Artist], len(rows)) for i, row := range rows { t := &models.Artist{ Name: row.Name, @@ -51,7 +37,8 @@ func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) Image: row.Image, ListenCount: row.ListenCount, } - rgs[i] = t + rgs[i].Item = t + rgs[i].Rank = row.Rank } count, err := d.q.CountTopArtists(ctx, repository.CountTopArtistsParams{ ListenedAt: t1, @@ -62,7 +49,7 @@ func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) } l.Debug().Msgf("Database responded with %d artists out of a total %d", len(rows), count) - return &db.PaginatedResponse[*models.Artist]{ + return &db.PaginatedResponse[db.RankedItem[*models.Artist]]{ Items: rgs, TotalCount: count, ItemsPerPage: int32(opts.Limit), diff --git a/internal/db/psql/top_artists_test.go b/internal/db/psql/top_artists_test.go index 2f261a0..7a69ab5 100644 --- a/internal/db/psql/top_artists_test.go +++ b/internal/db/psql/top_artists_test.go @@ -14,23 +14,23 @@ func TestGetTopArtistsPaginated(t *testing.T) { ctx := context.Background() // Test valid - resp, err := store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime}) + resp, err := store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, resp.Items, 4) assert.Equal(t, int64(4), resp.TotalCount) - assert.Equal(t, "Artist One", resp.Items[0].Name) - assert.Equal(t, "Artist Two", resp.Items[1].Name) - assert.Equal(t, "Artist Three", resp.Items[2].Name) - assert.Equal(t, "Artist Four", resp.Items[3].Name) + assert.Equal(t, "Artist One", resp.Items[0].Item.Name) + assert.Equal(t, "Artist Two", resp.Items[1].Item.Name) + assert.Equal(t, "Artist Three", resp.Items[2].Item.Name) + assert.Equal(t, "Artist Four", resp.Items[3].Item.Name) // Test pagination - resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime}) + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, resp.Items, 1) - assert.Equal(t, "Artist Two", resp.Items[0].Name) + assert.Equal(t, "Artist Two", resp.Items[0].Item.Name) // Test page out of range - resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime}) + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) assert.Empty(t, resp.Items) assert.False(t, resp.HasNextPage) @@ -43,7 +43,7 @@ func TestGetTopArtistsPaginated(t *testing.T) { assert.Error(t, err) // Test specify period - resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay}) + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodDay}}) require.NoError(t, err) require.Len(t, resp.Items, 0) // empty assert.Equal(t, int64(0), resp.TotalCount) @@ -53,44 +53,40 @@ func TestGetTopArtistsPaginated(t *testing.T) { require.Len(t, resp.Items, 0) // empty assert.Equal(t, int64(0), resp.TotalCount) - resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek}) + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodWeek}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Artist Four", resp.Items[0].Name) + assert.Equal(t, "Artist Four", resp.Items[0].Item.Name) - resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth}) + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodMonth}}) require.NoError(t, err) require.Len(t, resp.Items, 2) assert.Equal(t, int64(2), resp.TotalCount) - assert.Equal(t, "Artist Three", resp.Items[0].Name) - assert.Equal(t, "Artist Four", resp.Items[1].Name) + assert.Equal(t, "Artist Three", resp.Items[0].Item.Name) + assert.Equal(t, "Artist Four", resp.Items[1].Item.Name) - resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear}) + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}}) require.NoError(t, err) require.Len(t, resp.Items, 3) assert.Equal(t, int64(3), resp.TotalCount) - assert.Equal(t, "Artist Two", resp.Items[0].Name) - assert.Equal(t, "Artist Three", resp.Items[1].Name) - assert.Equal(t, "Artist Four", resp.Items[2].Name) + assert.Equal(t, "Artist Two", resp.Items[0].Item.Name) + assert.Equal(t, "Artist Three", resp.Items[1].Item.Name) + assert.Equal(t, "Artist Four", resp.Items[2].Item.Name) // Test specify dates testDataAbsoluteListenTimes(t) - resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Year: 2023}) + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Year: 2023}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Artist One", resp.Items[0].Name) + assert.Equal(t, "Artist One", resp.Items[0].Item.Name) - resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024}) + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Month: 6, Year: 2024}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Artist Two", resp.Items[0].Name) - - // invalid, year required with month - _, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Month: 10}) - require.Error(t, err) + assert.Equal(t, "Artist Two", resp.Items[0].Item.Name) } diff --git a/internal/db/psql/top_tracks.go b/internal/db/psql/top_tracks.go index 326ef77..89960e8 100644 --- a/internal/db/psql/top_tracks.go +++ b/internal/db/psql/top_tracks.go @@ -4,39 +4,25 @@ import ( "context" "encoding/json" "fmt" - "time" "github.com/gabehf/koito/internal/db" "github.com/gabehf/koito/internal/logger" "github.com/gabehf/koito/internal/models" "github.com/gabehf/koito/internal/repository" - "github.com/gabehf/koito/internal/utils" ) -func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Track], error) { +func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[db.RankedItem[*models.Track]], error) { l := logger.FromContext(ctx) offset := (opts.Page - 1) * opts.Limit - t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year) - if err != nil { - return nil, fmt.Errorf("GetTopTracksPaginated: %w", err) - } - if opts.Month == 0 && opts.Year == 0 { - // use period, not date range - t2 = time.Now() - t1 = db.StartTimeFromPeriod(opts.Period) - } - if opts.From != 0 || opts.To != 0 { - t1 = time.Unix(opts.From, 0) - t2 = time.Unix(opts.To, 0) - } + t1, t2 := db.TimeframeToTimeRange(opts.Timeframe) if opts.Limit == 0 { opts.Limit = DefaultItemsPerPage } - var tracks []*models.Track + var tracks []db.RankedItem[*models.Track] var count int64 if opts.AlbumID > 0 { - l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching top %d tracks on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetTopTracksInReleasePaginated(ctx, repository.GetTopTracksInReleasePaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, @@ -47,7 +33,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) if err != nil { return nil, fmt.Errorf("GetTopTracksPaginated: GetTopTracksInReleasePaginated: %w", err) } - tracks = make([]*models.Track, len(rows)) + tracks = make([]db.RankedItem[*models.Track], len(rows)) for i, row := range rows { artists := make([]models.SimpleArtist, 0) err = json.Unmarshal(row.Artists, &artists) @@ -64,7 +50,8 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) AlbumID: row.ReleaseID, Artists: artists, } - tracks[i] = t + tracks[i].Item = t + tracks[i].Rank = row.Rank } count, err = d.q.CountTopTracksByRelease(ctx, repository.CountTopTracksByReleaseParams{ ListenedAt: t1, @@ -75,8 +62,8 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) return nil, err } } else if opts.ArtistID > 0 { - l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching top %d tracks on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetTopTracksByArtistPaginated(ctx, repository.GetTopTracksByArtistPaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, @@ -87,7 +74,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) if err != nil { return nil, fmt.Errorf("GetTopTracksPaginated: GetTopTracksByArtistPaginated: %w", err) } - tracks = make([]*models.Track, len(rows)) + tracks = make([]db.RankedItem[*models.Track], len(rows)) for i, row := range rows { artists := make([]models.SimpleArtist, 0) err = json.Unmarshal(row.Artists, &artists) @@ -104,7 +91,8 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) AlbumID: row.ReleaseID, Artists: artists, } - tracks[i] = t + tracks[i].Item = t + tracks[i].Rank = row.Rank } count, err = d.q.CountTopTracksByArtist(ctx, repository.CountTopTracksByArtistParams{ ListenedAt: t1, @@ -115,8 +103,8 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) return nil, fmt.Errorf("GetTopTracksPaginated: CountTopTracksByArtist: %w", err) } } else { - l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v", - opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + l.Debug().Msgf("Fetching top %d tracks on page %d from range %v to %v", + opts.Limit, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) rows, err := d.q.GetTopTracksPaginated(ctx, repository.GetTopTracksPaginatedParams{ ListenedAt: t1, ListenedAt_2: t2, @@ -126,7 +114,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) if err != nil { return nil, fmt.Errorf("GetTopTracksPaginated: GetTopTracksPaginated: %w", err) } - tracks = make([]*models.Track, len(rows)) + tracks = make([]db.RankedItem[*models.Track], len(rows)) for i, row := range rows { artists := make([]models.SimpleArtist, 0) err = json.Unmarshal(row.Artists, &artists) @@ -143,7 +131,8 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) AlbumID: row.ReleaseID, Artists: artists, } - tracks[i] = t + tracks[i].Item = t + tracks[i].Rank = row.Rank } count, err = d.q.CountTopTracks(ctx, repository.CountTopTracksParams{ ListenedAt: t1, @@ -155,7 +144,7 @@ func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) l.Debug().Msgf("Database responded with %d tracks out of a total %d", len(rows), count) } - return &db.PaginatedResponse[*models.Track]{ + return &db.PaginatedResponse[db.RankedItem[*models.Track]]{ Items: tracks, TotalCount: count, ItemsPerPage: int32(opts.Limit), diff --git a/internal/db/psql/top_tracks_test.go b/internal/db/psql/top_tracks_test.go index 89e63f1..934d9b7 100644 --- a/internal/db/psql/top_tracks_test.go +++ b/internal/db/psql/top_tracks_test.go @@ -14,26 +14,26 @@ func TestGetTopTracksPaginated(t *testing.T) { ctx := context.Background() // Test valid - resp, err := store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime}) + resp, err := store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, resp.Items, 4) assert.Equal(t, int64(4), resp.TotalCount) - assert.Equal(t, "Track One", resp.Items[0].Title) - assert.Equal(t, "Track Two", resp.Items[1].Title) - assert.Equal(t, "Track Three", resp.Items[2].Title) - assert.Equal(t, "Track Four", resp.Items[3].Title) + assert.Equal(t, "Track One", resp.Items[0].Item.Title) + assert.Equal(t, "Track Two", resp.Items[1].Item.Title) + assert.Equal(t, "Track Three", resp.Items[2].Item.Title) + assert.Equal(t, "Track Four", resp.Items[3].Item.Title) // ensure artists are included - require.Len(t, resp.Items[0].Artists, 1) - assert.Equal(t, "Artist One", resp.Items[0].Artists[0].Name) + require.Len(t, resp.Items[0].Item.Artists, 1) + assert.Equal(t, "Artist One", resp.Items[0].Item.Artists[0].Name) // Test pagination - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) require.Len(t, resp.Items, 1) - assert.Equal(t, "Track Two", resp.Items[0].Title) + assert.Equal(t, "Track Two", resp.Items[0].Item.Title) // Test page out of range - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Timeframe: db.Timeframe{Period: db.PeriodAllTime}}) require.NoError(t, err) assert.Empty(t, resp.Items) assert.False(t, resp.HasNextPage) @@ -46,7 +46,7 @@ func TestGetTopTracksPaginated(t *testing.T) { assert.Error(t, err) // Test specify period - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodDay}}) require.NoError(t, err) require.Len(t, resp.Items, 0) // empty assert.Equal(t, int64(0), resp.TotalCount) @@ -56,63 +56,59 @@ func TestGetTopTracksPaginated(t *testing.T) { require.Len(t, resp.Items, 0) // empty assert.Equal(t, int64(0), resp.TotalCount) - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodWeek}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Track Four", resp.Items[0].Title) + assert.Equal(t, "Track Four", resp.Items[0].Item.Title) - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodMonth}}) require.NoError(t, err) require.Len(t, resp.Items, 2) assert.Equal(t, int64(2), resp.TotalCount) - assert.Equal(t, "Track Three", resp.Items[0].Title) - assert.Equal(t, "Track Four", resp.Items[1].Title) + assert.Equal(t, "Track Three", resp.Items[0].Item.Title) + assert.Equal(t, "Track Four", resp.Items[1].Item.Title) - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodYear}}) require.NoError(t, err) require.Len(t, resp.Items, 3) assert.Equal(t, int64(3), resp.TotalCount) - assert.Equal(t, "Track Two", resp.Items[0].Title) - assert.Equal(t, "Track Three", resp.Items[1].Title) - assert.Equal(t, "Track Four", resp.Items[2].Title) + assert.Equal(t, "Track Two", resp.Items[0].Item.Title) + assert.Equal(t, "Track Three", resp.Items[1].Item.Title) + assert.Equal(t, "Track Four", resp.Items[2].Item.Title) // Test filter by artists and releases - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, ArtistID: 1}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, ArtistID: 1}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Track One", resp.Items[0].Title) + assert.Equal(t, "Track One", resp.Items[0].Item.Title) - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, AlbumID: 2}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Track Two", resp.Items[0].Title) + assert.Equal(t, "Track Two", resp.Items[0].Item.Title) // when both artistID and albumID are specified, artist id is ignored - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2, ArtistID: 1}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Period: db.PeriodAllTime}, AlbumID: 2, ArtistID: 1}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Track Two", resp.Items[0].Title) + assert.Equal(t, "Track Two", resp.Items[0].Item.Title) // Test specify dates testDataAbsoluteListenTimes(t) - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Year: 2023}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Year: 2023}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Track One", resp.Items[0].Title) + assert.Equal(t, "Track One", resp.Items[0].Item.Title) - resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024}) + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Timeframe: db.Timeframe{Month: 6, Year: 2024}}) require.NoError(t, err) require.Len(t, resp.Items, 1) assert.Equal(t, int64(1), resp.TotalCount) - assert.Equal(t, "Track Two", resp.Items[0].Title) - - // invalid, year required with month - _, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Month: 10}) - require.Error(t, err) + assert.Equal(t, "Track Two", resp.Items[0].Item.Title) } diff --git a/internal/db/psql/track.go b/internal/db/psql/track.go index 2da852d..d4cc616 100644 --- a/internal/db/psql/track.go +++ b/internal/db/psql/track.go @@ -21,61 +21,36 @@ func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Trac l := logger.FromContext(ctx) var track models.Track - if opts.ID != 0 { - l.Debug().Msgf("Fetching track from DB with id %d", opts.ID) - t, err := d.q.GetTrack(ctx, opts.ID) - if err != nil { - return nil, fmt.Errorf("GetTrack: GetTrack By ID: %w", err) - } - track = models.Track{ - ID: t.ID, - MbzID: t.MusicBrainzID, - Title: t.Title, - AlbumID: t.ReleaseID, - Image: t.Image, - Duration: t.Duration, - } - err = json.Unmarshal(t.Artists, &track.Artists) - if err != nil { - return nil, fmt.Errorf("GetTrack: json.Unmarshal: %w", err) - } - } else if opts.MusicBrainzID != uuid.Nil { + if opts.MusicBrainzID != uuid.Nil { l.Debug().Msgf("Fetching track from DB with MusicBrainz ID %s", opts.MusicBrainzID) t, err := d.q.GetTrackByMbzID(ctx, &opts.MusicBrainzID) if err != nil { return nil, fmt.Errorf("GetTrack: GetTrackByMbzID: %w", err) } - track = models.Track{ - ID: t.ID, - MbzID: t.MusicBrainzID, - Title: t.Title, - AlbumID: t.ReleaseID, - Duration: t.Duration, - } - } else if len(opts.ArtistIDs) > 0 { - l.Debug().Msgf("Fetching track from DB with title '%s' and artist id(s) '%v'", opts.Title, opts.ArtistIDs) - t, err := d.q.GetTrackByTitleAndArtists(ctx, repository.GetTrackByTitleAndArtistsParams{ - Title: opts.Title, - Column2: opts.ArtistIDs, + opts.ID = t.ID + } else if len(opts.ArtistIDs) > 0 && opts.ReleaseID != 0 { + l.Debug().Msgf("Fetching track from DB from release id %d with title '%s' and artist id(s) '%v'", opts.ReleaseID, opts.Title, opts.ArtistIDs) + t, err := d.q.GetTrackByTrackInfo(ctx, repository.GetTrackByTrackInfoParams{ + Title: opts.Title, + ReleaseID: opts.ReleaseID, + Column3: opts.ArtistIDs, }) if err != nil { - return nil, fmt.Errorf("GetTrack: GetTrackByTitleAndArtists: %w", err) + return nil, fmt.Errorf("GetTrack: GetTrackByTrackInfo: %w", err) } - track = models.Track{ - ID: t.ID, - MbzID: t.MusicBrainzID, - Title: t.Title, - AlbumID: t.ReleaseID, - Duration: t.Duration, - } - } else { - return nil, errors.New("GetTrack: insufficient information to get track") + opts.ID = t.ID + } + + l.Debug().Msgf("Fetching track from DB with id %d", opts.ID) + t, err := d.q.GetTrack(ctx, opts.ID) + if err != nil { + return nil, fmt.Errorf("GetTrack: GetTrack By ID: %w", err) } count, err := d.q.CountListensFromTrack(ctx, repository.CountListensFromTrackParams{ ListenedAt: time.Unix(0, 0), ListenedAt_2: time.Now(), - TrackID: track.ID, + TrackID: opts.ID, }) if err != nil { return nil, fmt.Errorf("GetTrack: CountListensFromTrack: %w", err) @@ -83,20 +58,37 @@ func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Trac seconds, err := d.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ Timeframe: db.Timeframe{Period: db.PeriodAllTime}, - TrackID: track.ID, + TrackID: opts.ID, }) if err != nil { return nil, fmt.Errorf("GetTrack: CountTimeListenedToItem: %w", err) } - firstListen, err := d.q.GetFirstListenFromTrack(ctx, track.ID) + firstListen, err := d.q.GetFirstListenFromTrack(ctx, opts.ID) if err != nil && !errors.Is(err, pgx.ErrNoRows) { return nil, fmt.Errorf("GetAlbum: GetFirstListenFromRelease: %w", err) } + rank, err := d.q.GetTrackAllTimeRank(ctx, opts.ID) + if err != nil && !errors.Is(err, pgx.ErrNoRows) { + return nil, fmt.Errorf("GetAlbum: GetTrackAllTimeRank: %w", err) + } - track.ListenCount = count - track.TimeListened = seconds - track.FirstListen = firstListen.ListenedAt.Unix() + track = models.Track{ + ID: t.ID, + MbzID: t.MusicBrainzID, + Title: t.Title, + AlbumID: t.ReleaseID, + Image: t.Image, + Duration: t.Duration, + AllTimeRank: rank.Rank, + ListenCount: count, + TimeListened: seconds, + FirstListen: firstListen.ListenedAt.Unix(), + } + err = json.Unmarshal(t.Artists, &track.Artists) + if err != nil { + return nil, fmt.Errorf("GetTrack: json.Unmarshal: %w", err) + } return &track, nil } @@ -145,6 +137,13 @@ func (d *Psql) SaveTrack(ctx context.Context, opts db.SaveTrackOpts) (*models.Tr if err != nil { return nil, fmt.Errorf("SaveTrack: AssociateArtistToTrack: %w", err) } + err = qtx.AssociateArtistToRelease(ctx, repository.AssociateArtistToReleaseParams{ + ArtistID: aid, + ReleaseID: trackRow.ReleaseID, + }) + if err != nil { + return nil, fmt.Errorf("SaveTrack: AssociateArtistToTrack: %w", err) + } } // insert primary alias err = qtx.InsertTrackAlias(ctx, repository.InsertTrackAliasParams{ @@ -241,7 +240,28 @@ func (d *Psql) SaveTrackAliases(ctx context.Context, id int32, aliases []string, } func (d *Psql) DeleteTrack(ctx context.Context, id int32) error { - return d.q.DeleteTrack(ctx, id) + l := logger.FromContext(ctx) + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return fmt.Errorf("DeleteTrack: %w", err) + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + + err = qtx.DeleteTrack(ctx, id) + if err != nil { + return fmt.Errorf("DeleteTrack: DeleteTrack: %w", err) + } + + // also clean orphaned entries to ensure artists are disassociated with releases where + // they no longer have any tracks on the release + err = qtx.CleanOrphanedEntries(ctx) + if err != nil { + return fmt.Errorf("DeleteTrack: CleanOrphanedEntries: %w", err) + } + + return tx.Commit(ctx) } func (d *Psql) DeleteTrackAlias(ctx context.Context, id int32, alias string) error { @@ -374,3 +394,29 @@ func (d *Psql) SetPrimaryTrackArtist(ctx context.Context, id int32, artistId int } return tx.Commit(ctx) } + +// returns nil, nil when no results +func (d *Psql) GetTracksWithNoDurationButHaveMbzID(ctx context.Context, from int32) ([]*models.Track, error) { + results, err := d.q.GetTracksWithNoDurationButHaveMbzID(ctx, repository.GetTracksWithNoDurationButHaveMbzIDParams{ + Limit: 20, + ID: from, + }) + if errors.Is(err, pgx.ErrNoRows) { + return nil, nil + } else if err != nil { + return nil, fmt.Errorf("GetTracksWithNoDurationButHaveMbzID: %w", err) + } + + ret := make([]*models.Track, 0) + + for _, v := range results { + ret = append(ret, &models.Track{ + ID: v.ID, + Duration: v.Duration, + MbzID: v.MusicBrainzID, + Title: v.Title, + }) + } + + return ret, nil +} diff --git a/internal/db/psql/track_test.go b/internal/db/psql/track_test.go index 777b22c..f0ecd09 100644 --- a/internal/db/psql/track_test.go +++ b/internal/db/psql/track_test.go @@ -16,55 +16,55 @@ func testDataForTracks(t *testing.T) { // Insert artists err := store.Exec(context.Background(), - `INSERT INTO artists (musicbrainz_id) + `INSERT INTO artists (musicbrainz_id) VALUES ('00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002')`) require.NoError(t, err) // Insert artist aliases err = store.Exec(context.Background(), - `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) VALUES (1, 'Artist One', 'Testing', true), (2, 'Artist Two', 'Testing', true)`) require.NoError(t, err) // Insert release groups err = store.Exec(context.Background(), - `INSERT INTO releases (musicbrainz_id) + `INSERT INTO releases (musicbrainz_id) VALUES ('00000000-0000-0000-0000-000000000011'), ('00000000-0000-0000-0000-000000000022')`) require.NoError(t, err) // Insert release aliases err = store.Exec(context.Background(), - `INSERT INTO release_aliases (release_id, alias, source, is_primary) + `INSERT INTO release_aliases (release_id, alias, source, is_primary) VALUES (1, 'Release Group One', 'Testing', true), (2, 'Release Group Two', 'Testing', true)`) require.NoError(t, err) // Insert tracks err = store.Exec(context.Background(), - `INSERT INTO tracks (musicbrainz_id, release_id, duration) + `INSERT INTO tracks (musicbrainz_id, release_id, duration) VALUES ('11111111-1111-1111-1111-111111111111', 1, 100), ('22222222-2222-2222-2222-222222222222', 2, 100)`) require.NoError(t, err) // Insert track aliases err = store.Exec(context.Background(), - `INSERT INTO track_aliases (track_id, alias, source, is_primary) + `INSERT INTO track_aliases (track_id, alias, source, is_primary) VALUES (1, 'Track One', 'Testing', true), (2, 'Track Two', 'Testing', true)`) require.NoError(t, err) // Associate tracks with artists err = store.Exec(context.Background(), - `INSERT INTO artist_tracks (artist_id, track_id) + `INSERT INTO artist_tracks (artist_id, track_id) VALUES (1, 1), (2, 2)`) require.NoError(t, err) - // Associate tracks with artists + // Insert listens err = store.Exec(context.Background(), - `INSERT INTO listens (user_id, track_id, listened_at) + `INSERT INTO listens (user_id, track_id, listened_at) VALUES (1, 1, NOW()), (1, 2, NOW())`) require.NoError(t, err) } @@ -88,9 +88,10 @@ func TestGetTrack(t *testing.T) { assert.Equal(t, "Track Two", track.Title) assert.EqualValues(t, 100, track.TimeListened) - // Test GetTrack by Title and ArtistIDs + // Test GetTrack by Title, Release and ArtistIDs track, err = store.GetTrack(ctx, db.GetTrackOpts{ Title: "Track One", + ReleaseID: 1, ArtistIDs: []int32{1}, }) require.NoError(t, err) @@ -99,7 +100,7 @@ func TestGetTrack(t *testing.T) { assert.EqualValues(t, 100, track.TimeListened) // Test GetTrack with insufficient information - _, err = store.GetTrack(ctx, db.GetTrackOpts{}) + _, err = store.GetTrack(ctx, db.GetTrackOpts{Title: "Track One"}) assert.Error(t, err) } func TestSaveTrack(t *testing.T) { @@ -227,3 +228,27 @@ func TestDeleteTrack(t *testing.T) { _, err = store.Count(ctx, `SELECT * FROM tracks WHERE id = 2`) require.ErrorIs(t, err, pgx.ErrNoRows) // no rows error } + +func TestReleaseAssociations(t *testing.T) { + testDataForTracks(t) + ctx := context.Background() + + track, err := store.SaveTrack(ctx, db.SaveTrackOpts{ + Title: "Track Three", + AlbumID: 2, + ArtistIDs: []int32{2, 1}, // Artist Two feat. Artist One + Duration: 100, + }) + require.NoError(t, err) + count, err := store.Count(ctx, `SELECT COUNT(*) FROM artist_releases WHERE release_id = 2`) + require.NoError(t, err) + require.Equal(t, 2, count, "expected release to be associated with artist from inserted track") + + err = store.DeleteTrack(ctx, track.ID) + require.NoError(t, err) + + count, err = store.Count(ctx, `SELECT COUNT(*) FROM artist_releases WHERE release_id = 2`) + require.NoError(t, err) + require.Equal(t, 1, count, "expected artist no longer on release to be disassociated from release") + +} diff --git a/internal/db/timeframe.go b/internal/db/timeframe.go new file mode 100644 index 0000000..ebc3508 --- /dev/null +++ b/internal/db/timeframe.go @@ -0,0 +1,122 @@ +package db + +import ( + "time" +) + +type Timeframe struct { + Period Period + Year int + Month int + Week int + FromUnix int64 + ToUnix int64 + From time.Time + To time.Time + Timezone *time.Location +} + +func TimeframeToTimeRange(tf Timeframe) (t1, t2 time.Time) { + now := time.Now() + loc := tf.Timezone + if loc == nil { + loc, _ = time.LoadLocation("UTC") + } + + // --------------------------------------------------------------------- + // 1. Explicit From / To (time.Time) — highest precedence + // --------------------------------------------------------------------- + if !tf.From.IsZero() { + if tf.To.IsZero() { + return tf.From, now + } + return tf.From, tf.To + } + + // --------------------------------------------------------------------- + // 2. Unix timestamps + // --------------------------------------------------------------------- + if tf.FromUnix != 0 { + t1 = time.Unix(tf.FromUnix, 0).In(loc) + if tf.ToUnix == 0 { + return t1, now + } + t2 = time.Unix(tf.ToUnix, 0).In(loc) + return t1, t2 + } + + // --------------------------------------------------------------------- + // 3. Derived ranges (Year / Month / Week) + // --------------------------------------------------------------------- + + // YEAR only + if tf.Year != 0 && tf.Month == 0 && tf.Week == 0 { + start := time.Date(tf.Year, 1, 1, 0, 0, 0, 0, loc) + end := time.Date(tf.Year+1, 1, 1, 0, 0, 0, 0, loc).Add(-time.Second) + return start, end + } + + // MONTH (+ optional year) + if tf.Month != 0 { + year := tf.Year + if year == 0 { + year = now.Year() + if int(now.Month()) < tf.Month { + year-- + } + } + + start := time.Date(year, time.Month(tf.Month), 1, 0, 0, 0, 0, loc) + end := endOfMonth(year, time.Month(tf.Month), loc) + return start, end + } + + // WEEK (+ optional year) + if tf.Week != 0 { + year := tf.Year + if year == 0 { + year = now.Year() + _, currentWeek := now.ISOWeek() + if currentWeek < tf.Week { + year-- + } + } + + // ISO week 1 contains Jan 4 + jan4 := time.Date(year, 1, 4, 0, 0, 0, 0, loc) + week1Start := startOfWeek(jan4) + + start := week1Start.AddDate(0, 0, (tf.Week-1)*7) + end := endOfWeek(start) + return start, end + } + + // --------------------------------------------------------------------- + // 4. Period + // --------------------------------------------------------------------- + + if !tf.Period.IsZero() { + return StartTimeFromPeriod(tf.Period), now + } + + // --------------------------------------------------------------------- + // 5. Fallback: empty timeframe → zero values + // --------------------------------------------------------------------- + return time.Time{}, time.Time{} +} + +func startOfWeek(t time.Time) time.Time { + // ISO week: Monday = 1 + weekday := int(t.Weekday()) + if weekday == 0 { // Sunday + weekday = 7 + } + return time.Date(t.Year(), t.Month(), t.Day()-weekday+1, 0, 0, 0, 0, t.Location()) +} +func endOfWeek(t time.Time) time.Time { + return startOfWeek(t).AddDate(0, 0, 7).Add(-time.Second) +} +func endOfMonth(year int, month time.Month, loc *time.Location) time.Time { + startNextMonth := time.Date(year, month+1, 1, 0, 0, 0, 0, loc) + return startNextMonth.Add(-time.Second) +} diff --git a/internal/db/types.go b/internal/db/types.go index 421832f..46d3c01 100644 --- a/internal/db/types.go +++ b/internal/db/types.go @@ -28,6 +28,11 @@ type PaginatedResponse[T any] struct { CurrentPage int32 `json:"current_page"` } +type RankedItem[T any] struct { + Item T `json:"item"` + Rank int64 `json:"rank"` +} + type ExportItem struct { ListenedAt time.Time UserID int32 @@ -44,3 +49,9 @@ type ExportItem struct { ReleaseAliases []models.Alias Artists []models.ArtistWithFullAliases } + +type InterestBucket struct { + BucketStart time.Time `json:"bucket_start"` + BucketEnd time.Time `json:"bucket_end"` + ListenCount int64 `json:"listen_count"` +} diff --git a/internal/images/deezer.go b/internal/images/deezer.go index 8fb7b27..2ced676 100644 --- a/internal/images/deezer.go +++ b/internal/images/deezer.go @@ -110,6 +110,9 @@ func (c *DeezerClient) getEntity(ctx context.Context, endpoint string, result an return nil } +// Deezer behavior is that it serves a default image when it can't find one for an artist, so +// this function will just download the default image thinking that it is an actual artist image. +// I don't know how to fix this yet. func (c *DeezerClient) GetArtistImages(ctx context.Context, aliases []string) (string, error) { l := logger.FromContext(ctx) resp := new(DeezerArtistResponse) diff --git a/internal/images/imagesrc.go b/internal/images/imagesrc.go index 21eec65..46fe87a 100644 --- a/internal/images/imagesrc.go +++ b/internal/images/imagesrc.go @@ -5,6 +5,7 @@ import ( "context" "fmt" "net/http" + "strings" "sync" "github.com/gabehf/koito/internal/logger" @@ -16,6 +17,8 @@ type ImageSource struct { deezerC *DeezerClient subsonicEnabled bool subsonicC *SubsonicClient + lastfmEnabled bool + lastfmC *LastFMClient caaEnabled bool } type ImageSourceOpts struct { @@ -23,6 +26,7 @@ type ImageSourceOpts struct { EnableCAA bool EnableDeezer bool EnableSubsonic bool + EnableLastFM bool } var once sync.Once @@ -30,6 +34,7 @@ var imgsrc ImageSource type ArtistImageOpts struct { Aliases []string + MBID *uuid.UUID } type AlbumImageOpts struct { @@ -55,6 +60,10 @@ func Initialize(opts ImageSourceOpts) { imgsrc.subsonicEnabled = true imgsrc.subsonicC = NewSubsonicClient() } + if opts.EnableLastFM { + imgsrc.lastfmEnabled = true + imgsrc.lastfmC = NewLastFMClient() + } }) } @@ -65,31 +74,46 @@ func Shutdown() { func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) { l := logger.FromContext(ctx) if imgsrc.subsonicEnabled { - img, err := imgsrc.subsonicC.GetArtistImage(ctx, opts.Aliases[0]) + img, err := imgsrc.subsonicC.GetArtistImage(ctx, opts.MBID, opts.Aliases[0]) if err != nil { - return "", err - } - if img != "" { + l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from Subsonic") + } else if img != "" { return img, nil } - l.Debug().Msg("Could not find artist image from Subsonic") + } else { + l.Debug().Msg("GetArtistImage: Subsonic image fetching is disabled") } - if imgsrc.deezerC != nil { + if imgsrc.lastfmEnabled { + img, err := imgsrc.lastfmC.GetArtistImage(ctx, opts.MBID, opts.Aliases[0]) + if err != nil { + l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from LastFM") + } else if img != "" { + return img, nil + } + } else { + l.Debug().Msg("GetArtistImage: LastFM image fetching is disabled") + } + if imgsrc.deezerEnabled { img, err := imgsrc.deezerC.GetArtistImages(ctx, opts.Aliases) if err != nil { + l.Debug().Err(err).Msg("GetArtistImage: Could not find artist image from Deezer") return "", err + } else if img != "" { + return img, nil } - return img, nil + } else { + l.Debug().Msg("GetArtistImage: Deezer image fetching is disabled") } l.Warn().Msg("GetArtistImage: No image providers are enabled") return "", nil } + func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) { l := logger.FromContext(ctx) if imgsrc.subsonicEnabled { - img, err := imgsrc.subsonicC.GetAlbumImage(ctx, opts.Artists[0], opts.Album) + img, err := imgsrc.subsonicC.GetAlbumImage(ctx, opts.ReleaseMbzID, opts.Artists[0], opts.Album) if err != nil { - return "", err + l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Subsonic") } if img != "" { return img, nil @@ -102,29 +126,41 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) { url := fmt.Sprintf(caaBaseUrl+"/release/%s/front", opts.ReleaseMbzID.String()) resp, err := http.DefaultClient.Head(url) if err != nil { - return "", err + l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from CoverArtArchive with Release MBID") + } else { + if resp.StatusCode == 200 { + return url, nil + } else { + l.Debug().Int("status", resp.StatusCode).Msg("GetAlbumImage: Got non-OK response from CoverArtArchive") + } } - if resp.StatusCode == 200 { - return url, nil - } - l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release ID") } if opts.ReleaseGroupMbzID != nil && *opts.ReleaseGroupMbzID != uuid.Nil { url := fmt.Sprintf(caaBaseUrl+"/release-group/%s/front", opts.ReleaseGroupMbzID.String()) resp, err := http.DefaultClient.Head(url) if err != nil { - return "", err + l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from CoverArtArchive with Release Group MBID") } if resp.StatusCode == 200 { return url, nil } - l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release group ID") } } + if imgsrc.lastfmEnabled { + img, err := imgsrc.lastfmC.GetAlbumImage(ctx, opts.ReleaseMbzID, opts.Artists[0], opts.Album) + if err != nil { + l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Subsonic") + } + if img != "" { + return img, nil + } + l.Debug().Msg("Could not find album cover from Subsonic") + } if imgsrc.deezerEnabled { l.Debug().Msg("Attempting to find album image from Deezer") img, err := imgsrc.deezerC.GetAlbumImages(ctx, opts.Artists, opts.Album) if err != nil { + l.Debug().Err(err).Msg("GetAlbumImage: Could not find artist image from Deezer") return "", err } return img, nil @@ -132,3 +168,23 @@ func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) { l.Warn().Msg("GetAlbumImage: No image providers are enabled") return "", nil } + +// ValidateImageURL checks if the URL points to a valid image by performing a HEAD request. +func ValidateImageURL(url string) error { + resp, err := http.Head(url) + if err != nil { + return fmt.Errorf("ValidateImageURL: http.Head: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("ValidateImageURL: HEAD request failed, status code: %d", resp.StatusCode) + } + + contentType := resp.Header.Get("Content-Type") + if !strings.HasPrefix(contentType, "image/") { + return fmt.Errorf("ValidateImageURL: URL does not point to an image, content type: %s", contentType) + } + + return nil +} diff --git a/internal/images/lastfm.go b/internal/images/lastfm.go new file mode 100644 index 0000000..f35f6a3 --- /dev/null +++ b/internal/images/lastfm.go @@ -0,0 +1,298 @@ +package images + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/queue" + "github.com/google/uuid" +) + +// i told gemini to write this cuz i figured it would be simple enough and +// it looks like it just works? maybe ai is actually worth one quintillion gallons of water + +type LastFMClient struct { + apiKey string + baseUrl string + userAgent string + requestQueue *queue.RequestQueue +} + +// LastFM JSON structures use "#text" for the value of XML-mapped fields +type lastFMImage struct { + URL string `json:"#text"` + Size string `json:"size"` +} + +type lastFMAlbumResponse struct { + Album struct { + Name string `json:"name"` + Image []lastFMImage `json:"image"` + } `json:"album"` + Error int `json:"error"` + Message string `json:"message"` +} + +type lastFMArtistResponse struct { + Artist struct { + Name string `json:"name"` + Image []lastFMImage `json:"image"` + } `json:"artist"` + Error int `json:"error"` + Message string `json:"message"` +} + +const ( + lastFMApiBaseUrl = "http://ws.audioscrobbler.com/2.0/" +) + +func NewLastFMClient() *LastFMClient { + ret := new(LastFMClient) + ret.apiKey = cfg.LastFMApiKey() + ret.baseUrl = lastFMApiBaseUrl + ret.userAgent = cfg.UserAgent() + ret.requestQueue = queue.NewRequestQueue(5, 5) + return ret +} + +func (c *LastFMClient) queue(ctx context.Context, req *http.Request) ([]byte, error) { + l := logger.FromContext(ctx) + req.Header.Set("User-Agent", c.userAgent) + req.Header.Set("Accept", "application/json") + + resultChan := c.requestQueue.Enqueue(func(client *http.Client, done chan<- queue.RequestResult) { + resp, err := client.Do(req) + if err != nil { + l.Debug().Err(err).Str("url", req.URL.String()).Msg("Failed to contact LastFM") + done <- queue.RequestResult{Err: err} + return + } + defer resp.Body.Close() + + // LastFM might return 200 OK even for API errors (like "Artist not found"), + // so we rely on parsing the JSON body for logic errors later, + // but we still check for HTTP protocol failures here. + if resp.StatusCode >= 500 { + err = fmt.Errorf("received server error from LastFM: %s", resp.Status) + done <- queue.RequestResult{Body: nil, Err: err} + return + } + + body, err := io.ReadAll(resp.Body) + done <- queue.RequestResult{Body: body, Err: err} + }) + + result := <-resultChan + return result.Body, result.Err +} + +func (c *LastFMClient) getEntity(ctx context.Context, params url.Values, result any) error { + l := logger.FromContext(ctx) + + // Add standard parameters + params.Set("api_key", c.apiKey) + params.Set("format", "json") + + // Construct URL + reqUrl, _ := url.Parse(c.baseUrl) + reqUrl.RawQuery = params.Encode() + + l.Debug().Msgf("Sending request to LastFM: GET %s", reqUrl.String()) + + req, err := http.NewRequest("GET", reqUrl.String(), nil) + if err != nil { + return fmt.Errorf("getEntity: %w", err) + } + + l.Debug().Msg("Adding LastFM request to queue") + body, err := c.queue(ctx, req) + if err != nil { + l.Err(err).Msg("LastFM request failed") + return fmt.Errorf("getEntity: %w", err) + } + + err = json.Unmarshal(body, result) + if err != nil { + l.Err(err).Msg("Failed to unmarshal LastFM response") + return fmt.Errorf("getEntity: %w", err) + } + + return nil +} + +// selectBestImage picks the largest available image from the LastFM slice +func (c *LastFMClient) selectBestImage(images []lastFMImage) string { + // Rank preference: mega > extralarge > large > medium > small + // Since LastFM usually returns them in order of size, we could take the last one, + // but a map lookup is safer against API changes. + + imgMap := make(map[string]string) + for _, img := range images { + if img.URL != "" { + imgMap[img.Size] = img.URL + } + } + + if url, ok := imgMap["mega"]; ok { + if err := ValidateImageURL(overrideImgSize(url)); err == nil { + return overrideImgSize(url) + } else { + return url + } + } + if url, ok := imgMap["extralarge"]; ok { + if err := ValidateImageURL(overrideImgSize(url)); err == nil { + return overrideImgSize(url) + } else { + return url + } + } + if url, ok := imgMap["large"]; ok { + if err := ValidateImageURL(overrideImgSize(url)); err == nil { + return overrideImgSize(url) + } else { + return url + } + } + if url, ok := imgMap["medium"]; ok { + return url + } + if url, ok := imgMap["small"]; ok { + return url + } + + return "" +} + +// lastfm seems to only return a 300x300 image even for "mega" and "extralarge" images, so I'm cheating +func overrideImgSize(url string) string { + return strings.Replace(url, "300x300", "600x600", 1) +} + +func (c *LastFMClient) GetAlbumImage(ctx context.Context, mbid *uuid.UUID, artist, album string) (string, error) { + l := logger.FromContext(ctx) + resp := new(lastFMAlbumResponse) + l.Debug().Msgf("Finding album image for %s from artist %s", album, artist) + + // Helper to run the fetch + fetch := func(query paramsBuilder) error { + params := url.Values{} + params.Set("method", "album.getInfo") + query(params) + return c.getEntity(ctx, params, resp) + } + + // 1. Try MBID search first + if mbid != nil { + l.Debug().Str("mbid", mbid.String()).Msg("Searching album image by MBID") + err := fetch(func(p url.Values) { + p.Set("mbid", mbid.String()) + }) + + // If success and no API error code + if err == nil && resp.Error == 0 && len(resp.Album.Image) > 0 { + best := c.selectBestImage(resp.Album.Image) + if best != "" { + return best, nil + } + } else if resp.Error != 0 { + l.Debug().Int("api_error", resp.Error).Msg("LastFM MBID lookup failed, falling back to name") + } + } + + // 2. Fallback to Artist + Album name match + l.Debug().Str("title", album).Str("artist", artist).Msg("Searching album image by title and artist") + + // Clear previous response structure just in case + resp = new(lastFMAlbumResponse) + + err := fetch(func(p url.Values) { + p.Set("artist", artist) + p.Set("album", album) + // Auto-correct spelling is useful for name lookups + p.Set("autocorrect", "1") + }) + + if err != nil { + return "", fmt.Errorf("GetAlbumImage: %v", err) + } + + if resp.Error != 0 { + return "", fmt.Errorf("GetAlbumImage: LastFM API error %d: %s", resp.Error, resp.Message) + } + + best := c.selectBestImage(resp.Album.Image) + if best == "" { + return "", fmt.Errorf("GetAlbumImage: no suitable image found") + } + + return best, nil +} + +func (c *LastFMClient) GetArtistImage(ctx context.Context, mbid *uuid.UUID, artist string) (string, error) { + l := logger.FromContext(ctx) + resp := new(lastFMArtistResponse) + l.Debug().Msgf("Finding artist image for %s", artist) + + fetch := func(query paramsBuilder) error { + params := url.Values{} + params.Set("method", "artist.getInfo") + query(params) + return c.getEntity(ctx, params, resp) + } + + // 1. Try MBID search + if mbid != nil { + l.Debug().Str("mbid", mbid.String()).Msg("Searching artist image by MBID") + err := fetch(func(p url.Values) { + p.Set("mbid", mbid.String()) + }) + + if err == nil && resp.Error == 0 && len(resp.Artist.Image) > 0 { + best := c.selectBestImage(resp.Artist.Image) + if best != "" { + // Validate to match Subsonic implementation behavior + if err := ValidateImageURL(best); err == nil { + return best, nil + } + } + } + } + + // 2. Fallback to Artist name + l.Debug().Str("artist", artist).Msg("Searching artist image by name") + resp = new(lastFMArtistResponse) + + err := fetch(func(p url.Values) { + p.Set("artist", artist) + p.Set("autocorrect", "1") + }) + + if err != nil { + return "", fmt.Errorf("GetArtistImage: %v", err) + } + + if resp.Error != 0 { + return "", fmt.Errorf("GetArtistImage: LastFM API error %d: %s", resp.Error, resp.Message) + } + + best := c.selectBestImage(resp.Artist.Image) + if best == "" { + return "", fmt.Errorf("GetArtistImage: no suitable image found") + } + + if err := ValidateImageURL(best); err != nil { + return "", fmt.Errorf("GetArtistImage: failed to validate image url") + } + + return best, nil +} + +type paramsBuilder func(url.Values) diff --git a/internal/images/subsonic.go b/internal/images/subsonic.go index 961b4c2..4fd55c0 100644 --- a/internal/images/subsonic.go +++ b/internal/images/subsonic.go @@ -11,6 +11,7 @@ import ( "github.com/gabehf/koito/internal/cfg" "github.com/gabehf/koito/internal/logger" "github.com/gabehf/koito/queue" + "github.com/google/uuid" ) type SubsonicClient struct { @@ -26,6 +27,8 @@ type SubsonicAlbumResponse struct { SearchResult3 struct { Album []struct { CoverArt string `json:"coverArt"` + Artist string `json:"artist"` + MBID string `json:"musicBrainzId"` } `json:"album"` } `json:"searchResult3"` } `json:"subsonic-response"` @@ -43,7 +46,7 @@ type SubsonicArtistResponse struct { } const ( - subsonicAlbumSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=0&songCount=0&albumCount=1" + subsonicAlbumSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=0&songCount=0&albumCount=10" subsonicArtistSearchFmtStr = "/rest/search3?%s&f=json&query=%s&v=1.13.0&c=koito&artistCount=1&songCount=0&albumCount=0" subsonicCoverArtFmtStr = "/rest/getCoverArt?%s&id=%s&v=1.13.0&c=koito" ) @@ -106,32 +109,72 @@ func (c *SubsonicClient) getEntity(ctx context.Context, endpoint string, result return nil } -func (c *SubsonicClient) GetAlbumImage(ctx context.Context, artist, album string) (string, error) { +func (c *SubsonicClient) GetAlbumImage(ctx context.Context, mbid *uuid.UUID, artist, album string) (string, error) { l := logger.FromContext(ctx) resp := new(SubsonicAlbumResponse) l.Debug().Msgf("Finding album image for %s from artist %s", album, artist) - err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(artist+" "+album)), resp) + // first try mbid search + if mbid != nil { + l.Debug().Str("mbid", mbid.String()).Msg("Searching album image by MBID") + err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(mbid.String())), resp) + if err != nil { + return "", fmt.Errorf("GetAlbumImage: %v", err) + } + l.Debug().Any("subsonic_response", resp).Msg("") + if len(resp.SubsonicResponse.SearchResult3.Album) >= 1 { + return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil + } + } + // else do artist match + l.Debug().Str("title", album).Str("artist", artist).Msg("Searching album image by title and artist") + err := c.getEntity(ctx, fmt.Sprintf(subsonicAlbumSearchFmtStr, c.authParams, url.QueryEscape(album)), resp) if err != nil { return "", fmt.Errorf("GetAlbumImage: %v", err) } - l.Debug().Any("subsonic_response", resp).Send() - if len(resp.SubsonicResponse.SearchResult3.Album) < 1 || resp.SubsonicResponse.SearchResult3.Album[0].CoverArt == "" { - return "", fmt.Errorf("GetAlbumImage: failed to get album art") + l.Debug().Any("subsonic_response", resp).Msg("") + if len(resp.SubsonicResponse.SearchResult3.Album) < 1 { + return "", fmt.Errorf("GetAlbumImage: failed to get album art from subsonic") } - return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil + for _, album := range resp.SubsonicResponse.SearchResult3.Album { + if album.Artist == artist { + return cfg.SubsonicUrl() + fmt.Sprintf(subsonicCoverArtFmtStr, c.authParams, url.QueryEscape(resp.SubsonicResponse.SearchResult3.Album[0].CoverArt)), nil + } + } + return "", fmt.Errorf("GetAlbumImage: failed to get album art from subsonic") } -func (c *SubsonicClient) GetArtistImage(ctx context.Context, artist string) (string, error) { +func (c *SubsonicClient) GetArtistImage(ctx context.Context, mbid *uuid.UUID, artist string) (string, error) { l := logger.FromContext(ctx) resp := new(SubsonicArtistResponse) l.Debug().Msgf("Finding artist image for %s", artist) + // first try mbid search + if mbid != nil { + l.Debug().Str("mbid", mbid.String()).Msg("Searching artist image by MBID") + err := c.getEntity(ctx, fmt.Sprintf(subsonicArtistSearchFmtStr, c.authParams, url.QueryEscape(mbid.String())), resp) + if err != nil { + return "", fmt.Errorf("GetArtistImage: %v", err) + } + l.Debug().Any("subsonic_response", resp).Msg("") + if len(resp.SubsonicResponse.SearchResult3.Artist) < 1 || resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl == "" { + return "", fmt.Errorf("GetArtistImage: failed to get artist art") + } + // Subsonic seems to have a tendency to return an artist image even though the url is a 404 + if err = ValidateImageURL(resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl); err != nil { + return "", fmt.Errorf("GetArtistImage: failed to get validate image url") + } + } + l.Debug().Str("artist", artist).Msg("Searching artist image by name") err := c.getEntity(ctx, fmt.Sprintf(subsonicArtistSearchFmtStr, c.authParams, url.QueryEscape(artist)), resp) if err != nil { return "", fmt.Errorf("GetArtistImage: %v", err) } - l.Debug().Any("subsonic_response", resp).Send() + l.Debug().Any("subsonic_response", resp).Msg("") if len(resp.SubsonicResponse.SearchResult3.Artist) < 1 || resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl == "" { return "", fmt.Errorf("GetArtistImage: failed to get artist art") } + // Subsonic seems to have a tendency to return an artist image even though the url is a 404 + if err = ValidateImageURL(resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl); err != nil { + return "", fmt.Errorf("GetArtistImage: failed to get validate image url") + } return resp.SubsonicResponse.SearchResult3.Artist[0].ArtistImageUrl, nil } diff --git a/internal/importer/koito.go b/internal/importer/koito.go index ae74cbf..0f8df74 100644 --- a/internal/importer/koito.go +++ b/internal/importer/koito.go @@ -42,6 +42,10 @@ func ImportKoitoFile(ctx context.Context, store db.DB, filename string) error { count := 0 for i := range data.Listens { + if !inImportTimeWindow(data.Listens[i].ListenedAt) { + l.Debug().Msgf("Skipping import due to import time rules") + continue + } // use this for save/get mbid for all artist/album/track var mbid uuid.UUID @@ -126,6 +130,7 @@ func ImportKoitoFile(ctx context.Context, store db.DB, filename string) error { track, err := store.GetTrack(ctx, db.GetTrackOpts{ MusicBrainzID: mbid, Title: getPrimaryAliasFromAliasSlice(data.Listens[i].Track.Aliases), + ReleaseID: albumId, ArtistIDs: artistIds, }) if errors.Is(err, pgx.ErrNoRows) { diff --git a/internal/importer/listenbrainz.go b/internal/importer/listenbrainz.go index 4187bbb..7c1a8bb 100644 --- a/internal/importer/listenbrainz.go +++ b/internal/importer/listenbrainz.go @@ -85,7 +85,14 @@ func ImportListenBrainzFile(ctx context.Context, store db.DB, mbzc mbz.MusicBrai } artistMbzIDs, err := utils.ParseUUIDSlice(payload.TrackMeta.AdditionalInfo.ArtistMBIDs) if err != nil { - l.Debug().Err(err).Msg("Failed to parse one or more uuids") + l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Failed to parse one or more UUIDs") + } + if len(artistMbzIDs) < 1 { + l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Attempting to parse artist UUIDs from mbid_mapping") + utils.ParseUUIDSlice(payload.TrackMeta.MBIDMapping.ArtistMBIDs) + if err != nil { + l.Debug().AnErr("error", err).Msg("ImportListenBrainzFile: Failed to parse one or more UUIDs") + } } rgMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.ReleaseGroupMBID) if err != nil { @@ -93,11 +100,17 @@ func ImportListenBrainzFile(ctx context.Context, store db.DB, mbzc mbz.MusicBrai } releaseMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.ReleaseMBID) if err != nil { - releaseMbzID = uuid.Nil + releaseMbzID, err = uuid.Parse(payload.TrackMeta.MBIDMapping.ReleaseMBID) + if err != nil { + releaseMbzID = uuid.Nil + } } recordingMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.RecordingMBID) if err != nil { - recordingMbzID = uuid.Nil + recordingMbzID, err = uuid.Parse(payload.TrackMeta.MBIDMapping.RecordingMBID) + if err != nil { + recordingMbzID = uuid.Nil + } } var client string diff --git a/internal/models/album.go b/internal/models/album.go index 24948f9..a295fe9 100644 --- a/internal/models/album.go +++ b/internal/models/album.go @@ -12,11 +12,5 @@ type Album struct { ListenCount int64 `json:"listen_count"` TimeListened int64 `json:"time_listened"` FirstListen int64 `json:"first_listen"` + AllTimeRank int64 `json:"all_time_rank"` } - -// type SimpleAlbum struct { -// ID int32 `json:"id"` -// Title string `json:"title"` -// VariousArtists bool `json:"is_various_artists"` -// Image uuid.UUID `json:"image"` -// } diff --git a/internal/models/artist.go b/internal/models/artist.go index 7784e51..07f09e6 100644 --- a/internal/models/artist.go +++ b/internal/models/artist.go @@ -12,6 +12,7 @@ type Artist struct { TimeListened int64 `json:"time_listened"` FirstListen int64 `json:"first_listen"` IsPrimary bool `json:"is_primary,omitempty"` + AllTimeRank int64 `json:"all_time_rank"` } type SimpleArtist struct { diff --git a/internal/models/track.go b/internal/models/track.go index 8eb802c..4cb5b04 100644 --- a/internal/models/track.go +++ b/internal/models/track.go @@ -13,4 +13,5 @@ type Track struct { AlbumID int32 `json:"album_id"` TimeListened int64 `json:"time_listened"` FirstListen int64 `json:"first_listen"` + AllTimeRank int64 `json:"all_time_rank"` } diff --git a/internal/repository/artist.sql.go b/internal/repository/artist.sql.go index 3d33446..8506975 100644 --- a/internal/repository/artist.sql.go +++ b/internal/repository/artist.sql.go @@ -134,6 +134,39 @@ func (q *Queries) GetArtist(ctx context.Context, id int32) (GetArtistRow, error) return i, err } +const getArtistAllTimeRank = `-- name: GetArtistAllTimeRank :one +SELECT + artist_id, + rank +FROM ( + SELECT + x.artist_id, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank + FROM ( + SELECT + at.artist_id, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks t ON l.track_id = t.id + JOIN artist_tracks at ON t.id = at.track_id + GROUP BY at.artist_id + ) x + ) +WHERE artist_id = $1 +` + +type GetArtistAllTimeRankRow struct { + ArtistID int32 + Rank int64 +} + +func (q *Queries) GetArtistAllTimeRank(ctx context.Context, artistID int32) (GetArtistAllTimeRankRow, error) { + row := q.db.QueryRow(ctx, getArtistAllTimeRank, artistID) + var i GetArtistAllTimeRankRow + err := row.Scan(&i.ArtistID, &i.Rank) + return i, err +} + const getArtistByImage = `-- name: GetArtistByImage :one SELECT id, musicbrainz_id, image, image_source FROM artists WHERE image = $1 LIMIT 1 ` @@ -221,6 +254,47 @@ func (q *Queries) GetArtistByName(ctx context.Context, alias string) (GetArtistB return i, err } +const getArtistsWithoutImages = `-- name: GetArtistsWithoutImages :many +SELECT + id, musicbrainz_id, image, image_source, name +FROM artists_with_name +WHERE image IS NULL + AND id > $2 +ORDER BY id ASC +LIMIT $1 +` + +type GetArtistsWithoutImagesParams struct { + Limit int32 + ID int32 +} + +func (q *Queries) GetArtistsWithoutImages(ctx context.Context, arg GetArtistsWithoutImagesParams) ([]ArtistsWithName, error) { + rows, err := q.db.Query(ctx, getArtistsWithoutImages, arg.Limit, arg.ID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ArtistsWithName + for rows.Next() { + var i ArtistsWithName + if err := rows.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.ImageSource, + &i.Name, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getReleaseArtists = `-- name: GetReleaseArtists :many SELECT a.id, a.musicbrainz_id, a.image, a.image_source, a.name, @@ -269,18 +343,27 @@ func (q *Queries) GetReleaseArtists(ctx context.Context, releaseID int32) ([]Get const getTopArtistsPaginated = `-- name: GetTopArtistsPaginated :many SELECT + x.id, + x.name, + x.musicbrainz_id, + x.image, + x.listen_count, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank +FROM ( + SELECT a.id, a.name, a.musicbrainz_id, a.image, COUNT(*) AS listen_count -FROM listens l -JOIN tracks t ON l.track_id = t.id -JOIN artist_tracks at ON at.track_id = t.id -JOIN artists_with_name a ON a.id = at.artist_id -WHERE l.listened_at BETWEEN $1 AND $2 -GROUP BY a.id, a.name, a.musicbrainz_id, a.image, a.image_source, a.name -ORDER BY listen_count DESC, a.id + FROM listens l + JOIN tracks t ON l.track_id = t.id + JOIN artist_tracks at ON at.track_id = t.id + JOIN artists_with_name a ON a.id = at.artist_id + WHERE l.listened_at BETWEEN $1 AND $2 + GROUP BY a.id, a.name, a.musicbrainz_id, a.image +) x +ORDER BY x.listen_count DESC, x.id LIMIT $3 OFFSET $4 ` @@ -297,6 +380,7 @@ type GetTopArtistsPaginatedRow struct { MusicBrainzID *uuid.UUID Image *uuid.UUID ListenCount int64 + Rank int64 } func (q *Queries) GetTopArtistsPaginated(ctx context.Context, arg GetTopArtistsPaginatedParams) ([]GetTopArtistsPaginatedRow, error) { @@ -319,6 +403,7 @@ func (q *Queries) GetTopArtistsPaginated(ctx context.Context, arg GetTopArtistsP &i.MusicBrainzID, &i.Image, &i.ListenCount, + &i.Rank, ); err != nil { return nil, err } diff --git a/internal/repository/etc.sql.go b/internal/repository/etc.sql.go index ed902ea..484f5c4 100644 --- a/internal/repository/etc.sql.go +++ b/internal/repository/etc.sql.go @@ -15,11 +15,17 @@ BEGIN DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l); DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t); DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at); + DELETE FROM artist_releases ar + WHERE NOT EXISTS ( + SELECT 1 + FROM artist_tracks at + JOIN tracks t ON at.track_id = t.id + WHERE at.artist_id = ar.artist_id + AND t.release_id = ar.release_id + ); END $$ ` -// DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t); -// DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg); func (q *Queries) CleanOrphanedEntries(ctx context.Context) error { _, err := q.db.Exec(ctx, cleanOrphanedEntries) return err diff --git a/internal/repository/interest.sql.go b/internal/repository/interest.sql.go new file mode 100644 index 0000000..ae77764 --- /dev/null +++ b/internal/repository/interest.sql.go @@ -0,0 +1,247 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: interest.sql + +package repository + +import ( + "context" + "time" +) + +const getGroupedListensFromArtist = `-- name: GetGroupedListensFromArtist :many +WITH bounds AS ( + SELECT + MIN(l.listened_at) AS start_time, + NOW() AS end_time + FROM listens l + JOIN tracks t ON t.id = l.track_id + JOIN artist_tracks at ON at.track_id = t.id + WHERE at.artist_id = $1 +), +stats AS ( + SELECT + start_time, + end_time, + EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds, + ((end_time - start_time) / $2::int) AS bucket_interval + FROM bounds +), +bucket_series AS ( + SELECT generate_series(0, $2::int - 1) AS idx +), +listen_indices AS ( + SELECT + LEAST( + $2::int - 1, + FLOOR( + (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0)) + * $2::int + )::int + ) AS bucket_idx + FROM listens l + JOIN tracks t ON t.id = l.track_id + JOIN artist_tracks at ON at.track_id = t.id + CROSS JOIN stats s + WHERE at.artist_id = $1 + AND s.start_time IS NOT NULL +) +SELECT + (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start, + (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end, + COUNT(li.bucket_idx) AS listen_count +FROM bucket_series bs +CROSS JOIN stats s +LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx +WHERE s.start_time IS NOT NULL +GROUP BY bs.idx, s.start_time, s.bucket_interval +ORDER BY bs.idx +` + +type GetGroupedListensFromArtistParams struct { + ArtistID int32 + BucketCount int32 +} + +type GetGroupedListensFromArtistRow struct { + BucketStart time.Time + BucketEnd time.Time + ListenCount int64 +} + +func (q *Queries) GetGroupedListensFromArtist(ctx context.Context, arg GetGroupedListensFromArtistParams) ([]GetGroupedListensFromArtistRow, error) { + rows, err := q.db.Query(ctx, getGroupedListensFromArtist, arg.ArtistID, arg.BucketCount) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetGroupedListensFromArtistRow + for rows.Next() { + var i GetGroupedListensFromArtistRow + if err := rows.Scan(&i.BucketStart, &i.BucketEnd, &i.ListenCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGroupedListensFromRelease = `-- name: GetGroupedListensFromRelease :many +WITH bounds AS ( + SELECT + MIN(l.listened_at) AS start_time, + NOW() AS end_time + FROM listens l + JOIN tracks t ON t.id = l.track_id + WHERE t.release_id = $1 +), +stats AS ( + SELECT + start_time, + end_time, + EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds, + ((end_time - start_time) / $2::int) AS bucket_interval + FROM bounds +), +bucket_series AS ( + SELECT generate_series(0, $2::int - 1) AS idx +), +listen_indices AS ( + SELECT + LEAST( + $2::int - 1, + FLOOR( + (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0)) + * $2::int + )::int + ) AS bucket_idx + FROM listens l + JOIN tracks t ON t.id = l.track_id + CROSS JOIN stats s + WHERE t.release_id = $1 + AND s.start_time IS NOT NULL +) +SELECT + (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start, + (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end, + COUNT(li.bucket_idx) AS listen_count +FROM bucket_series bs +CROSS JOIN stats s +LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx +WHERE s.start_time IS NOT NULL +GROUP BY bs.idx, s.start_time, s.bucket_interval +ORDER BY bs.idx +` + +type GetGroupedListensFromReleaseParams struct { + ReleaseID int32 + BucketCount int32 +} + +type GetGroupedListensFromReleaseRow struct { + BucketStart time.Time + BucketEnd time.Time + ListenCount int64 +} + +func (q *Queries) GetGroupedListensFromRelease(ctx context.Context, arg GetGroupedListensFromReleaseParams) ([]GetGroupedListensFromReleaseRow, error) { + rows, err := q.db.Query(ctx, getGroupedListensFromRelease, arg.ReleaseID, arg.BucketCount) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetGroupedListensFromReleaseRow + for rows.Next() { + var i GetGroupedListensFromReleaseRow + if err := rows.Scan(&i.BucketStart, &i.BucketEnd, &i.ListenCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGroupedListensFromTrack = `-- name: GetGroupedListensFromTrack :many +WITH bounds AS ( + SELECT + MIN(l.listened_at) AS start_time, + NOW() AS end_time + FROM listens l + JOIN tracks t ON t.id = l.track_id + WHERE t.id = $1 +), +stats AS ( + SELECT + start_time, + end_time, + EXTRACT(EPOCH FROM (end_time - start_time)) AS total_seconds, + ((end_time - start_time) / $2::int) AS bucket_interval + FROM bounds +), +bucket_series AS ( + SELECT generate_series(0, $2::int - 1) AS idx +), +listen_indices AS ( + SELECT + LEAST( + $2::int - 1, + FLOOR( + (EXTRACT(EPOCH FROM (l.listened_at - s.start_time)) / NULLIF(s.total_seconds, 0)) + * $2::int + )::int + ) AS bucket_idx + FROM listens l + JOIN tracks t ON t.id = l.track_id + CROSS JOIN stats s + WHERE t.id = $1 + AND s.start_time IS NOT NULL +) +SELECT + (s.start_time + (s.bucket_interval * bs.idx))::timestamptz AS bucket_start, + (s.start_time + (s.bucket_interval * (bs.idx + 1)))::timestamptz AS bucket_end, + COUNT(li.bucket_idx) AS listen_count +FROM bucket_series bs +CROSS JOIN stats s +LEFT JOIN listen_indices li ON bs.idx = li.bucket_idx +WHERE s.start_time IS NOT NULL +GROUP BY bs.idx, s.start_time, s.bucket_interval +ORDER BY bs.idx +` + +type GetGroupedListensFromTrackParams struct { + ID int32 + BucketCount int32 +} + +type GetGroupedListensFromTrackRow struct { + BucketStart time.Time + BucketEnd time.Time + ListenCount int64 +} + +func (q *Queries) GetGroupedListensFromTrack(ctx context.Context, arg GetGroupedListensFromTrackParams) ([]GetGroupedListensFromTrackRow, error) { + rows, err := q.db.Query(ctx, getGroupedListensFromTrack, arg.ID, arg.BucketCount) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetGroupedListensFromTrackRow + for rows.Next() { + var i GetGroupedListensFromTrackRow + if err := rows.Scan(&i.BucketStart, &i.BucketEnd, &i.ListenCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/internal/repository/listen.sql.go b/internal/repository/listen.sql.go index 027873a..d3db4bb 100644 --- a/internal/repository/listen.sql.go +++ b/internal/repository/listen.sql.go @@ -190,12 +190,32 @@ func (q *Queries) DeleteListen(ctx context.Context, arg DeleteListenParams) erro return err } +const getFirstListen = `-- name: GetFirstListen :one +SELECT + track_id, listened_at, client, user_id +FROM listens +ORDER BY listened_at ASC +LIMIT 1 +` + +func (q *Queries) GetFirstListen(ctx context.Context) (Listen, error) { + row := q.db.QueryRow(ctx, getFirstListen) + var i Listen + err := row.Scan( + &i.TrackID, + &i.ListenedAt, + &i.Client, + &i.UserID, + ) + return i, err +} + const getFirstListenFromArtist = `-- name: GetFirstListenFromArtist :one -SELECT +SELECT l.track_id, l.listened_at, l.client, l.user_id FROM listens l JOIN tracks_with_title t ON l.track_id = t.id -JOIN artist_tracks at ON t.id = at.track_id +JOIN artist_tracks at ON t.id = at.track_id WHERE at.artist_id = $1 ORDER BY l.listened_at ASC LIMIT 1 @@ -214,7 +234,7 @@ func (q *Queries) GetFirstListenFromArtist(ctx context.Context, artistID int32) } const getFirstListenFromRelease = `-- name: GetFirstListenFromRelease :one -SELECT +SELECT l.track_id, l.listened_at, l.client, l.user_id FROM listens l JOIN tracks t ON l.track_id = t.id @@ -236,7 +256,7 @@ func (q *Queries) GetFirstListenFromRelease(ctx context.Context, releaseID int32 } const getFirstListenFromTrack = `-- name: GetFirstListenFromTrack :one -SELECT +SELECT l.track_id, l.listened_at, l.client, l.user_id FROM listens l JOIN tracks t ON l.track_id = t.id @@ -258,14 +278,14 @@ func (q *Queries) GetFirstListenFromTrack(ctx context.Context, id int32) (Listen } const getLastListensFromArtistPaginated = `-- name: GetLastListensFromArtistPaginated :many -SELECT +SELECT l.track_id, l.listened_at, l.client, l.user_id, t.title AS track_title, t.release_id AS release_id, get_artists_for_track(t.id) AS artists FROM listens l JOIN tracks_with_title t ON l.track_id = t.id -JOIN artist_tracks at ON t.id = at.track_id +JOIN artist_tracks at ON t.id = at.track_id WHERE at.artist_id = $5 AND l.listened_at BETWEEN $1 AND $2 ORDER BY l.listened_at DESC @@ -325,7 +345,7 @@ func (q *Queries) GetLastListensFromArtistPaginated(ctx context.Context, arg Get } const getLastListensFromReleasePaginated = `-- name: GetLastListensFromReleasePaginated :many -SELECT +SELECT l.track_id, l.listened_at, l.client, l.user_id, t.title AS track_title, t.release_id AS release_id, @@ -391,7 +411,7 @@ func (q *Queries) GetLastListensFromReleasePaginated(ctx context.Context, arg Ge } const getLastListensFromTrackPaginated = `-- name: GetLastListensFromTrackPaginated :many -SELECT +SELECT l.track_id, l.listened_at, l.client, l.user_id, t.title AS track_title, t.release_id AS release_id, @@ -457,7 +477,7 @@ func (q *Queries) GetLastListensFromTrackPaginated(ctx context.Context, arg GetL } const getLastListensPaginated = `-- name: GetLastListensPaginated :many -SELECT +SELECT l.track_id, l.listened_at, l.client, l.user_id, t.title AS track_title, t.release_id AS release_id, @@ -675,36 +695,29 @@ func (q *Queries) InsertListen(ctx context.Context, arg InsertListenParams) erro } const listenActivity = `-- name: ListenActivity :many -WITH buckets AS ( - SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start -), -bucketed_listens AS ( - SELECT - b.bucket_start, - COUNT(l.listened_at) AS listen_count - FROM buckets b - LEFT JOIN listens l - ON l.listened_at >= b.bucket_start - AND l.listened_at < b.bucket_start + $3::interval - GROUP BY b.bucket_start - ORDER BY b.bucket_start -) -SELECT bucket_start, listen_count FROM bucketed_listens +SELECT + (listened_at AT TIME ZONE $1::text)::date as day, + COUNT(*) AS listen_count +FROM listens +WHERE listened_at >= $2 +AND listened_at < $3 +GROUP BY day +ORDER BY day ` type ListenActivityParams struct { - Column1 time.Time - Column2 time.Time - Column3 pgtype.Interval + Column1 string + ListenedAt time.Time + ListenedAt_2 time.Time } type ListenActivityRow struct { - BucketStart time.Time + Day pgtype.Date ListenCount int64 } func (q *Queries) ListenActivity(ctx context.Context, arg ListenActivityParams) ([]ListenActivityRow, error) { - rows, err := q.db.Query(ctx, listenActivity, arg.Column1, arg.Column2, arg.Column3) + rows, err := q.db.Query(ctx, listenActivity, arg.Column1, arg.ListenedAt, arg.ListenedAt_2) if err != nil { return nil, err } @@ -712,7 +725,7 @@ func (q *Queries) ListenActivity(ctx context.Context, arg ListenActivityParams) var items []ListenActivityRow for rows.Next() { var i ListenActivityRow - if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil { + if err := rows.Scan(&i.Day, &i.ListenCount); err != nil { return nil, err } items = append(items, i) @@ -724,46 +737,36 @@ func (q *Queries) ListenActivity(ctx context.Context, arg ListenActivityParams) } const listenActivityForArtist = `-- name: ListenActivityForArtist :many -WITH buckets AS ( - SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start -), -filtered_listens AS ( - SELECT l.track_id, l.listened_at, l.client, l.user_id - FROM listens l - JOIN artist_tracks t ON l.track_id = t.track_id - WHERE t.artist_id = $4 -), -bucketed_listens AS ( - SELECT - b.bucket_start, - COUNT(l.listened_at) AS listen_count - FROM buckets b - LEFT JOIN filtered_listens l - ON l.listened_at >= b.bucket_start - AND l.listened_at < b.bucket_start + $3::interval - GROUP BY b.bucket_start - ORDER BY b.bucket_start -) -SELECT bucket_start, listen_count FROM bucketed_listens +SELECT + (listened_at AT TIME ZONE $1::text)::date as day, + COUNT(*) AS listen_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN artist_tracks at ON t.id = at.track_id +WHERE l.listened_at >= $2 +AND l.listened_at < $3 +AND at.artist_id = $4 +GROUP BY day +ORDER BY day ` type ListenActivityForArtistParams struct { - Column1 time.Time - Column2 time.Time - Column3 pgtype.Interval - ArtistID int32 + Column1 string + ListenedAt time.Time + ListenedAt_2 time.Time + ArtistID int32 } type ListenActivityForArtistRow struct { - BucketStart time.Time + Day pgtype.Date ListenCount int64 } func (q *Queries) ListenActivityForArtist(ctx context.Context, arg ListenActivityForArtistParams) ([]ListenActivityForArtistRow, error) { rows, err := q.db.Query(ctx, listenActivityForArtist, arg.Column1, - arg.Column2, - arg.Column3, + arg.ListenedAt, + arg.ListenedAt_2, arg.ArtistID, ) if err != nil { @@ -773,7 +776,7 @@ func (q *Queries) ListenActivityForArtist(ctx context.Context, arg ListenActivit var items []ListenActivityForArtistRow for rows.Next() { var i ListenActivityForArtistRow - if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil { + if err := rows.Scan(&i.Day, &i.ListenCount); err != nil { return nil, err } items = append(items, i) @@ -785,46 +788,35 @@ func (q *Queries) ListenActivityForArtist(ctx context.Context, arg ListenActivit } const listenActivityForRelease = `-- name: ListenActivityForRelease :many -WITH buckets AS ( - SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start -), -filtered_listens AS ( - SELECT l.track_id, l.listened_at, l.client, l.user_id - FROM listens l - JOIN tracks t ON l.track_id = t.id - WHERE t.release_id = $4 -), -bucketed_listens AS ( - SELECT - b.bucket_start, - COUNT(l.listened_at) AS listen_count - FROM buckets b - LEFT JOIN filtered_listens l - ON l.listened_at >= b.bucket_start - AND l.listened_at < b.bucket_start + $3::interval - GROUP BY b.bucket_start - ORDER BY b.bucket_start -) -SELECT bucket_start, listen_count FROM bucketed_listens +SELECT + (listened_at AT TIME ZONE $1::text)::date as day, + COUNT(*) AS listen_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at >= $2 +AND l.listened_at < $3 +AND t.release_id = $4 +GROUP BY day +ORDER BY day ` type ListenActivityForReleaseParams struct { - Column1 time.Time - Column2 time.Time - Column3 pgtype.Interval - ReleaseID int32 + Column1 string + ListenedAt time.Time + ListenedAt_2 time.Time + ReleaseID int32 } type ListenActivityForReleaseRow struct { - BucketStart time.Time + Day pgtype.Date ListenCount int64 } func (q *Queries) ListenActivityForRelease(ctx context.Context, arg ListenActivityForReleaseParams) ([]ListenActivityForReleaseRow, error) { rows, err := q.db.Query(ctx, listenActivityForRelease, arg.Column1, - arg.Column2, - arg.Column3, + arg.ListenedAt, + arg.ListenedAt_2, arg.ReleaseID, ) if err != nil { @@ -834,7 +826,7 @@ func (q *Queries) ListenActivityForRelease(ctx context.Context, arg ListenActivi var items []ListenActivityForReleaseRow for rows.Next() { var i ListenActivityForReleaseRow - if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil { + if err := rows.Scan(&i.Day, &i.ListenCount); err != nil { return nil, err } items = append(items, i) @@ -846,46 +838,35 @@ func (q *Queries) ListenActivityForRelease(ctx context.Context, arg ListenActivi } const listenActivityForTrack = `-- name: ListenActivityForTrack :many -WITH buckets AS ( - SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start -), -filtered_listens AS ( - SELECT l.track_id, l.listened_at, l.client, l.user_id - FROM listens l - JOIN tracks t ON l.track_id = t.id - WHERE t.id = $4 -), -bucketed_listens AS ( - SELECT - b.bucket_start, - COUNT(l.listened_at) AS listen_count - FROM buckets b - LEFT JOIN filtered_listens l - ON l.listened_at >= b.bucket_start - AND l.listened_at < b.bucket_start + $3::interval - GROUP BY b.bucket_start - ORDER BY b.bucket_start -) -SELECT bucket_start, listen_count FROM bucketed_listens +SELECT + (listened_at AT TIME ZONE $1::text)::date as day, + COUNT(*) AS listen_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at >= $2 +AND l.listened_at < $3 +AND t.id = $4 +GROUP BY day +ORDER BY day ` type ListenActivityForTrackParams struct { - Column1 time.Time - Column2 time.Time - Column3 pgtype.Interval - ID int32 + Column1 string + ListenedAt time.Time + ListenedAt_2 time.Time + ID int32 } type ListenActivityForTrackRow struct { - BucketStart time.Time + Day pgtype.Date ListenCount int64 } func (q *Queries) ListenActivityForTrack(ctx context.Context, arg ListenActivityForTrackParams) ([]ListenActivityForTrackRow, error) { rows, err := q.db.Query(ctx, listenActivityForTrack, arg.Column1, - arg.Column2, - arg.Column3, + arg.ListenedAt, + arg.ListenedAt_2, arg.ID, ) if err != nil { @@ -895,7 +876,7 @@ func (q *Queries) ListenActivityForTrack(ctx context.Context, arg ListenActivity var items []ListenActivityForTrackRow for rows.Next() { var i ListenActivityForTrackRow - if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil { + if err := rows.Scan(&i.Day, &i.ListenCount); err != nil { return nil, err } items = append(items, i) diff --git a/internal/repository/release.sql.go b/internal/repository/release.sql.go index aa791e6..f62e086 100644 --- a/internal/repository/release.sql.go +++ b/internal/repository/release.sql.go @@ -141,6 +141,38 @@ func (q *Queries) GetRelease(ctx context.Context, id int32) (GetReleaseRow, erro return i, err } +const getReleaseAllTimeRank = `-- name: GetReleaseAllTimeRank :one +SELECT + release_id, + rank +FROM ( + SELECT + x.release_id, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank + FROM ( + SELECT + t.release_id, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks t ON l.track_id = t.id + GROUP BY t.release_id + ) x + ) +WHERE release_id = $1 +` + +type GetReleaseAllTimeRankRow struct { + ReleaseID int32 + Rank int64 +} + +func (q *Queries) GetReleaseAllTimeRank(ctx context.Context, releaseID int32) (GetReleaseAllTimeRankRow, error) { + row := q.db.QueryRow(ctx, getReleaseAllTimeRank, releaseID) + var i GetReleaseAllTimeRankRow + err := row.Scan(&i.ReleaseID, &i.Rank) + return i, err +} + const getReleaseByArtistAndTitle = `-- name: GetReleaseByArtistAndTitle :one SELECT r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title FROM releases_with_title r @@ -195,6 +227,39 @@ func (q *Queries) GetReleaseByArtistAndTitles(ctx context.Context, arg GetReleas return i, err } +const getReleaseByArtistAndTitlesNoMbzID = `-- name: GetReleaseByArtistAndTitlesNoMbzID :one +SELECT r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title +FROM releases_with_title r +JOIN artist_releases ar ON r.id = ar.release_id +WHERE r.title = ANY ($1::TEXT[]) + AND ar.artist_id = $2 + AND EXISTS ( + SELECT 1 + FROM releases r2 + WHERE r2.id = r.id + AND r2.musicbrainz_id IS NULL + ) +` + +type GetReleaseByArtistAndTitlesNoMbzIDParams struct { + Column1 []string + ArtistID int32 +} + +func (q *Queries) GetReleaseByArtistAndTitlesNoMbzID(ctx context.Context, arg GetReleaseByArtistAndTitlesNoMbzIDParams) (ReleasesWithTitle, error) { + row := q.db.QueryRow(ctx, getReleaseByArtistAndTitlesNoMbzID, arg.Column1, arg.ArtistID) + var i ReleasesWithTitle + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + &i.Title, + ) + return i, err +} + const getReleaseByImageID = `-- name: GetReleaseByImageID :one SELECT id, musicbrainz_id, image, various_artists, image_source FROM releases WHERE image = $1 LIMIT 1 @@ -288,17 +353,22 @@ func (q *Queries) GetReleasesWithoutImages(ctx context.Context, arg GetReleasesW const getTopReleasesFromArtist = `-- name: GetTopReleasesFromArtist :many SELECT - r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title, - COUNT(*) AS listen_count, - get_artists_for_release(r.id) AS artists -FROM listens l -JOIN tracks t ON l.track_id = t.id -JOIN releases_with_title r ON t.release_id = r.id -JOIN artist_releases ar ON r.id = ar.release_id -WHERE ar.artist_id = $5 - AND l.listened_at BETWEEN $1 AND $2 -GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source -ORDER BY listen_count DESC, r.id + x.id, x.musicbrainz_id, x.image, x.various_artists, x.image_source, x.title, x.listen_count, + get_artists_for_release(x.id) AS artists, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank +FROM ( + SELECT + r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks t ON l.track_id = t.id + JOIN releases_with_title r ON t.release_id = r.id + JOIN artist_releases ar ON r.id = ar.release_id + WHERE ar.artist_id = $5 + AND l.listened_at BETWEEN $1 AND $2 + GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source +) x +ORDER BY listen_count DESC, x.id LIMIT $3 OFFSET $4 ` @@ -319,6 +389,7 @@ type GetTopReleasesFromArtistRow struct { Title string ListenCount int64 Artists []byte + Rank int64 } func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleasesFromArtistParams) ([]GetTopReleasesFromArtistRow, error) { @@ -345,6 +416,7 @@ func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleas &i.Title, &i.ListenCount, &i.Artists, + &i.Rank, ); err != nil { return nil, err } @@ -358,15 +430,20 @@ func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleas const getTopReleasesPaginated = `-- name: GetTopReleasesPaginated :many SELECT - r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title, - COUNT(*) AS listen_count, - get_artists_for_release(r.id) AS artists -FROM listens l -JOIN tracks t ON l.track_id = t.id -JOIN releases_with_title r ON t.release_id = r.id -WHERE l.listened_at BETWEEN $1 AND $2 -GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source -ORDER BY listen_count DESC, r.id + x.id, x.musicbrainz_id, x.image, x.various_artists, x.image_source, x.title, x.listen_count, + get_artists_for_release(x.id) AS artists, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank +FROM ( + SELECT + r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks t ON l.track_id = t.id + JOIN releases_with_title r ON t.release_id = r.id + WHERE l.listened_at BETWEEN $1 AND $2 + GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source +) x +ORDER BY listen_count DESC, x.id LIMIT $3 OFFSET $4 ` @@ -386,6 +463,7 @@ type GetTopReleasesPaginatedRow struct { Title string ListenCount int64 Artists []byte + Rank int64 } func (q *Queries) GetTopReleasesPaginated(ctx context.Context, arg GetTopReleasesPaginatedParams) ([]GetTopReleasesPaginatedRow, error) { @@ -411,6 +489,7 @@ func (q *Queries) GetTopReleasesPaginated(ctx context.Context, arg GetTopRelease &i.Title, &i.ListenCount, &i.Artists, + &i.Rank, ); err != nil { return nil, err } diff --git a/internal/repository/track.sql.go b/internal/repository/track.sql.go index c531210..b376198 100644 --- a/internal/repository/track.sql.go +++ b/internal/repository/track.sql.go @@ -155,22 +155,30 @@ func (q *Queries) GetAllTracksFromArtist(ctx context.Context, artistID int32) ([ const getTopTracksByArtistPaginated = `-- name: GetTopTracksByArtistPaginated :many SELECT - t.id, + x.track_id AS id, t.title, t.musicbrainz_id, t.release_id, r.image, - COUNT(*) AS listen_count, - get_artists_for_track(t.id) AS artists -FROM listens l -JOIN tracks_with_title t ON l.track_id = t.id + x.listen_count, + get_artists_for_track(x.track_id) AS artists, + x.rank +FROM ( + SELECT + l.track_id, + COUNT(*) AS listen_count, + RANK() OVER (ORDER BY COUNT(*) DESC) as rank + FROM listens l + JOIN artist_tracks at ON l.track_id = at.track_id + WHERE l.listened_at BETWEEN $1 AND $2 + AND at.artist_id = $5 + GROUP BY l.track_id + ORDER BY listen_count DESC + LIMIT $3 OFFSET $4 +) x +JOIN tracks_with_title t ON x.track_id = t.id JOIN releases r ON t.release_id = r.id -JOIN artist_tracks at ON at.track_id = t.id -WHERE l.listened_at BETWEEN $1 AND $2 - AND at.artist_id = $5 -GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image -ORDER BY listen_count DESC, t.id -LIMIT $3 OFFSET $4 +ORDER BY x.listen_count DESC, x.track_id ` type GetTopTracksByArtistPaginatedParams struct { @@ -189,6 +197,7 @@ type GetTopTracksByArtistPaginatedRow struct { Image *uuid.UUID ListenCount int64 Artists []byte + Rank int64 } func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopTracksByArtistPaginatedParams) ([]GetTopTracksByArtistPaginatedRow, error) { @@ -214,6 +223,7 @@ func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopT &i.Image, &i.ListenCount, &i.Artists, + &i.Rank, ); err != nil { return nil, err } @@ -227,21 +237,30 @@ func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopT const getTopTracksInReleasePaginated = `-- name: GetTopTracksInReleasePaginated :many SELECT - t.id, + x.track_id AS id, t.title, t.musicbrainz_id, t.release_id, r.image, - COUNT(*) AS listen_count, - get_artists_for_track(t.id) AS artists -FROM listens l -JOIN tracks_with_title t ON l.track_id = t.id + x.listen_count, + get_artists_for_track(x.track_id) AS artists, + x.rank +FROM ( + SELECT + l.track_id, + COUNT(*) AS listen_count, + RANK() OVER (ORDER BY COUNT(*) DESC) as rank + FROM listens l + JOIN tracks t ON l.track_id = t.id + WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $5 + GROUP BY l.track_id + ORDER BY listen_count DESC + LIMIT $3 OFFSET $4 +) x +JOIN tracks_with_title t ON x.track_id = t.id JOIN releases r ON t.release_id = r.id -WHERE l.listened_at BETWEEN $1 AND $2 - AND t.release_id = $5 -GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image -ORDER BY listen_count DESC, t.id -LIMIT $3 OFFSET $4 +ORDER BY x.listen_count DESC, x.track_id ` type GetTopTracksInReleasePaginatedParams struct { @@ -260,6 +279,7 @@ type GetTopTracksInReleasePaginatedRow struct { Image *uuid.UUID ListenCount int64 Artists []byte + Rank int64 } func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTopTracksInReleasePaginatedParams) ([]GetTopTracksInReleasePaginatedRow, error) { @@ -285,6 +305,7 @@ func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTop &i.Image, &i.ListenCount, &i.Artists, + &i.Rank, ); err != nil { return nil, err } @@ -298,20 +319,28 @@ func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTop const getTopTracksPaginated = `-- name: GetTopTracksPaginated :many SELECT - t.id, + x.track_id AS id, t.title, t.musicbrainz_id, t.release_id, r.image, - COUNT(*) AS listen_count, - get_artists_for_track(t.id) AS artists -FROM listens l -JOIN tracks_with_title t ON l.track_id = t.id + x.listen_count, + get_artists_for_track(x.track_id) AS artists, + x.rank +FROM ( + SELECT + track_id, + COUNT(*) AS listen_count, + RANK() OVER (ORDER BY COUNT(*) DESC) as rank + FROM listens + WHERE listened_at BETWEEN $1 AND $2 + GROUP BY track_id + ORDER BY listen_count DESC + LIMIT $3 OFFSET $4 +) x +JOIN tracks_with_title t ON x.track_id = t.id JOIN releases r ON t.release_id = r.id -WHERE l.listened_at BETWEEN $1 AND $2 -GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image -ORDER BY listen_count DESC, t.id -LIMIT $3 OFFSET $4 +ORDER BY x.listen_count DESC, x.track_id ` type GetTopTracksPaginatedParams struct { @@ -329,6 +358,7 @@ type GetTopTracksPaginatedRow struct { Image *uuid.UUID ListenCount int64 Artists []byte + Rank int64 } func (q *Queries) GetTopTracksPaginated(ctx context.Context, arg GetTopTracksPaginatedParams) ([]GetTopTracksPaginatedRow, error) { @@ -353,6 +383,7 @@ func (q *Queries) GetTopTracksPaginated(ctx context.Context, arg GetTopTracksPag &i.Image, &i.ListenCount, &i.Artists, + &i.Rank, ); err != nil { return nil, err } @@ -399,6 +430,37 @@ func (q *Queries) GetTrack(ctx context.Context, id int32) (GetTrackRow, error) { return i, err } +const getTrackAllTimeRank = `-- name: GetTrackAllTimeRank :one +SELECT + id, + rank +FROM ( + SELECT + x.id, + RANK() OVER (ORDER BY x.listen_count DESC) AS rank + FROM ( + SELECT + t.id, + COUNT(*) AS listen_count + FROM listens l + JOIN tracks_with_title t ON l.track_id = t.id + GROUP BY t.id) x + ) y +WHERE id = $1 +` + +type GetTrackAllTimeRankRow struct { + ID int32 + Rank int64 +} + +func (q *Queries) GetTrackAllTimeRank(ctx context.Context, id int32) (GetTrackAllTimeRankRow, error) { + row := q.db.QueryRow(ctx, getTrackAllTimeRank, id) + var i GetTrackAllTimeRankRow + err := row.Scan(&i.ID, &i.Rank) + return i, err +} + const getTrackByMbzID = `-- name: GetTrackByMbzID :one SELECT id, musicbrainz_id, duration, release_id, title FROM tracks_with_title WHERE musicbrainz_id = $1 LIMIT 1 @@ -417,23 +479,25 @@ func (q *Queries) GetTrackByMbzID(ctx context.Context, musicbrainzID *uuid.UUID) return i, err } -const getTrackByTitleAndArtists = `-- name: GetTrackByTitleAndArtists :one +const getTrackByTrackInfo = `-- name: GetTrackByTrackInfo :one SELECT t.id, t.musicbrainz_id, t.duration, t.release_id, t.title FROM tracks_with_title t JOIN artist_tracks at ON at.track_id = t.id WHERE t.title = $1 - AND at.artist_id = ANY($2::int[]) + AND at.artist_id = ANY($3::int[]) + AND t.release_id = $2 GROUP BY t.id, t.title, t.musicbrainz_id, t.duration, t.release_id -HAVING COUNT(DISTINCT at.artist_id) = cardinality($2::int[]) +HAVING COUNT(DISTINCT at.artist_id) = cardinality($3::int[]) ` -type GetTrackByTitleAndArtistsParams struct { - Title string - Column2 []int32 +type GetTrackByTrackInfoParams struct { + Title string + ReleaseID int32 + Column3 []int32 } -func (q *Queries) GetTrackByTitleAndArtists(ctx context.Context, arg GetTrackByTitleAndArtistsParams) (TracksWithTitle, error) { - row := q.db.QueryRow(ctx, getTrackByTitleAndArtists, arg.Title, arg.Column2) +func (q *Queries) GetTrackByTrackInfo(ctx context.Context, arg GetTrackByTrackInfoParams) (TracksWithTitle, error) { + row := q.db.QueryRow(ctx, getTrackByTrackInfo, arg.Title, arg.ReleaseID, arg.Column3) var i TracksWithTitle err := row.Scan( &i.ID, @@ -445,6 +509,48 @@ func (q *Queries) GetTrackByTitleAndArtists(ctx context.Context, arg GetTrackByT return i, err } +const getTracksWithNoDurationButHaveMbzID = `-- name: GetTracksWithNoDurationButHaveMbzID :many +SELECT + id, musicbrainz_id, duration, release_id, title +FROM tracks_with_title +WHERE duration = 0 + AND musicbrainz_id IS NOT NULL + AND id > $2 +ORDER BY id ASC +LIMIT $1 +` + +type GetTracksWithNoDurationButHaveMbzIDParams struct { + Limit int32 + ID int32 +} + +func (q *Queries) GetTracksWithNoDurationButHaveMbzID(ctx context.Context, arg GetTracksWithNoDurationButHaveMbzIDParams) ([]TracksWithTitle, error) { + rows, err := q.db.Query(ctx, getTracksWithNoDurationButHaveMbzID, arg.Limit, arg.ID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []TracksWithTitle + for rows.Next() { + var i TracksWithTitle + if err := rows.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Duration, + &i.ReleaseID, + &i.Title, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const insertTrack = `-- name: InsertTrack :one INSERT INTO tracks (musicbrainz_id, release_id, duration) VALUES ($1, $2, $3) diff --git a/internal/summary/summary.go b/internal/summary/summary.go index 5605f15..7a2b9d7 100644 --- a/internal/summary/summary.go +++ b/internal/summary/summary.go @@ -9,20 +9,20 @@ import ( ) type Summary struct { - Title string `json:"title,omitempty"` - TopArtists []*models.Artist `json:"top_artists"` // ListenCount and TimeListened are overriden with stats from timeframe - TopAlbums []*models.Album `json:"top_albums"` // ListenCount and TimeListened are overriden with stats from timeframe - TopTracks []*models.Track `json:"top_tracks"` // ListenCount and TimeListened are overriden with stats from timeframe - MinutesListened int `json:"minutes_listened"` - AvgMinutesPerDay int `json:"avg_minutes_listened_per_day"` - Plays int `json:"plays"` - AvgPlaysPerDay float32 `json:"avg_plays_per_day"` - UniqueTracks int `json:"unique_tracks"` - UniqueAlbums int `json:"unique_albums"` - UniqueArtists int `json:"unique_artists"` - NewTracks int `json:"new_tracks"` - NewAlbums int `json:"new_albums"` - NewArtists int `json:"new_artists"` + Title string `json:"title,omitempty"` + TopArtists []db.RankedItem[*models.Artist] `json:"top_artists"` // ListenCount and TimeListened are overriden with stats from timeframe + TopAlbums []db.RankedItem[*models.Album] `json:"top_albums"` // ListenCount and TimeListened are overriden with stats from timeframe + TopTracks []db.RankedItem[*models.Track] `json:"top_tracks"` // ListenCount and TimeListened are overriden with stats from timeframe + MinutesListened int `json:"minutes_listened"` + AvgMinutesPerDay int `json:"avg_minutes_listened_per_day"` + Plays int `json:"plays"` + AvgPlaysPerDay float32 `json:"avg_plays_per_day"` + UniqueTracks int `json:"unique_tracks"` + UniqueAlbums int `json:"unique_albums"` + UniqueArtists int `json:"unique_artists"` + NewTracks int `json:"new_tracks"` + NewAlbums int `json:"new_albums"` + NewArtists int `json:"new_artists"` } func GenerateSummary(ctx context.Context, store db.DB, userId int32, timeframe db.Timeframe, title string) (summary *Summary, err error) { @@ -30,61 +30,61 @@ func GenerateSummary(ctx context.Context, store db.DB, userId int32, timeframe d summary = new(Summary) - topArtists, err := store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Page: 1, Limit: 5, From: timeframe.T1u, To: timeframe.T2u, Period: timeframe.Period}) + topArtists, err := store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Page: 1, Limit: 5, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } summary.TopArtists = topArtists.Items // replace ListenCount and TimeListened with stats from timeframe for i, artist := range summary.TopArtists { - timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ArtistID: artist.ID, Timeframe: timeframe}) + timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{ArtistID: artist.Item.ID, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } - listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{ArtistID: artist.ID, Timeframe: timeframe}) + listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{ArtistID: artist.Item.ID, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } - summary.TopArtists[i].TimeListened = timelistened - summary.TopArtists[i].ListenCount = listens + summary.TopArtists[i].Item.TimeListened = timelistened + summary.TopArtists[i].Item.ListenCount = listens } - topAlbums, err := store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Page: 1, Limit: 5, From: timeframe.T1u, To: timeframe.T2u, Period: timeframe.Period}) + topAlbums, err := store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Page: 1, Limit: 5, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } summary.TopAlbums = topAlbums.Items // replace ListenCount and TimeListened with stats from timeframe for i, album := range summary.TopAlbums { - timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{AlbumID: album.ID, Timeframe: timeframe}) + timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{AlbumID: album.Item.ID, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } - listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{AlbumID: album.ID, Timeframe: timeframe}) + listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{AlbumID: album.Item.ID, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } - summary.TopAlbums[i].TimeListened = timelistened - summary.TopAlbums[i].ListenCount = listens + summary.TopAlbums[i].Item.TimeListened = timelistened + summary.TopAlbums[i].Item.ListenCount = listens } - topTracks, err := store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Page: 1, Limit: 5, From: timeframe.T1u, To: timeframe.T2u, Period: timeframe.Period}) + topTracks, err := store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Page: 1, Limit: 5, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } summary.TopTracks = topTracks.Items // replace ListenCount and TimeListened with stats from timeframe for i, track := range summary.TopTracks { - timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{TrackID: track.ID, Timeframe: timeframe}) + timelistened, err := store.CountTimeListenedToItem(ctx, db.TimeListenedOpts{TrackID: track.Item.ID, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } - listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{TrackID: track.ID, Timeframe: timeframe}) + listens, err := store.CountListensToItem(ctx, db.TimeListenedOpts{TrackID: track.Item.ID, Timeframe: timeframe}) if err != nil { return nil, fmt.Errorf("GenerateSummary: %w", err) } - summary.TopTracks[i].TimeListened = timelistened - summary.TopTracks[i].ListenCount = listens + summary.TopTracks[i].Item.TimeListened = timelistened + summary.TopTracks[i].Item.ListenCount = listens } t1, t2 := db.TimeframeToTimeRange(timeframe) diff --git a/internal/summary/summary.png b/internal/summary/summary.png deleted file mode 100644 index feb096d..0000000 Binary files a/internal/summary/summary.png and /dev/null differ diff --git a/internal/utils/utils.go b/internal/utils/utils.go index 905ab41..eb56425 100644 --- a/internal/utils/utils.go +++ b/internal/utils/utils.go @@ -127,6 +127,12 @@ func DateRange(week, month, year int) (time.Time, time.Time, error) { return start, end, nil } +// Returns a time.Time that represents the first moment of the day of t. +func BeginningOfDay(t time.Time) time.Time { + year, month, day := t.Date() + return time.Date(year, month, day, 0, 0, 0, 0, t.Location()) +} + // CopyFile copies a file from src to dst. If src and dst files exist, and are // the same, then return success. Otherise, attempt to create a hard link // between the two files. If that fail, copy the file contents from src to dst. diff --git a/test_assets/koito_export_test.json b/test_assets/koito_export_test.json index b7ce463..e2cd8ea 100644 --- a/test_assets/koito_export_test.json +++ b/test_assets/koito_export_test.json @@ -18,7 +18,7 @@ }, "album": { "image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg", - "mbid": null, + "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161", "aliases": [ { "alias": "American Football (LP3)", @@ -70,7 +70,7 @@ }, "album": { "image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg", - "mbid": null, + "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161", "aliases": [ { "alias": "American Football (LP3)", @@ -122,7 +122,7 @@ }, "album": { "image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg", - "mbid": null, + "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161", "aliases": [ { "alias": "American Football (LP3)", @@ -174,7 +174,7 @@ }, "album": { "image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg", - "mbid": null, + "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161", "aliases": [ { "alias": "American Football (LP3)", @@ -226,7 +226,7 @@ }, "album": { "image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg", - "mbid": null, + "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161", "aliases": [ { "alias": "American Football (LP3)", @@ -278,7 +278,7 @@ }, "album": { "image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg", - "mbid": null, + "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161", "aliases": [ { "alias": "American Football (LP3)", @@ -330,7 +330,7 @@ }, "album": { "image_url": "https://cdn-images.dzcdn.net/images/cover/1f54d600d0ce5c88a6b2fd75659ec796/1000x1000-000000-80-0-0.jpg", - "mbid": null, + "mbid": "d0ec30bd-7cdc-417c-979d-5a0631b8a161", "aliases": [ { "alias": "American Football (LP3)", @@ -703,4 +703,4 @@ ] } ] -} \ No newline at end of file +} diff --git a/test_assets/listenbrainz_shoko1_123456789.zip b/test_assets/listenbrainz_shoko1_123456789.zip new file mode 100644 index 0000000..14c97a2 Binary files /dev/null and b/test_assets/listenbrainz_shoko1_123456789.zip differ