commit fc9054b78cd03ca8079808ba256c704452f69106 Author: Gabe Farrell Date: Wed Jun 11 19:45:39 2025 -0400 chore: initial public commit diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 0000000..1e9dfc8 --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,56 @@ +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +# GitHub recommends pinning actions to a commit SHA. +# To get a newer version, you will need to update the SHA. +# You can also reference a tag or branch, but the action may change without warning. + +name: Publish Docker image + +on: + release: + types: [published] + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + permissions: + packages: write + contents: read + attestations: write + id-token: write + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Log in to Docker Hub + uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 + with: + images: gabehf/koito + + - name: Build and push Docker image + id: push + uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 + with: + context: . + file: ./Dockerfile + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + - name: Generate artifact attestation + uses: actions/attest-build-provenance@v2 + with: + subject-name: index.docker.io/gabehf/koito + subject-digest: ${{ steps.push.outputs.digest }} + push-to-registry: true diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..082b194 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "makefile.configureOnOpen": false +} \ No newline at end of file diff --git a/DEV.md b/DEV.md new file mode 100644 index 0000000..4093280 --- /dev/null +++ b/DEV.md @@ -0,0 +1,14 @@ +# Koito +## Dependencies +### libvips +``` +sudo apt install libvips +``` +## Tools +- goose +- sqlc +## Start dev env +``` +make postgres.run +make api.debug +``` \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..0ae78bb --- /dev/null +++ b/Dockerfile @@ -0,0 +1,43 @@ +FROM node AS frontend + +WORKDIR /client +COPY ./client/package.json ./client/yarn.lock ./ +RUN yarn install +COPY ./client . +ENV BUILD_TARGET=docker +RUN yarn run build + + +FROM golang:1.23 AS backend + +WORKDIR /app + +RUN apt-get update && \ + apt-get install -y libvips-dev pkg-config && \ + rm -rf /var/lib/apt/lists/* + +COPY go.mod go.sum ./ +RUN go mod download + +COPY . . + +RUN CGO_ENABLED=1 GOOS=linux go build -o app ./cmd/api + + +FROM debian:bookworm-slim AS final + +WORKDIR /app + +RUN apt-get update && \ + apt-get install -y libvips42 && \ + rm -rf /var/lib/apt/lists/* + +COPY --from=backend /app/app ./app +COPY --from=frontend /client/build ./client/build +COPY ./client/public ./client/public +COPY ./assets ./assets +COPY ./db ./db + +EXPOSE 4110 + +ENTRYPOINT ["./app"] diff --git a/FEATURES.md b/FEATURES.md new file mode 100644 index 0000000..68d68ee --- /dev/null +++ b/FEATURES.md @@ -0,0 +1,24 @@ +# Must-haves +- scrobble with listenbrainz api +- import from maloja +- import from spotify +- natively host on subdirectory +- good mobile ui +- replace artist/album/track art from ui +- fetch artist/album/track art from lastfm and/or spotify +- edit artist/album/track name in ui (auto-merge colliding names) +- built with being exposed to the internet in mind +- track artist aliases +- hold a cache of musicbrainz responses, with a button to clear it out +# Want +- use musibrainz ids from scrobble to automatically merge plays +- use musicbrainz ids from scrobble to automatically add AKA fields (美波 aka Minami) and sort name +- export playlist m3u8 files based on charts +- track device/player listened from +- webhooks on certain events (every scrobble, listening milestones, etc.) +- Time of day, day of week, etc. graphs +- "pause" mode that temporarily disables recieving scrobbles until turned back on +# Stretch +- "Listening Digest" wrapped-esque digestable recap of the last week/month/year +# Could explore +- Federation/ActivityPub \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..119f70e --- /dev/null +++ b/Makefile @@ -0,0 +1,48 @@ +.PHONY: all test clean client + +db.up: + GOOSE_MIGRATION_DIR=db/migrations GOOSE_DRIVER=postgres GOOSE_DBSTRING=postgres://postgres:secret@localhost:5432 goose up + +db.down: + GOOSE_MIGRATION_DIR=db/migrations GOOSE_DRIVER=postgres GOOSE_DBSTRING=postgres://postgres:secret@localhost:5432 goose down + +db.reset: + GOOSE_MIGRATION_DIR=db/migrations GOOSE_DRIVER=postgres GOOSE_DBSTRING=postgres://postgres:secret@localhost:5432 goose down-to 0 + +db.schemadump: + docker run --rm --network=host --env PGPASSWORD=secret -v "./db:/tmp/dump" \ + postgres pg_dump \ + --schema-only \ + --host=192.168.0.153 \ + --port=5432 \ + --username=postgres \ + -v --dbname="koitodb" -f "/tmp/dump/schema.sql" + +postgres.run: + docker run --name koito-db -p 5432:5432 -e POSTGRES_PASSWORD=secret -d postgres + +postgres.start: + docker start koito-db + +postgres.stop: + docker stop koito-db + +postgres.rm: + docker rm bamsort-db + +api.debug: + KOITO_ALLOWED_HOSTS=* KOITO_LOG_LEVEL=debug KOITO_CONFIG_DIR=test_config_dir KOITO_DATABASE_URL=postgres://postgres:secret@192.168.0.153:5432/koitodb?sslmode=disable go run cmd/api/main.go + +api.test: + go test ./... -timeout 60s + +client.dev: + cd client && yarn run dev + +docs.dev: + cd docs && yarn dev + +client.build: + cd client && yarn run build + +test: api.test \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..04369dc --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# Koito diff --git a/assets/default_img b/assets/default_img new file mode 100644 index 0000000..5fcdd64 Binary files /dev/null and b/assets/default_img differ diff --git a/client/.dockerignore b/client/.dockerignore new file mode 100644 index 0000000..9b8d514 --- /dev/null +++ b/client/.dockerignore @@ -0,0 +1,4 @@ +.react-router +build +node_modules +README.md \ No newline at end of file diff --git a/client/.gitignore b/client/.gitignore new file mode 100644 index 0000000..9b7c041 --- /dev/null +++ b/client/.gitignore @@ -0,0 +1,6 @@ +.DS_Store +/node_modules/ + +# React Router +/.react-router/ +/build/ diff --git a/client/Dockerfile b/client/Dockerfile new file mode 100644 index 0000000..207bf93 --- /dev/null +++ b/client/Dockerfile @@ -0,0 +1,22 @@ +FROM node:20-alpine AS development-dependencies-env +COPY . /app +WORKDIR /app +RUN npm ci + +FROM node:20-alpine AS production-dependencies-env +COPY ./package.json package-lock.json /app/ +WORKDIR /app +RUN npm ci --omit=dev + +FROM node:20-alpine AS build-env +COPY . /app/ +COPY --from=development-dependencies-env /app/node_modules /app/node_modules +WORKDIR /app +RUN npm run build + +FROM node:20-alpine +COPY ./package.json package-lock.json /app/ +COPY --from=production-dependencies-env /app/node_modules /app/node_modules +COPY --from=build-env /app/build /app/build +WORKDIR /app +CMD ["npm", "run", "start"] \ No newline at end of file diff --git a/client/README.md b/client/README.md new file mode 100644 index 0000000..5c4780a --- /dev/null +++ b/client/README.md @@ -0,0 +1,87 @@ +# Welcome to React Router! + +A modern, production-ready template for building full-stack React applications using React Router. + +[![Open in StackBlitz](https://developer.stackblitz.com/img/open_in_stackblitz.svg)](https://stackblitz.com/github/remix-run/react-router-templates/tree/main/default) + +## Features + +- 🚀 Server-side rendering +- ⚡️ Hot Module Replacement (HMR) +- 📦 Asset bundling and optimization +- 🔄 Data loading and mutations +- 🔒 TypeScript by default +- 🎉 TailwindCSS for styling +- 📖 [React Router docs](https://reactrouter.com/) + +## Getting Started + +### Installation + +Install the dependencies: + +```bash +npm install +``` + +### Development + +Start the development server with HMR: + +```bash +npm run dev +``` + +Your application will be available at `http://localhost:5173`. + +## Building for Production + +Create a production build: + +```bash +npm run build +``` + +## Deployment + +### Docker Deployment + +To build and run using Docker: + +```bash +docker build -t my-app . + +# Run the container +docker run -p 3000:3000 my-app +``` + +The containerized application can be deployed to any platform that supports Docker, including: + +- AWS ECS +- Google Cloud Run +- Azure Container Apps +- Digital Ocean App Platform +- Fly.io +- Railway + +### DIY Deployment + +If you're familiar with deploying Node applications, the built-in app server is production-ready. + +Make sure to deploy the output of `npm run build` + +``` +├── package.json +├── package-lock.json (or pnpm-lock.yaml, or bun.lockb) +├── build/ +│ ├── client/ # Static assets +│ └── server/ # Server-side code +``` + +## Styling + +This template comes with [Tailwind CSS](https://tailwindcss.com/) already configured for a simple default starting experience. You can use whatever CSS framework you prefer. + +--- + +Built with ❤️ using React Router. diff --git a/client/api/api.ts b/client/api/api.ts new file mode 100644 index 0000000..40f4d6f --- /dev/null +++ b/client/api/api.ts @@ -0,0 +1,278 @@ +interface getItemsArgs { + limit: number, + period: string, + page: number, + artist_id?: number, + album_id?: number, + track_id?: number +} +interface getActivityArgs { + step: string + range: number + month: number + year: number + artist_id: number + album_id: number + track_id: number +} + +function getLastListens(args: getItemsArgs): Promise> { + return fetch(`/apis/web/v1/listens?period=${args.period}&limit=${args.limit}&artist_id=${args.artist_id}&album_id=${args.album_id}&track_id=${args.track_id}&page=${args.page}`).then(r => r.json() as Promise>) +} + +function getTopTracks(args: getItemsArgs): Promise> { + if (args.artist_id) { + return fetch(`/apis/web/v1/top-tracks?period=${args.period}&limit=${args.limit}&artist_id=${args.artist_id}&page=${args.page}`).then(r => r.json() as Promise>) + } else if (args.album_id) { + return fetch(`/apis/web/v1/top-tracks?period=${args.period}&limit=${args.limit}&album_id=${args.album_id}&page=${args.page}`).then(r => r.json() as Promise>) + } else { + return fetch(`/apis/web/v1/top-tracks?period=${args.period}&limit=${args.limit}&page=${args.page}`).then(r => r.json() as Promise>) + } +} + +function getTopAlbums(args: getItemsArgs): Promise> { + const baseUri = `/apis/web/v1/top-albums?period=${args.period}&limit=${args.limit}&page=${args.page}` + if (args.artist_id) { + return fetch(baseUri+`&artist_id=${args.artist_id}`).then(r => r.json() as Promise>) + } else { + return fetch(baseUri).then(r => r.json() as Promise>) + } +} + +function getTopArtists(args: getItemsArgs): Promise> { + const baseUri = `/apis/web/v1/top-artists?period=${args.period}&limit=${args.limit}&page=${args.page}` + return fetch(baseUri).then(r => r.json() as Promise>) +} + +function getActivity(args: getActivityArgs): Promise { + return fetch(`/apis/web/v1/listen-activity?step=${args.step}&range=${args.range}&month=${args.month}&year=${args.year}&album_id=${args.album_id}&artist_id=${args.artist_id}&track_id=${args.track_id}`).then(r => r.json() as Promise) +} + +function getStats(period: string): Promise { + return fetch(`/apis/web/v1/stats?period=${period}`).then(r => r.json() as Promise) +} + +function search(q: string): Promise { + return fetch(`/apis/web/v1/search?q=${q}`).then(r => r.json() as Promise) +} + +function imageUrl(id: string, size: string) { + if (!id) { + id = 'default' + } + return `/images/${size}/${id}` +} +function replaceImage(form: FormData): Promise { + return fetch(`/apis/web/v1/replace-image`, { + method: "POST", + body: form, + }) +} + +function mergeTracks(from: number, to: number): Promise { + return fetch(`/apis/web/v1/merge/tracks?from_id=${from}&to_id=${to}`, { + method: "POST", + }) +} +function mergeAlbums(from: number, to: number): Promise { + return fetch(`/apis/web/v1/merge/albums?from_id=${from}&to_id=${to}`, { + method: "POST", + }) +} +function mergeArtists(from: number, to: number): Promise { + return fetch(`/apis/web/v1/merge/artists?from_id=${from}&to_id=${to}`, { + method: "POST", + }) +} +function login(username: string, password: string, remember: boolean): Promise { + return fetch(`/apis/web/v1/login?username=${username}&password=${password}&remember_me=${remember}`, { + method: "POST", + }) +} +function logout(): Promise { + return fetch(`/apis/web/v1/logout`, { + method: "POST", + }) +} + +function getApiKeys(): Promise { + return fetch(`/apis/web/v1/user/apikeys`).then((r) => r.json() as Promise) +} +const createApiKey = async (label: string): Promise => { + const r = await fetch(`/apis/web/v1/user/apikeys?label=${label}`, { + method: "POST" + }); + if (!r.ok) { + let errorMessage = `error: ${r.status}`; + try { + const errorData: ApiError = await r.json(); + if (errorData && typeof errorData.error === 'string') { + errorMessage = errorData.error; + } + } catch (e) { + console.error("unexpected api error:", e); + } + throw new Error(errorMessage); + } + const data: ApiKey = await r.json(); + return data; +}; +function deleteApiKey(id: number): Promise { + return fetch(`/apis/web/v1/user/apikeys?id=${id}`, { + method: "DELETE" + }) +} +function updateApiKeyLabel(id: number, label: string): Promise { + return fetch(`/apis/web/v1/user/apikeys?id=${id}&label=${label}`, { + method: "PATCH" + }) +} + +function deleteItem(itemType: string, id: number): Promise { + return fetch(`/apis/web/v1/${itemType}?id=${id}`, { + method: "DELETE" + }) +} +function updateUser(username: string, password: string) { + return fetch(`/apis/web/v1/user?username=${username}&password=${password}`, { + method: "PATCH" + }) +} +function getAliases(type: string, id: number): Promise { + return fetch(`/apis/web/v1/aliases?${type}_id=${id}`).then(r => r.json() as Promise) +} +function createAlias(type: string, id: number, alias: string): Promise { + return fetch(`/apis/web/v1/aliases?${type}_id=${id}&alias=${alias}`, { + method: 'POST' + }) +} +function deleteAlias(type: string, id: number, alias: string): Promise { + return fetch(`/apis/web/v1/aliases?${type}_id=${id}&alias=${alias}`, { + method: "DELETE" + }) +} +function setPrimaryAlias(type: string, id: number, alias: string): Promise { + return fetch(`/apis/web/v1/aliases/primary?${type}_id=${id}&alias=${alias}`, { + method: "POST" + }) +} + +export { + getLastListens, + getTopTracks, + getTopAlbums, + getTopArtists, + getActivity, + getStats, + search, + replaceImage, + mergeTracks, + mergeAlbums, + mergeArtists, + imageUrl, + login, + logout, + deleteItem, + updateUser, + getAliases, + createAlias, + deleteAlias, + setPrimaryAlias, + getApiKeys, + createApiKey, + deleteApiKey, + updateApiKeyLabel, +} +type Track = { + id: number + title: string + artists: SimpleArtists[] + listen_count: number + image: string + album_id: number + musicbrainz_id: string +} +type Artist = { + id: number + name: string + image: string, + aliases: string[] + listen_count: number + musicbrainz_id: string +} +type Album = { + id: number, + title: string + image: string + listen_count: number + is_various_artists: boolean + artists: SimpleArtists[] + musicbrainz_id: string +} +type Alias = { + id: number + alias: string + source: string + is_primary: boolean +} +type Listen = { + time: string, + track: Track, +} +type PaginatedResponse = { + items: T[], + total_record_count: number, + has_next_page: boolean, + current_page: number, + items_per_page: number, +} +type ListenActivityItem = { + start_time: Date, + listens: number +} +type SimpleArtists = { + name: string + id: number +} +type Stats = { + listen_count: number + track_count: number + album_count: number + artist_count: number + hours_listened: number +} +type SearchResponse = { + albums: Album[] + artists: Artist[] + tracks: Track[] +} +type User = { + id: number + username: string + role: 'user' | 'admin' +} +type ApiKey = { + id: number + key: string + label: string + created_at: Date +} +type ApiError = { + error: string +} + +export type { + getItemsArgs, + getActivityArgs, + Track, + Artist, + Album, + Listen, + SearchResponse, + PaginatedResponse, + ListenActivityItem, + User, + Alias, + ApiKey, + ApiError +} diff --git a/client/app/app.css b/client/app/app.css new file mode 100644 index 0000000..bbc1200 --- /dev/null +++ b/client/app/app.css @@ -0,0 +1,181 @@ +@import url('https://fonts.googleapis.com/css2?family=Jost:ital,wght@0,100..900;1,100..900&family=League+Spartan:wght@100..900&display=swap'); +@import "tailwindcss"; + +@theme { + --font-sans: "Jost", "Inter", ui-sans-serif, system-ui, sans-serif, + "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"; + --animate-fade-in-scale: fade-in-scale 0.1s ease forwards; + --animate-fade-out-scale: fade-out-scale 0.1s ease forwards; + + @keyframes fade-in-scale { + 0% { + opacity: 0; + transform: scale(0.95); + } + 100% { + opacity: 1; + transform: scale(1); + } + } + + @keyframes fade-out-scale { + 0% { + opacity: 1; + transform: scale(1); + } + 100% { + opacity: 0; + transform: scale(0.95); + } + } + + --animate-fade-in: fade-in 0.1s ease forwards; + --animate-fade-out: fade-out 0.1s ease forwards; + + @keyframes fade-in { + 0% { + opacity: 0; + } + 100% { + opacity: 1; + } + } + + @keyframes fade-out { + 0% { + opacity: 1; + } + 100% { + opacity: 0; + } + } + +} + + + +:root { + --header-xl: 78px; + --header-lg: 28px; + --header-md: 22px; + --header-sm: 16px; + --header-xl-weight: 600; + --header-weight: 600; +} + +html, +body { + background-color: var(--color-bg); + color: var(--color-fg); + transition: background-color 0.3s ease; +} +select option { + margin: 40px; + background: var(--color-bg); + color: var(--color-fg); + text-shadow: 0 1px 0 rgba(0, 0, 0, 0.4); +} +select { + background: var(--color-bg); +} + +/* a { + color: var(--color-fg); +} +a:hover { + color: var(--color-link-hover); +} */ + +h1 { + font-family: "League Spartan"; + font-weight: var(--header-weight); + font-size: var(--header-xl); +} +h2 { + font-family: "League Spartan"; + font-weight: var(--header-weight); + font-size: var(--header-md); + margin-bottom: 0.5em; +} +h3 { + font-family: "League Spartan"; + font-size: var(--header-sm); + font-weight: var(--header-weight); +} +h4 { + font-size: var(--header-md); +} +.header-font { + font-family: "League Spartan"; +} + +.icon-hover-fill:hover > svg > path { + fill: var(--color-fg-secondary); +} +.icon-hover-stroke:hover > svg > path { + stroke: var(--color-fg-secondary); +} + +.link-underline:hover { + text-decoration: underline; +} + +input[type="text"] { + border: 1px solid var(--color-bg); +} +input[type="text"]:focus { + outline: none; + border: 1px solid var(--color-fg-tertiary); +} +input[type="password"] { + border: 1px solid var(--color-bg); +} +input[type="password"]:focus { + outline: none; + border: 1px solid var(--color-fg-tertiary); +} +input[type="checkbox"]:focus { + outline: none; + border: 1px solid var(--color-fg-tertiary); +} + +button:hover { + cursor: pointer; +} +button:disabled:hover, +button[disabled]:hover { + cursor: default; +} + +button.large-button { + background: var(--color-bg); +} +button.large-button:hover { + background: var(--color-bg-tertiary); +} +button.large-button:disabled:hover, +button.large-button[disabled]:hover { + background: var(--color-bg); +} + +button.period-selector { + color: var(--color-fg-secondary); +} +button.period-selector:disabled, +button.period-selector[disabled]:hover { + color: var(--color-fg); +} +button.period-selector:hover { + color: var(--color-fg); +} + +button.default { + color: var(--color-fg); +} +button.default:disabled, +button.default[disabled]:hover { + color: var(--color-fg-secondary); +} +button.default:hover { + color: var(--color-fg-secondary); +} \ No newline at end of file diff --git a/client/app/components/ActivityGrid.tsx b/client/app/components/ActivityGrid.tsx new file mode 100644 index 0000000..e9a3cec --- /dev/null +++ b/client/app/components/ActivityGrid.tsx @@ -0,0 +1,185 @@ +import { useQuery } from "@tanstack/react-query" +import { getActivity, type getActivityArgs } from "api/api" +import Popup from "./Popup" +import { useEffect, useState } from "react" +import { useTheme } from "~/hooks/useTheme" +import ActivityOptsSelector from "./ActivityOptsSelector" + +function getPrimaryColor(): string { + const value = getComputedStyle(document.documentElement) + .getPropertyValue('--color-primary') + .trim(); + + const rgbMatch = value.match(/^rgb\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*\)$/); + if (rgbMatch) { + const [, r, g, b] = rgbMatch.map(Number); + return ( + '#' + + [r, g, b] + .map((n) => n.toString(16).padStart(2, '0')) + .join('') + ); + } + + return value; +} + +interface Props { + step?: string + range?: number + month?: number + year?: number + artistId?: number + albumId?: number + trackId?: number + configurable?: boolean + autoAdjust?: boolean +} + +export default function ActivityGrid({ + step = 'day', + range = 182, + month = 0, + year = 0, + artistId = 0, + albumId = 0, + trackId = 0, + configurable = false, + autoAdjust = false, + }: Props) { + + const [color, setColor] = useState(getPrimaryColor()) + const [stepState, setStep] = useState(step) + const [rangeState, setRange] = useState(range) + + const { isPending, isError, data, error } = useQuery({ + queryKey: [ + 'listen-activity', + { + step: stepState, + range: rangeState, + month: month, + year: year, + artist_id: artistId, + album_id: albumId, + track_id: trackId + }, + ], + queryFn: ({ queryKey }) => getActivity(queryKey[1] as getActivityArgs), + }); + + + const { theme } = useTheme(); + useEffect(() => { + const raf = requestAnimationFrame(() => { + const color = getPrimaryColor() + setColor(color); + }); + + return () => cancelAnimationFrame(raf); + }, [theme]); + + if (isPending) { + return ( +
+

Activity

+

Loading...

+
+ ) + } + if (isError) return

Error:{error.message}

+ + // from https://css-tricks.com/snippets/javascript/lighten-darken-color/ + function LightenDarkenColor(hex: string, lum: number) { + // validate hex string + hex = String(hex).replace(/[^0-9a-f]/gi, ''); + if (hex.length < 6) { + hex = hex[0]+hex[0]+hex[1]+hex[1]+hex[2]+hex[2]; + } + lum = lum || 0; + + // convert to decimal and change luminosity + var rgb = "#", c, i; + for (i = 0; i < 3; i++) { + c = parseInt(hex.substring(i*2,(i*2)+2), 16); + c = Math.round(Math.min(Math.max(0, c + (c * lum)), 255)).toString(16); + rgb += ("00"+c).substring(c.length); + } + + return rgb; + } + + const getDarkenAmount = (v: number, t: number): number => { + + if (autoAdjust) { + // automatically adjust the target value based on step + // the smartest way to do this would be to have the api return the + // highest value in the range. too bad im not smart + switch (stepState) { + case 'day': + t = 10 + break; + case 'week': + t = 20 + break; + case 'month': + t = 50 + break; + case 'year': + t = 100 + break; + } + } + + v = Math.min(v, t) + if (theme === "pearl") { + // special case for the only light theme lol + // could be generalized by pragmatically comparing the + // lightness of the bg vs the primary but eh + return ((t-v) / t) + } else { + return ((v-t) / t) * .8 + } + } + + const dotSize = 12; + + return ( +
+

Activity

+ {configurable ? + + : + '' + } +
+ {data.map((item) => ( +
+ +
0 + ? LightenDarkenColor(color, getDarkenAmount(item.listens, 100)) + : 'var(--color-bg-secondary)', + }} + className={`rounded-[3px] ${item.listens > 0 ? '' : 'border-[0.5px] border-(--color-bg-tertiary)'}`} + >
+
+
+ ))} +
+
+ ); +} diff --git a/client/app/components/ActivityOptsSelector.tsx b/client/app/components/ActivityOptsSelector.tsx new file mode 100644 index 0000000..213f8a6 --- /dev/null +++ b/client/app/components/ActivityOptsSelector.tsx @@ -0,0 +1,98 @@ +import { useEffect } from "react"; + +interface Props { + stepSetter: (value: string) => void; + currentStep: string; + rangeSetter: (value: number) => void; + currentRange: number; + disableCache?: boolean; +} + +export default function ActivityOptsSelector({ + stepSetter, + currentStep, + rangeSetter, + currentRange, + disableCache = false, +}: Props) { + const stepPeriods = ['day', 'week', 'month', 'year']; + const rangePeriods = [105, 182, 365]; + + const stepDisplay = (str: string): string => { + return str.split('_').map(w => + w.split('').map((char, index) => + index === 0 ? char.toUpperCase() : char).join('') + ).join(' '); + }; + + const rangeDisplay = (r: number): string => { + return `${r}` + } + + const setStep = (val: string) => { + stepSetter(val); + if (!disableCache) { + localStorage.setItem('activity_step_' + window.location.pathname.split('/')[1], val); + } + }; + + const setRange = (val: number) => { + rangeSetter(val); + if (!disableCache) { + localStorage.setItem('activity_range_' + window.location.pathname.split('/')[1], String(val)); + } + }; + + useEffect(() => { + if (!disableCache) { + const cachedRange = parseInt(localStorage.getItem('activity_range_' + window.location.pathname.split('/')[1]) ?? '35'); + if (cachedRange) { + rangeSetter(cachedRange); + } + const cachedStep = localStorage.getItem('activity_step_' + window.location.pathname.split('/')[1]); + if (cachedStep) { + stepSetter(cachedStep); + } + } + }, []); + + return ( +
+
+

Step:

+ {stepPeriods.map((p, i) => ( +
+ + + {i !== stepPeriods.length - 1 ? '|' : ''} + +
+ ))} +
+ +
+

Range:

+ {rangePeriods.map((r, i) => ( +
+ + + {i !== rangePeriods.length - 1 ? '|' : ''} + +
+ ))} +
+
+ ); +} diff --git a/client/app/components/AlbumDisplay.tsx b/client/app/components/AlbumDisplay.tsx new file mode 100644 index 0000000..6721199 --- /dev/null +++ b/client/app/components/AlbumDisplay.tsx @@ -0,0 +1,25 @@ +import { imageUrl, type Album } from "api/api"; +import { Link } from "react-router"; + +interface Props { + album: Album + size: number +} + +export default function AlbumDisplay({ album, size }: Props) { + return ( +
+
+ + {album.title} + +
+
+ +

{album.title}

+ +

{album.listen_count} plays

+
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/AllTimeStats.tsx b/client/app/components/AllTimeStats.tsx new file mode 100644 index 0000000..0a54daa --- /dev/null +++ b/client/app/components/AllTimeStats.tsx @@ -0,0 +1,45 @@ +import { useQuery } from "@tanstack/react-query" +import { getStats } from "api/api" + +export default function AllTimeStats() { + + const { isPending, isError, data, error } = useQuery({ + queryKey: ['stats', 'all_time'], + queryFn: ({ queryKey }) => getStats(queryKey[1]), + }) + + if (isPending) { + return ( +
+

All Time Stats

+

Loading...

+
+ ) + } + if (isError) { + return

Error:{error.message}

+ } + + const numberClasses = 'header-font font-bold text-xl' + + return ( +
+

All Time Stats

+
+ {data.hours_listened} Hours Listened +
+
+ {data.listen_count} Plays +
+
+ {data.artist_count} Artists +
+
+ {data.album_count} Albums +
+
+ {data.track_count} Tracks +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/ArtistAlbums.tsx b/client/app/components/ArtistAlbums.tsx new file mode 100644 index 0000000..c95155a --- /dev/null +++ b/client/app/components/ArtistAlbums.tsx @@ -0,0 +1,51 @@ +import { useQuery } from "@tanstack/react-query" +import { getTopAlbums, imageUrl, type getItemsArgs } from "api/api" +import { Link } from "react-router" + +interface Props { + artistId: number + name: string + period: string +} + +export default function ArtistAlbums({artistId, name, period}: Props) { + + const { isPending, isError, data, error } = useQuery({ + queryKey: ['top-albums', {limit: 99, period: "all_time", artist_id: artistId, page: 0}], + queryFn: ({ queryKey }) => getTopAlbums(queryKey[1] as getItemsArgs), + }) + + if (isPending) { + return ( +
+

Albums From This Artist

+

Loading...

+
+ ) + } + if (isError) { + return ( +
+

Albums From This Artist

+

Error:{error.message}

+
+ ) + } + + return ( +
+

Albums featuring {name}

+
+ {data.items.map((item) => ( + + {item.title} +
+

{item.title}

+

{item.listen_count} play{item.listen_count > 1 ? 's' : ''}

+
+ + ))} +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/ArtistLinks.tsx b/client/app/components/ArtistLinks.tsx new file mode 100644 index 0000000..69cff8f --- /dev/null +++ b/client/app/components/ArtistLinks.tsx @@ -0,0 +1,26 @@ +import React from 'react'; +import { Link } from 'react-router'; + +type Artist = { + id: number; + name: string; +}; + +type ArtistLinksProps = { + artists: Artist[]; +}; + +const ArtistLinks: React.FC = ({ artists }) => { + return ( + <> + {artists.map((artist, index) => ( + + {artist.name} + {index < artists.length - 1 ? ', ' : ''} + + ))} + + ); +}; + +export default ArtistLinks; diff --git a/client/app/components/AsyncButton.tsx b/client/app/components/AsyncButton.tsx new file mode 100644 index 0000000..ebff4b5 --- /dev/null +++ b/client/app/components/AsyncButton.tsx @@ -0,0 +1,43 @@ +import React, { useState } from "react" + +type Props = { + children: React.ReactNode + onClick: () => void + loading?: boolean + disabled?: boolean + confirm?: boolean +} + +export function AsyncButton(props: Props) { + const [awaitingConfirm, setAwaitingConfirm] = useState(false) + + const handleClick = () => { + if (props.confirm) { + if (!awaitingConfirm) { + setAwaitingConfirm(true) + setTimeout(() => setAwaitingConfirm(false), 3000) + return + } + setAwaitingConfirm(false) + } + + props.onClick() + } + + return ( + + ) +} diff --git a/client/app/components/Footer.tsx b/client/app/components/Footer.tsx new file mode 100644 index 0000000..16d2c94 --- /dev/null +++ b/client/app/components/Footer.tsx @@ -0,0 +1,13 @@ +import { ExternalLinkIcon } from 'lucide-react' +import pkg from '../../package.json' + +export default function Footer() { + return ( +
+ +
+ ) +} \ No newline at end of file diff --git a/client/app/components/GlobalThemes.tsx b/client/app/components/GlobalThemes.tsx new file mode 100644 index 0000000..81cefe8 --- /dev/null +++ b/client/app/components/GlobalThemes.tsx @@ -0,0 +1,36 @@ +// import { css } from '@emotion/css'; +// import { themes } from '../providers/ThemeProvider'; + +// export default function GlobalThemes() { +// return ( +//
` +// [data-theme=${theme.name}] { +// --color-bg: ${theme.bg}; +// --color-bg-secondary: ${theme.bgSecondary}; +// --color-bg-tertiary:${theme.bgTertiary}; +// --color-fg: ${theme.fg}; +// --color-fg-secondary: ${theme.fgSecondary}; +// --color-fg-tertiary: ${theme.fgTertiary}; +// --color-primary: ${theme.primary}; +// --color-primary-dim: ${theme.primaryDim}; +// --color-secondary: ${theme.secondary}; +// --color-secondary-dim: ${theme.secondaryDim}; +// --color-error: ${theme.error}; +// --color-success: ${theme.success}; +// --color-warning: ${theme.warning}; +// --color-info: ${theme.info}; +// --color-border: var(--color-bg-tertiary); +// --color-shadow: rgba(0, 0, 0, 0.5); +// --color-link: var(--color-primary); +// --color-link-hover: var(--color-primary-dim); +// } +// `).join('\n') +// } +// `} +// /> +// ) +// } \ No newline at end of file diff --git a/client/app/components/ImageDropHandler.tsx b/client/app/components/ImageDropHandler.tsx new file mode 100644 index 0000000..8557ff9 --- /dev/null +++ b/client/app/components/ImageDropHandler.tsx @@ -0,0 +1,53 @@ +import { replaceImage } from 'api/api'; +import { useEffect } from 'react'; + +interface Props { + itemType: string, + id: number, + onComplete: Function +} + +export default function ImageDropHandler({ itemType, id, onComplete }: Props) { + useEffect(() => { + const handleDragOver = (e: DragEvent) => { + console.log('dragover!!') + e.preventDefault(); + }; + + const handleDrop = async (e: DragEvent) => { + e.preventDefault(); + if (!e.dataTransfer?.files.length) return; + + const imageFile = Array.from(e.dataTransfer.files).find(file => + file.type.startsWith('image/') + ); + if (!imageFile) return; + + const formData = new FormData(); + formData.append('image', imageFile); + formData.append(itemType.toLowerCase()+'_id', String(id)) + replaceImage(formData).then((r) => { + if (r.status >= 200 && r.status < 300) { + onComplete() + console.log("Replacement image uploaded successfully") + } else { + r.json().then((body) => { + console.log(`Upload failed: ${r.statusText} - ${body}`) + }) + } + }).catch((err) => { + console.log(`Upload failed: ${err}`) + }) + }; + + window.addEventListener('dragover', handleDragOver); + window.addEventListener('drop', handleDrop); + + return () => { + window.removeEventListener('dragover', handleDragOver); + window.removeEventListener('drop', handleDrop); + }; + }, []); + + return null; +} diff --git a/client/app/components/LastPlays.tsx b/client/app/components/LastPlays.tsx new file mode 100644 index 0000000..056042b --- /dev/null +++ b/client/app/components/LastPlays.tsx @@ -0,0 +1,57 @@ +import { useQuery } from "@tanstack/react-query" +import { timeSince } from "~/utils/utils" +import ArtistLinks from "./ArtistLinks" +import { getLastListens, type getItemsArgs } from "api/api" +import { Link } from "react-router" + +interface Props { + limit: number + artistId?: Number + albumId?: Number + trackId?: number + hideArtists?: boolean +} + +export default function LastPlays(props: Props) { + + const { isPending, isError, data, error } = useQuery({ + queryKey: ['last-listens', {limit: props.limit, period: 'all_time', artist_id: props.artistId, album_id: props.albumId, track_id: props.trackId}], + queryFn: ({ queryKey }) => getLastListens(queryKey[1] as getItemsArgs), + }) + + if (isPending) { + return ( +
+

Last Played

+

Loading...

+
+ ) + } + if (isError) { + return

Error:{error.message}

+ } + + let params = '' + params += props.artistId ? `&artist_id=${props.artistId}` : '' + params += props.albumId ? `&album_id=${props.albumId}` : '' + params += props.trackId ? `&track_id=${props.trackId}` : '' + + return ( +
+

Last Played

+ + + {data.items.map((item) => ( + + + + + ))} + +
{timeSince(new Date(item.time))} + {props.hideArtists ? <> : <> - } + {item.track.title} +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/PeriodSelector.tsx b/client/app/components/PeriodSelector.tsx new file mode 100644 index 0000000..91bad9a --- /dev/null +++ b/client/app/components/PeriodSelector.tsx @@ -0,0 +1,52 @@ +import { useEffect } from "react" + +interface Props { + setter: Function + current: string + disableCache?: boolean +} + +export default function PeriodSelector({ setter, current, disableCache = false }: Props) { + const periods = ['day', 'week', 'month', 'year', 'all_time'] + + const periodDisplay = (str: string) => { + return str.split('_').map(w => w.split('').map((char, index) => + index === 0 ? char.toUpperCase() : char).join('')).join(' ') + } + + const setPeriod = (val: string) => { + setter(val) + if (!disableCache) { + localStorage.setItem('period_selection_'+window.location.pathname.split('/')[1], val) + } + } + + useEffect(() => { + if (!disableCache) { + const cached = localStorage.getItem('period_selection_' + window.location.pathname.split('/')[1]); + if (cached) { + setter(cached); + } + } + }, []); + + return ( +
+

Showing stats for:

+ {periods.map((p, i) => ( +
+ + + {i !== periods.length - 1 ? '|' : ''} + +
+ ))} +
+ ) +} \ No newline at end of file diff --git a/client/app/components/Popup.tsx b/client/app/components/Popup.tsx new file mode 100644 index 0000000..3c73cb5 --- /dev/null +++ b/client/app/components/Popup.tsx @@ -0,0 +1,48 @@ +import React, { type PropsWithChildren, useState } from 'react'; + +interface Props { + inner: React.ReactNode + position: string + space: number + extraClasses?: string + hint?: string +} + +export default function Popup({ inner, position, space, extraClasses, children }: PropsWithChildren) { + const [isVisible, setIsVisible] = useState(false); + + let positionClasses + let spaceCSS = {} + if (position == "top") { + positionClasses = `top-${space} -bottom-2 -translate-y-1/2 -translate-x-1/2` + } else if (position == "right") { + positionClasses = `bottom-1 -translate-x-1/2` + spaceCSS = {left: 70 + space} + } + + return ( +
setIsVisible(true)} + onMouseLeave={() => setIsVisible(false)} + > + {children} +
+ {inner} +
+
+ ); +} diff --git a/client/app/components/SearchResultItem.tsx b/client/app/components/SearchResultItem.tsx new file mode 100644 index 0000000..fc95696 --- /dev/null +++ b/client/app/components/SearchResultItem.tsx @@ -0,0 +1,23 @@ +import { Link } from "react-router" + +interface Props { + to: string + onClick: React.MouseEventHandler + img: string + text: string + subtext?: string +} + +export default function SearchResultItem(props: Props) { + return ( + + {props.text} +
+ {props.text} + {props.subtext ? <>
+ {props.subtext} + : ''} +
+ + ) +} \ No newline at end of file diff --git a/client/app/components/SearchResultSelectorItem.tsx b/client/app/components/SearchResultSelectorItem.tsx new file mode 100644 index 0000000..5a9c773 --- /dev/null +++ b/client/app/components/SearchResultSelectorItem.tsx @@ -0,0 +1,31 @@ +import { Check } from "lucide-react" +import CheckCircleIcon from "./icons/CheckCircleIcon" + +interface Props { + id: number + onClick: React.MouseEventHandler + img: string + text: string + subtext?: string + active: boolean +} + +export default function SearchResultSelectorItem(props: Props) { + return ( + + ) +} \ No newline at end of file diff --git a/client/app/components/SearchResults.tsx b/client/app/components/SearchResults.tsx new file mode 100644 index 0000000..c0269e8 --- /dev/null +++ b/client/app/components/SearchResults.tsx @@ -0,0 +1,107 @@ +import { imageUrl, type SearchResponse } from "api/api" +import { useState } from "react" +import SearchResultItem from "./SearchResultItem" +import SearchResultSelectorItem from "./SearchResultSelectorItem" + +interface Props { + data?: SearchResponse + onSelect: Function + selectorMode?: boolean +} +export default function SearchResults({ data, onSelect, selectorMode }: Props) { + const [selected, setSelected] = useState(0) + const classes = "flex flex-col items-start bg rounded w-full" + const hClasses = "pt-4 pb-2" + + const selectItem = (title: string, id: number) => { + if (selected === id) { + setSelected(0) + onSelect({id: id, title: title}) + } else { + setSelected(id) + onSelect({id: id, title: title}) + } + } + + if (data === undefined) { + return <> + } + return ( +
+ { data.artists.length > 0 && + <> +

Artists

+
+ {data.artists.map((artist) => ( + selectorMode ? + selectItem(artist.name, artist.id)} + text={artist.name} + img={imageUrl(artist.image, "small")} + active={selected === artist.id} + /> : + onSelect(artist.id)} + text={artist.name} + img={imageUrl(artist.image, "small")} + /> + + ))} +
+ + } + { data.albums.length > 0 && + <> +

Albums

+
+ {data.albums.map((album) => ( + selectorMode ? + selectItem(album.title, album.id)} + text={album.title} + subtext={album.is_various_artists ? "Various Artists" : album.artists[0].name} + img={imageUrl(album.image, "small")} + active={selected === album.id} + /> : + onSelect(album.id)} + text={album.title} + subtext={album.is_various_artists ? "Various Artists" : album.artists[0].name} + img={imageUrl(album.image, "small")} + /> + ))} +
+ + } + { data.tracks.length > 0 && + <> +

Tracks

+
+ {data.tracks.map((track) => ( + selectorMode ? + selectItem(track.title, track.id)} + text={track.title} + subtext={track.artists.map((a) => a.name).join(', ')} + img={imageUrl(track.image, "small")} + active={selected === track.id} + /> : + onSelect(track.id)} + text={track.title} + subtext={track.artists.map((a) => a.name).join(', ')} + img={imageUrl(track.image, "small")} + /> + ))} +
+ + } +
+ ) +} \ No newline at end of file diff --git a/client/app/components/TopAlbums.tsx b/client/app/components/TopAlbums.tsx new file mode 100644 index 0000000..4ae87bd --- /dev/null +++ b/client/app/components/TopAlbums.tsx @@ -0,0 +1,42 @@ +import { useQuery } from "@tanstack/react-query" +import ArtistLinks from "./ArtistLinks" +import { getTopAlbums, getTopTracks, imageUrl, type getItemsArgs } from "api/api" +import { Link } from "react-router" +import TopListSkeleton from "./skeletons/TopListSkeleton" +import TopItemList from "./TopItemList" + +interface Props { + limit: number, + period: string, + artistId?: Number +} + +export default function TopAlbums (props: Props) { + + const { isPending, isError, data, error } = useQuery({ + queryKey: ['top-albums', {limit: props.limit, period: props.period, artistId: props.artistId, page: 0 }], + queryFn: ({ queryKey }) => getTopAlbums(queryKey[1] as getItemsArgs), + }) + + if (isPending) { + return ( +
+

Top Albums

+

Loading...

+
+ ) + } + if (isError) { + return

Error:{error.message}

+ } + + return ( +
+

Top Albums

+
+ + {data.items.length < 1 ? 'Nothing to show' : ''} +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/TopArtists.tsx b/client/app/components/TopArtists.tsx new file mode 100644 index 0000000..1c7b719 --- /dev/null +++ b/client/app/components/TopArtists.tsx @@ -0,0 +1,43 @@ +import { useQuery } from "@tanstack/react-query" +import ArtistLinks from "./ArtistLinks" +import { getTopArtists, imageUrl, type getItemsArgs } from "api/api" +import { Link } from "react-router" +import TopListSkeleton from "./skeletons/TopListSkeleton" +import TopItemList from "./TopItemList" + +interface Props { + limit: number, + period: string, + artistId?: Number + albumId?: Number +} + +export default function TopArtists (props: Props) { + + const { isPending, isError, data, error } = useQuery({ + queryKey: ['top-artists', {limit: props.limit, period: props.period, page: 0 }], + queryFn: ({ queryKey }) => getTopArtists(queryKey[1] as getItemsArgs), + }) + + if (isPending) { + return ( +
+

Top Artists

+

Loading...

+
+ ) + } + if (isError) { + return

Error:{error.message}

+ } + + return ( +
+

Top Artists

+
+ + {data.items.length < 1 ? 'Nothing to show' : ''} +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/TopItemList.tsx b/client/app/components/TopItemList.tsx new file mode 100644 index 0000000..7f68c9e --- /dev/null +++ b/client/app/components/TopItemList.tsx @@ -0,0 +1,142 @@ +import { Link, useNavigate } from "react-router"; +import ArtistLinks from "./ArtistLinks"; +import { imageUrl, type Album, type Artist, type Track, type PaginatedResponse } from "api/api"; + +type Item = Album | Track | Artist; + +interface Props { + data: PaginatedResponse + separators?: ConstrainBoolean + width?: number + type: "album" | "track" | "artist"; +} + +export default function TopItemList({ data, separators, type, width }: Props) { + + return ( +
+ {data.items.map((item, index) => { + const key = `${type}-${item.id}`; + return ( +
+ +
+ ); + })} +
+ ); +} + +function ItemCard({ item, type }: { item: Item; type: "album" | "track" | "artist" }) { + + const itemClasses = `flex items-center gap-2 hover:text-(--color-fg-secondary)` + + const navigate = useNavigate(); + + const handleItemClick = (type: string, id: number) => { + navigate(`/${type.toLowerCase()}/${id}`); + }; + + const handleArtistClick = (event: React.MouseEvent) => { + // Stop the click from navigating to the album page + event.stopPropagation(); + }; + + // Also stop keyboard events on the inner links from bubbling up + const handleArtistKeyDown = (event: React.KeyboardEvent) => { + event.stopPropagation(); + } + + switch (type) { + case "album": { + const album = item as Album; + + const handleKeyDown = (event: React.KeyboardEvent) => { + if (event.key === 'Enter') { + handleItemClick("album", album.id); + } + }; + + return ( +
+
handleItemClick("album", album.id)} + onKeyDown={handleKeyDown} + role="link" + tabIndex={0} + aria-label={`View album: ${album.title}`} + style={{ cursor: 'pointer' }} + > + {album.title} +
+ {album.title} +
+ {album.is_various_artists ? + Various Artists + : +
+ +
+ } +
{album.listen_count} plays
+
+
+
+ ); + } + case "track": { + const track = item as Track; + + const handleKeyDown = (event: React.KeyboardEvent) => { + if (event.key === 'Enter') { + handleItemClick("track", track.id); + } + }; + + return ( +
+
handleItemClick("track", track.id)} + onKeyDown={handleKeyDown} + role="link" + tabIndex={0} + aria-label={`View track: ${track.title}`} + style={{ cursor: 'pointer' }} + > + {track.title} +
+ {track.title} +
+
+ +
+
{track.listen_count} plays
+
+
+
+ ); + } + case "artist": { + const artist = item as Artist; + return ( +
+ + {artist.name} +
+ {artist.name} +
{artist.listen_count} plays
+
+ +
+ ); + } + } +} diff --git a/client/app/components/TopThreeAlbums.tsx b/client/app/components/TopThreeAlbums.tsx new file mode 100644 index 0000000..c5136e4 --- /dev/null +++ b/client/app/components/TopThreeAlbums.tsx @@ -0,0 +1,38 @@ +import { useQuery } from "@tanstack/react-query" +import { getTopAlbums, type getItemsArgs } from "api/api" +import AlbumDisplay from "./AlbumDisplay" + +interface Props { + period: string + artistId?: Number + vert?: boolean + hideTitle?: boolean +} + +export default function TopThreeAlbums(props: Props) { + + const { isPending, isError, data, error } = useQuery({ + queryKey: ['top-albums', {limit: 3, period: props.period, artist_id: props.artistId, page: 0}], + queryFn: ({ queryKey }) => getTopAlbums(queryKey[1] as getItemsArgs), + }) + + if (isPending) { + return

Loading...

+ } + if (isError) { + return

Error:{error.message}

+ } + + console.log(data) + + return ( +
+ {!props.hideTitle &&

Top Three Albums

} +
+ {data.items.map((item, index) => ( + + ))} +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/TopTracks.tsx b/client/app/components/TopTracks.tsx new file mode 100644 index 0000000..b1d14c7 --- /dev/null +++ b/client/app/components/TopTracks.tsx @@ -0,0 +1,50 @@ +import { useQuery } from "@tanstack/react-query" +import ArtistLinks from "./ArtistLinks" +import { getTopTracks, imageUrl, type getItemsArgs } from "api/api" +import { Link } from "react-router" +import TopListSkeleton from "./skeletons/TopListSkeleton" +import { useEffect } from "react" +import TopItemList from "./TopItemList" + +interface Props { + limit: number, + period: string, + artistId?: Number + albumId?: Number +} + +const TopTracks = (props: Props) => { + + const { isPending, isError, data, error } = useQuery({ + queryKey: ['top-tracks', {limit: props.limit, period: props.period, artist_id: props.artistId, album_id: props.albumId, page: 0}], + queryFn: ({ queryKey }) => getTopTracks(queryKey[1] as getItemsArgs), + }) + + if (isPending) { + return ( +
+

Top Tracks

+

Loading...

+
+ ) + } + if (isError) { + return

Error:{error.message}

+ } + + let params = '' + params += props.artistId ? `&artist_id=${props.artistId}` : '' + params += props.albumId ? `&album_id=${props.albumId}` : '' + + return ( +
+

Top Tracks

+
+ + {data.items.length < 1 ? 'Nothing to show' : ''} +
+
+ ) +} + +export default TopTracks \ No newline at end of file diff --git a/client/app/components/icons/ChartIcon.tsx b/client/app/components/icons/ChartIcon.tsx new file mode 100644 index 0000000..4695a36 --- /dev/null +++ b/client/app/components/icons/ChartIcon.tsx @@ -0,0 +1,16 @@ +interface Props { + size: number, + hover?: boolean, +} +export default function ChartIcon({size, hover}: Props) { + let classNames = "" + if (hover) { + classNames += "icon-hover-stroke" + } + return ( +
+ + +
+ ) +} \ No newline at end of file diff --git a/client/app/components/icons/CheckCircleIcon.tsx b/client/app/components/icons/CheckCircleIcon.tsx new file mode 100644 index 0000000..3340ac7 --- /dev/null +++ b/client/app/components/icons/CheckCircleIcon.tsx @@ -0,0 +1,16 @@ +interface Props { + size: number, + hover?: boolean, + color?: string +} +export default function CheckCircleIcon({size, hover, color}: Props) { + let classNames = "" + if (hover) { + classNames += "icon-hover-fill" + } + return ( +
+ +
+ ) +} \ No newline at end of file diff --git a/client/app/components/icons/GraphIcon.tsx b/client/app/components/icons/GraphIcon.tsx new file mode 100644 index 0000000..ce3be02 --- /dev/null +++ b/client/app/components/icons/GraphIcon.tsx @@ -0,0 +1,17 @@ +interface Props { + size: number, + hover?: boolean, +} +export default function GraphIcon({size, hover}: Props) { + let classNames = "" + if (hover) { + classNames += "icon-hover-stroke" + } + return ( +
+ + + +
+ ) +} \ No newline at end of file diff --git a/client/app/components/icons/HomeIcon.tsx b/client/app/components/icons/HomeIcon.tsx new file mode 100644 index 0000000..646f59f --- /dev/null +++ b/client/app/components/icons/HomeIcon.tsx @@ -0,0 +1,16 @@ +interface Props { + size: number, + hover?: boolean, +} +export default function HomeIcon({size, hover}: Props) { + let classNames = "" + if (hover) { + classNames += "icon-hover-fill" + } + return ( +
+ + +
+ ) +} \ No newline at end of file diff --git a/client/app/components/icons/ImageIcon.tsx b/client/app/components/icons/ImageIcon.tsx new file mode 100644 index 0000000..d1676eb --- /dev/null +++ b/client/app/components/icons/ImageIcon.tsx @@ -0,0 +1,16 @@ +interface Props { + size: number, + hover?: boolean, +} +export default function ImageIcon({size, hover}: Props) { + let classNames = "" + if (hover) { + classNames += "icon-hover-stroke" + } + return ( +
+ + +
+ ) +} \ No newline at end of file diff --git a/client/app/components/icons/MergeIcon.tsx b/client/app/components/icons/MergeIcon.tsx new file mode 100644 index 0000000..d1ef27a --- /dev/null +++ b/client/app/components/icons/MergeIcon.tsx @@ -0,0 +1,15 @@ +interface Props { + size: number, + hover?: boolean, +} +export default function MergeIcon({size, hover}: Props) { + let classNames = "" + if (hover) { + classNames += "icon-hover-fill" + } + return ( +
+ +
+ ) +} diff --git a/client/app/components/icons/SearchIcon.tsx b/client/app/components/icons/SearchIcon.tsx new file mode 100644 index 0000000..3e40b68 --- /dev/null +++ b/client/app/components/icons/SearchIcon.tsx @@ -0,0 +1,16 @@ +interface Props { + size: number, + hover?: boolean, +} +export default function SearchIcon({size, hover}: Props) { + let classNames = "" + if (hover) { + classNames += "icon-hover-stroke" + } + return ( +
+ + +
+ ) +} \ No newline at end of file diff --git a/client/app/components/modals/Account.tsx b/client/app/components/modals/Account.tsx new file mode 100644 index 0000000..a6cb918 --- /dev/null +++ b/client/app/components/modals/Account.tsx @@ -0,0 +1,97 @@ +import { logout, updateUser } from "api/api" +import { useState } from "react" +import { AsyncButton } from "../AsyncButton" +import { useAppContext } from "~/providers/AppProvider" + +export default function Account() { + const [username, setUsername] = useState('') + const [password, setPassword] = useState('') + const [confirmPw, setConfirmPw] = useState('') + const [loading, setLoading] = useState(false) + const [error, setError] = useState('') + const [success, setSuccess] = useState('') + const { user, setUsername: setCtxUsername } = useAppContext() + + const logoutHandler = () => { + setLoading(true) + logout() + .then(r => { + if (r.ok) { + window.location.reload() + } else { + r.json().then(r => setError(r.error)) + } + }).catch(err => setError(err)) + setLoading(false) + } + const updateHandler = () => { + if (password != "" && confirmPw === "") { + setError("confirm your password before submitting") + return + } + setError('') + setSuccess('') + setLoading(true) + updateUser(username, password) + .then(r => { + if (r.ok) { + setSuccess("sucessfully updated user") + if (username != "") { + setCtxUsername(username) + } + setUsername('') + setPassword('') + setConfirmPw('') + } else { + r.json().then((r) => setError(r.error)) + } + }).catch(err => setError(err)) + setLoading(false) + } + + return ( + <> +

Account

+
+
+

You're logged in as {user?.username}

+ Logout +
+

Update User

+
+ setUsername(e.target.value)} + /> +
+
+ setPassword(e.target.value)} + /> + setConfirmPw(e.target.value)} + /> +
+
+ Submit +
+ {success != "" &&

{success}

} + {error != "" &&

{error}

} +
+ + ) +} \ No newline at end of file diff --git a/client/app/components/modals/AccountPage.tsx b/client/app/components/modals/AccountPage.tsx new file mode 100644 index 0000000..71d5779 --- /dev/null +++ b/client/app/components/modals/AccountPage.tsx @@ -0,0 +1,17 @@ +import { useAppContext } from "~/providers/AppProvider" +import LoginForm from "./LoginForm" +import Account from "./Account" + +export default function AuthForm() { + const { user } = useAppContext() + + return ( + <> + { user ? + + : + + } + + ) +} \ No newline at end of file diff --git a/client/app/components/modals/ApiKeysModal.tsx b/client/app/components/modals/ApiKeysModal.tsx new file mode 100644 index 0000000..43e242e --- /dev/null +++ b/client/app/components/modals/ApiKeysModal.tsx @@ -0,0 +1,129 @@ +import { useQuery } from "@tanstack/react-query"; +import { createApiKey, deleteApiKey, getApiKeys, type ApiKey } from "api/api"; +import { AsyncButton } from "../AsyncButton"; +import { useEffect, useState } from "react"; +import { Copy, Trash } from "lucide-react"; + +type CopiedState = { + x: number; + y: number; + visible: boolean; +}; + +export default function ApiKeysModal() { + const [input, setInput] = useState('') + const [loading, setLoading ] = useState(false) + const [err, setError ] = useState() + const [displayData, setDisplayData] = useState([]) + const [copied, setCopied] = useState(null); + + const { isPending, isError, data, error } = useQuery({ + queryKey: [ + 'api-keys' + ], + queryFn: () => { + return getApiKeys(); + }, + }); + + useEffect(() => { + if (data) { + setDisplayData(data) + } + }, [data]) + + if (isError) { + return ( +

Error: {error.message}

+ ) + } + if (isPending) { + return ( +

Loading...

+ ) + } + + const handleCopy = (e: React.MouseEvent, text: string) => { + navigator.clipboard.writeText(text); + + const parentRect = (e.currentTarget.closest(".relative") as HTMLElement).getBoundingClientRect(); + const buttonRect = e.currentTarget.getBoundingClientRect(); + + setCopied({ + x: buttonRect.left - parentRect.left + buttonRect.width / 2, // center of button + y: buttonRect.top - parentRect.top - 8, // above the button + visible: true, + }); + + setTimeout(() => setCopied(null), 1500); + }; + + const handleCreateApiKey = () => { + setError(undefined) + if (input === "") { + setError("a label must be provided") + return + } + setLoading(true) + createApiKey(input) + .then(r => { + setDisplayData([r, ...displayData]) + setInput('') + }).catch((err) => setError(err.message)) + setLoading(false) + } + + const handleDeleteApiKey = (id: number) => { + setError(undefined) + setLoading(true) + deleteApiKey(id) + .then(r => { + if (r.ok) { + setDisplayData(displayData.filter((v) => v.id != id)) + } else { + r.json().then((r) => setError(r.error)) + } + }) + setLoading(false) + + } + + return ( +
+

API Keys

+
+ {displayData.map((v) => ( +
+
{v.key.slice(0, 8)+'...'} {v.label}
+ + handleDeleteApiKey(v.id)} confirm> +
+ ))} +
+ setInput(e.target.value)} + /> + Create +
+ {err &&

{err}

} + {copied?.visible && ( +
+ Copied! +
+ )} +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/modals/DeleteModal.tsx b/client/app/components/modals/DeleteModal.tsx new file mode 100644 index 0000000..98304ad --- /dev/null +++ b/client/app/components/modals/DeleteModal.tsx @@ -0,0 +1,40 @@ +import { deleteItem } from "api/api" +import { AsyncButton } from "../AsyncButton" +import { Modal } from "./Modal" +import { useNavigate } from "react-router" +import { useState } from "react" + +interface Props { + open: boolean + setOpen: Function + title: string, + id: number, + type: string +} + +export default function DeleteModal({ open, setOpen, title, id, type }: Props) { + const [loading, setLoading] = useState(false) + const navigate = useNavigate() + + const doDelete = () => { + setLoading(true) + deleteItem(type.toLowerCase(), id) + .then(r => { + if (r.ok) { + navigate('/') + } else { + console.log(r) + } + }) + } + + return ( + setOpen(false)}> +

Delete "{title}"?

+

This action is irreversible!

+
+ Yes, Delete It +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/modals/ImageReplaceModal.tsx b/client/app/components/modals/ImageReplaceModal.tsx new file mode 100644 index 0000000..d76dd61 --- /dev/null +++ b/client/app/components/modals/ImageReplaceModal.tsx @@ -0,0 +1,90 @@ +import { useEffect, useState } from "react"; +import { Modal } from "./Modal"; +import { replaceImage, search, type SearchResponse } from "api/api"; +import SearchResults from "../SearchResults"; +import { AsyncButton } from "../AsyncButton"; + +interface Props { + type: string + id: number + musicbrainzId?: string + open: boolean + setOpen: Function +} + +export default function ImageReplaceModal({ musicbrainzId, type, id, open, setOpen }: Props) { + const [query, setQuery] = useState(''); + const [loading, setLoading] = useState(false) + const [suggestedImgLoading, setSuggestedImgLoading] = useState(true) + + const doImageReplace = (url: string) => { + setLoading(true) + const formData = new FormData + formData.set(`${type.toLowerCase()}_id`, id.toString()) + formData.set("image_url", url) + replaceImage(formData) + .then((r) => { + if (r.ok) { + window.location.reload() + } else { + console.log(r) + setLoading(false) + } + }) + .catch((err) => console.log(err)) + } + + const closeModal = () => { + setOpen(false) + setQuery('') + } + + return ( + +

Replace Image

+
+ setQuery(e.target.value)} + /> + { query != "" ? +
+ doImageReplace(query)}>Submit +
: + ''} + { type === "Album" && musicbrainzId ? + <> +

Suggested Image (Click to Apply)

+ + + : '' + } +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/modals/LoginForm.tsx b/client/app/components/modals/LoginForm.tsx new file mode 100644 index 0000000..2c2afc6 --- /dev/null +++ b/client/app/components/modals/LoginForm.tsx @@ -0,0 +1,59 @@ +import { login } from "api/api" +import { useEffect, useState } from "react" +import { AsyncButton } from "../AsyncButton" + +export default function LoginForm() { + const [loading, setLoading] = useState(false) + const [error, setError] = useState('') + const [username, setUsername] = useState('') + const [password, setPassword] = useState('') + const [remember, setRemember] = useState(false) + + const loginHandler = () => { + if (username && password) { + setLoading(true) + login(username, password, remember) + .then(r => { + if (r.status >= 200 && r.status < 300) { + window.location.reload() + } else { + r.json().then(r => setError(r.error)) + } + }).catch(err => setError(err)) + setLoading(false) + } else if (username || password) { + setError("username and password are required") + } + } + + return ( + <> +

Log In

+
+

Logging in gives you access to admin tools, such as updating images, merging items, deleting items, and more.

+
e.preventDefault()}> + setUsername(e.target.value)} + /> + setPassword(e.target.value)} + /> +
+ setRemember(!remember)} /> + +
+ Login +
+

{error}

+
+ + ) +} \ No newline at end of file diff --git a/client/app/components/modals/MergeModal.tsx b/client/app/components/modals/MergeModal.tsx new file mode 100644 index 0000000..d034a52 --- /dev/null +++ b/client/app/components/modals/MergeModal.tsx @@ -0,0 +1,125 @@ +import { useEffect, useState } from "react"; +import { Modal } from "./Modal"; +import { search, type SearchResponse } from "api/api"; +import SearchResults from "../SearchResults"; +import type { MergeFunc, MergeSearchCleanerFunc } from "~/routes/MediaItems/MediaLayout"; +import { useNavigate } from "react-router"; + +interface Props { + open: boolean + setOpen: Function + type: string + currentId: number + currentTitle: string + mergeFunc: MergeFunc + mergeCleanerFunc: MergeSearchCleanerFunc +} + +export default function MergeModal(props: Props) { + const [query, setQuery] = useState(''); + const [data, setData] = useState(); + const [debouncedQuery, setDebouncedQuery] = useState(query); + const [mergeTarget, setMergeTarget] = useState<{title: string, id: number}>({title: '', id: 0}) + const [mergeOrderReversed, setMergeOrderReversed] = useState(false) + const navigate = useNavigate() + + + const closeMergeModal = () => { + props.setOpen(false) + setQuery('') + setData(undefined) + setMergeOrderReversed(false) + setMergeTarget({title: '', id: 0}) + } + + const toggleSelect = ({title, id}: {title: string, id: number}) => { + if (mergeTarget.id === 0) { + setMergeTarget({title: title, id: id}) + } else { + setMergeTarget({title:"", id: 0}) + } + } + + useEffect(() => { + console.log(mergeTarget) + }, [mergeTarget]) + + const doMerge = () => { + let from, to + if (!mergeOrderReversed) { + from = mergeTarget + to = {id: props.currentId, title: props.currentTitle} + } else { + from = {id: props.currentId, title: props.currentTitle} + to = mergeTarget + } + props.mergeFunc(from.id, to.id) + .then(r => { + if (r.ok) { + if (mergeOrderReversed) { + navigate(`/${props.type.toLowerCase()}/${mergeTarget}`) + closeMergeModal() + } else { + window.location.reload() + } + } else { + // TODO: handle error + console.log(r) + } + }) + .catch((err) => console.log(err)) + } + + useEffect(() => { + const handler = setTimeout(() => { + setDebouncedQuery(query); + if (query === '') { + setData(undefined) + } + }, 300); + + return () => { + clearTimeout(handler); + }; + }, [query]); + + useEffect(() => { + if (debouncedQuery) { + search(debouncedQuery).then((r) => { + r = props.mergeCleanerFunc(r, props.currentId) + setData(r); + }); + } + }, [debouncedQuery]); + + return ( + +

Merge {props.type}s

+
+ setQuery(e.target.value)} + /> + + { mergeTarget.id !== 0 ? + <> + {mergeOrderReversed ? +

{props.currentTitle} will be merged into {mergeTarget.title}

+ : +

{mergeTarget.title} will be merged into {props.currentTitle}

+ } + +
+ setMergeOrderReversed(!mergeOrderReversed)} /> + +
+ : + ''} +
+
+ ) +} diff --git a/client/app/components/modals/Modal.tsx b/client/app/components/modals/Modal.tsx new file mode 100644 index 0000000..47307b0 --- /dev/null +++ b/client/app/components/modals/Modal.tsx @@ -0,0 +1,84 @@ +import { useEffect, useRef, useState } from 'react'; +import ReactDOM from 'react-dom'; + +export function Modal({ + isOpen, + onClose, + children, + maxW, + h +}: { + isOpen: boolean; + onClose: () => void; + children: React.ReactNode; + maxW?: number; + h?: number; +}) { + const modalRef = useRef(null); + const [shouldRender, setShouldRender] = useState(isOpen); + const [isClosing, setIsClosing] = useState(false); + + // Show/hide logic + useEffect(() => { + if (isOpen) { + setShouldRender(true); + setIsClosing(false); + } else if (shouldRender) { + setIsClosing(true); + const timeout = setTimeout(() => { + setShouldRender(false); + }, 100); // Match fade-out duration + return () => clearTimeout(timeout); + } + }, [isOpen, shouldRender]); + + // Close on Escape key + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if (e.key === 'Escape') onClose(); + }; + if (isOpen) document.addEventListener('keydown', handleKeyDown); + return () => document.removeEventListener('keydown', handleKeyDown); + }, [isOpen, onClose]); + + // Close on outside click + useEffect(() => { + const handleClick = (e: MouseEvent) => { + if ( + modalRef.current && + !modalRef.current.contains(e.target as Node) + ) { + onClose(); + } + }; + if (isOpen) document.addEventListener('mousedown', handleClick); + return () => document.removeEventListener('mousedown', handleClick); + }, [isOpen, onClose]); + + if (!shouldRender) return null; + + return ReactDOM.createPortal( +
+
+ + {children} +
+
, + document.body + ); +} diff --git a/client/app/components/modals/RenameModal.tsx b/client/app/components/modals/RenameModal.tsx new file mode 100644 index 0000000..4a53ae6 --- /dev/null +++ b/client/app/components/modals/RenameModal.tsx @@ -0,0 +1,124 @@ +import { useQuery } from "@tanstack/react-query"; +import { createAlias, deleteAlias, getAliases, setPrimaryAlias, type Alias } from "api/api"; +import { Modal } from "./Modal"; +import { AsyncButton } from "../AsyncButton"; +import { useEffect, useState } from "react"; +import { Trash } from "lucide-react"; + +interface Props { + type: string + id: number + open: boolean + setOpen: Function +} + +export default function RenameModal({ open, setOpen, type, id }: Props) { + const [input, setInput] = useState('') + const [loading, setLoading ] = useState(false) + const [err, setError ] = useState() + const [displayData, setDisplayData] = useState([]) + + const { isPending, isError, data, error } = useQuery({ + queryKey: [ + 'aliases', + { + type: type, + id: id + }, + ], + queryFn: ({ queryKey }) => { + const params = queryKey[1] as { type: string; id: number }; + return getAliases(params.type, params.id); + }, + }); + + useEffect(() => { + if (data) { + setDisplayData(data) + } + }, [data]) + + + if (isError) { + return ( +

Error: {error.message}

+ ) + } + if (isPending) { + return ( +

Loading...

+ ) + } + const handleSetPrimary = (alias: string) => { + setError(undefined) + setLoading(true) + setPrimaryAlias(type, id, alias) + .then(r => { + if (r.ok) { + window.location.reload() + } else { + r.json().then((r) => setError(r.error)) + } + }) + setLoading(false) + } + + const handleNewAlias = () => { + setError(undefined) + if (input === "") { + setError("alias must be provided") + return + } + setLoading(true) + createAlias(type, id, input) + .then(r => { + if (r.ok) { + setDisplayData([...displayData, {alias: input, source: "Manual", is_primary: false, id: id}]) + } else { + r.json().then((r) => setError(r.error)) + } + }) + setLoading(false) + } + + const handleDeleteAlias = (alias: string) => { + setError(undefined) + setLoading(true) + deleteAlias(type, id, alias) + .then(r => { + if (r.ok) { + setDisplayData(displayData.filter((v) => v.alias != alias)) + } else { + r.json().then((r) => setError(r.error)) + } + }) + setLoading(false) + + } + + return ( + setOpen(false)}> +

Alias Manager

+
+ {displayData.map((v) => ( +
+
{v.alias} (source: {v.source})
+ handleSetPrimary(v.alias)} disabled={v.is_primary}>Set Primary + handleDeleteAlias(v.alias)} confirm disabled={v.is_primary}> +
+ ))} +
+ setInput(e.target.value)} + /> + Submit +
+ {err &&

{err}

} +
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/modals/SearchModal.tsx b/client/app/components/modals/SearchModal.tsx new file mode 100644 index 0000000..ec056cf --- /dev/null +++ b/client/app/components/modals/SearchModal.tsx @@ -0,0 +1,60 @@ +import { useEffect, useState } from "react"; +import { Modal } from "./Modal"; +import { search, type SearchResponse } from "api/api"; +import SearchResults from "../SearchResults"; + +interface Props { + open: boolean + setOpen: Function +} + +export default function SearchModal({ open, setOpen }: Props) { + const [query, setQuery] = useState(''); + const [data, setData] = useState(); + const [debouncedQuery, setDebouncedQuery] = useState(query); + + const closeSearchModal = () => { + setOpen(false) + setQuery('') + setData(undefined) + } + + useEffect(() => { + const handler = setTimeout(() => { + setDebouncedQuery(query); + if (query === '') { + setData(undefined) + } + }, 300); + + return () => { + clearTimeout(handler); + }; + }, [query]); + + useEffect(() => { + if (debouncedQuery) { + search(debouncedQuery).then((r) => { + setData(r); + }); + } + }, [debouncedQuery]); + + return ( + +

Search

+
+ setQuery(e.target.value)} + /> +
+ +
+
+
+ ) +} diff --git a/client/app/components/modals/SettingsModal.tsx b/client/app/components/modals/SettingsModal.tsx new file mode 100644 index 0000000..bd8a4ce --- /dev/null +++ b/client/app/components/modals/SettingsModal.tsx @@ -0,0 +1,41 @@ +import { Modal } from "./Modal" +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@radix-ui/react-tabs"; +import AccountPage from "./AccountPage"; +import { ThemeSwitcher } from "../themeSwitcher/ThemeSwitcher"; +import ThemeHelper from "../../routes/ThemeHelper"; +import { useAppContext } from "~/providers/AppProvider"; +import ApiKeysModal from "./ApiKeysModal"; + +interface Props { + open: boolean + setOpen: Function +} + +export default function SettingsModal({ open, setOpen } : Props) { + + const { user } = useAppContext() + + const triggerClasses = "px-4 py-2 w-full hover-bg-secondary rounded-md text-start data-[state=active]:bg-[var(--color-bg-secondary)]" + const contentClasses = "w-full px-10 overflow-y-auto" + + return ( + setOpen(false)} maxW={900}> + + + Appearance + Account + { user && API Keys} + + + + + + + + + + + + + ) +} \ No newline at end of file diff --git a/client/app/components/sidebar/Sidebar.tsx b/client/app/components/sidebar/Sidebar.tsx new file mode 100644 index 0000000..f1927e5 --- /dev/null +++ b/client/app/components/sidebar/Sidebar.tsx @@ -0,0 +1,22 @@ +import { ExternalLink, Home, Info } from "lucide-react"; +import SidebarSearch from "./SidebarSearch"; +import SidebarItem from "./SidebarItem"; +import SidebarSettings from "./SidebarSettings"; + +export default function Sidebar() { + + const iconSize = 20; + + return ( +
+
+ {}} modal={<>}> + +
+
+ } space={22} externalLink to="https://koito.io" name="About" onClick={() => {}} modal={<>}> + +
+
+ ); +} \ No newline at end of file diff --git a/client/app/components/sidebar/SidebarItem.tsx b/client/app/components/sidebar/SidebarItem.tsx new file mode 100644 index 0000000..0e994cc --- /dev/null +++ b/client/app/components/sidebar/SidebarItem.tsx @@ -0,0 +1,48 @@ +import React, { useState } from "react"; +import Popup from "../Popup"; +import { Link } from "react-router"; + +interface Props { + name: string; + to?: string; + onClick: Function; + children: React.ReactNode; + modal: React.ReactNode; + keyHint?: React.ReactNode; + space?: number + externalLink?: boolean + /* true if the keyhint is an icon and not text */ + icon?: boolean +} + +export default function SidebarItem({ externalLink, space, keyHint, name, to, children, modal, onClick, icon }: Props) { + const classes = "hover:cursor-pointer hover:bg-(--color-bg-tertiary) transition duration-100 rounded-md p-2 inline-block"; + + const popupInner = keyHint ? ( +
+ {name} + {icon ? +
+ {keyHint} +
+ : + + {keyHint} + + } +
+ ) : name; + + return ( + <> + + {to ? ( + {children} + ) : ( + onClick()}>{children} + )} + + {modal} + + ); +} diff --git a/client/app/components/sidebar/SidebarSearch.tsx b/client/app/components/sidebar/SidebarSearch.tsx new file mode 100644 index 0000000..dca2cca --- /dev/null +++ b/client/app/components/sidebar/SidebarSearch.tsx @@ -0,0 +1,33 @@ +import { useEffect, useState } from "react"; +import SidebarItem from "./SidebarItem"; +import { Search } from "lucide-react"; +import SearchModal from "../modals/SearchModal"; + +interface Props { + size: number +} + +export default function SidebarSearch({ size } : Props) { + const [open, setModalOpen] = useState(false) + + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if (e.key === '/' && !open) { + e.preventDefault(); + setModalOpen(true); + } + }; + document.addEventListener('keydown', handleKeyDown); + return () => document.removeEventListener('keydown', handleKeyDown); + }, [open]); + + return ( + setModalOpen(true)} + name="Search" + keyHint="/" + children={} modal={} + /> + ) +} \ No newline at end of file diff --git a/client/app/components/sidebar/SidebarSettings.tsx b/client/app/components/sidebar/SidebarSettings.tsx new file mode 100644 index 0000000..d2ff503 --- /dev/null +++ b/client/app/components/sidebar/SidebarSettings.tsx @@ -0,0 +1,29 @@ +import { Settings2 } from "lucide-react"; +import SettingsModal from "../modals/SettingsModal"; +import SidebarItem from "./SidebarItem"; +import { useEffect, useState } from "react"; + +interface Props { + size: number +} + +export default function SidebarSettings({ size }: Props) { + const [open, setOpen] = useState(false); + + useEffect(() => { + const handleKeyDown= (e: KeyboardEvent) => { + if (e.key === '\\' && !open) { + e.preventDefault(); + setOpen(true); + } + }; + document.addEventListener('keydown', handleKeyDown); + return () => document.removeEventListener('keydown', handleKeyDown); + }, [open]); + + return ( + setOpen(true)} modal={}> + + + ) +} \ No newline at end of file diff --git a/client/app/components/skeletons/TopListSkeleton.tsx b/client/app/components/skeletons/TopListSkeleton.tsx new file mode 100644 index 0000000..8358ffc --- /dev/null +++ b/client/app/components/skeletons/TopListSkeleton.tsx @@ -0,0 +1,20 @@ +interface Props { + numItems: number +} + +export default function TopListSkeleton({ numItems }: Props) { + + return ( +
+ {[...Array(numItems)].map(() => ( +
+
+
+
+
+
+
+ ))} +
+ ) +} \ No newline at end of file diff --git a/client/app/components/themeSwitcher/ThemeOption.tsx b/client/app/components/themeSwitcher/ThemeOption.tsx new file mode 100644 index 0000000..7cecf76 --- /dev/null +++ b/client/app/components/themeSwitcher/ThemeOption.tsx @@ -0,0 +1,22 @@ +import type { Theme } from "~/providers/ThemeProvider"; + +interface Props { + theme: Theme + setTheme: Function +} + +export default function ThemeOption({ theme, setTheme }: Props) { + + const capitalizeFirstLetter = (s: string) => { + return s.charAt(0).toUpperCase() + s.slice(1); + } + + return ( +
setTheme(theme.name)} className="rounded-md p-5 hover:cursor-pointer flex gap-4 items-center border-2" style={{background: theme.bg, color: theme.fg, borderColor: theme.bgSecondary}}> + {capitalizeFirstLetter(theme.name)} +
+
+
+
+ ) +} \ No newline at end of file diff --git a/client/app/components/themeSwitcher/ThemeSwitcher.tsx b/client/app/components/themeSwitcher/ThemeSwitcher.tsx new file mode 100644 index 0000000..e051f50 --- /dev/null +++ b/client/app/components/themeSwitcher/ThemeSwitcher.tsx @@ -0,0 +1,36 @@ +// ThemeSwitcher.tsx +import { useEffect } from 'react'; +import { useTheme } from '../../hooks/useTheme'; +import { themes } from '~/providers/ThemeProvider'; +import ThemeOption from './ThemeOption'; + +export function ThemeSwitcher() { + const { theme, setTheme } = useTheme(); + + + useEffect(() => { + const saved = localStorage.getItem('theme'); + if (saved && saved !== theme) { + setTheme(saved); + } else if (!saved) { + localStorage.setItem('theme', theme) + } + }, []); + + useEffect(() => { + if (theme) { + localStorage.setItem('theme', theme) + } + }, [theme]); + + return ( + <> +

Select Theme

+
+ {themes.map((t) => ( + + ))} +
+ + ); +} diff --git a/client/app/entry.client.tsx b/client/app/entry.client.tsx new file mode 100644 index 0000000..08ab4ec --- /dev/null +++ b/client/app/entry.client.tsx @@ -0,0 +1,12 @@ +import { startTransition, StrictMode } from "react"; +import { hydrateRoot } from "react-dom/client"; +import { HydratedRouter } from "react-router/dom"; + +startTransition(() => { + hydrateRoot( + document, + + + + ); +}); diff --git a/client/app/entry.server.tsx b/client/app/entry.server.tsx new file mode 100644 index 0000000..e7a46ee --- /dev/null +++ b/client/app/entry.server.tsx @@ -0,0 +1,70 @@ +import { PassThrough } from "node:stream"; + +import type { AppLoadContext, EntryContext } from "react-router"; +import { createReadableStreamFromReadable } from "@react-router/node"; +import { ServerRouter } from "react-router"; +import { isbot } from "isbot"; +import type { RenderToPipeableStreamOptions } from "react-dom/server"; +import { renderToPipeableStream } from "react-dom/server"; + +export const streamTimeout = 5_000; + +export default function handleRequest( + request: Request, + responseStatusCode: number, + responseHeaders: Headers, + routerContext: EntryContext, + loadContext: AppLoadContext + // If you have middleware enabled: + // loadContext: unstable_RouterContextProvider +) { + return new Promise((resolve, reject) => { + let shellRendered = false; + let userAgent = request.headers.get("user-agent"); + + // Ensure requests from bots and SPA Mode renders wait for all content to load before responding + // https://react.dev/reference/react-dom/server/renderToPipeableStream#waiting-for-all-content-to-load-for-crawlers-and-static-generation + let readyOption: keyof RenderToPipeableStreamOptions = + (userAgent && isbot(userAgent)) || routerContext.isSpaMode + ? "onAllReady" + : "onShellReady"; + + const { pipe, abort } = renderToPipeableStream( + , + { + [readyOption]() { + shellRendered = true; + const body = new PassThrough(); + const stream = createReadableStreamFromReadable(body); + + responseHeaders.set("Content-Type", "text/html"); + + resolve( + new Response(stream, { + headers: responseHeaders, + status: responseStatusCode, + }) + ); + + pipe(body); + }, + onShellError(error: unknown) { + reject(error); + }, + onError(error: unknown) { + responseStatusCode = 500; + // Log streaming rendering errors from inside the shell. Don't log + // errors encountered during initial shell rendering since they'll + // reject and get logged in handleDocumentRequest. + if (shellRendered) { + console.error(error); + } + }, + } + ); + + // Abort the rendering stream after the `streamTimeout` so it has time to + // flush down the rejected boundaries + setTimeout(abort, streamTimeout + 1000); + }); +} diff --git a/client/app/hooks/useTheme.ts b/client/app/hooks/useTheme.ts new file mode 100644 index 0000000..6fd9ece --- /dev/null +++ b/client/app/hooks/useTheme.ts @@ -0,0 +1,10 @@ +import { useContext } from 'react'; +import { ThemeContext } from '../providers/ThemeProvider'; + +export function useTheme() { + const context = useContext(ThemeContext); + if (!context) { + throw new Error('useTheme must be used within a ThemeProvider'); + } + return context; +} \ No newline at end of file diff --git a/client/app/providers/AppProvider.tsx b/client/app/providers/AppProvider.tsx new file mode 100644 index 0000000..9614db8 --- /dev/null +++ b/client/app/providers/AppProvider.tsx @@ -0,0 +1,61 @@ +import type { User } from "api/api"; +import { createContext, useContext, useEffect, useState } from "react"; + +interface AppContextType { + user: User | null | undefined; + configurableHomeActivity: boolean; + homeItems: number; + setConfigurableHomeActivity: (value: boolean) => void; + setHomeItems: (value: number) => void; + setUsername: (value: string) => void; +} + +const AppContext = createContext(undefined); + +export const useAppContext = () => { + const context = useContext(AppContext); + if (context === undefined) { + throw new Error("useAppContext must be used within an AppProvider"); + } + return context; +}; + +export const AppProvider = ({ children }: { children: React.ReactNode }) => { + const [user, setUser] = useState(undefined); + const [configurableHomeActivity, setConfigurableHomeActivity] = useState(false); + const [homeItems, setHomeItems] = useState(0); + + const setUsername = (value: string) => { + if (!user) { + return + } + setUser({...user, username: value}) + } + + useEffect(() => { + fetch("/apis/web/v1/user/me") + .then((res) => res.json()) + .then((data) => { + data.error ? setUser(null) : setUser(data); + }) + .catch(() => setUser(null)); + + setConfigurableHomeActivity(true); + setHomeItems(12); + }, []); + + if (user === undefined) { + return null; + } + + const contextValue: AppContextType = { + user, + configurableHomeActivity, + homeItems, + setConfigurableHomeActivity, + setHomeItems, + setUsername, + }; + + return {children}; +}; \ No newline at end of file diff --git a/client/app/providers/ThemeProvider.tsx b/client/app/providers/ThemeProvider.tsx new file mode 100644 index 0000000..cbdbf72 --- /dev/null +++ b/client/app/providers/ThemeProvider.tsx @@ -0,0 +1,259 @@ +import { createContext, useEffect, useState, type ReactNode } from 'react'; + +// a fair number of colors aren't actually used, but i'm keeping +// them so that I don't have to worry about colors when adding new ui elements +export type Theme = { + name: string, + bg: string + bgSecondary: string + bgTertiary: string + fg: string + fgSecondary: string + fgTertiary: string + primary: string + primaryDim: string + accent: string + accentDim: string + error: string + warning: string + info: string + success: string +} + +export const themes: Theme[] = [ + { + name: "yuu", + bg: "#161312", + bgSecondary: "#272120", + bgTertiary: "#382F2E", + fg: "#faf5f4", + fgSecondary: "#CCC7C6", + fgTertiary: "#B0A3A1", + primary: "#ff826d", + primaryDim: "#CE6654", + accent: "#464DAE", + accentDim: "#393D74", + error: "#FF6247", + warning: "#FFC107", + success: "#3ECE5F", + info: "#41C4D8", + }, + { + name: "varia", + bg: "rgb(25, 25, 29)", + bgSecondary: "#222222", + bgTertiary: "#333333", + fg: "#eeeeee", + fgSecondary: "#aaaaaa", + fgTertiary: "#888888", + primary: "rgb(203, 110, 240)", + primaryDim: "#c28379", + accent: "#f0ad0a", + accentDim: "#d08d08", + error: "#f44336", + warning: "#ff9800", + success: "#4caf50", + info: "#2196f3", + }, + { + name: "midnight", + bg: "rgb(8, 15, 24)", + bgSecondary: "rgb(15, 27, 46)", + bgTertiary: "rgb(15, 41, 70)", + fg: "#dbdfe7", + fgSecondary: "#9ea3a8", + fgTertiary: "#74787c", + primary: "#1a97eb", + primaryDim: "#2680aa", + accent: "#f0ad0a", + accentDim: "#d08d08", + error: "#f44336", + warning: "#ff9800", + success: "#4caf50", + info: "#2196f3", + }, + { + name: "catppuccin", + bg: "#1e1e2e", + bgSecondary: "#181825", + bgTertiary: "#11111b", + fg: "#cdd6f4", + fgSecondary: "#a6adc8", + fgTertiary: "#9399b2", + primary: "#89b4fa", + primaryDim: "#739df0", + accent: "#f38ba8", + accentDim: "#d67b94", + error: "#f38ba8", + warning: "#f9e2af", + success: "#a6e3a1", + info: "#89dceb", + }, + { + name: "autumn", + bg: "rgb(44, 25, 18)", + bgSecondary: "rgb(70, 40, 18)", + bgTertiary: "#4b2f1c", + fg: "#fef9f3", + fgSecondary: "#dbc6b0", + fgTertiary: "#a3917a", + primary: "#d97706", + primaryDim: "#b45309", + accent: "#8c4c28", + accentDim: "#6b3b1f", + error: "#d1433f", + warning: "#e38b29", + success: "#6b8e23", + info: "#c084fc", + }, + { + name: "black", + bg: "#000000", + bgSecondary: "#1a1a1a", + bgTertiary: "#2a2a2a", + fg: "#dddddd", + fgSecondary: "#aaaaaa", + fgTertiary: "#888888", + primary: "#08c08c", + primaryDim: "#08c08c", + accent: "#f0ad0a", + accentDim: "#d08d08", + error: "#f44336", + warning: "#ff9800", + success: "#4caf50", + info: "#2196f3", + }, + { + name: "wine", + bg: "#23181E", + bgSecondary: "#2C1C25", + bgTertiary: "#422A37", + fg: "#FCE0B3", + fgSecondary: "#C7AC81", + fgTertiary: "#A78E64", + primary: "#EA8A64", + primaryDim: "#BD7255", + accent: "#FAE99B", + accentDim: "#C6B464", + error: "#fca5a5", + warning: "#fde68a", + success: "#bbf7d0", + info: "#bae6fd", + }, + { + name: "pearl", + bg: "#FFFFFF", + bgSecondary: "#EEEEEE", + bgTertiary: "#E0E0E0", + fg: "#333333", + fgSecondary: "#555555", + fgTertiary: "#777777", + primary: "#007BFF", + primaryDim: "#0056B3", + accent: "#28A745", + accentDim: "#1E7E34", + error: "#DC3545", + warning: "#FFC107", + success: "#28A745", + info: "#17A2B8", + }, + { + name: "asuka", + bg: "#3B1212", + bgSecondary: "#471B1B", + bgTertiary: "#020202", + fg: "#F1E9E6", + fgSecondary: "#CCB6AE", + fgTertiary: "#9F8176", + primary: "#F1E9E6", + primaryDim: "#CCB6AE", + accent: "#41CE41", + accentDim: "#3BA03B", + error: "#DC143C", + warning: "#FFD700", + success: "#32CD32", + info: "#1E90FF", + }, + { + name: "urim", + bg: "#101713", + bgSecondary: "#1B2921", + bgTertiary: "#273B30", + fg: "#D2E79E", + fgSecondary: "#B4DA55", + fgTertiary: "#7E9F2A", + primary: "#ead500", + primaryDim: "#C1B210", + accent: "#28A745", + accentDim: "#1E7E34", + error: "#EE5237", + warning: "#FFC107", + success: "#28A745", + info: "#17A2B8", + }, + { + name: "match", + bg: "#071014", + bgSecondary: "#0A181E", + bgTertiary: "#112A34", + fg: "#ebeaeb", + fgSecondary: "#BDBDBD", + fgTertiary: "#A2A2A2", + primary: "#fda827", + primaryDim: "#C78420", + accent: "#277CFD", + accentDim: "#1F60C1", + error: "#F14426", + warning: "#FFC107", + success: "#28A745", + info: "#17A2B8", + }, + { + name: "lemon", + bg: "#1a171a", + bgSecondary: "#2E272E", + bgTertiary: "#443844", + fg: "#E6E2DC", + fgSecondary: "#B2ACA1", + fgTertiary: "#968F82", + primary: "#f5c737", + primaryDim: "#C29D2F", + accent: "#277CFD", + accentDim: "#1F60C1", + error: "#F14426", + warning: "#FFC107", + success: "#28A745", + info: "#17A2B8", + }, +]; + +interface ThemeContextValue { + theme: string; + setTheme: (theme: string) => void; +} + +const ThemeContext = createContext(undefined); + +export function ThemeProvider({ + theme: initialTheme, + children, +}: { + theme: string; + children: ReactNode; +}) { + const [theme, setTheme] = useState(initialTheme); + + useEffect(() => { + if (theme) { + document.documentElement.setAttribute('data-theme', theme); + } + }, [theme]); + + return ( + + {children} + + ); +} + +export { ThemeContext } \ No newline at end of file diff --git a/client/app/root.tsx b/client/app/root.tsx new file mode 100644 index 0000000..7a62099 --- /dev/null +++ b/client/app/root.tsx @@ -0,0 +1,138 @@ +import { + isRouteErrorResponse, + Links, + Meta, + Outlet, + Scripts, + ScrollRestoration, + useRouteError, +} from "react-router"; + +import type { Route } from "./+types/root"; +import './themes.css' +import "./app.css"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { ThemeProvider } from './providers/ThemeProvider'; +import Sidebar from "./components/sidebar/Sidebar"; +import Footer from "./components/Footer"; +import { AppProvider } from "./providers/AppProvider"; + +// Create a client +const queryClient = new QueryClient() + +export const links: Route.LinksFunction = () => [ + { rel: "preconnect", href: "https://fonts.googleapis.com" }, + { + rel: "preconnect", + href: "https://fonts.gstatic.com", + crossOrigin: "anonymous", + }, + { + rel: "stylesheet", + href: "https://fonts.googleapis.com/css2?family=Inter:ital,opsz,wght@0,14..32,100..900;1,14..32,100..900&display=swap", + }, +]; + +export function Layout({ children }: { children: React.ReactNode }) { + return ( + + + + + + + + + + + + + + + {children} + + + + + ); +} + +export default function App() { + let theme = localStorage.getItem('theme') ?? 'midnight' + + return ( + <> + + + +
+ +
+ +
+
+
+
+
+
+ + ); +} + +export function HydrateFallback() { + return null +} + +export function ErrorBoundary() { + const error = useRouteError(); + let message = "Oops!"; + let details = "An unexpected error occurred."; + let stack: string | undefined; + + if (isRouteErrorResponse(error)) { + message = error.status === 404 ? "404" : "Error"; + details = error.status === 404 + ? "The requested page could not be found." + : error.statusText || details; + } else if (import.meta.env.DEV && error instanceof Error) { + details = error.message; + stack = error.stack; + } + + let theme = 'midnight' + try { + theme = localStorage.getItem('theme') ?? theme + } catch(err) { + console.log(err) + } + + const title = `${message} - Koito` + + return ( + + + {title} +
+ +
+
+
+ +
+

{message}

+

{details}

+
+
+ {stack && ( +
+                                {stack}
+                                
+ )} +
+
+
+
+
+
+ ); +} diff --git a/client/app/routes.ts b/client/app/routes.ts new file mode 100644 index 0000000..8909928 --- /dev/null +++ b/client/app/routes.ts @@ -0,0 +1,13 @@ +import { type RouteConfig, index, route } from "@react-router/dev/routes"; + +export default [ + index("routes/Home.tsx"), + route("/artist/:id", "routes/MediaItems/Artist.tsx"), + route("/album/:id", "routes/MediaItems/Album.tsx"), + route("/track/:id", "routes/MediaItems/Track.tsx"), + route("/chart/top-albums", "routes/Charts/AlbumChart.tsx"), + route("/chart/top-artists", "routes/Charts/ArtistChart.tsx"), + route("/chart/top-tracks", "routes/Charts/TrackChart.tsx"), + route("/listens", "routes/Charts/Listens.tsx"), + route("/theme-helper", "routes/ThemeHelper.tsx"), +] satisfies RouteConfig; \ No newline at end of file diff --git a/client/app/routes/Charts/AlbumChart.tsx b/client/app/routes/Charts/AlbumChart.tsx new file mode 100644 index 0000000..8e68186 --- /dev/null +++ b/client/app/routes/Charts/AlbumChart.tsx @@ -0,0 +1,58 @@ +import TopItemList from "~/components/TopItemList"; +import ChartLayout from "./ChartLayout"; +import { useLoaderData, type LoaderFunctionArgs } from "react-router"; +import { type Album, type PaginatedResponse } from "api/api"; + +export async function clientLoader({ request }: LoaderFunctionArgs) { + const url = new URL(request.url); + const page = url.searchParams.get("page") || "0"; + url.searchParams.set('page', page) + + const res = await fetch( + `/apis/web/v1/top-albums?${url.searchParams.toString()}` + ); + if (!res.ok) { + throw new Response("Failed to load top albums", { status: 500 }); + } + + const top_albums: PaginatedResponse = await res.json(); + return { top_albums }; +} + +export default function AlbumChart() { + const { top_albums: initialData } = useLoaderData<{ top_albums: PaginatedResponse }>(); + + return ( + ( +
+
+ + +
+ +
+ + +
+
+ )} + /> + ); +} diff --git a/client/app/routes/Charts/ArtistChart.tsx b/client/app/routes/Charts/ArtistChart.tsx new file mode 100644 index 0000000..bc3be16 --- /dev/null +++ b/client/app/routes/Charts/ArtistChart.tsx @@ -0,0 +1,58 @@ +import TopItemList from "~/components/TopItemList"; +import ChartLayout from "./ChartLayout"; +import { useLoaderData, type LoaderFunctionArgs } from "react-router"; +import { type Album, type PaginatedResponse } from "api/api"; + +export async function clientLoader({ request }: LoaderFunctionArgs) { + const url = new URL(request.url); + const page = url.searchParams.get("page") || "0"; + url.searchParams.set('page', page) + + const res = await fetch( + `/apis/web/v1/top-artists?${url.searchParams.toString()}` + ); + if (!res.ok) { + throw new Response("Failed to load top artists", { status: 500 }); + } + + const top_artists: PaginatedResponse = await res.json(); + return { top_artists }; +} + +export default function Artist() { + const { top_artists: initialData } = useLoaderData<{ top_artists: PaginatedResponse }>(); + + return ( + ( +
+
+ + +
+ +
+ + +
+
+ )} + /> + ); +} diff --git a/client/app/routes/Charts/ChartLayout.tsx b/client/app/routes/Charts/ChartLayout.tsx new file mode 100644 index 0000000..6690cd3 --- /dev/null +++ b/client/app/routes/Charts/ChartLayout.tsx @@ -0,0 +1,262 @@ +import { + useFetcher, + useLocation, + useNavigate, +} from "react-router" +import { useEffect, useState } from "react" +import { average } from "color.js" +import { imageUrl, type PaginatedResponse } from "api/api" +import PeriodSelector from "~/components/PeriodSelector" + +interface ChartLayoutProps { + title: "Top Albums" | "Top Tracks" | "Top Artists" | "Last Played" + initialData: PaginatedResponse + endpoint: string + render: (opts: { + data: PaginatedResponse + page: number + onNext: () => void + onPrev: () => void + }) => React.ReactNode +} + +export default function ChartLayout({ + title, + initialData, + endpoint, + render, +}: ChartLayoutProps) { + const pgTitle = `${title} - Koito` + + const fetcher = useFetcher() + const location = useLocation() + const navigate = useNavigate() + + const currentParams = new URLSearchParams(location.search) + const currentPage = parseInt(currentParams.get("page") || "1", 10) + + const data: PaginatedResponse = fetcher.data?.[endpoint] + ? fetcher.data[endpoint] + : initialData + + const [bgColor, setBgColor] = useState("(--color-bg)") + + useEffect(() => { + if ((data?.items?.length ?? 0) === 0) return + + const img = (data.items[0] as any)?.image + if (!img) return + + average(imageUrl(img, "small"), { amount: 1 }).then((color) => { + setBgColor(`rgba(${color[0]},${color[1]},${color[2]},0.4)`) + }) + }, [data]) + + const period = currentParams.get("period") ?? "day" + const year = currentParams.get("year") + const month = currentParams.get("month") + const week = currentParams.get("week") + + const updateParams = (params: Record) => { + const nextParams = new URLSearchParams(location.search) + + for (const key in params) { + const val = params[key] + if (val !== null) { + nextParams.set(key, val) + } else { + nextParams.delete(key) + } + } + + const url = `/${endpoint}?${nextParams.toString()}` + navigate(url, { replace: false }) + } + + const handleSetPeriod = (p: string) => { + updateParams({ + period: p, + page: "1", + year: null, + month: null, + week: null, + }) + } + const handleSetYear = (val: string) => { + if (val == "") { + updateParams({ + period: period, + page: "1", + year: null, + month: null, + week: null + }) + return + } + updateParams({ + period: null, + page: "1", + year: val, + }) + } + const handleSetMonth = (val: string) => { + updateParams({ + period: null, + page: "1", + year: year ?? new Date().getFullYear().toString(), + month: val, + }) + } + const handleSetWeek = (val: string) => { + updateParams({ + period: null, + page: "1", + year: year ?? new Date().getFullYear().toString(), + month: null, + week: val, + }) + } + + useEffect(() => { + fetcher.load(`/${endpoint}?${currentParams.toString()}`) + }, [location.search]) + + const setPage = (nextPage: number) => { + const nextParams = new URLSearchParams(location.search) + nextParams.set("page", String(nextPage)) + const url = `/${endpoint}?${nextParams.toString()}` + fetcher.load(url) + navigate(url, { replace: false }) + } + + const handleNextPage = () => setPage(currentPage + 1) + const handlePrevPage = () => setPage(currentPage - 1) + + const yearOptions = Array.from({ length: 10 }, (_, i) => `${new Date().getFullYear() - i}`) + const monthOptions = Array.from({ length: 12 }, (_, i) => `${i + 1}`) + const weekOptions = Array.from({ length: 53 }, (_, i) => `${i + 1}`) + + const getDateRange = (): string => { + let from: Date + let to: Date + + const now = new Date() + const currentYear = now.getFullYear() + const currentMonth = now.getMonth() // 0-indexed + const currentDate = now.getDate() + + if (year && month) { + from = new Date(parseInt(year), parseInt(month) - 1, 1) + to = new Date(from) + to.setMonth(from.getMonth() + 1) + to.setDate(0) + } else if (year && week) { + const base = new Date(parseInt(year), 0, 1) // Jan 1 of the year + const weekNumber = parseInt(week) + from = new Date(base) + from.setDate(base.getDate() + (weekNumber - 1) * 7) + to = new Date(from) + to.setDate(from.getDate() + 6) + } else if (year) { + from = new Date(parseInt(year), 0, 1) + to = new Date(parseInt(year), 11, 31) + } else { + switch (period) { + case "day": + from = new Date(now) + to = new Date(now) + break + case "week": + to = new Date(now) + from = new Date(now) + from.setDate(to.getDate() - 6) + break + case "month": + to = new Date(now) + from = new Date(now) + if (currentMonth === 0) { + from = new Date(currentYear - 1, 11, currentDate) + } else { + from = new Date(currentYear, currentMonth - 1, currentDate) + } + break + case "year": + to = new Date(now) + from = new Date(currentYear - 1, currentMonth, currentDate) + break + case "all_time": + return "All Time" + default: + return "" + } + } + + const formatter = new Intl.DateTimeFormat(undefined, { + year: "numeric", + month: "long", + day: "numeric", + }) + + return `${formatter.format(from)} - ${formatter.format(to)}` + } + + + return ( +
+ {pgTitle} + + +
+

{title}

+
+ + + + +
+

{getDateRange()}

+
+ {render({ + data, + page: currentPage, + onNext: handleNextPage, + onPrev: handlePrevPage, + })} +
+
+
+ ) +} diff --git a/client/app/routes/Charts/Listens.tsx b/client/app/routes/Charts/Listens.tsx new file mode 100644 index 0000000..6f5efdb --- /dev/null +++ b/client/app/routes/Charts/Listens.tsx @@ -0,0 +1,66 @@ +import ChartLayout from "./ChartLayout"; +import { Link, useLoaderData, type LoaderFunctionArgs } from "react-router"; +import { type Album, type Listen, type PaginatedResponse } from "api/api"; +import { timeSince } from "~/utils/utils"; +import ArtistLinks from "~/components/ArtistLinks"; + +export async function clientLoader({ request }: LoaderFunctionArgs) { + const url = new URL(request.url); + const page = url.searchParams.get("page") || "0"; + url.searchParams.set('page', page) + + const res = await fetch( + `/apis/web/v1/listens?${url.searchParams.toString()}` + ); + if (!res.ok) { + throw new Response("Failed to load top tracks", { status: 500 }); + } + + const listens: PaginatedResponse = await res.json(); + return { listens }; +} + +export default function Listens() { + const { listens: initialData } = useLoaderData<{ listens: PaginatedResponse }>(); + + return ( + ( +
+
+ + +
+ + + {data.items.map((item) => ( + + + + + ))} + +
{timeSince(new Date(item.time))} + {' - '} + {item.track.title} +
+
+ + +
+
+ )} + /> + ); +} diff --git a/client/app/routes/Charts/TrackChart.tsx b/client/app/routes/Charts/TrackChart.tsx new file mode 100644 index 0000000..23c1531 --- /dev/null +++ b/client/app/routes/Charts/TrackChart.tsx @@ -0,0 +1,58 @@ +import TopItemList from "~/components/TopItemList"; +import ChartLayout from "./ChartLayout"; +import { useLoaderData, type LoaderFunctionArgs } from "react-router"; +import { type Album, type PaginatedResponse } from "api/api"; + +export async function clientLoader({ request }: LoaderFunctionArgs) { + const url = new URL(request.url); + const page = url.searchParams.get("page") || "0"; + url.searchParams.set('page', page) + + const res = await fetch( + `/apis/web/v1/top-tracks?${url.searchParams.toString()}` + ); + if (!res.ok) { + throw new Response("Failed to load top tracks", { status: 500 }); + } + + const top_tracks: PaginatedResponse = await res.json(); + return { top_tracks }; +} + +export default function TrackChart() { + const { top_tracks: initialData } = useLoaderData<{ top_tracks: PaginatedResponse }>(); + + return ( + ( +
+
+ + +
+ +
+ + +
+
+ )} + /> + ); +} diff --git a/client/app/routes/Home.tsx b/client/app/routes/Home.tsx new file mode 100644 index 0000000..b340caa --- /dev/null +++ b/client/app/routes/Home.tsx @@ -0,0 +1,41 @@ +import type { Route } from "./+types/Home"; +import TopTracks from "~/components/TopTracks"; +import LastPlays from "~/components/LastPlays"; +import ActivityGrid from "~/components/ActivityGrid"; +import TopAlbums from "~/components/TopAlbums"; +import TopArtists from "~/components/TopArtists"; +import AllTimeStats from "~/components/AllTimeStats"; +import { useState } from "react"; +import PeriodSelector from "~/components/PeriodSelector"; +import { useAppContext } from "~/providers/AppProvider"; + +export function meta({}: Route.MetaArgs) { + return [ + { title: "Koito" }, + { name: "description", content: "Koito" }, + ]; +} + +export default function Home() { + const [period, setPeriod] = useState('week') + + const { homeItems } = useAppContext(); + + return ( +
+
+
+ + +
+ +
+ + + + +
+
+
+ ); +} diff --git a/client/app/routes/MediaItems/Album.tsx b/client/app/routes/MediaItems/Album.tsx new file mode 100644 index 0000000..77b52df --- /dev/null +++ b/client/app/routes/MediaItems/Album.tsx @@ -0,0 +1,57 @@ +import { useState } from "react"; +import { useLoaderData, type LoaderFunctionArgs } from "react-router"; +import TopTracks from "~/components/TopTracks"; +import { mergeAlbums, type Album } from "api/api"; +import LastPlays from "~/components/LastPlays"; +import PeriodSelector from "~/components/PeriodSelector"; +import MediaLayout from "./MediaLayout"; +import ActivityGrid from "~/components/ActivityGrid"; + +export async function clientLoader({ params }: LoaderFunctionArgs) { + const res = await fetch(`/apis/web/v1/album?id=${params.id}`); + if (!res.ok) { + throw new Response("Failed to load album", { status: 500 }); + } + const album: Album = await res.json(); + return album; +} + +export default function Album() { + const album = useLoaderData() as Album; + const [period, setPeriod] = useState('week') + + console.log(album) + + return ( + { + r.artists = [] + r.tracks = [] + for (let i = 0; i < r.albums.length; i ++) { + if (r.albums[i].id === id) { + delete r.albums[i] + } + } + return r + }} + subContent={<> + {album.listen_count &&

{album.listen_count} play{ album.listen_count > 1 ? 's' : ''}

} + } + > +
+ +
+
+ + + +
+
+ ); +} diff --git a/client/app/routes/MediaItems/Artist.tsx b/client/app/routes/MediaItems/Artist.tsx new file mode 100644 index 0000000..b742f56 --- /dev/null +++ b/client/app/routes/MediaItems/Artist.tsx @@ -0,0 +1,66 @@ +import { useState } from "react"; +import { useLoaderData, type LoaderFunctionArgs } from "react-router"; +import TopTracks from "~/components/TopTracks"; +import { mergeArtists, type Artist } from "api/api"; +import LastPlays from "~/components/LastPlays"; +import PeriodSelector from "~/components/PeriodSelector"; +import MediaLayout from "./MediaLayout"; +import ArtistAlbums from "~/components/ArtistAlbums"; +import ActivityGrid from "~/components/ActivityGrid"; + +export async function clientLoader({ params }: LoaderFunctionArgs) { + const res = await fetch(`/apis/web/v1/artist?id=${params.id}`); + if (!res.ok) { + throw new Response("Failed to load artist", { status: 500 }); + } + const artist: Artist = await res.json(); + return artist; +} + +export default function Artist() { + const artist = useLoaderData() as Artist; + const [period, setPeriod] = useState('week') + + // remove canonical name from alias list + console.log(artist.aliases) + let index = artist.aliases.indexOf(artist.name); + if (index !== -1) { + artist.aliases.splice(index, 1); + } + + return ( + { + r.albums = [] + r.tracks = [] + for (let i = 0; i < r.artists.length; i ++) { + if (r.artists[i].id === id) { + delete r.artists[i] + } + } + return r + }} + subContent={<> + {artist.listen_count &&

{artist.listen_count} play{ artist.listen_count > 1 ? 's' : ''}

} + } + > +
+ +
+
+
+ + + +
+ +
+
+ ); +} diff --git a/client/app/routes/MediaItems/MediaLayout.tsx b/client/app/routes/MediaItems/MediaLayout.tsx new file mode 100644 index 0000000..a0bf2fb --- /dev/null +++ b/client/app/routes/MediaItems/MediaLayout.tsx @@ -0,0 +1,88 @@ +import React, { useEffect, useState } from "react"; +import { average } from "color.js"; +import { imageUrl, type SearchResponse } from "api/api"; +import ImageDropHandler from "~/components/ImageDropHandler"; +import { Edit, ImageIcon, Merge, Trash } from "lucide-react"; +import { useAppContext } from "~/providers/AppProvider"; +import MergeModal from "~/components/modals/MergeModal"; +import ImageReplaceModal from "~/components/modals/ImageReplaceModal"; +import DeleteModal from "~/components/modals/DeleteModal"; +import RenameModal from "~/components/modals/RenameModal"; + +export type MergeFunc = (from: number, to: number) => Promise +export type MergeSearchCleanerFunc = (r: SearchResponse, id: number) => SearchResponse + +interface Props { + type: "Track" | "Album" | "Artist" + title: string + img: string + id: number + musicbrainzId: string + imgItemId: number + mergeFunc: MergeFunc + mergeCleanerFunc: MergeSearchCleanerFunc + children: React.ReactNode + subContent: React.ReactNode +} + +export default function MediaLayout(props: Props) { + const [bgColor, setBgColor] = useState("(--color-bg)"); + const [mergeModalOpen, setMergeModalOpen] = useState(false); + const [deleteModalOpen, setDeleteModalOpen] = useState(false); + const [imageModalOpen, setImageModalOpen] = useState(false); + const [renameModalOpen, setRenameModalOpen] = useState(false); + const { user } = useAppContext(); + + useEffect(() => { + average(imageUrl(props.img, 'small'), { amount: 1 }).then((color) => { + setBgColor(`rgba(${color[0]},${color[1]},${color[2]},0.4)`); + }); + }, [props.img]); + + const replaceImageCallback = () => { + window.location.reload() + } + + const title = `${props.title} - Koito` + + return ( +
+ + {title} + + +
+
+ {props.title} +
+

{props.type}

+

{props.title}

+ {props.subContent} +
+ { user && +
+ + + + + + + + +
+ } +
+ {props.children} +
+
+ ); +} diff --git a/client/app/routes/MediaItems/Track.tsx b/client/app/routes/MediaItems/Track.tsx new file mode 100644 index 0000000..c74c7ef --- /dev/null +++ b/client/app/routes/MediaItems/Track.tsx @@ -0,0 +1,59 @@ +import { useState } from "react"; +import { Link, useLoaderData, type LoaderFunctionArgs } from "react-router"; +import { mergeTracks, type Album, type Track } from "api/api"; +import LastPlays from "~/components/LastPlays"; +import PeriodSelector from "~/components/PeriodSelector"; +import MediaLayout from "./MediaLayout"; +import ActivityGrid from "~/components/ActivityGrid"; + +export async function clientLoader({ params }: LoaderFunctionArgs) { + let res = await fetch(`/apis/web/v1/track?id=${params.id}`); + if (!res.ok) { + throw new Response("Failed to load track", { status: res.status }); + } + const track: Track = await res.json(); + res = await fetch(`/apis/web/v1/album?id=${track.album_id}`) + if (!res.ok) { + throw new Response("Failed to load album for track", { status: res.status }) + } + const album: Album = await res.json() + return {track: track, album: album}; +} + +export default function Track() { + const { track, album } = useLoaderData(); + const [period, setPeriod] = useState('week') + + return ( + { + r.albums = [] + r.artists = [] + for (let i = 0; i < r.tracks.length; i ++) { + if (r.tracks[i].id === id) { + delete r.tracks[i] + } + } + return r + }} + subContent={
+ appears on {album.title} + {track.listen_count &&

{track.listen_count} play{ track.listen_count > 1 ? 's' : ''}

} +
} + > +
+ +
+
+ + +
+
+ ) +} diff --git a/client/app/routes/Root.tsx b/client/app/routes/Root.tsx new file mode 100644 index 0000000..9672dd8 --- /dev/null +++ b/client/app/routes/Root.tsx @@ -0,0 +1,43 @@ +import { isRouteErrorResponse, Outlet } from "react-router"; +import Footer from "~/components/Footer"; +import type { Route } from "../+types/root"; + +export default function Root() { + + return ( +
+ +
+
+ ) +} + +export function ErrorBoundary({ error }: Route.ErrorBoundaryProps) { + let message = "Oops!"; + let details = "An unexpected error occurred."; + let stack: string | undefined; + + if (isRouteErrorResponse(error)) { + message = error.status === 404 ? "404" : "Error"; + details = + error.status === 404 + ? "The requested page could not be found." + : error.statusText || details; + } else if (import.meta.env.DEV && error && error instanceof Error) { + details = error.message; + stack = error.stack; + } + + return ( +
+

{message}

+

{details}

+ {stack && ( +
+            {stack}
+          
+ )} +
+ ); + } + \ No newline at end of file diff --git a/client/app/routes/ThemeHelper.tsx b/client/app/routes/ThemeHelper.tsx new file mode 100644 index 0000000..7c65c6a --- /dev/null +++ b/client/app/routes/ThemeHelper.tsx @@ -0,0 +1,67 @@ +import { useState } from "react" +import { useAppContext } from "~/providers/AppProvider" +import { AsyncButton } from "../components/AsyncButton" +import AllTimeStats from "~/components/AllTimeStats" +import ActivityGrid from "~/components/ActivityGrid" +import LastPlays from "~/components/LastPlays" +import TopAlbums from "~/components/TopAlbums" +import TopArtists from "~/components/TopArtists" +import TopTracks from "~/components/TopTracks" + +export default function ThemeHelper() { + + const homeItems = 3 + + return ( +
+
+ + +
+
+ + + + +
+
+
+

You're logged in as Example User

+ {}}>Logout +
+
+ + {}}>Submit +
+
+ + + {}}>Submit +
+
+ {}} /> + +
+

successfully displayed example text

+

this is an example of error text

+

here is an informational example

+

heed this warning, traveller

+
+
+ ) +} \ No newline at end of file diff --git a/client/app/themes.css b/client/app/themes.css new file mode 100644 index 0000000..b29001f --- /dev/null +++ b/client/app/themes.css @@ -0,0 +1,432 @@ +/* Theme Definitions */ + +[data-theme="varia"]{ + /* Backgrounds */ + --color-bg:rgb(25, 25, 29); + --color-bg-secondary: #222222; + --color-bg-tertiary: #333333; + + /* Foregrounds */ + --color-fg: #eeeeee; + --color-fg-secondary: #aaaaaa; + --color-fg-tertiary: #888888; + + /* Accents */ + --color-primary:rgb(203, 110, 240); + --color-primary-dim: #c28379; + --color-accent: #f0ad0a; + --color-accent-dim: #d08d08; + + /* Status Colors */ + --color-error: #f44336; + --color-warning: #ff9800; + --color-success: #4caf50; + --color-info: #2196f3; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.5); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="wine"] { + /* Backgrounds */ + --color-bg: #23181E; + --color-bg-secondary: #2C1C25; + --color-bg-tertiary: #422A37; + + /* Foregrounds */ + --color-fg: #FCE0B3; + --color-fg-secondary:#C7AC81; + --color-fg-tertiary:#A78E64; + + /* Accents */ + --color-primary: #EA8A64; + --color-primary-dim: #BD7255; + --color-accent: #FAE99B; + --color-accent-dim: #C6B464; + + /* Status Colors */ + --color-error: #fca5a5; + --color-warning: #fde68a; + --color-success: #bbf7d0; + --color-info: #bae6fd; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.05); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="asuka"] { + /* Backgrounds */ + --color-bg: #3B1212; + --color-bg-secondary: #471B1B; + --color-bg-tertiary: #020202; + + /* Foregrounds */ + --color-fg: #F1E9E6; + --color-fg-secondary: #CCB6AE; + --color-fg-tertiary: #9F8176; + + /* Accents */ + --color-primary: #F1E9E6; + --color-primary-dim: #CCB6AE; + --color-accent: #41CE41; + --color-accent-dim: #3BA03B; + + /* Status Colors */ + --color-error: #EB97A8; + --color-warning: #FFD700; + --color-success: #32CD32; + --color-info: #1E90FF; + + /* Borders and Shadows (derived from existing colors for consistency) */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.1); /* Slightly more prominent shadow for contrast */ + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="midnight"] { + /* Backgrounds */ + --color-bg:rgb(8, 15, 24); + --color-bg-secondary:rgb(15, 27, 46); + --color-bg-tertiary:rgb(15, 41, 70); + + /* Foregrounds */ + --color-fg: #dbdfe7; + --color-fg-secondary: #9ea3a8; + --color-fg-tertiary: #74787c; + + /* Accents */ + --color-primary: #1a97eb; + --color-primary-dim: #2680aa; + --color-accent: #f0ad0a; + --color-accent-dim: #d08d08; + + /* Status Colors */ + --color-error: #f44336; + --color-warning: #ff9800; + --color-success: #4caf50; + --color-info: #2196f3; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.5); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +/* TODO: Adjust */ +[data-theme="catppuccin"] { + /* Backgrounds */ + --color-bg: #1e1e2e; + --color-bg-secondary: #181825; + --color-bg-tertiary: #11111b; + + /* Foregrounds */ + --color-fg: #cdd6f4; + --color-fg-secondary: #a6adc8; + --color-fg-tertiary: #9399b2; + + /* Accents */ + --color-primary: #cba6f7; + --color-primary-dim: #739df0; + --color-accent: #f38ba8; + --color-accent-dim: #d67b94; + + /* Status Colors */ + --color-error: #f38ba8; + --color-warning: #f9e2af; + --color-success: #a6e3a1; + --color-info: #89dceb; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.5); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="pearl"] { + /* Backgrounds */ + --color-bg: #FFFFFF; + --color-bg-secondary: #EEEEEE; + --color-bg-tertiary: #E0E0E0; + + /* Foregrounds */ + --color-fg: #333333; + --color-fg-secondary: #555555; + --color-fg-tertiary: #777777; + + /* Accents */ + --color-primary: #007BFF; + --color-primary-dim: #0056B3; + --color-accent: #28A745; + --color-accent-dim: #1E7E34; + + /* Status Colors */ + --color-error: #DC3545; + --color-warning: #CE9B00; + --color-success: #099B2B; + --color-info: #02B3CE; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.1); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="urim"] { + /* Backgrounds */ + --color-bg: #101713; + --color-bg-secondary: #1B2921; + --color-bg-tertiary: #273B30; + + /* Foregrounds */ + --color-fg: #D2E79E; + --color-fg-secondary: #B4DA55; + --color-fg-tertiary: #7E9F2A; + + /* Accents */ + --color-primary: #ead500; + --color-primary-dim: #C1B210; + --color-accent: #28A745; + --color-accent-dim: #1E7E34; + + /* Status Colors */ + --color-error: #EE5237; + --color-warning: #FFC107; + --color-success: #28A745; + --color-info: #17A2B8; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.1); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="yuu"] { + /* Backgrounds */ + --color-bg: #161312; + --color-bg-secondary: #272120; + --color-bg-tertiary: #382F2E; + + /* Foregrounds */ + --color-fg: #faf5f4; + --color-fg-secondary: #CCC7C6; + --color-fg-tertiary: #B0A3A1; + + /* Accents */ + --color-primary: #ff826d; + --color-primary-dim: #CE6654; + --color-accent: #464DAE; + --color-accent-dim: #393D74; + + /* Status Colors */ + --color-error: #FF6247; + --color-warning: #FFC107; + --color-success: #3ECE5F; + --color-info: #41C4D8; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.1); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="match"] { + /* Backgrounds */ + --color-bg: #071014; + --color-bg-secondary: #0A181E; + --color-bg-tertiary: #112A34; + + /* Foregrounds */ + --color-fg: #ebeaeb; + --color-fg-secondary: #BDBDBD; + --color-fg-tertiary: #A2A2A2; + + /* Accents */ + --color-primary: #fda827; + --color-primary-dim: #C78420; + --color-accent: #277CFD; + --color-accent-dim: #1F60C1; + + /* Status Colors */ + --color-error: #F14426; + --color-warning: #FFC107; + --color-success: #28A745; + --color-info: #17A2B8; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.1); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="lemon"] { + /* Backgrounds */ + --color-bg: #1a171a; + --color-bg-secondary: #2E272E; + --color-bg-tertiary: #443844; + + /* Foregrounds */ + --color-fg: #E6E2DC; + --color-fg-secondary: #B2ACA1; + --color-fg-tertiary: #968F82; + + /* Accents */ + --color-primary: #f5c737; + --color-primary-dim: #C29D2F; + --color-accent: #277CFD; + --color-accent-dim: #1F60C1; + + /* Status Colors */ + --color-error: #F14426; + --color-warning: #FFC107; + --color-success: #28A745; + --color-info: #17A2B8; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.1); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="autumn"] { + /* Backgrounds */ + --color-bg:rgb(44, 25, 18); + --color-bg-secondary:rgb(70, 40, 18); + --color-bg-tertiary: #4b2f1c; + + /* Foregrounds */ + --color-fg: #fef9f3; + --color-fg-secondary: #dbc6b0; + --color-fg-tertiary: #a3917a; + + /* Accents */ + --color-primary: #d97706; + --color-primary-dim: #b45309; + --color-accent: #8c4c28; + --color-accent-dim: #6b3b1f; + + /* Status Colors */ + --color-error: #d1433f; + --color-warning: #e38b29; + --color-success: #6b8e23; + --color-info: #c084fc; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.4); + + /* Interactive Elements */ + --color-link: var(--color-primary); + --color-link-hover: var(--color-primary-dim); +} + +[data-theme="black"] { + /* Backgrounds */ + --color-bg: #000000; + --color-bg-secondary: #1a1a1a; + --color-bg-tertiary: #2a2a2a; + + /* Foregrounds */ + --color-fg: #dddddd; + --color-fg-secondary: #aaaaaa; + --color-fg-tertiary: #888888; + + /* Accents */ + --color-primary: #08c08c; + --color-primary-dim: #08c08c; + --color-accent: #f0ad0a; + --color-accent-dim: #d08d08; + + /* Status Colors */ + --color-error: #f44336; + --color-warning: #ff9800; + --color-success: #4caf50; + --color-info: #2196f3; + + /* Borders and Shadows */ + --color-border: var(--color-bg-tertiary); + --color-shadow: rgba(0, 0, 0, 0.5); + + /* Interactive Elements */ + --color-link: #0af0af; + --color-link-hover: #08c08c; +} + + +/* Theme Helper Classes */ + +/* Foreground Text */ +.color-fg { color: var(--color-fg); } +.color-fg-secondary { color: var(--color-fg-secondary); } +.color-fg-tertiary { color: var(--color-fg-tertiary); } +.hover-color-fg:hover { color: var(--color-fg); } +.hover-color-fg-secondary:hover { color: var(--color-fg-secondary); } +.hover-color-fg-tertiary:hover { color: var(--color-fg-tertiary); } + +/* Backgrounds */ +.bg { background-color: var(--color-bg); } +.bg-secondary { background-color: var(--color-bg-secondary); } +.bg-tertiary { background-color: var(--color-bg-tertiary); } +.hover-bg:hover { background-color: var(--color-bg); } +.hover-bg-secondary:hover { background-color: var(--color-bg-secondary); } +.hover-bg-tertiary:hover { background-color: var(--color-bg-tertiary); } + +/* Borders */ +.border { border: 1px solid var(--color-border); } + +/* Accent Colors */ +.color-primary { color: var(--color-primary); } +.bg-primary { background-color: var(--color-primary); } +.color-accent { color: var(--color-accent); } +.bg-secondary-accent { background-color: var(--color-accent); } + +/* Status Colors */ +.error { color: var(--color-error); } +.bg-error { background-color: var(--color-error); } + +.warning { color: var(--color-warning); } +.bg-warning { background-color: var(--color-warning); } + +.success { color: var(--color-success); } +.bg-success { background-color: var(--color-success); } + + +.info { color: var(--color-info); } +.bg-info { background-color: var(--color-info); } + +/* Links */ +.link { color: var(--color-link); transition: color var(--transition-speed); } +.link:hover { color: var(--color-link-hover); } diff --git a/client/app/types/apiTypes.ts b/client/app/types/apiTypes.ts new file mode 100644 index 0000000..e69de29 diff --git a/client/app/types/timeframe.ts b/client/app/types/timeframe.ts new file mode 100644 index 0000000..2678554 --- /dev/null +++ b/client/app/types/timeframe.ts @@ -0,0 +1,9 @@ +enum Timeframe { + Day = 1, + Week, + Month, + Year, + AllTime, +} + +export default Timeframe \ No newline at end of file diff --git a/client/app/utils/utils.ts b/client/app/utils/utils.ts new file mode 100644 index 0000000..0cf0b33 --- /dev/null +++ b/client/app/utils/utils.ts @@ -0,0 +1,90 @@ +import Timeframe from "~/types/timeframe" + +const timeframeToInterval = (timeframe: Timeframe): string => { + switch (timeframe) { + case Timeframe.Day: + return "1 day" + case Timeframe.Week: + return "1 week" + case Timeframe.Month: + return "1 month" + case Timeframe.Year: + return "1 year" + case Timeframe.AllTime: + return "99 years" + } +} + +function timeSince(date: Date) { + const now = new Date(); + const seconds = Math.floor((now.getTime() - date.getTime()) / 1000); + + const intervals = [ + { label: 'year', seconds: 31536000 }, + { label: 'month', seconds: 2592000 }, + { label: 'week', seconds: 604800 }, + { label: 'day', seconds: 86400 }, + { label: 'hour', seconds: 3600 }, + { label: 'minute', seconds: 60 }, + { label: 'second', seconds: 1 }, + ]; + + for (const interval of intervals) { + const count = Math.floor(seconds / interval.seconds); + if (count >= 1) { + return `${count} ${interval.label}${count !== 1 ? 's' : ''} ago`; + } + } + + return 'just now'; +} + +export { timeSince } + +type hsl = { + h: number, + s: number, + l: number, +} + +const hexToHSL = (hex: string): hsl => { + let r = 0, g = 0, b = 0; + hex = hex.replace('#', ''); + + if (hex.length === 3) { + r = parseInt(hex[0] + hex[0], 16); + g = parseInt(hex[1] + hex[1], 16); + b = parseInt(hex[2] + hex[2], 16); + } else if (hex.length === 6) { + r = parseInt(hex.substring(0, 2), 16); + g = parseInt(hex.substring(2, 4), 16); + b = parseInt(hex.substring(4, 6), 16); + } + + r /= 255; + g /= 255; + b /= 255; + + const max = Math.max(r, g, b), min = Math.min(r, g, b); + let h = 0, s = 0, l = (max + min) / 2; + + if (max !== min) { + const d = max - min; + s = l > 0.5 ? d / (2 - max - min) : d / (max + min); + switch (max) { + case r: h = ((g - b) / d + (g < b ? 6 : 0)); break; + case g: h = ((b - r) / d + 2); break; + case b: h = ((r - g) / d + 4); break; + } + h /= 6; + } + + return { + h: Math.round(h * 360), + s: Math.round(s * 100), + l: Math.round(l * 100) + }; +}; + +export {hexToHSL} +export type {hsl} \ No newline at end of file diff --git a/client/package.json b/client/package.json new file mode 100644 index 0000000..8bea239 --- /dev/null +++ b/client/package.json @@ -0,0 +1,35 @@ +{ + "name": "koito", + "version": "v0.0.1", + "private": true, + "type": "module", + "scripts": { + "build": "react-router build", + "dev": "react-router dev", + "start": "react-router-serve ./build/server/index.js", + "typecheck": "react-router typegen && tsc" + }, + "dependencies": { + "@radix-ui/react-tabs": "^1.1.12", + "@react-router/node": "^7.5.3", + "@react-router/serve": "^7.5.3", + "@tanstack/react-query": "^5.80.6", + "color.js": "^1.2.0", + "isbot": "^5.1.27", + "lucide-react": "^0.513.0", + "react": "^19.1.0", + "react-dom": "^19.1.0", + "react-router": "^7.5.3" + }, + "devDependencies": { + "@react-router/dev": "^7.5.3", + "@tailwindcss/vite": "^4.1.4", + "@types/node": "^20", + "@types/react": "^19.1.2", + "@types/react-dom": "^19.1.2", + "tailwindcss": "^4.1.4", + "typescript": "^5.8.3", + "vite": "^6.3.3", + "vite-tsconfig-paths": "^5.1.4" + } +} diff --git a/client/public/apple-touch-icon.png b/client/public/apple-touch-icon.png new file mode 100644 index 0000000..e4636a5 Binary files /dev/null and b/client/public/apple-touch-icon.png differ diff --git a/client/public/chart.svg b/client/public/chart.svg new file mode 100644 index 0000000..bbcd75a --- /dev/null +++ b/client/public/chart.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/client/public/favicon-96x96.png b/client/public/favicon-96x96.png new file mode 100644 index 0000000..419f27d Binary files /dev/null and b/client/public/favicon-96x96.png differ diff --git a/client/public/favicon.ico b/client/public/favicon.ico new file mode 100644 index 0000000..a722ee7 Binary files /dev/null and b/client/public/favicon.ico differ diff --git a/client/public/favicon.svg b/client/public/favicon.svg new file mode 100644 index 0000000..3b9ffca --- /dev/null +++ b/client/public/favicon.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/client/public/home.svg b/client/public/home.svg new file mode 100644 index 0000000..657b7af --- /dev/null +++ b/client/public/home.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/client/public/search.svg b/client/public/search.svg new file mode 100644 index 0000000..45d48d1 --- /dev/null +++ b/client/public/search.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/client/public/site.webmanifest b/client/public/site.webmanifest new file mode 100644 index 0000000..a3a27f4 --- /dev/null +++ b/client/public/site.webmanifest @@ -0,0 +1,21 @@ +{ + "name": "Koito", + "short_name": "Koito", + "icons": [ + { + "src": "/web-app-manifest-192x192.png", + "sizes": "192x192", + "type": "image/png", + "purpose": "maskable" + }, + { + "src": "/web-app-manifest-512x512.png", + "sizes": "512x512", + "type": "image/png", + "purpose": "maskable" + } + ], + "theme_color": "#161312", + "background_color": "#161312", + "display": "standalone" +} \ No newline at end of file diff --git a/client/public/web-app-manifest-192x192.png b/client/public/web-app-manifest-192x192.png new file mode 100644 index 0000000..8f9470a Binary files /dev/null and b/client/public/web-app-manifest-192x192.png differ diff --git a/client/public/web-app-manifest-512x512.png b/client/public/web-app-manifest-512x512.png new file mode 100644 index 0000000..f1bc483 Binary files /dev/null and b/client/public/web-app-manifest-512x512.png differ diff --git a/client/public/yuu.jpg b/client/public/yuu.jpg new file mode 100644 index 0000000..fdbaf3d Binary files /dev/null and b/client/public/yuu.jpg differ diff --git a/client/react-router.config.ts b/client/react-router.config.ts new file mode 100644 index 0000000..b8b143a --- /dev/null +++ b/client/react-router.config.ts @@ -0,0 +1,7 @@ +import type { Config } from "@react-router/dev/config"; + +export default { + // Config options... + // Server-side render by default, to enable SPA mode set this to `false` + ssr: false, +} satisfies Config; diff --git a/client/tsconfig.json b/client/tsconfig.json new file mode 100644 index 0000000..dc391a4 --- /dev/null +++ b/client/tsconfig.json @@ -0,0 +1,27 @@ +{ + "include": [ + "**/*", + "**/.server/**/*", + "**/.client/**/*", + ".react-router/types/**/*" + ], + "compilerOptions": { + "lib": ["DOM", "DOM.Iterable", "ES2022"], + "types": ["node", "vite/client"], + "target": "ES2022", + "module": "ES2022", + "moduleResolution": "bundler", + "jsx": "react-jsx", + "rootDirs": [".", "./.react-router/types"], + "baseUrl": ".", + "paths": { + "~/*": ["./app/*"] + }, + "esModuleInterop": true, + "verbatimModuleSyntax": true, + "noEmit": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "strict": true + } +} diff --git a/client/vite.config.ts b/client/vite.config.ts new file mode 100644 index 0000000..de218ca --- /dev/null +++ b/client/vite.config.ts @@ -0,0 +1,29 @@ +import { reactRouter } from "@react-router/dev/vite"; +import tailwindcss from "@tailwindcss/vite"; +import { defineConfig } from "vite"; +import tsconfigPaths from "vite-tsconfig-paths"; + +const isDocker = process.env.BUILD_TARGET === 'docker'; + +export default defineConfig({ + plugins: [tailwindcss(), reactRouter(), tsconfigPaths()], + server: { + proxy: { + '/apis': { + target: 'http://localhost:4110', + changeOrigin: true, + }, + '/images': { + target: 'http://192.168.0.153:4110', + changeOrigin: true, + } + } + }, + resolve: { + alias: { + ...(isDocker + ? { 'react-dom/server': 'react-dom/server.node' } + : {}), + }, + }, +}); \ No newline at end of file diff --git a/client/yarn.lock b/client/yarn.lock new file mode 100644 index 0000000..136000e --- /dev/null +++ b/client/yarn.lock @@ -0,0 +1,2467 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.2.0", "@ampproject/remapping@^2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4" + integrity sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.24" + +"@babel/code-frame@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.27.1.tgz#200f715e66d52a23b221a9435534a91cc13ad5be" + integrity sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg== + dependencies: + "@babel/helper-validator-identifier" "^7.27.1" + js-tokens "^4.0.0" + picocolors "^1.1.1" + +"@babel/compat-data@^7.27.2": + version "7.27.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.27.5.tgz#7d0658ec1a8420fc866d1df1b03bea0e79934c82" + integrity sha512-KiRAp/VoJaWkkte84TvUd9qjdbZAdiqyvMxrGl1N6vzFogKmaLgoM3L1kgtLicp2HP5fBJS8JrZKLVIZGVJAVg== + +"@babel/core@^7.21.8", "@babel/core@^7.23.7": + version "7.27.4" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.27.4.tgz#cc1fc55d0ce140a1828d1dd2a2eba285adbfb3ce" + integrity sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.27.1" + "@babel/generator" "^7.27.3" + "@babel/helper-compilation-targets" "^7.27.2" + "@babel/helper-module-transforms" "^7.27.3" + "@babel/helpers" "^7.27.4" + "@babel/parser" "^7.27.4" + "@babel/template" "^7.27.2" + "@babel/traverse" "^7.27.4" + "@babel/types" "^7.27.3" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + +"@babel/generator@^7.21.5", "@babel/generator@^7.27.3": + version "7.27.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.27.5.tgz#3eb01866b345ba261b04911020cbe22dd4be8c8c" + integrity sha512-ZGhA37l0e/g2s1Cnzdix0O3aLYm66eF8aufiVteOgnwxgnRP8GoyMj7VWsgWnQbVKXyge7hqrFh2K2TQM6t1Hw== + dependencies: + "@babel/parser" "^7.27.5" + "@babel/types" "^7.27.3" + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + jsesc "^3.0.2" + +"@babel/helper-annotate-as-pure@^7.27.1": + version "7.27.3" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz#f31fd86b915fc4daf1f3ac6976c59be7084ed9c5" + integrity sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg== + dependencies: + "@babel/types" "^7.27.3" + +"@babel/helper-compilation-targets@^7.27.2": + version "7.27.2" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz#46a0f6efab808d51d29ce96858dd10ce8732733d" + integrity sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ== + dependencies: + "@babel/compat-data" "^7.27.2" + "@babel/helper-validator-option" "^7.27.1" + browserslist "^4.24.0" + lru-cache "^5.1.1" + semver "^6.3.1" + +"@babel/helper-create-class-features-plugin@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.27.1.tgz#5bee4262a6ea5ddc852d0806199eb17ca3de9281" + integrity sha512-QwGAmuvM17btKU5VqXfb+Giw4JcN0hjuufz3DYnpeVDvZLAObloM77bhMXiqry3Iio+Ai4phVRDwl6WU10+r5A== + dependencies: + "@babel/helper-annotate-as-pure" "^7.27.1" + "@babel/helper-member-expression-to-functions" "^7.27.1" + "@babel/helper-optimise-call-expression" "^7.27.1" + "@babel/helper-replace-supers" "^7.27.1" + "@babel/helper-skip-transparent-expression-wrappers" "^7.27.1" + "@babel/traverse" "^7.27.1" + semver "^6.3.1" + +"@babel/helper-member-expression-to-functions@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.27.1.tgz#ea1211276be93e798ce19037da6f06fbb994fa44" + integrity sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA== + dependencies: + "@babel/traverse" "^7.27.1" + "@babel/types" "^7.27.1" + +"@babel/helper-module-imports@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz#7ef769a323e2655e126673bb6d2d6913bbead204" + integrity sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w== + dependencies: + "@babel/traverse" "^7.27.1" + "@babel/types" "^7.27.1" + +"@babel/helper-module-transforms@^7.27.1", "@babel/helper-module-transforms@^7.27.3": + version "7.27.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz#db0bbcfba5802f9ef7870705a7ef8788508ede02" + integrity sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg== + dependencies: + "@babel/helper-module-imports" "^7.27.1" + "@babel/helper-validator-identifier" "^7.27.1" + "@babel/traverse" "^7.27.3" + +"@babel/helper-optimise-call-expression@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz#c65221b61a643f3e62705e5dd2b5f115e35f9200" + integrity sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw== + dependencies: + "@babel/types" "^7.27.1" + +"@babel/helper-plugin-utils@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz#ddb2f876534ff8013e6c2b299bf4d39b3c51d44c" + integrity sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw== + +"@babel/helper-replace-supers@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.27.1.tgz#b1ed2d634ce3bdb730e4b52de30f8cccfd692bc0" + integrity sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.27.1" + "@babel/helper-optimise-call-expression" "^7.27.1" + "@babel/traverse" "^7.27.1" + +"@babel/helper-skip-transparent-expression-wrappers@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz#62bb91b3abba8c7f1fec0252d9dbea11b3ee7a56" + integrity sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg== + dependencies: + "@babel/traverse" "^7.27.1" + "@babel/types" "^7.27.1" + +"@babel/helper-string-parser@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz#54da796097ab19ce67ed9f88b47bb2ec49367687" + integrity sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA== + +"@babel/helper-validator-identifier@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz#a7054dcc145a967dd4dc8fee845a57c1316c9df8" + integrity sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow== + +"@babel/helper-validator-option@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz#fa52f5b1e7db1ab049445b421c4471303897702f" + integrity sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg== + +"@babel/helpers@^7.27.4": + version "7.27.6" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.27.6.tgz#6456fed15b2cb669d2d1fabe84b66b34991d812c" + integrity sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug== + dependencies: + "@babel/template" "^7.27.2" + "@babel/types" "^7.27.6" + +"@babel/parser@^7.21.8", "@babel/parser@^7.23.6", "@babel/parser@^7.27.2", "@babel/parser@^7.27.4", "@babel/parser@^7.27.5": + version "7.27.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.27.5.tgz#ed22f871f110aa285a6fd934a0efed621d118826" + integrity sha512-OsQd175SxWkGlzbny8J3K8TnnDD0N3lrIUtB92xwyRpzaenGZhxDvxN/JgU00U3CDZNj9tPuDJ5H0WS4Nt3vKg== + dependencies: + "@babel/types" "^7.27.3" + +"@babel/plugin-syntax-decorators@^7.22.10": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.27.1.tgz#ee7dd9590aeebc05f9d4c8c0560007b05979a63d" + integrity sha512-YMq8Z87Lhl8EGkmb0MwYkt36QnxC+fzCgrl66ereamPlYToRpIk5nUjKUY3QKLWq8mwUB1BgbeXcTJhZOCDg5A== + dependencies: + "@babel/helper-plugin-utils" "^7.27.1" + +"@babel/plugin-syntax-jsx@^7.21.4", "@babel/plugin-syntax-jsx@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.27.1.tgz#2f9beb5eff30fa507c5532d107daac7b888fa34c" + integrity sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w== + dependencies: + "@babel/helper-plugin-utils" "^7.27.1" + +"@babel/plugin-syntax-typescript@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz#5147d29066a793450f220c63fa3a9431b7e6dd18" + integrity sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ== + dependencies: + "@babel/helper-plugin-utils" "^7.27.1" + +"@babel/plugin-transform-modules-commonjs@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.27.1.tgz#8e44ed37c2787ecc23bdc367f49977476614e832" + integrity sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw== + dependencies: + "@babel/helper-module-transforms" "^7.27.1" + "@babel/helper-plugin-utils" "^7.27.1" + +"@babel/plugin-transform-typescript@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.27.1.tgz#d3bb65598bece03f773111e88cc4e8e5070f1140" + integrity sha512-Q5sT5+O4QUebHdbwKedFBEwRLb02zJ7r4A5Gg2hUoLuU3FjdMcyqcywqUrLCaDsFCxzokf7u9kuy7qz51YUuAg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.27.1" + "@babel/helper-create-class-features-plugin" "^7.27.1" + "@babel/helper-plugin-utils" "^7.27.1" + "@babel/helper-skip-transparent-expression-wrappers" "^7.27.1" + "@babel/plugin-syntax-typescript" "^7.27.1" + +"@babel/preset-typescript@^7.21.5": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.27.1.tgz#190742a6428d282306648a55b0529b561484f912" + integrity sha512-l7WfQfX0WK4M0v2RudjuQK4u99BS6yLHYEmdtVPP7lKV013zr9DygFuWNlnbvQ9LR+LS0Egz/XAvGx5U9MX0fQ== + dependencies: + "@babel/helper-plugin-utils" "^7.27.1" + "@babel/helper-validator-option" "^7.27.1" + "@babel/plugin-syntax-jsx" "^7.27.1" + "@babel/plugin-transform-modules-commonjs" "^7.27.1" + "@babel/plugin-transform-typescript" "^7.27.1" + +"@babel/template@^7.27.2": + version "7.27.2" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.27.2.tgz#fa78ceed3c4e7b63ebf6cb39e5852fca45f6809d" + integrity sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw== + dependencies: + "@babel/code-frame" "^7.27.1" + "@babel/parser" "^7.27.2" + "@babel/types" "^7.27.1" + +"@babel/traverse@^7.23.2", "@babel/traverse@^7.23.7", "@babel/traverse@^7.27.1", "@babel/traverse@^7.27.3", "@babel/traverse@^7.27.4": + version "7.27.4" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.27.4.tgz#b0045ac7023c8472c3d35effd7cc9ebd638da6ea" + integrity sha512-oNcu2QbHqts9BtOWJosOVJapWjBDSxGCpFvikNR5TGDYDQf3JwpIoMzIKrvfoti93cLfPJEG4tH9SPVeyCGgdA== + dependencies: + "@babel/code-frame" "^7.27.1" + "@babel/generator" "^7.27.3" + "@babel/parser" "^7.27.4" + "@babel/template" "^7.27.2" + "@babel/types" "^7.27.3" + debug "^4.3.1" + globals "^11.1.0" + +"@babel/types@^7.22.5", "@babel/types@^7.23.6", "@babel/types@^7.27.1", "@babel/types@^7.27.3", "@babel/types@^7.27.6": + version "7.27.6" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.27.6.tgz#a434ca7add514d4e646c80f7375c0aa2befc5535" + integrity sha512-ETyHEk2VHHvl9b9jZP5IHPavHYk57EhanlRRuae9XCpb/j5bDCbPPMOBfCWhnl/7EDJz0jEMCi/RhccCE8r1+Q== + dependencies: + "@babel/helper-string-parser" "^7.27.1" + "@babel/helper-validator-identifier" "^7.27.1" + +"@emnapi/core@^1.4.3": + version "1.4.3" + resolved "https://registry.yarnpkg.com/@emnapi/core/-/core-1.4.3.tgz#9ac52d2d5aea958f67e52c40a065f51de59b77d6" + integrity sha512-4m62DuCE07lw01soJwPiBGC0nAww0Q+RY70VZ+n49yDIO13yyinhbWCeNnaob0lakDtWQzSdtNWzJeOJt2ma+g== + dependencies: + "@emnapi/wasi-threads" "1.0.2" + tslib "^2.4.0" + +"@emnapi/runtime@^1.4.3": + version "1.4.3" + resolved "https://registry.yarnpkg.com/@emnapi/runtime/-/runtime-1.4.3.tgz#c0564665c80dc81c448adac23f9dfbed6c838f7d" + integrity sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ== + dependencies: + tslib "^2.4.0" + +"@emnapi/wasi-threads@1.0.2", "@emnapi/wasi-threads@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@emnapi/wasi-threads/-/wasi-threads-1.0.2.tgz#977f44f844eac7d6c138a415a123818c655f874c" + integrity sha512-5n3nTJblwRi8LlXkJ9eBzu+kZR8Yxcc7ubakyQTFzPMtIhFpUBRbsnc2Dv88IZDIbCDlBiWrknhB4Lsz7mg6BA== + dependencies: + tslib "^2.4.0" + +"@esbuild/aix-ppc64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz#4e0f91776c2b340e75558f60552195f6fad09f18" + integrity sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA== + +"@esbuild/android-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz#bc766407f1718923f6b8079c8c61bf86ac3a6a4f" + integrity sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg== + +"@esbuild/android-arm@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.25.5.tgz#4290d6d3407bae3883ad2cded1081a234473ce26" + integrity sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA== + +"@esbuild/android-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.25.5.tgz#40c11d9cbca4f2406548c8a9895d321bc3b35eff" + integrity sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw== + +"@esbuild/darwin-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz#49d8bf8b1df95f759ac81eb1d0736018006d7e34" + integrity sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ== + +"@esbuild/darwin-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz#e27a5d92a14886ef1d492fd50fc61a2d4d87e418" + integrity sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ== + +"@esbuild/freebsd-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz#97cede59d638840ca104e605cdb9f1b118ba0b1c" + integrity sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw== + +"@esbuild/freebsd-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz#71c77812042a1a8190c3d581e140d15b876b9c6f" + integrity sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw== + +"@esbuild/linux-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz#f7b7c8f97eff8ffd2e47f6c67eb5c9765f2181b8" + integrity sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg== + +"@esbuild/linux-arm@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz#2a0be71b6cd8201fa559aea45598dffabc05d911" + integrity sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw== + +"@esbuild/linux-ia32@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz#763414463cd9ea6fa1f96555d2762f9f84c61783" + integrity sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA== + +"@esbuild/linux-loong64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz#428cf2213ff786a502a52c96cf29d1fcf1eb8506" + integrity sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg== + +"@esbuild/linux-mips64el@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz#5cbcc7fd841b4cd53358afd33527cd394e325d96" + integrity sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg== + +"@esbuild/linux-ppc64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz#0d954ab39ce4f5e50f00c4f8c4fd38f976c13ad9" + integrity sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ== + +"@esbuild/linux-riscv64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz#0e7dd30730505abd8088321e8497e94b547bfb1e" + integrity sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA== + +"@esbuild/linux-s390x@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz#5669af81327a398a336d7e40e320b5bbd6e6e72d" + integrity sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ== + +"@esbuild/linux-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz#b2357dd153aa49038967ddc1ffd90c68a9d2a0d4" + integrity sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw== + +"@esbuild/netbsd-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz#53b4dfb8fe1cee93777c9e366893bd3daa6ba63d" + integrity sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw== + +"@esbuild/netbsd-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz#a0206f6314ce7dc8713b7732703d0f58de1d1e79" + integrity sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ== + +"@esbuild/openbsd-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz#2a796c87c44e8de78001d808c77d948a21ec22fd" + integrity sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw== + +"@esbuild/openbsd-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz#28d0cd8909b7fa3953af998f2b2ed34f576728f0" + integrity sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg== + +"@esbuild/sunos-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz#a28164f5b997e8247d407e36c90d3fd5ddbe0dc5" + integrity sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA== + +"@esbuild/win32-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz#6eadbead38e8bd12f633a5190e45eff80e24007e" + integrity sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw== + +"@esbuild/win32-ia32@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz#bab6288005482f9ed2adb9ded7e88eba9a62cc0d" + integrity sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ== + +"@esbuild/win32-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz#7fc114af5f6563f19f73324b5d5ff36ece0803d1" + integrity sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g== + +"@isaacs/cliui@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== + dependencies: + string-width "^5.1.2" + string-width-cjs "npm:string-width@^4.2.0" + strip-ansi "^7.0.1" + strip-ansi-cjs "npm:strip-ansi@^6.0.1" + wrap-ansi "^8.1.0" + wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + +"@isaacs/fs-minipass@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz#2d59ae3ab4b38fb4270bfa23d30f8e2e86c7fe32" + integrity sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w== + dependencies: + minipass "^7.0.4" + +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.8" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz#4f0e06362e01362f823d348f1872b08f666d8142" + integrity sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14", "@jridgewell/sourcemap-codec@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + +"@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + +"@mjackson/node-fetch-server@^0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@mjackson/node-fetch-server/-/node-fetch-server-0.2.0.tgz#577c0c25d8aae9f69a97738b7b0d03d1471cdc49" + integrity sha512-EMlH1e30yzmTpGLQjlFmaDAjyOeZhng1/XCd7DExR8PNAnG/G1tyruZxEoUe11ClnwGhGrtsdnyyUx1frSzjng== + +"@napi-rs/wasm-runtime@^0.2.10": + version "0.2.11" + resolved "https://registry.yarnpkg.com/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.11.tgz#192c1610e1625048089ab4e35bc0649ce478500e" + integrity sha512-9DPkXtvHydrcOsopiYpUgPHpmj0HWZKMUnL2dZqpvC42lsratuBG06V5ipyno0fUek5VlFsNQ+AcFATSrJXgMA== + dependencies: + "@emnapi/core" "^1.4.3" + "@emnapi/runtime" "^1.4.3" + "@tybys/wasm-util" "^0.9.0" + +"@npmcli/git@^4.1.0": + version "4.1.0" + resolved "https://registry.yarnpkg.com/@npmcli/git/-/git-4.1.0.tgz#ab0ad3fd82bc4d8c1351b6c62f0fa56e8fe6afa6" + integrity sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ== + dependencies: + "@npmcli/promise-spawn" "^6.0.0" + lru-cache "^7.4.4" + npm-pick-manifest "^8.0.0" + proc-log "^3.0.0" + promise-inflight "^1.0.1" + promise-retry "^2.0.1" + semver "^7.3.5" + which "^3.0.0" + +"@npmcli/package-json@^4.0.1": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@npmcli/package-json/-/package-json-4.0.1.tgz#1a07bf0e086b640500791f6bf245ff43cc27fa37" + integrity sha512-lRCEGdHZomFsURroh522YvA/2cVb9oPIJrjHanCJZkiasz1BzcnLr3tBJhlV7S86MBJBuAQ33is2D60YitZL2Q== + dependencies: + "@npmcli/git" "^4.1.0" + glob "^10.2.2" + hosted-git-info "^6.1.1" + json-parse-even-better-errors "^3.0.0" + normalize-package-data "^5.0.0" + proc-log "^3.0.0" + semver "^7.5.3" + +"@npmcli/promise-spawn@^6.0.0": + version "6.0.2" + resolved "https://registry.yarnpkg.com/@npmcli/promise-spawn/-/promise-spawn-6.0.2.tgz#c8bc4fa2bd0f01cb979d8798ba038f314cfa70f2" + integrity sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg== + dependencies: + which "^3.0.0" + +"@pkgjs/parseargs@^0.11.0": + version "0.11.0" + resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== + +"@radix-ui/primitive@1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@radix-ui/primitive/-/primitive-1.1.2.tgz#83f415c4425f21e3d27914c12b3272a32e3dae65" + integrity sha512-XnbHrrprsNqZKQhStrSwgRUQzoCI1glLzdw79xiZPoofhGICeZRSQ3dIxAKH1gb3OHfNf4d6f+vAv3kil2eggA== + +"@radix-ui/react-collection@1.1.7": + version "1.1.7" + resolved "https://registry.yarnpkg.com/@radix-ui/react-collection/-/react-collection-1.1.7.tgz#d05c25ca9ac4695cc19ba91f42f686e3ea2d9aec" + integrity sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw== + dependencies: + "@radix-ui/react-compose-refs" "1.1.2" + "@radix-ui/react-context" "1.1.2" + "@radix-ui/react-primitive" "2.1.3" + "@radix-ui/react-slot" "1.2.3" + +"@radix-ui/react-compose-refs@1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz#a2c4c47af6337048ee78ff6dc0d090b390d2bb30" + integrity sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg== + +"@radix-ui/react-context@1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-context/-/react-context-1.1.2.tgz#61628ef269a433382c364f6f1e3788a6dc213a36" + integrity sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA== + +"@radix-ui/react-direction@1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-direction/-/react-direction-1.1.1.tgz#39e5a5769e676c753204b792fbe6cf508e550a14" + integrity sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw== + +"@radix-ui/react-id@1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-id/-/react-id-1.1.1.tgz#1404002e79a03fe062b7e3864aa01e24bd1471f7" + integrity sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg== + dependencies: + "@radix-ui/react-use-layout-effect" "1.1.1" + +"@radix-ui/react-presence@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-presence/-/react-presence-1.1.4.tgz#253ac0ad4946c5b4a9c66878335f5cf07c967ced" + integrity sha512-ueDqRbdc4/bkaQT3GIpLQssRlFgWaL/U2z/S31qRwwLWoxHLgry3SIfCwhxeQNbirEUXFa+lq3RL3oBYXtcmIA== + dependencies: + "@radix-ui/react-compose-refs" "1.1.2" + "@radix-ui/react-use-layout-effect" "1.1.1" + +"@radix-ui/react-primitive@2.1.3": + version "2.1.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz#db9b8bcff49e01be510ad79893fb0e4cda50f1bc" + integrity sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ== + dependencies: + "@radix-ui/react-slot" "1.2.3" + +"@radix-ui/react-roving-focus@1.1.10": + version "1.1.10" + resolved "https://registry.yarnpkg.com/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.10.tgz#46030496d2a490c4979d29a7e1252465e51e4b0b" + integrity sha512-dT9aOXUen9JSsxnMPv/0VqySQf5eDQ6LCk5Sw28kamz8wSOW2bJdlX2Bg5VUIIcV+6XlHpWTIuTPCf/UNIyq8Q== + dependencies: + "@radix-ui/primitive" "1.1.2" + "@radix-ui/react-collection" "1.1.7" + "@radix-ui/react-compose-refs" "1.1.2" + "@radix-ui/react-context" "1.1.2" + "@radix-ui/react-direction" "1.1.1" + "@radix-ui/react-id" "1.1.1" + "@radix-ui/react-primitive" "2.1.3" + "@radix-ui/react-use-callback-ref" "1.1.1" + "@radix-ui/react-use-controllable-state" "1.2.2" + +"@radix-ui/react-slot@1.2.3": + version "1.2.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.2.3.tgz#502d6e354fc847d4169c3bc5f189de777f68cfe1" + integrity sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A== + dependencies: + "@radix-ui/react-compose-refs" "1.1.2" + +"@radix-ui/react-tabs@^1.1.12": + version "1.1.12" + resolved "https://registry.yarnpkg.com/@radix-ui/react-tabs/-/react-tabs-1.1.12.tgz#99b3522c73db9263f429a6d0f5a9acb88df3b129" + integrity sha512-GTVAlRVrQrSw3cEARM0nAx73ixrWDPNZAruETn3oHCNP6SbZ/hNxdxp+u7VkIEv3/sFoLq1PfcHrl7Pnp0CDpw== + dependencies: + "@radix-ui/primitive" "1.1.2" + "@radix-ui/react-context" "1.1.2" + "@radix-ui/react-direction" "1.1.1" + "@radix-ui/react-id" "1.1.1" + "@radix-ui/react-presence" "1.1.4" + "@radix-ui/react-primitive" "2.1.3" + "@radix-ui/react-roving-focus" "1.1.10" + "@radix-ui/react-use-controllable-state" "1.2.2" + +"@radix-ui/react-use-callback-ref@1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz#62a4dba8b3255fdc5cc7787faeac1c6e4cc58d40" + integrity sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg== + +"@radix-ui/react-use-controllable-state@1.2.2": + version "1.2.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz#905793405de57d61a439f4afebbb17d0645f3190" + integrity sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg== + dependencies: + "@radix-ui/react-use-effect-event" "0.0.2" + "@radix-ui/react-use-layout-effect" "1.1.1" + +"@radix-ui/react-use-effect-event@0.0.2": + version "0.0.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz#090cf30d00a4c7632a15548512e9152217593907" + integrity sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA== + dependencies: + "@radix-ui/react-use-layout-effect" "1.1.1" + +"@radix-ui/react-use-layout-effect@1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz#0c4230a9eed49d4589c967e2d9c0d9d60a23971e" + integrity sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ== + +"@react-router/dev@^7.5.3": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@react-router/dev/-/dev-7.6.2.tgz#c30b4ea5812dc8b8c2c303229856d2a60f4cdbd8" + integrity sha512-BuG83Ug2C/P+zMYErTz/KKuXoxbOefh3oR66r13XWG9txwooC9nt2QDt2u8yt7Eo/9BATnx+TmXnOHEWqMyB8w== + dependencies: + "@babel/core" "^7.21.8" + "@babel/generator" "^7.21.5" + "@babel/parser" "^7.21.8" + "@babel/plugin-syntax-decorators" "^7.22.10" + "@babel/plugin-syntax-jsx" "^7.21.4" + "@babel/preset-typescript" "^7.21.5" + "@babel/traverse" "^7.23.2" + "@babel/types" "^7.22.5" + "@npmcli/package-json" "^4.0.1" + "@react-router/node" "7.6.2" + arg "^5.0.1" + babel-dead-code-elimination "^1.0.6" + chokidar "^4.0.0" + dedent "^1.5.3" + es-module-lexer "^1.3.1" + exit-hook "2.2.1" + fs-extra "^10.0.0" + jsesc "3.0.2" + lodash "^4.17.21" + pathe "^1.1.2" + picocolors "^1.1.1" + prettier "^2.7.1" + react-refresh "^0.14.0" + semver "^7.3.7" + set-cookie-parser "^2.6.0" + valibot "^0.41.0" + vite-node "^3.1.4" + +"@react-router/express@7.6.2": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@react-router/express/-/express-7.6.2.tgz#c91ffe73c07151180d08e9a2961bd0f1d677e389" + integrity sha512-b1XwP2ZknWG6yNl1aEAJ+yx0Alk85+iLk5y521MOhh2lCKPNyFOuX4Gw8hI3E4IXgDEPqiZ+lipmrIb7XkLNZQ== + dependencies: + "@react-router/node" "7.6.2" + +"@react-router/node@7.6.2", "@react-router/node@^7.5.3": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@react-router/node/-/node-7.6.2.tgz#1aee2344776b575d145528d166dc3b5f8ade8bb7" + integrity sha512-KrxfnfJVU1b+020VKemkxpc7ssItsAL8MOJthcoGwPyKwrgovdwc+8NKJUqw3P7yk/Si0ZmVh9QYAzi9qF96dg== + dependencies: + "@mjackson/node-fetch-server" "^0.2.0" + source-map-support "^0.5.21" + stream-slice "^0.1.2" + undici "^6.19.2" + +"@react-router/serve@^7.5.3": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@react-router/serve/-/serve-7.6.2.tgz#8c961c9c3850701e0a4e31c795c91092418809cf" + integrity sha512-VTdvB8kdZEtYeQML9TFJiIZnPefv94LfmLx5qQ0SJSesel/hQolnfpWEkLJ9WtBO+/10CulAvg6y5UwiceUFTQ== + dependencies: + "@react-router/express" "7.6.2" + "@react-router/node" "7.6.2" + compression "^1.7.4" + express "^4.19.2" + get-port "5.1.1" + morgan "^1.10.0" + source-map-support "^0.5.21" + +"@rollup/rollup-android-arm-eabi@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.42.0.tgz#8baae15a6a27f18b7c5be420e00ab08c7d3dd6f4" + integrity sha512-gldmAyS9hpj+H6LpRNlcjQWbuKUtb94lodB9uCz71Jm+7BxK1VIOo7y62tZZwxhA7j1ylv/yQz080L5WkS+LoQ== + +"@rollup/rollup-android-arm64@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.42.0.tgz#6798394241d1b26f8b44d2bbd8de9c12eb9dd6e6" + integrity sha512-bpRipfTgmGFdCZDFLRvIkSNO1/3RGS74aWkJJTFJBH7h3MRV4UijkaEUeOMbi9wxtxYmtAbVcnMtHTPBhLEkaw== + +"@rollup/rollup-darwin-arm64@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.42.0.tgz#8642482ac2d21e7747a79b1cc3293d5711fefea3" + integrity sha512-JxHtA081izPBVCHLKnl6GEA0w3920mlJPLh89NojpU2GsBSB6ypu4erFg/Wx1qbpUbepn0jY4dVWMGZM8gplgA== + +"@rollup/rollup-darwin-x64@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.42.0.tgz#e15568b2fea4fdc526e86424150df9ec511fbaaf" + integrity sha512-rv5UZaWVIJTDMyQ3dCEK+m0SAn6G7H3PRc2AZmExvbDvtaDc+qXkei0knQWcI3+c9tEs7iL/4I4pTQoPbNL2SA== + +"@rollup/rollup-freebsd-arm64@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.42.0.tgz#0ebdb3b470ccf6acf0eacae8177f34e27477559f" + integrity sha512-fJcN4uSGPWdpVmvLuMtALUFwCHgb2XiQjuECkHT3lWLZhSQ3MBQ9pq+WoWeJq2PrNxr9rPM1Qx+IjyGj8/c6zQ== + +"@rollup/rollup-freebsd-x64@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.42.0.tgz#675808bf4fe7c7fc454326510ab3be0857626d41" + integrity sha512-CziHfyzpp8hJpCVE/ZdTizw58gr+m7Y2Xq5VOuCSrZR++th2xWAz4Nqk52MoIIrV3JHtVBhbBsJcAxs6NammOQ== + +"@rollup/rollup-linux-arm-gnueabihf@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.42.0.tgz#05e881cc69f59415fe8c1af13554c60c7c49d114" + integrity sha512-UsQD5fyLWm2Fe5CDM7VPYAo+UC7+2Px4Y+N3AcPh/LdZu23YcuGPegQly++XEVaC8XUTFVPscl5y5Cl1twEI4A== + +"@rollup/rollup-linux-arm-musleabihf@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.42.0.tgz#eb990bf7c3c37749c3d5afed34e6adec1c927963" + integrity sha512-/i8NIrlgc/+4n1lnoWl1zgH7Uo0XK5xK3EDqVTf38KvyYgCU/Rm04+o1VvvzJZnVS5/cWSd07owkzcVasgfIkQ== + +"@rollup/rollup-linux-arm64-gnu@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.42.0.tgz#3deeacff589e7f370aca5cef29d68d4c8fa0033c" + integrity sha512-eoujJFOvoIBjZEi9hJnXAbWg+Vo1Ov8n/0IKZZcPZ7JhBzxh2A+2NFyeMZIRkY9iwBvSjloKgcvnjTbGKHE44Q== + +"@rollup/rollup-linux-arm64-musl@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.42.0.tgz#6db81ab065ef278faf83d875c77ff9cdd51abcfd" + integrity sha512-/3NrcOWFSR7RQUQIuZQChLND36aTU9IYE4j+TB40VU78S+RA0IiqHR30oSh6P1S9f9/wVOenHQnacs/Byb824g== + +"@rollup/rollup-linux-loongarch64-gnu@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.42.0.tgz#90d35336ad4cbf318648e41b0e7ce3920c28ebc9" + integrity sha512-O8AplvIeavK5ABmZlKBq9/STdZlnQo7Sle0LLhVA7QT+CiGpNVe197/t8Aph9bhJqbDVGCHpY2i7QyfEDDStDg== + +"@rollup/rollup-linux-powerpc64le-gnu@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.42.0.tgz#6d21a0f18262648ec181fc9326b8f0ac02aa744d" + integrity sha512-6Qb66tbKVN7VyQrekhEzbHRxXXFFD8QKiFAwX5v9Xt6FiJ3BnCVBuyBxa2fkFGqxOCSGGYNejxd8ht+q5SnmtA== + +"@rollup/rollup-linux-riscv64-gnu@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.42.0.tgz#e46e2d1125957694bfb5222ecd63dd6c9bd69682" + integrity sha512-KQETDSEBamQFvg/d8jajtRwLNBlGc3aKpaGiP/LvEbnmVUKlFta1vqJqTrvPtsYsfbE/DLg5CC9zyXRX3fnBiA== + +"@rollup/rollup-linux-riscv64-musl@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.42.0.tgz#478a23f0fa0d832a0a6fa858a9f3d2eb201d44de" + integrity sha512-qMvnyjcU37sCo/tuC+JqeDKSuukGAd+pVlRl/oyDbkvPJ3awk6G6ua7tyum02O3lI+fio+eM5wsVd66X0jQtxw== + +"@rollup/rollup-linux-s390x-gnu@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.42.0.tgz#4261c714cd750e3fb685a330dfca7bb8f5711469" + integrity sha512-I2Y1ZUgTgU2RLddUHXTIgyrdOwljjkmcZ/VilvaEumtS3Fkuhbw4p4hgHc39Ypwvo2o7sBFNl2MquNvGCa55Iw== + +"@rollup/rollup-linux-x64-gnu@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.42.0.tgz#45aa751bdf05ac696da417a37fdfd13f607e1fab" + integrity sha512-Gfm6cV6mj3hCUY8TqWa63DB8Mx3NADoFwiJrMpoZ1uESbK8FQV3LXkhfry+8bOniq9pqY1OdsjFWNsSbfjPugw== + +"@rollup/rollup-linux-x64-musl@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.42.0.tgz#9a0f8691dede53d1720ebb2aeef72e483cf69220" + integrity sha512-g86PF8YZ9GRqkdi0VoGlcDUb4rYtQKyTD1IVtxxN4Hpe7YqLBShA7oHMKU6oKTCi3uxwW4VkIGnOaH/El8de3w== + +"@rollup/rollup-win32-arm64-msvc@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.42.0.tgz#395ad8b6b6372a3888d2e96bf6c45392be815f4d" + integrity sha512-+axkdyDGSp6hjyzQ5m1pgcvQScfHnMCcsXkx8pTgy/6qBmWVhtRVlgxjWwDp67wEXXUr0x+vD6tp5W4x6V7u1A== + +"@rollup/rollup-win32-ia32-msvc@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.42.0.tgz#0d80305a14fff372ea5e90cd35c63c6b8efbd143" + integrity sha512-F+5J9pelstXKwRSDq92J0TEBXn2nfUrQGg+HK1+Tk7VOL09e0gBqUHugZv7SW4MGrYj41oNCUe3IKCDGVlis2g== + +"@rollup/rollup-win32-x64-msvc@4.42.0": + version "4.42.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.42.0.tgz#516c6770ba15fe6aef369d217a9747492c01e8b7" + integrity sha512-LpHiJRwkaVz/LqjHjK8LCi8osq7elmpwujwbXKNW88bM8eeGxavJIKKjkjpMHAh/2xfnrt1ZSnhTv41WYUHYmA== + +"@tailwindcss/node@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/node/-/node-4.1.8.tgz#e29187abec6194ce1e9f072208c62116a79a129b" + integrity sha512-OWwBsbC9BFAJelmnNcrKuf+bka2ZxCE2A4Ft53Tkg4uoiE67r/PMEYwCsourC26E+kmxfwE0hVzMdxqeW+xu7Q== + dependencies: + "@ampproject/remapping" "^2.3.0" + enhanced-resolve "^5.18.1" + jiti "^2.4.2" + lightningcss "1.30.1" + magic-string "^0.30.17" + source-map-js "^1.2.1" + tailwindcss "4.1.8" + +"@tailwindcss/oxide-android-arm64@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.8.tgz#4cb4b464636fc7e3154a1bb7df38a828291b3e9a" + integrity sha512-Fbz7qni62uKYceWYvUjRqhGfZKwhZDQhlrJKGtnZfuNtHFqa8wmr+Wn74CTWERiW2hn3mN5gTpOoxWKk0jRxjg== + +"@tailwindcss/oxide-darwin-arm64@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.8.tgz#b0b8c02745f76aea683c30818e249d62821864b8" + integrity sha512-RdRvedGsT0vwVVDztvyXhKpsU2ark/BjgG0huo4+2BluxdXo8NDgzl77qh0T1nUxmM11eXwR8jA39ibvSTbi7A== + +"@tailwindcss/oxide-darwin-x64@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.8.tgz#d0f3fa4c3bde21a772e29e31c9739d91db79de12" + integrity sha512-t6PgxjEMLp5Ovf7uMb2OFmb3kqzVTPPakWpBIFzppk4JE4ix0yEtbtSjPbU8+PZETpaYMtXvss2Sdkx8Vs4XRw== + +"@tailwindcss/oxide-freebsd-x64@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.8.tgz#545c94c941007ed1aa2e449465501b70d59cb3da" + integrity sha512-g8C8eGEyhHTqwPStSwZNSrOlyx0bhK/V/+zX0Y+n7DoRUzyS8eMbVshVOLJTDDC+Qn9IJnilYbIKzpB9n4aBsg== + +"@tailwindcss/oxide-linux-arm-gnueabihf@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.8.tgz#e1bdbf63a179081669b8cd1c9523889774760eb9" + integrity sha512-Jmzr3FA4S2tHhaC6yCjac3rGf7hG9R6Gf2z9i9JFcuyy0u79HfQsh/thifbYTF2ic82KJovKKkIB6Z9TdNhCXQ== + +"@tailwindcss/oxide-linux-arm64-gnu@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.8.tgz#8d28093bbd43bdae771a2dcca720e926baa57093" + integrity sha512-qq7jXtO1+UEtCmCeBBIRDrPFIVI4ilEQ97qgBGdwXAARrUqSn/L9fUrkb1XP/mvVtoVeR2bt/0L77xx53bPZ/Q== + +"@tailwindcss/oxide-linux-arm64-musl@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.8.tgz#cc6cece814d813885ead9cd8b9d55aeb3db56c97" + integrity sha512-O6b8QesPbJCRshsNApsOIpzKt3ztG35gfX9tEf4arD7mwNinsoCKxkj8TgEE0YRjmjtO3r9FlJnT/ENd9EVefQ== + +"@tailwindcss/oxide-linux-x64-gnu@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.8.tgz#4cac14fa71382574773fb7986d9f0681ad89e3de" + integrity sha512-32iEXX/pXwikshNOGnERAFwFSfiltmijMIAbUhnNyjFr3tmWmMJWQKU2vNcFX0DACSXJ3ZWcSkzNbaKTdngH6g== + +"@tailwindcss/oxide-linux-x64-musl@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.8.tgz#e085f1ccbc8f97625773a6a3afc2a6f88edf59da" + integrity sha512-s+VSSD+TfZeMEsCaFaHTaY5YNj3Dri8rST09gMvYQKwPphacRG7wbuQ5ZJMIJXN/puxPcg/nU+ucvWguPpvBDg== + +"@tailwindcss/oxide-wasm32-wasi@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.8.tgz#c5e19fffe67f25cabf12a357bba4e87128151ea0" + integrity sha512-CXBPVFkpDjM67sS1psWohZ6g/2/cd+cq56vPxK4JeawelxwK4YECgl9Y9TjkE2qfF+9/s1tHHJqrC4SS6cVvSg== + dependencies: + "@emnapi/core" "^1.4.3" + "@emnapi/runtime" "^1.4.3" + "@emnapi/wasi-threads" "^1.0.2" + "@napi-rs/wasm-runtime" "^0.2.10" + "@tybys/wasm-util" "^0.9.0" + tslib "^2.8.0" + +"@tailwindcss/oxide-win32-arm64-msvc@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.8.tgz#77521f23f91604c587736927fd2cb526667b7344" + integrity sha512-7GmYk1n28teDHUjPlIx4Z6Z4hHEgvP5ZW2QS9ygnDAdI/myh3HTHjDqtSqgu1BpRoI4OiLx+fThAyA1JePoENA== + +"@tailwindcss/oxide-win32-x64-msvc@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.8.tgz#55c876ab35f8779d1dceec61483cd9834d7365ac" + integrity sha512-fou+U20j+Jl0EHwK92spoWISON2OBnCazIc038Xj2TdweYV33ZRkS9nwqiUi2d/Wba5xg5UoHfvynnb/UB49cQ== + +"@tailwindcss/oxide@4.1.8": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide/-/oxide-4.1.8.tgz#b7a3df10c6c47ac5a3ac9976ad334732c4870d16" + integrity sha512-d7qvv9PsM5N3VNKhwVUhpK6r4h9wtLkJ6lz9ZY9aeZgrUWk1Z8VPyqyDT9MZlem7GTGseRQHkeB1j3tC7W1P+A== + dependencies: + detect-libc "^2.0.4" + tar "^7.4.3" + optionalDependencies: + "@tailwindcss/oxide-android-arm64" "4.1.8" + "@tailwindcss/oxide-darwin-arm64" "4.1.8" + "@tailwindcss/oxide-darwin-x64" "4.1.8" + "@tailwindcss/oxide-freebsd-x64" "4.1.8" + "@tailwindcss/oxide-linux-arm-gnueabihf" "4.1.8" + "@tailwindcss/oxide-linux-arm64-gnu" "4.1.8" + "@tailwindcss/oxide-linux-arm64-musl" "4.1.8" + "@tailwindcss/oxide-linux-x64-gnu" "4.1.8" + "@tailwindcss/oxide-linux-x64-musl" "4.1.8" + "@tailwindcss/oxide-wasm32-wasi" "4.1.8" + "@tailwindcss/oxide-win32-arm64-msvc" "4.1.8" + "@tailwindcss/oxide-win32-x64-msvc" "4.1.8" + +"@tailwindcss/vite@^4.1.4": + version "4.1.8" + resolved "https://registry.yarnpkg.com/@tailwindcss/vite/-/vite-4.1.8.tgz#4da74494e2e0578767e02b96450b7f5127862698" + integrity sha512-CQ+I8yxNV5/6uGaJjiuymgw0kEQiNKRinYbZXPdx1fk5WgiyReG0VaUx/Xq6aVNSUNJFzxm6o8FNKS5aMaim5A== + dependencies: + "@tailwindcss/node" "4.1.8" + "@tailwindcss/oxide" "4.1.8" + tailwindcss "4.1.8" + +"@tanstack/query-core@5.80.6": + version "5.80.6" + resolved "https://registry.yarnpkg.com/@tanstack/query-core/-/query-core-5.80.6.tgz#d2e54fa9eabebc953c675b74b17d6cd45a2a4cb4" + integrity sha512-nl7YxT/TAU+VTf+e2zTkObGTyY8YZBMnbgeA1ee66lIVqzKlYursAII6z5t0e6rXgwUMJSV4dshBTNacNpZHbQ== + +"@tanstack/react-query@^5.80.6": + version "5.80.6" + resolved "https://registry.yarnpkg.com/@tanstack/react-query/-/react-query-5.80.6.tgz#9c77f05f3236b95693caaea155e5eaa4459f0197" + integrity sha512-izX+5CnkpON3NQGcEm3/d7LfFQNo9ZpFtX2QsINgCYK9LT2VCIdi8D3bMaMSNhrAJCznRoAkFic76uvLroALBw== + dependencies: + "@tanstack/query-core" "5.80.6" + +"@tybys/wasm-util@^0.9.0": + version "0.9.0" + resolved "https://registry.yarnpkg.com/@tybys/wasm-util/-/wasm-util-0.9.0.tgz#3e75eb00604c8d6db470bf18c37b7d984a0e3355" + integrity sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw== + dependencies: + tslib "^2.4.0" + +"@types/estree@1.0.7": + version "1.0.7" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.7.tgz#4158d3105276773d5b7695cd4834b1722e4f37a8" + integrity sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ== + +"@types/node@^20": + version "20.19.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.19.0.tgz#7006b097b15dfea06695c3bbdba98b268797f65b" + integrity sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q== + dependencies: + undici-types "~6.21.0" + +"@types/react-dom@^19.1.2": + version "19.1.6" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-19.1.6.tgz#4af629da0e9f9c0f506fc4d1caa610399c595d64" + integrity sha512-4hOiT/dwO8Ko0gV1m/TJZYk3y0KBnY9vzDh7W+DH17b2HFSOGgdj33dhihPeuy3l0q23+4e+hoXHV6hCC4dCXw== + +"@types/react@^19.1.2": + version "19.1.7" + resolved "https://registry.yarnpkg.com/@types/react/-/react-19.1.7.tgz#9fc4ab6003a8e4f38710c83cb5f8afbdacb7d687" + integrity sha512-BnsPLV43ddr05N71gaGzyZ5hzkCmGwhMvYc8zmvI8Ci1bRkkDSzDDVfAXfN2tk748OwI7ediiPX6PfT9p0QGVg== + dependencies: + csstype "^3.0.2" + +accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.1.0.tgz#95ec409c69619d6cb1b8b34f14b660ef28ebd654" + integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== + +ansi-styles@^4.0.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^6.1.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== + +arg@^5.0.1: + version "5.0.2" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +babel-dead-code-elimination@^1.0.6: + version "1.0.10" + resolved "https://registry.yarnpkg.com/babel-dead-code-elimination/-/babel-dead-code-elimination-1.0.10.tgz#e230562b57bf72ff3de4639ac763ba54f15d37b0" + integrity sha512-DV5bdJZTzZ0zn0DC24v3jD7Mnidh6xhKa4GfKCbq3sfW8kaWhDdZjP3i81geA8T33tdYqWKw4D3fVv0CwEgKVA== + dependencies: + "@babel/core" "^7.23.7" + "@babel/parser" "^7.23.6" + "@babel/traverse" "^7.23.7" + "@babel/types" "^7.23.6" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +basic-auth@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/basic-auth/-/basic-auth-2.0.1.tgz#b998279bf47ce38344b4f3cf916d4679bbf51e3a" + integrity sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg== + dependencies: + safe-buffer "5.1.2" + +body-parser@1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== + dependencies: + bytes "3.1.2" + content-type "~1.0.5" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.13.0" + raw-body "2.5.2" + type-is "~1.6.18" + unpipe "1.0.0" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +browserslist@^4.24.0: + version "4.25.0" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.25.0.tgz#986aa9c6d87916885da2b50d8eb577ac8d133b2c" + integrity sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA== + dependencies: + caniuse-lite "^1.0.30001718" + electron-to-chromium "^1.5.160" + node-releases "^2.0.19" + update-browserslist-db "^1.1.3" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +cac@^6.7.14: + version "6.7.14" + resolved "https://registry.yarnpkg.com/cac/-/cac-6.7.14.tgz#804e1e6f506ee363cb0e3ccbb09cad5dd9870959" + integrity sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ== + +call-bind-apply-helpers@^1.0.1, call-bind-apply-helpers@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz#4b5428c222be985d79c3d82657479dbe0b59b2d6" + integrity sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + +call-bound@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/call-bound/-/call-bound-1.0.4.tgz#238de935d2a2a692928c538c7ccfa91067fd062a" + integrity sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg== + dependencies: + call-bind-apply-helpers "^1.0.2" + get-intrinsic "^1.3.0" + +caniuse-lite@^1.0.30001718: + version "1.0.30001721" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001721.tgz#36b90cd96901f8c98dd6698bf5c8af7d4c6872d7" + integrity sha512-cOuvmUVtKrtEaoKiO0rSc29jcjwMwX5tOHDy4MgVFEWiUXj4uBMJkwI8MDySkgXidpMiHUcviogAvFi4pA2hDQ== + +chokidar@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-4.0.3.tgz#7be37a4c03c9aee1ecfe862a4a23b2c70c205d30" + integrity sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA== + dependencies: + readdirp "^4.0.1" + +chownr@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-3.0.0.tgz#9855e64ecd240a9cc4267ce8a4aa5d24a1da15e4" + integrity sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g== + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +color.js@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/color.js/-/color.js-1.2.0.tgz#18d9f55545111730d25ccf18ea8b6933c71440d7" + integrity sha512-0ajlNgWWOR7EK9N6l2h0YKsZPzMCLQG5bheCoTGpGfhkR8tB5eQNItdua1oFHDTeq9JKgSzQJqo+Gp3V/xW+Lw== + +compressible@~2.0.18: + version "2.0.18" + resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.8.0" + resolved "https://registry.yarnpkg.com/compression/-/compression-1.8.0.tgz#09420efc96e11a0f44f3a558de59e321364180f7" + integrity sha512-k6WLKfunuqCYD3t6AsuPGvQWaKwuLLh2/xHNcX4qE+vIfDNXpSqnrhwA7O53R7WVQUnt8dVAIW+YHr7xTgOgGA== + dependencies: + bytes "3.1.2" + compressible "~2.0.18" + debug "2.6.9" + negotiator "~0.6.4" + on-headers "~1.0.2" + safe-buffer "5.2.1" + vary "~1.1.2" + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4, content-type@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== + +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9" + integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== + +cookie@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-1.0.2.tgz#27360701532116bd3f1f9416929d176afe1e4610" + integrity sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA== + +cross-spawn@^7.0.6: + version "7.0.6" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" + integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +csstype@^3.0.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" + integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== + +debug@2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.4.1: + version "4.4.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.1.tgz#e5a8bc6cbc4c6cd3e64308b0693a3d4fa550189b" + integrity sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ== + dependencies: + ms "^2.1.3" + +dedent@^1.5.3: + version "1.6.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.6.0.tgz#79d52d6389b1ffa67d2bcef59ba51847a9d503b2" + integrity sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA== + +depd@2.0.0, depd@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detect-libc@^2.0.3, detect-libc@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.4.tgz#f04715b8ba815e53b4d8109655b6508a6865a7e8" + integrity sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA== + +dunder-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/dunder-proto/-/dunder-proto-1.0.1.tgz#d7ae667e1dc83482f8b70fd0f6eefc50da30f58a" + integrity sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A== + dependencies: + call-bind-apply-helpers "^1.0.1" + es-errors "^1.3.0" + gopd "^1.2.0" + +eastasianwidth@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +electron-to-chromium@^1.5.160: + version "1.5.166" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.166.tgz#3fff386ed473cc2169dbe2d3ace9592262601114" + integrity sha512-QPWqHL0BglzPYyJJ1zSSmwFFL6MFXhbACOCcsCdUMCkzPdS9/OIBVxg516X/Ado2qwAq8k0nJJ7phQPCqiaFAw== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + +enhanced-resolve@^5.18.1: + version "5.18.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.18.1.tgz#728ab082f8b7b6836de51f1637aab5d3b9568faf" + integrity sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +err-code@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9" + integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA== + +es-define-property@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.1.tgz#983eb2f9a6724e9303f61addf011c72e09e0b0fa" + integrity sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g== + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + +es-module-lexer@^1.3.1, es-module-lexer@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.7.0.tgz#9159601561880a85f2734560a9099b2c31e5372a" + integrity sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA== + +es-object-atoms@^1.0.0, es-object-atoms@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz#1c4f2c4837327597ce69d2ca190a7fdd172338c1" + integrity sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA== + dependencies: + es-errors "^1.3.0" + +esbuild@^0.25.0: + version "0.25.5" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.25.5.tgz#71075054993fdfae76c66586f9b9c1f8d7edd430" + integrity sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ== + optionalDependencies: + "@esbuild/aix-ppc64" "0.25.5" + "@esbuild/android-arm" "0.25.5" + "@esbuild/android-arm64" "0.25.5" + "@esbuild/android-x64" "0.25.5" + "@esbuild/darwin-arm64" "0.25.5" + "@esbuild/darwin-x64" "0.25.5" + "@esbuild/freebsd-arm64" "0.25.5" + "@esbuild/freebsd-x64" "0.25.5" + "@esbuild/linux-arm" "0.25.5" + "@esbuild/linux-arm64" "0.25.5" + "@esbuild/linux-ia32" "0.25.5" + "@esbuild/linux-loong64" "0.25.5" + "@esbuild/linux-mips64el" "0.25.5" + "@esbuild/linux-ppc64" "0.25.5" + "@esbuild/linux-riscv64" "0.25.5" + "@esbuild/linux-s390x" "0.25.5" + "@esbuild/linux-x64" "0.25.5" + "@esbuild/netbsd-arm64" "0.25.5" + "@esbuild/netbsd-x64" "0.25.5" + "@esbuild/openbsd-arm64" "0.25.5" + "@esbuild/openbsd-x64" "0.25.5" + "@esbuild/sunos-x64" "0.25.5" + "@esbuild/win32-arm64" "0.25.5" + "@esbuild/win32-ia32" "0.25.5" + "@esbuild/win32-x64" "0.25.5" + +escalade@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +exit-hook@2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-2.2.1.tgz#007b2d92c6428eda2b76e7016a34351586934593" + integrity sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw== + +express@^4.19.2: + version "4.21.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.2.tgz#cf250e48362174ead6cea4a566abef0162c1ec32" + integrity sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.3" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.7.1" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~2.0.0" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.3.1" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.3" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.12" + proxy-addr "~2.0.7" + qs "6.13.0" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.19.0" + serve-static "1.16.2" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +fdir@^6.4.4: + version "6.4.6" + resolved "https://registry.yarnpkg.com/fdir/-/fdir-6.4.6.tgz#2b268c0232697063111bbf3f64810a2a741ba281" + integrity sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w== + +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== + dependencies: + debug "2.6.9" + encodeurl "~2.0.0" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +foreground-child@^3.1.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.3.1.tgz#32e8e9ed1b68a3497befb9ac2b6adf92a638576f" + integrity sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw== + dependencies: + cross-spawn "^7.0.6" + signal-exit "^4.0.1" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-extra@^10.0.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fsevents@~2.3.2, fsevents@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-intrinsic@^1.2.5, get-intrinsic@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz#743f0e3b6964a93a5491ed1bffaae054d7f98d01" + integrity sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ== + dependencies: + call-bind-apply-helpers "^1.0.2" + es-define-property "^1.0.1" + es-errors "^1.3.0" + es-object-atoms "^1.1.1" + function-bind "^1.1.2" + get-proto "^1.0.1" + gopd "^1.2.0" + has-symbols "^1.1.0" + hasown "^2.0.2" + math-intrinsics "^1.1.0" + +get-port@5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" + integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== + +get-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/get-proto/-/get-proto-1.0.1.tgz#150b3f2743869ef3e851ec0c49d15b1d14d00ee1" + integrity sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g== + dependencies: + dunder-proto "^1.0.1" + es-object-atoms "^1.0.0" + +glob@^10.2.2: + version "10.4.5" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956" + integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== + dependencies: + foreground-child "^3.1.0" + jackspeak "^3.1.2" + minimatch "^9.0.4" + minipass "^7.1.2" + package-json-from-dist "^1.0.0" + path-scurry "^1.11.1" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globrex@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/globrex/-/globrex-0.1.2.tgz#dd5d9ec826232730cd6793a5e33a9302985e6098" + integrity sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg== + +gopd@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.2.0.tgz#89f56b8217bdbc8802bd299df6d7f1081d7e51a1" + integrity sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg== + +graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +has-symbols@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.1.0.tgz#fc9c6a783a084951d0b971fe1018de813707a338" + integrity sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ== + +hasown@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + +hosted-git-info@^6.0.0, hosted-git-info@^6.1.1: + version "6.1.3" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-6.1.3.tgz#2ee1a14a097a1236bddf8672c35b613c46c55946" + integrity sha512-HVJyzUrLIL1c0QmviVh5E8VGyUS7xCFPS6yydaVd1UegW+ibV/CohqTH9MkOLDp5o+rb82DMo77PTuc9F/8GKw== + dependencies: + lru-cache "^7.5.1" + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +inherits@2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +is-core-module@^2.8.1: + version "2.16.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.1.tgz#2a98801a849f43e2add644fbb6bc6229b19a4ef4" + integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w== + dependencies: + hasown "^2.0.2" + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +isbot@^5.1.27: + version "5.1.28" + resolved "https://registry.yarnpkg.com/isbot/-/isbot-5.1.28.tgz#a9a32e70c890cf19b76090971b1fccf6021a519b" + integrity sha512-qrOp4g3xj8YNse4biorv6O5ZShwsJM0trsoda4y7j/Su7ZtTTfVXFzbKkpgcSoDrHS8FcTuUwcU04YimZlZOxw== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +jackspeak@^3.1.2: + version "3.4.3" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.4.3.tgz#8833a9d89ab4acde6188942bd1c53b6390ed5a8a" + integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + +jiti@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-2.4.2.tgz#d19b7732ebb6116b06e2038da74a55366faef560" + integrity sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A== + +js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +jsesc@3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e" + integrity sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g== + +jsesc@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.1.0.tgz#74d335a234f67ed19907fdadfac7ccf9d409825d" + integrity sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA== + +json-parse-even-better-errors@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz#b43d35e89c0f3be6b5fbbe9dc6c82467b30c28da" + integrity sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ== + +json5@^2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +lightningcss-darwin-arm64@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz#3d47ce5e221b9567c703950edf2529ca4a3700ae" + integrity sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ== + +lightningcss-darwin-x64@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz#e81105d3fd6330860c15fe860f64d39cff5fbd22" + integrity sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA== + +lightningcss-freebsd-x64@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz#a0e732031083ff9d625c5db021d09eb085af8be4" + integrity sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig== + +lightningcss-linux-arm-gnueabihf@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz#1f5ecca6095528ddb649f9304ba2560c72474908" + integrity sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q== + +lightningcss-linux-arm64-gnu@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz#eee7799726103bffff1e88993df726f6911ec009" + integrity sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw== + +lightningcss-linux-arm64-musl@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz#f2e4b53f42892feeef8f620cbb889f7c064a7dfe" + integrity sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ== + +lightningcss-linux-x64-gnu@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz#2fc7096224bc000ebb97eea94aea248c5b0eb157" + integrity sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw== + +lightningcss-linux-x64-musl@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz#66dca2b159fd819ea832c44895d07e5b31d75f26" + integrity sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ== + +lightningcss-win32-arm64-msvc@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz#7d8110a19d7c2d22bfdf2f2bb8be68e7d1b69039" + integrity sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA== + +lightningcss-win32-x64-msvc@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz#fd7dd008ea98494b85d24b4bea016793f2e0e352" + integrity sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg== + +lightningcss@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss/-/lightningcss-1.30.1.tgz#78e979c2d595bfcb90d2a8c0eb632fe6c5bfed5d" + integrity sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg== + dependencies: + detect-libc "^2.0.3" + optionalDependencies: + lightningcss-darwin-arm64 "1.30.1" + lightningcss-darwin-x64 "1.30.1" + lightningcss-freebsd-x64 "1.30.1" + lightningcss-linux-arm-gnueabihf "1.30.1" + lightningcss-linux-arm64-gnu "1.30.1" + lightningcss-linux-arm64-musl "1.30.1" + lightningcss-linux-x64-gnu "1.30.1" + lightningcss-linux-x64-musl "1.30.1" + lightningcss-win32-arm64-msvc "1.30.1" + lightningcss-win32-x64-msvc "1.30.1" + +lodash@^4.17.21: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +lru-cache@^10.2.0: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + +lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== + dependencies: + yallist "^3.0.2" + +lru-cache@^7.4.4, lru-cache@^7.5.1: + version "7.18.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89" + integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA== + +lucide-react@^0.513.0: + version "0.513.0" + resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.513.0.tgz#1315c04fb1121f3be7883ff08a95d55a31a14e97" + integrity sha512-CJZKq2g8Y8yN4Aq002GahSXbG2JpFv9kXwyiOAMvUBv7pxeOFHUWKB0mO7MiY4ZVFCV4aNjv2BJFq/z3DgKPQg== + +magic-string@^0.30.17: + version "0.30.17" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.17.tgz#450a449673d2460e5bbcfba9a61916a1714c7453" + integrity sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA== + dependencies: + "@jridgewell/sourcemap-codec" "^1.5.0" + +math-intrinsics@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz#a0dd74be81e2aa5c2f27e65ce283605ee4e2b7f9" + integrity sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g== + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +"mime-db@>= 1.43.0 < 2": + version "1.54.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.54.0.tgz#cddb3ee4f9c64530dff640236661d42cb6a314f5" + integrity sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ== + +mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +minimatch@^9.0.4: + version "9.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" + integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== + dependencies: + brace-expansion "^2.0.1" + +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.4, minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + +minizlib@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-3.0.2.tgz#f33d638eb279f664439aa38dc5f91607468cb574" + integrity sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA== + dependencies: + minipass "^7.1.2" + +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + +morgan@^1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/morgan/-/morgan-1.10.0.tgz#091778abc1fc47cd3509824653dae1faab6b17d7" + integrity sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ== + dependencies: + basic-auth "~2.0.1" + debug "2.6.9" + depd "~2.0.0" + on-finished "~2.3.0" + on-headers "~1.0.2" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.3, ms@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +nanoid@^3.3.11: + version "3.3.11" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.11.tgz#4f4f112cefbe303202f2199838128936266d185b" + integrity sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w== + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +negotiator@~0.6.4: + version "0.6.4" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.4.tgz#777948e2452651c570b712dd01c23e262713fff7" + integrity sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w== + +node-releases@^2.0.19: + version "2.0.19" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.19.tgz#9e445a52950951ec4d177d843af370b411caf314" + integrity sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw== + +normalize-package-data@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-5.0.0.tgz#abcb8d7e724c40d88462b84982f7cbf6859b4588" + integrity sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q== + dependencies: + hosted-git-info "^6.0.0" + is-core-module "^2.8.1" + semver "^7.3.5" + validate-npm-package-license "^3.0.4" + +npm-install-checks@^6.0.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/npm-install-checks/-/npm-install-checks-6.3.0.tgz#046552d8920e801fa9f919cad569545d60e826fe" + integrity sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw== + dependencies: + semver "^7.1.1" + +npm-normalize-package-bin@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz#25447e32a9a7de1f51362c61a559233b89947832" + integrity sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ== + +npm-package-arg@^10.0.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-10.1.0.tgz#827d1260a683806685d17193073cc152d3c7e9b1" + integrity sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA== + dependencies: + hosted-git-info "^6.0.0" + proc-log "^3.0.0" + semver "^7.3.5" + validate-npm-package-name "^5.0.0" + +npm-pick-manifest@^8.0.0: + version "8.0.2" + resolved "https://registry.yarnpkg.com/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz#2159778d9c7360420c925c1a2287b5a884c713aa" + integrity sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg== + dependencies: + npm-install-checks "^6.0.0" + npm-normalize-package-bin "^3.0.0" + npm-package-arg "^10.0.0" + semver "^7.3.5" + +object-inspect@^1.13.3: + version "1.13.4" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.4.tgz#8375265e21bc20d0fa582c22e1b13485d6e00213" + integrity sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew== + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-finished@~2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" + integrity sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +package-json-from-dist@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz#4f1471a010827a86f94cfd9b0727e36d267de505" + integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== + +parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-scurry@^1.11.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + +path-to-regexp@0.1.12: + version "0.1.12" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.12.tgz#d5e1a12e478a976d432ef3c58d534b9923164bb7" + integrity sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ== + +pathe@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.2.tgz#6c4cb47a945692e48a1ddd6e4094d170516437ec" + integrity sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ== + +pathe@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/pathe/-/pathe-2.0.3.tgz#3ecbec55421685b70a9da872b2cff3e1cbed1716" + integrity sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w== + +picocolors@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== + +picomatch@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.2.tgz#77c742931e8f3b8820946c76cd0c1f13730d1dab" + integrity sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg== + +postcss@^8.5.3: + version "8.5.4" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.5.4.tgz#d61014ac00e11d5f58458ed7247d899bd65f99c0" + integrity sha512-QSa9EBe+uwlGTFmHsPKokv3B/oEMQZxfqW0QqNCyhpa6mB1afzulwn8hihglqAb2pOw+BJgNlmXQ8la2VeHB7w== + dependencies: + nanoid "^3.3.11" + picocolors "^1.1.1" + source-map-js "^1.2.1" + +prettier@^2.7.1: + version "2.8.8" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" + integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== + +proc-log@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/proc-log/-/proc-log-3.0.0.tgz#fb05ef83ccd64fd7b20bbe9c8c1070fc08338dd8" + integrity sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A== + +promise-inflight@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" + integrity sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g== + +promise-retry@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/promise-retry/-/promise-retry-2.0.1.tgz#ff747a13620ab57ba688f5fc67855410c370da22" + integrity sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g== + dependencies: + err-code "^2.0.2" + retry "^0.12.0" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== + dependencies: + side-channel "^1.0.6" + +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" + integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +react-dom@^19.1.0: + version "19.1.0" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-19.1.0.tgz#133558deca37fa1d682708df8904b25186793623" + integrity sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g== + dependencies: + scheduler "^0.26.0" + +react-refresh@^0.14.0: + version "0.14.2" + resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.14.2.tgz#3833da01ce32da470f1f936b9d477da5c7028bf9" + integrity sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA== + +react-router@^7.5.3: + version "7.6.2" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-7.6.2.tgz#9f48b343bead7d0a94e28342fc4f9ae29131520e" + integrity sha512-U7Nv3y+bMimgWjhlT5CRdzHPu2/KVmqPwKUCChW8en5P3znxUqwlYFlbmyj8Rgp1SF6zs5X4+77kBVknkg6a0w== + dependencies: + cookie "^1.0.1" + set-cookie-parser "^2.6.0" + +react@^19.1.0: + version "19.1.0" + resolved "https://registry.yarnpkg.com/react/-/react-19.1.0.tgz#926864b6c48da7627f004795d6cce50e90793b75" + integrity sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg== + +readdirp@^4.0.1: + version "4.1.2" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-4.1.2.tgz#eb85801435fbf2a7ee58f19e0921b068fc69948d" + integrity sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg== + +retry@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" + integrity sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow== + +rollup@^4.34.9: + version "4.42.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.42.0.tgz#57990f059e96bbccccefd5a41d8d5aff15ac1cb8" + integrity sha512-LW+Vse3BJPyGJGAJt1j8pWDKPd73QM8cRXYK1IxOBgL2AGLu7Xd2YOW0M2sLUBCkF5MshXXtMApyEAEzMVMsnw== + dependencies: + "@types/estree" "1.0.7" + optionalDependencies: + "@rollup/rollup-android-arm-eabi" "4.42.0" + "@rollup/rollup-android-arm64" "4.42.0" + "@rollup/rollup-darwin-arm64" "4.42.0" + "@rollup/rollup-darwin-x64" "4.42.0" + "@rollup/rollup-freebsd-arm64" "4.42.0" + "@rollup/rollup-freebsd-x64" "4.42.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.42.0" + "@rollup/rollup-linux-arm-musleabihf" "4.42.0" + "@rollup/rollup-linux-arm64-gnu" "4.42.0" + "@rollup/rollup-linux-arm64-musl" "4.42.0" + "@rollup/rollup-linux-loongarch64-gnu" "4.42.0" + "@rollup/rollup-linux-powerpc64le-gnu" "4.42.0" + "@rollup/rollup-linux-riscv64-gnu" "4.42.0" + "@rollup/rollup-linux-riscv64-musl" "4.42.0" + "@rollup/rollup-linux-s390x-gnu" "4.42.0" + "@rollup/rollup-linux-x64-gnu" "4.42.0" + "@rollup/rollup-linux-x64-musl" "4.42.0" + "@rollup/rollup-win32-arm64-msvc" "4.42.0" + "@rollup/rollup-win32-ia32-msvc" "4.42.0" + "@rollup/rollup-win32-x64-msvc" "4.42.0" + fsevents "~2.3.2" + +safe-buffer@5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +scheduler@^0.26.0: + version "0.26.0" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.26.0.tgz#4ce8a8c2a2095f13ea11bf9a445be50c555d6337" + integrity sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA== + +semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^7.1.1, semver@^7.3.5, semver@^7.3.7, semver@^7.5.3: + version "7.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.2.tgz#67d99fdcd35cec21e6f8b87a7fd515a33f982b58" + integrity sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA== + +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== + dependencies: + encodeurl "~2.0.0" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.19.0" + +set-cookie-parser@^2.6.0: + version "2.7.1" + resolved "https://registry.yarnpkg.com/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz#3016f150072202dfbe90fadee053573cc89d2943" + integrity sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +side-channel-list@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/side-channel-list/-/side-channel-list-1.0.0.tgz#10cb5984263115d3b7a0e336591e290a830af8ad" + integrity sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA== + dependencies: + es-errors "^1.3.0" + object-inspect "^1.13.3" + +side-channel-map@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/side-channel-map/-/side-channel-map-1.0.1.tgz#d6bb6b37902c6fef5174e5f533fab4c732a26f42" + integrity sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA== + dependencies: + call-bound "^1.0.2" + es-errors "^1.3.0" + get-intrinsic "^1.2.5" + object-inspect "^1.13.3" + +side-channel-weakmap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz#11dda19d5368e40ce9ec2bdc1fb0ecbc0790ecea" + integrity sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A== + dependencies: + call-bound "^1.0.2" + es-errors "^1.3.0" + get-intrinsic "^1.2.5" + object-inspect "^1.13.3" + side-channel-map "^1.0.1" + +side-channel@^1.0.6: + version "1.1.0" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.1.0.tgz#c3fcff9c4da932784873335ec9765fa94ff66bc9" + integrity sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw== + dependencies: + es-errors "^1.3.0" + object-inspect "^1.13.3" + side-channel-list "^1.0.0" + side-channel-map "^1.0.1" + side-channel-weakmap "^1.0.2" + +signal-exit@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== + +source-map-js@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46" + integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA== + +source-map-support@^0.5.21: + version "0.5.21" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@^0.6.0: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +spdx-correct@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c" + integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA== + dependencies: + spdx-expression-parse "^3.0.0" + spdx-license-ids "^3.0.0" + +spdx-exceptions@^2.1.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz#5d607d27fc806f66d7b64a766650fa890f04ed66" + integrity sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w== + +spdx-expression-parse@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" + integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + +spdx-license-ids@^3.0.0: + version "3.0.21" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz#6d6e980c9df2b6fc905343a3b2d702a6239536c3" + integrity sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg== + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +stream-slice@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/stream-slice/-/stream-slice-0.1.2.tgz#2dc4f4e1b936fb13f3eb39a2def1932798d07a4b" + integrity sha512-QzQxpoacatkreL6jsxnVb7X5R/pGw9OUv2qWTYWnmLpg4NdN31snPy/f3TdQE1ZUXaThRvj1Zw4/OGg0ZkaLMA== + +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.1.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^5.0.1, string-width@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== + dependencies: + eastasianwidth "^0.2.0" + emoji-regex "^9.2.2" + strip-ansi "^7.0.1" + +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== + dependencies: + ansi-regex "^6.0.1" + +tailwindcss@4.1.8, tailwindcss@^4.1.4: + version "4.1.8" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-4.1.8.tgz#5d66d095ee7d82f03d6dbc6158bc248e064a5c05" + integrity sha512-kjeW8gjdxasbmFKpVGrGd5T4i40mV5J2Rasw48QARfYeQ8YS9x02ON9SFWax3Qf616rt4Cp3nVNIj6Hd1mP3og== + +tapable@^2.2.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.2.tgz#ab4984340d30cb9989a490032f086dbb8b56d872" + integrity sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg== + +tar@^7.4.3: + version "7.4.3" + resolved "https://registry.yarnpkg.com/tar/-/tar-7.4.3.tgz#88bbe9286a3fcd900e94592cda7a22b192e80571" + integrity sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw== + dependencies: + "@isaacs/fs-minipass" "^4.0.0" + chownr "^3.0.0" + minipass "^7.1.2" + minizlib "^3.0.1" + mkdirp "^3.0.1" + yallist "^5.0.0" + +tinyglobby@^0.2.13: + version "0.2.14" + resolved "https://registry.yarnpkg.com/tinyglobby/-/tinyglobby-0.2.14.tgz#5280b0cf3f972b050e74ae88406c0a6a58f4079d" + integrity sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ== + dependencies: + fdir "^6.4.4" + picomatch "^4.0.2" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tsconfck@^3.0.3: + version "3.1.6" + resolved "https://registry.yarnpkg.com/tsconfck/-/tsconfck-3.1.6.tgz#da1f0b10d82237ac23422374b3fce1edb23c3ead" + integrity sha512-ks6Vjr/jEw0P1gmOVwutM3B7fWxoWBL2KRDb1JfqGVawBmO5UsvmWOQFGHBPl5yxYz4eERr19E6L7NMv+Fej4w== + +tslib@^2.4.0, tslib@^2.8.0: + version "2.8.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" + integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typescript@^5.8.3: + version "5.8.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.8.3.tgz#92f8a3e5e3cf497356f4178c34cd65a7f5e8440e" + integrity sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ== + +undici-types@~6.21.0: + version "6.21.0" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.21.0.tgz#691d00af3909be93a7faa13be61b3a5b50ef12cb" + integrity sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ== + +undici@^6.19.2: + version "6.21.3" + resolved "https://registry.yarnpkg.com/undici/-/undici-6.21.3.tgz#185752ad92c3d0efe7a7d1f6854a50f83b552d7a" + integrity sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw== + +universalify@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d" + integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +update-browserslist-db@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz#348377dd245216f9e7060ff50b15a1b740b75420" + integrity sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw== + dependencies: + escalade "^3.2.0" + picocolors "^1.1.1" + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +valibot@^0.41.0: + version "0.41.0" + resolved "https://registry.yarnpkg.com/valibot/-/valibot-0.41.0.tgz#5c2efd49c078e455f7862379365f6036f3cd9f96" + integrity sha512-igDBb8CTYr8YTQlOKgaN9nSS0Be7z+WRuaeYqGf3Cjz3aKmSnqEmYnkfVjzIuumGqfHpa3fLIvMEAfhrpqN8ng== + +validate-npm-package-license@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== + dependencies: + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" + +validate-npm-package-name@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz#a316573e9b49f3ccd90dbb6eb52b3f06c6d604e8" + integrity sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ== + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +vite-node@^3.1.4: + version "3.2.3" + resolved "https://registry.yarnpkg.com/vite-node/-/vite-node-3.2.3.tgz#1c5a2282fe100114c26fd221daf506e69d392a36" + integrity sha512-gc8aAifGuDIpZHrPjuHyP4dpQmYXqWw7D1GmDnWeNWP654UEXzVfQ5IHPSK5HaHkwB/+p1atpYpSdw/2kOv8iQ== + dependencies: + cac "^6.7.14" + debug "^4.4.1" + es-module-lexer "^1.7.0" + pathe "^2.0.3" + vite "^5.0.0 || ^6.0.0 || ^7.0.0-0" + +vite-tsconfig-paths@^5.1.4: + version "5.1.4" + resolved "https://registry.yarnpkg.com/vite-tsconfig-paths/-/vite-tsconfig-paths-5.1.4.tgz#d9a71106a7ff2c1c840c6f1708042f76a9212ed4" + integrity sha512-cYj0LRuLV2c2sMqhqhGpaO3LretdtMn/BVX4cPLanIZuwwrkVl+lK84E/miEXkCHWXuq65rhNN4rXsBcOB3S4w== + dependencies: + debug "^4.1.1" + globrex "^0.1.2" + tsconfck "^3.0.3" + +"vite@^5.0.0 || ^6.0.0 || ^7.0.0-0", vite@^6.3.3: + version "6.3.5" + resolved "https://registry.yarnpkg.com/vite/-/vite-6.3.5.tgz#fec73879013c9c0128c8d284504c6d19410d12a3" + integrity sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ== + dependencies: + esbuild "^0.25.0" + fdir "^6.4.4" + picomatch "^4.0.2" + postcss "^8.5.3" + rollup "^4.34.9" + tinyglobby "^0.2.13" + optionalDependencies: + fsevents "~2.3.3" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +which@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/which/-/which-3.0.1.tgz#89f1cd0c23f629a8105ffe69b8172791c87b4be1" + integrity sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg== + dependencies: + isexe "^2.0.0" + +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== + dependencies: + ansi-styles "^6.1.0" + string-width "^5.0.1" + strip-ansi "^7.0.1" + +yallist@^3.0.2: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== + +yallist@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-5.0.0.tgz#00e2de443639ed0d78fd87de0d27469fbcffb533" + integrity sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw== diff --git a/cmd/api/main.go b/cmd/api/main.go new file mode 100644 index 0000000..cbb9238 --- /dev/null +++ b/cmd/api/main.go @@ -0,0 +1,18 @@ +package main + +import ( + "fmt" + "os" + + "github.com/gabehf/koito/engine" +) + +func main() { + if err := engine.Run( + os.Getenv, + os.Stdout, + ); err != nil { + fmt.Fprintf(os.Stderr, "%s\n", err) + os.Exit(1) + } +} diff --git a/db/migrations/000001_initial_schema.sql b/db/migrations/000001_initial_schema.sql new file mode 100644 index 0000000..bfc861e --- /dev/null +++ b/db/migrations/000001_initial_schema.sql @@ -0,0 +1,282 @@ +-- +goose Up +-- +goose StatementBegin +SELECT 'up SQL query'; +-- +goose StatementEnd + +-- Extensions +CREATE EXTENSION IF NOT EXISTS pg_trgm WITH SCHEMA public; + +-- Types +CREATE TYPE role AS ENUM ( + 'admin', + 'user' +); + +-- Functions + +-- +goose StatementBegin +CREATE FUNCTION delete_orphan_releases() RETURNS trigger + LANGUAGE plpgsql + AS $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM artist_releases + WHERE release_id = OLD.release_id + ) THEN + DELETE FROM releases WHERE id = OLD.release_id; + END IF; + RETURN NULL; +END; +$$; +-- +goose StatementEnd + +-- Tables +CREATE TABLE artists ( + id integer NOT NULL GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME artists_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 + ), + musicbrainz_id UUID UNIQUE, + image UUID, + image_source text, + CONSTRAINT artists_pkey PRIMARY KEY (id) +); + +CREATE TABLE artist_aliases ( + artist_id integer NOT NULL, + alias text NOT NULL, + source text NOT NULL, + is_primary boolean NOT NULL, + CONSTRAINT artist_aliases_pkey PRIMARY KEY (artist_id, alias) +); + +CREATE TABLE releases ( + id integer NOT NULL GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME releases_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 + ), + musicbrainz_id UUID UNIQUE, + image UUID, + various_artists boolean DEFAULT false NOT NULL, + image_source text, + CONSTRAINT releases_pkey PRIMARY KEY (id) +); + +CREATE TABLE artist_releases ( + artist_id integer NOT NULL, + release_id integer NOT NULL, + CONSTRAINT artist_releases_pkey PRIMARY KEY (artist_id, release_id) +); + +CREATE TABLE tracks ( + id integer NOT NULL GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME tracks_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 + ), + musicbrainz_id UUID UNIQUE, + duration integer DEFAULT 0 NOT NULL, + release_id integer NOT NULL, + CONSTRAINT tracks_pkey PRIMARY KEY (id) +); + +CREATE TABLE artist_tracks ( + artist_id integer NOT NULL, + track_id integer NOT NULL, + CONSTRAINT artist_tracks_pkey PRIMARY KEY (artist_id, track_id) +); + +CREATE TABLE users ( + id integer NOT NULL GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME users_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 + ), + username text UNIQUE NOT NULL, + role role DEFAULT 'user'::role NOT NULL, + password bytea NOT NULL, + CONSTRAINT users_pkey PRIMARY KEY (id) +); + +CREATE TABLE api_keys ( + id integer NOT NULL GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME api_keys_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 + ), + key text UNIQUE NOT NULL, + user_id integer NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + label text NOT NULL, + CONSTRAINT api_keys_pkey PRIMARY KEY (id) +); + +CREATE TABLE release_aliases ( + release_id integer NOT NULL, + alias text NOT NULL, + source text NOT NULL, + is_primary boolean NOT NULL, + CONSTRAINT release_aliases_pkey PRIMARY KEY (release_id, alias) +); + +CREATE TABLE sessions ( + id UUID NOT NULL, + user_id integer NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + expires_at timestamp without time zone NOT NULL, + persistent boolean DEFAULT false NOT NULL, + CONSTRAINT sessions_pkey PRIMARY KEY (id) +); + +CREATE TABLE track_aliases ( + track_id integer NOT NULL, + alias text NOT NULL, + is_primary boolean NOT NULL, + source text NOT NULL, + CONSTRAINT track_aliases_pkey PRIMARY KEY (track_id, alias) +); + +CREATE TABLE listens ( + track_id integer NOT NULL, + listened_at timestamptz NOT NULL, + client text, + user_id integer NOT NULL, + CONSTRAINT listens_pkey PRIMARY KEY (track_id, listened_at) +); + + +-- Views +CREATE VIEW artists_with_name AS + SELECT a.id, + a.musicbrainz_id, + a.image, + a.image_source, + aa.alias AS name + FROM (artists a + JOIN artist_aliases aa ON ((aa.artist_id = a.id))) + WHERE (aa.is_primary = true); + +CREATE VIEW releases_with_title AS + SELECT r.id, + r.musicbrainz_id, + r.image, + r.various_artists, + r.image_source, + ra.alias AS title + FROM (releases r + JOIN release_aliases ra ON ((ra.release_id = r.id))) + WHERE (ra.is_primary = true); + +CREATE VIEW tracks_with_title AS + SELECT t.id, + t.musicbrainz_id, + t.duration, + t.release_id, + ta.alias AS title + FROM (tracks t + JOIN track_aliases ta ON ((ta.track_id = t.id))) + WHERE (ta.is_primary = true); + +-- Foreign Key Constraints +ALTER TABLE ONLY api_keys + ADD CONSTRAINT api_keys_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY artist_aliases + ADD CONSTRAINT artist_aliases_artist_id_fkey FOREIGN KEY (artist_id) REFERENCES artists(id) ON DELETE CASCADE; + +ALTER TABLE ONLY artist_releases + ADD CONSTRAINT artist_releases_artist_id_fkey FOREIGN KEY (artist_id) REFERENCES artists(id) ON DELETE CASCADE; + +ALTER TABLE ONLY artist_releases + ADD CONSTRAINT artist_releases_release_id_fkey FOREIGN KEY (release_id) REFERENCES releases(id) ON DELETE CASCADE; + +ALTER TABLE ONLY artist_tracks + ADD CONSTRAINT artist_tracks_artist_id_fkey FOREIGN KEY (artist_id) REFERENCES artists(id) ON DELETE CASCADE; + +ALTER TABLE ONLY artist_tracks + ADD CONSTRAINT artist_tracks_track_id_fkey FOREIGN KEY (track_id) REFERENCES tracks(id) ON DELETE CASCADE; + +ALTER TABLE ONLY listens + ADD CONSTRAINT listens_track_id_fkey FOREIGN KEY (track_id) REFERENCES tracks(id) ON DELETE CASCADE; + +ALTER TABLE ONLY listens + ADD CONSTRAINT listens_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY release_aliases + ADD CONSTRAINT release_aliases_release_id_fkey FOREIGN KEY (release_id) REFERENCES releases(id) ON DELETE CASCADE; + +ALTER TABLE ONLY sessions + ADD CONSTRAINT sessions_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY track_aliases + ADD CONSTRAINT track_aliases_track_id_fkey FOREIGN KEY (track_id) REFERENCES tracks(id) ON DELETE CASCADE; + +ALTER TABLE ONLY tracks + ADD CONSTRAINT track_release_id_fkey FOREIGN KEY (release_id) REFERENCES releases(id) ON DELETE CASCADE; + +-- Indexes +CREATE INDEX idx_artist_aliases_alias_trgm ON artist_aliases USING gin (alias gin_trgm_ops); +CREATE INDEX idx_artist_aliases_artist_id ON artist_aliases USING btree (artist_id); +CREATE INDEX idx_artist_releases ON artist_releases USING btree (artist_id, release_id); +CREATE INDEX idx_release_aliases_alias_trgm ON release_aliases USING gin (alias gin_trgm_ops); +CREATE INDEX idx_tracks_release_id ON tracks USING btree (release_id); +CREATE INDEX listens_listened_at_idx ON listens USING btree (listened_at); +CREATE INDEX listens_track_id_listened_at_idx ON listens USING btree (track_id, listened_at); +CREATE INDEX release_aliases_release_id_idx ON release_aliases USING btree (release_id) WHERE (is_primary = true); +CREATE INDEX track_aliases_track_id_idx ON track_aliases USING btree (track_id) WHERE (is_primary = true); +CREATE INDEX idx_track_aliases_alias_trgm ON track_aliases USING gin (alias gin_trgm_ops); + +-- Triggers +CREATE TRIGGER trg_delete_orphan_releases AFTER DELETE ON artist_releases FOR EACH ROW EXECUTE FUNCTION delete_orphan_releases(); + +-- +goose Down +-- +goose StatementBegin +SELECT 'down SQL query'; +-- +goose StatementEnd + +-- Drop Triggers +DROP TRIGGER IF EXISTS trg_delete_orphan_releases ON artist_releases; + +-- Drop Views +DROP VIEW IF EXISTS artists_with_name; +DROP VIEW IF EXISTS releases_with_title; +DROP VIEW IF EXISTS tracks_with_title; + +-- Drop Tables (in reverse dependency order) +DROP TABLE IF EXISTS listens CASCADE; +DROP TABLE IF EXISTS api_keys CASCADE; +DROP TABLE IF EXISTS artist_tracks CASCADE; +DROP TABLE IF EXISTS artist_releases CASCADE; +DROP TABLE IF EXISTS release_aliases CASCADE; +DROP TABLE IF EXISTS track_aliases CASCADE; +DROP TABLE IF EXISTS sessions CASCADE; +DROP TABLE IF EXISTS tracks CASCADE; +DROP TABLE IF EXISTS artists CASCADE; +DROP TABLE IF EXISTS users CASCADE; +DROP TABLE IF EXISTS artist_aliases CASCADE; + +-- Drop Functions +DROP FUNCTION IF EXISTS delete_orphan_releases(); + +-- Drop Types +DROP TYPE IF EXISTS role; + +-- Drop Extensions +DROP EXTENSION IF EXISTS pg_trgm; diff --git a/db/migrations/000002_fix_api_key_fkey.sql b/db/migrations/000002_fix_api_key_fkey.sql new file mode 100644 index 0000000..30eb751 --- /dev/null +++ b/db/migrations/000002_fix_api_key_fkey.sql @@ -0,0 +1,3 @@ +-- +goose Up +ALTER TABLE api_keys DROP CONSTRAINT api_keys_user_id_fkey; +ALTER TABLE api_keys ADD CONSTRAINT api_keys_user_id_fkey FOREIGN KEY(user_id) REFERENCES users(id) ON DELETE CASCADE; \ No newline at end of file diff --git a/db/migrations/etc/dump.sql b/db/migrations/etc/dump.sql new file mode 100644 index 0000000..4d28cae --- /dev/null +++ b/db/migrations/etc/dump.sql @@ -0,0 +1,772 @@ +-- +goose Up +-- +goose StatementBegin + +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 16.9 (Debian 16.9-1.pgdg120+1) +-- Dumped by pg_dump version 16.4 (Debian 16.4-1.pgdg120+1) + +-- Started on 2025-06-11 14:30:57 UTC + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- TOC entry 2 (class 3079 OID 16511) +-- Name: pg_trgm; Type: EXTENSION; Schema: -; Owner: - +-- + +CREATE EXTENSION IF NOT EXISTS pg_trgm WITH SCHEMA public; + + +-- +-- TOC entry 3536 (class 0 OID 0) +-- Dependencies: 2 +-- Name: EXTENSION pg_trgm; Type: COMMENT; Schema: -; Owner: +-- + +COMMENT ON EXTENSION pg_trgm IS 'text similarity measurement and index searching based on trigrams'; + + +-- +-- TOC entry 921 (class 1247 OID 16885) +-- Name: role; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.role AS ENUM ( + 'admin', + 'user' +); + + +ALTER TYPE public.role OWNER TO postgres; + +-- +-- TOC entry 269 (class 1255 OID 16963) +-- Name: delete_orphan_releases(); Type: FUNCTION; Schema: public; Owner: postgres +-- + +CREATE FUNCTION public.delete_orphan_releases() RETURNS trigger + LANGUAGE plpgsql + AS $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM artist_releases + WHERE release_id = OLD.release_id + ) THEN + DELETE FROM releases WHERE id = OLD.release_id; + END IF; + RETURN NULL; +END; +$$; + + +ALTER FUNCTION public.delete_orphan_releases() OWNER TO postgres; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- TOC entry 231 (class 1259 OID 16901) +-- Name: api_keys; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.api_keys ( + id integer NOT NULL, + key text NOT NULL, + user_id integer NOT NULL, + created_at timestamp with time zone DEFAULT now() NOT NULL, + label text +); + + +ALTER TABLE public.api_keys OWNER TO postgres; + +-- +-- TOC entry 230 (class 1259 OID 16900) +-- Name: api_keys_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +ALTER TABLE public.api_keys ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME public.api_keys_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 +); + + +-- +-- TOC entry 220 (class 1259 OID 16402) +-- Name: artist_aliases; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.artist_aliases ( + artist_id integer NOT NULL, + alias text NOT NULL, + source text NOT NULL, + is_primary boolean NOT NULL +); + + +ALTER TABLE public.artist_aliases OWNER TO postgres; + +-- +-- TOC entry 227 (class 1259 OID 16839) +-- Name: artist_releases; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.artist_releases ( + artist_id integer NOT NULL, + release_id integer NOT NULL +); + + +ALTER TABLE public.artist_releases OWNER TO postgres; + +-- +-- TOC entry 223 (class 1259 OID 16469) +-- Name: artist_tracks; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.artist_tracks ( + artist_id integer NOT NULL, + track_id integer NOT NULL +); + + +ALTER TABLE public.artist_tracks OWNER TO postgres; + +-- +-- TOC entry 219 (class 1259 OID 16393) +-- Name: artists; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.artists ( + id integer NOT NULL, + musicbrainz_id uuid, + image text, + image_source text +); + + +ALTER TABLE public.artists OWNER TO postgres; + +-- +-- TOC entry 218 (class 1259 OID 16392) +-- Name: artists_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +ALTER TABLE public.artists ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME public.artists_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 +); + + +-- +-- TOC entry 235 (class 1259 OID 16980) +-- Name: artists_with_name; Type: VIEW; Schema: public; Owner: postgres +-- + +CREATE VIEW public.artists_with_name AS + SELECT a.id, + a.musicbrainz_id, + a.image, + a.image_source, + aa.alias AS name + FROM (public.artists a + JOIN public.artist_aliases aa ON ((aa.artist_id = a.id))) + WHERE (aa.is_primary = true); + + +ALTER VIEW public.artists_with_name OWNER TO postgres; + +-- +-- TOC entry 217 (class 1259 OID 16386) +-- Name: goose_db_version; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.goose_db_version ( + id integer NOT NULL, + version_id bigint NOT NULL, + is_applied boolean NOT NULL, + tstamp timestamp without time zone DEFAULT now() NOT NULL +); + + +ALTER TABLE public.goose_db_version OWNER TO postgres; + +-- +-- TOC entry 216 (class 1259 OID 16385) +-- Name: goose_db_version_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +ALTER TABLE public.goose_db_version ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY ( + SEQUENCE NAME public.goose_db_version_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 +); + + +-- +-- TOC entry 224 (class 1259 OID 16485) +-- Name: listens; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.listens ( + track_id integer NOT NULL, + listened_at timestamp with time zone NOT NULL, + client text, + user_id integer NOT NULL +); + + +ALTER TABLE public.listens OWNER TO postgres; + +-- +-- TOC entry 232 (class 1259 OID 16916) +-- Name: release_aliases; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.release_aliases ( + release_id integer NOT NULL, + alias text NOT NULL, + source text NOT NULL, + is_primary boolean NOT NULL +); + + +ALTER TABLE public.release_aliases OWNER TO postgres; + +-- +-- TOC entry 226 (class 1259 OID 16825) +-- Name: releases; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.releases ( + id integer NOT NULL, + musicbrainz_id uuid, + image uuid, + various_artists boolean DEFAULT false NOT NULL, + image_source text +); + + +ALTER TABLE public.releases OWNER TO postgres; + +-- +-- TOC entry 225 (class 1259 OID 16824) +-- Name: releases_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +ALTER TABLE public.releases ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME public.releases_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 +); + + +-- +-- TOC entry 236 (class 1259 OID 16984) +-- Name: releases_with_title; Type: VIEW; Schema: public; Owner: postgres +-- + +CREATE VIEW public.releases_with_title AS + SELECT r.id, + r.musicbrainz_id, + r.image, + r.various_artists, + r.image_source, + ra.alias AS title + FROM (public.releases r + JOIN public.release_aliases ra ON ((ra.release_id = r.id))) + WHERE (ra.is_primary = true); + + +ALTER VIEW public.releases_with_title OWNER TO postgres; + +-- +-- TOC entry 233 (class 1259 OID 16940) +-- Name: sessions; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.sessions ( + id uuid NOT NULL, + user_id integer NOT NULL, + created_at timestamp without time zone DEFAULT now() NOT NULL, + expires_at timestamp without time zone NOT NULL, + persistent boolean DEFAULT false NOT NULL +); + + +ALTER TABLE public.sessions OWNER TO postgres; + +-- +-- TOC entry 234 (class 1259 OID 16967) +-- Name: track_aliases; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.track_aliases ( + track_id integer NOT NULL, + alias text NOT NULL, + is_primary boolean NOT NULL, + source text NOT NULL +); + + +ALTER TABLE public.track_aliases OWNER TO postgres; + +-- +-- TOC entry 222 (class 1259 OID 16455) +-- Name: tracks; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.tracks ( + id integer NOT NULL, + musicbrainz_id uuid, + duration integer DEFAULT 0 NOT NULL, + release_id integer NOT NULL +); + + +ALTER TABLE public.tracks OWNER TO postgres; + +-- +-- TOC entry 221 (class 1259 OID 16454) +-- Name: tracks_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +ALTER TABLE public.tracks ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME public.tracks_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 +); + + +-- +-- TOC entry 237 (class 1259 OID 16988) +-- Name: tracks_with_title; Type: VIEW; Schema: public; Owner: postgres +-- + +CREATE VIEW public.tracks_with_title AS + SELECT t.id, + t.musicbrainz_id, + t.duration, + t.release_id, + ta.alias AS title + FROM (public.tracks t + JOIN public.track_aliases ta ON ((ta.track_id = t.id))) + WHERE (ta.is_primary = true); + + +ALTER VIEW public.tracks_with_title OWNER TO postgres; + +-- +-- TOC entry 229 (class 1259 OID 16890) +-- Name: users; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.users ( + id integer NOT NULL, + username text NOT NULL, + role public.role DEFAULT 'user'::public.role NOT NULL, + password bytea NOT NULL +); + + +ALTER TABLE public.users OWNER TO postgres; + +-- +-- TOC entry 228 (class 1259 OID 16889) +-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +ALTER TABLE public.users ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( + SEQUENCE NAME public.users_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1 +); + + +-- +-- TOC entry 3361 (class 2606 OID 16910) +-- Name: api_keys api_keys_key_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.api_keys + ADD CONSTRAINT api_keys_key_key UNIQUE (key); + + +-- +-- TOC entry 3363 (class 2606 OID 16908) +-- Name: api_keys api_keys_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.api_keys + ADD CONSTRAINT api_keys_pkey PRIMARY KEY (id); + + +-- +-- TOC entry 3335 (class 2606 OID 16408) +-- Name: artist_aliases artist_aliases_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artist_aliases + ADD CONSTRAINT artist_aliases_pkey PRIMARY KEY (artist_id, alias); + + +-- +-- TOC entry 3354 (class 2606 OID 16843) +-- Name: artist_releases artist_releases_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artist_releases + ADD CONSTRAINT artist_releases_pkey PRIMARY KEY (artist_id, release_id); + + +-- +-- TOC entry 3344 (class 2606 OID 16473) +-- Name: artist_tracks artist_tracks_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artist_tracks + ADD CONSTRAINT artist_tracks_pkey PRIMARY KEY (artist_id, track_id); + + +-- +-- TOC entry 3331 (class 2606 OID 16401) +-- Name: artists artists_musicbrainz_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artists + ADD CONSTRAINT artists_musicbrainz_id_key UNIQUE (musicbrainz_id); + + +-- +-- TOC entry 3333 (class 2606 OID 16399) +-- Name: artists artists_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artists + ADD CONSTRAINT artists_pkey PRIMARY KEY (id); + + +-- +-- TOC entry 3329 (class 2606 OID 16391) +-- Name: goose_db_version goose_db_version_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.goose_db_version + ADD CONSTRAINT goose_db_version_pkey PRIMARY KEY (id); + + +-- +-- TOC entry 3347 (class 2606 OID 16622) +-- Name: listens listens_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listens + ADD CONSTRAINT listens_pkey PRIMARY KEY (track_id, listened_at); + + +-- +-- TOC entry 3366 (class 2606 OID 16922) +-- Name: release_aliases release_aliases_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.release_aliases + ADD CONSTRAINT release_aliases_pkey PRIMARY KEY (release_id, alias); + + +-- +-- TOC entry 3350 (class 2606 OID 16833) +-- Name: releases releases_musicbrainz_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.releases + ADD CONSTRAINT releases_musicbrainz_id_key UNIQUE (musicbrainz_id); + + +-- +-- TOC entry 3352 (class 2606 OID 16831) +-- Name: releases releases_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.releases + ADD CONSTRAINT releases_pkey PRIMARY KEY (id); + + +-- +-- TOC entry 3369 (class 2606 OID 16946) +-- Name: sessions sessions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.sessions + ADD CONSTRAINT sessions_pkey PRIMARY KEY (id); + + +-- +-- TOC entry 3371 (class 2606 OID 16973) +-- Name: track_aliases track_aliases_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.track_aliases + ADD CONSTRAINT track_aliases_pkey PRIMARY KEY (track_id, alias); + + +-- +-- TOC entry 3340 (class 2606 OID 16463) +-- Name: tracks tracks_musicbrainz_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tracks + ADD CONSTRAINT tracks_musicbrainz_id_key UNIQUE (musicbrainz_id); + + +-- +-- TOC entry 3342 (class 2606 OID 16461) +-- Name: tracks tracks_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tracks + ADD CONSTRAINT tracks_pkey PRIMARY KEY (id); + + +-- +-- TOC entry 3357 (class 2606 OID 16897) +-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_pkey PRIMARY KEY (id); + + +-- +-- TOC entry 3359 (class 2606 OID 16899) +-- Name: users users_username_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_username_key UNIQUE (username); + + +-- +-- TOC entry 3336 (class 1259 OID 16936) +-- Name: idx_artist_aliases_alias_trgm; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_artist_aliases_alias_trgm ON public.artist_aliases USING gin (alias public.gin_trgm_ops); + + +-- +-- TOC entry 3337 (class 1259 OID 16495) +-- Name: idx_artist_aliases_artist_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_artist_aliases_artist_id ON public.artist_aliases USING btree (artist_id); + + +-- +-- TOC entry 3355 (class 1259 OID 16855) +-- Name: idx_artist_releases; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_artist_releases ON public.artist_releases USING btree (artist_id, release_id); + + +-- +-- TOC entry 3364 (class 1259 OID 16937) +-- Name: idx_release_aliases_alias_trgm; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_release_aliases_alias_trgm ON public.release_aliases USING gin (alias public.gin_trgm_ops); + + +-- +-- TOC entry 3338 (class 1259 OID 16854) +-- Name: idx_tracks_release_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_tracks_release_id ON public.tracks USING btree (release_id); + + +-- +-- TOC entry 3345 (class 1259 OID 16498) +-- Name: listens_listened_at_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX listens_listened_at_idx ON public.listens USING btree (listened_at); + + +-- +-- TOC entry 3348 (class 1259 OID 16499) +-- Name: listens_track_id_listened_at_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX listens_track_id_listened_at_idx ON public.listens USING btree (track_id, listened_at); + + +-- +-- TOC entry 3367 (class 1259 OID 16992) +-- Name: release_aliases_release_id_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX release_aliases_release_id_idx ON public.release_aliases USING btree (release_id) WHERE (is_primary = true); + + +-- +-- TOC entry 3372 (class 1259 OID 16993) +-- Name: track_aliases_track_id_idx; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX track_aliases_track_id_idx ON public.track_aliases USING btree (track_id) WHERE (is_primary = true); + + +-- +-- TOC entry 3384 (class 2620 OID 16964) +-- Name: artist_releases trg_delete_orphan_releases; Type: TRIGGER; Schema: public; Owner: postgres +-- + +CREATE TRIGGER trg_delete_orphan_releases AFTER DELETE ON public.artist_releases FOR EACH ROW EXECUTE FUNCTION public.delete_orphan_releases(); + + +-- +-- TOC entry 3380 (class 2606 OID 16957) +-- Name: api_keys api_keys_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.api_keys + ADD CONSTRAINT api_keys_user_id_fkey FOREIGN KEY (id) REFERENCES public.users(id) ON DELETE CASCADE; + + +-- +-- TOC entry 3373 (class 2606 OID 16409) +-- Name: artist_aliases artist_aliases_artist_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artist_aliases + ADD CONSTRAINT artist_aliases_artist_id_fkey FOREIGN KEY (artist_id) REFERENCES public.artists(id) ON DELETE CASCADE; + + +-- +-- TOC entry 3378 (class 2606 OID 16844) +-- Name: artist_releases artist_releases_artist_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artist_releases + ADD CONSTRAINT artist_releases_artist_id_fkey FOREIGN KEY (artist_id) REFERENCES public.artists(id) ON DELETE CASCADE; + + +-- +-- TOC entry 3379 (class 2606 OID 16849) +-- Name: artist_releases artist_releases_release_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artist_releases + ADD CONSTRAINT artist_releases_release_id_fkey FOREIGN KEY (release_id) REFERENCES public.releases(id) ON DELETE CASCADE; + + +-- +-- TOC entry 3374 (class 2606 OID 16474) +-- Name: artist_tracks artist_tracks_artist_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artist_tracks + ADD CONSTRAINT artist_tracks_artist_id_fkey FOREIGN KEY (artist_id) REFERENCES public.artists(id) ON DELETE CASCADE; + + +-- +-- TOC entry 3375 (class 2606 OID 16479) +-- Name: artist_tracks artist_tracks_track_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.artist_tracks + ADD CONSTRAINT artist_tracks_track_id_fkey FOREIGN KEY (track_id) REFERENCES public.tracks(id) ON DELETE CASCADE; + + +-- +-- TOC entry 3376 (class 2606 OID 16490) +-- Name: listens listens_track_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listens + ADD CONSTRAINT listens_track_id_fkey FOREIGN KEY (track_id) REFERENCES public.tracks(id) ON DELETE CASCADE; + + +-- +-- TOC entry 3377 (class 2606 OID 16952) +-- Name: listens listens_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.listens + ADD CONSTRAINT listens_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- TOC entry 3381 (class 2606 OID 16923) +-- Name: release_aliases release_aliases_release_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.release_aliases + ADD CONSTRAINT release_aliases_release_id_fkey FOREIGN KEY (release_id) REFERENCES public.releases(id) ON DELETE CASCADE; + + +-- +-- TOC entry 3382 (class 2606 OID 16947) +-- Name: sessions sessions_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.sessions + ADD CONSTRAINT sessions_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- TOC entry 3383 (class 2606 OID 16974) +-- Name: track_aliases track_aliases_track_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.track_aliases + ADD CONSTRAINT track_aliases_track_id_fkey FOREIGN KEY (track_id) REFERENCES public.tracks(id) ON DELETE CASCADE; + + +-- Completed on 2025-06-11 14:30:58 UTC + +-- +-- PostgreSQL database dump complete +-- + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin + + +-- +goose StatementEnd \ No newline at end of file diff --git a/db/migrations/old/000001_initial_schema.sql b/db/migrations/old/000001_initial_schema.sql new file mode 100644 index 0000000..3f71a86 --- /dev/null +++ b/db/migrations/old/000001_initial_schema.sql @@ -0,0 +1,106 @@ +-- +goose Up +-- +goose StatementBegin +SELECT 'up SQL query'; +-- +goose StatementEnd + +CREATE TABLE IF NOT EXISTS artists ( + id INT NOT NULL GENERATED ALWAYS AS IDENTITY, + PRIMARY KEY(id), + musicbrainz_id UUID UNIQUE, + name TEXT NOT NULL, + image UUID, + image_source TEXT +); + +CREATE TABLE IF NOT EXISTS artist_aliases ( + artist_id INT NOT NULL REFERENCES artists(id) ON DELETE CASCADE, + alias TEXT NOT NULL, + PRIMARY KEY (artist_id, alias), + source TEXT NOT NULL +); + +-- CREATE TABLE IF NOT EXISTS release_groups ( +-- id INT NOT NULL GENERATED ALWAYS AS IDENTITY, +-- PRIMARY KEY(id), +-- musicbrainz_id UUID UNIQUE, +-- title TEXT NOT NULL, +-- various_artists BOOLEAN NOT NULL DEFAULT FALSE, +-- image TEXT +-- ); + +CREATE TABLE IF NOT EXISTS releases ( + id INT NOT NULL GENERATED ALWAYS AS IDENTITY, + PRIMARY KEY(id), + musicbrainz_id UUID UNIQUE, + -- release_group_id INT REFERENCES release_groups(id) ON DELETE SET NULL, + image UUID, + image_source TEXT, + various_artists BOOLEAN NOT NULL DEFAULT FALSE, + title TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS release_aliases ( + release_id INT NOT NULL REFERENCES releases(id) ON DELETE CASCADE, + alias TEXT NOT NULL, + PRIMARY KEY (release_id, alias), + source TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS artist_releases ( + artist_id INT REFERENCES artists(id) ON DELETE CASCADE, + release_id INT REFERENCES releases(id) ON DELETE CASCADE, + PRIMARY KEY (artist_id, release_id) +); + +CREATE TABLE IF NOT EXISTS tracks ( + id INT NOT NULL GENERATED ALWAYS AS IDENTITY, + PRIMARY KEY(id), + musicbrainz_id UUID UNIQUE, + title TEXT NOT NULL, + duration INT NOT NULL DEFAULT 0, + release_id INT NOT NULL REFERENCES releases(id) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS artist_tracks ( + artist_id INT REFERENCES artists(id) ON DELETE CASCADE, + track_id INT REFERENCES tracks(id) ON DELETE CASCADE, + PRIMARY KEY (artist_id, track_id) +); + +CREATE TABLE IF NOT EXISTS listens ( + track_id INT NOT NULL REFERENCES tracks(id) ON DELETE CASCADE, + listened_at TIMESTAMPTZ NOT NULL, + PRIMARY KEY(track_id, listened_at) +); + +-- Indexes +CREATE INDEX idx_artist_aliases_artist_id ON artist_aliases(artist_id); +CREATE INDEX idx_artist_releases ON artist_releases(artist_id, release_id); +CREATE INDEX idx_tracks_release_id ON tracks(release_id); +CREATE INDEX listens_listened_at_idx ON listens(listened_at); +CREATE INDEX listens_track_id_listened_at_idx ON listens(track_id, listened_at); + +-- Trigram search support +CREATE EXTENSION IF NOT EXISTS pg_trgm; +CREATE INDEX idx_tracks_title_trgm ON tracks USING gin (title gin_trgm_ops); + +-- +goose Down +-- +goose StatementBegin +SELECT 'down SQL query'; +-- +goose StatementEnd + +DROP INDEX idx_artist_aliases_artist_id; +DROP INDEX idx_artist_releases; +DROP INDEX idx_tracks_release_id; +DROP INDEX listens_listened_at_idx; +DROP INDEX listens_track_id_listened_at_idx; +DROP INDEX idx_tracks_title_trgm; + +DROP TABLE listens; +DROP TABLE artist_aliases; +DROP TABLE artist_releases; +DROP TABLE artist_tracks; +DROP TABLE tracks; +DROP TABLE releases; +DROP TABLE release_groups; +DROP TABLE artists; diff --git a/db/migrations/old/20250603_switch_to_release_ids.sql b/db/migrations/old/20250603_switch_to_release_ids.sql new file mode 100644 index 0000000..351815a --- /dev/null +++ b/db/migrations/old/20250603_switch_to_release_ids.sql @@ -0,0 +1,87 @@ +-- +goose Up +-- +goose StatementBegin + +-- Step 1: Create new releases table with surrogate ID +DROP TABLE releases; +CREATE TABLE releases ( + id INT NOT NULL GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + musicbrainz_id UUID UNIQUE, + release_group_id INT REFERENCES release_groups(id) ON DELETE SET NULL, + title TEXT NOT NULL +); + +-- Step 2: Create artist_releases (replaces artist_release_groups) +CREATE TABLE artist_releases ( + artist_id INT REFERENCES artists(id) ON DELETE CASCADE, + release_id INT REFERENCES releases(id) ON DELETE CASCADE, + PRIMARY KEY (artist_id, release_id) +); + +-- Step 3: Populate releases with one release per release_group +INSERT INTO releases (musicbrainz_id, release_group_id, title) +SELECT musicbrainz_id, id AS release_group_id, title +FROM release_groups; + +-- Step 4: Add release_id to tracks temporarily +ALTER TABLE tracks ADD COLUMN release_id INT; + +-- Step 5: Fill release_id in tracks from the newly inserted releases +UPDATE tracks +SET release_id = releases.id +FROM releases +WHERE tracks.release_group_id = releases.release_group_id; + +-- Step 6: Set release_id to NOT NULL now that it's populated +ALTER TABLE tracks ALTER COLUMN release_id SET NOT NULL; + +-- Step 7: Drop old FK and column for release_group_id +ALTER TABLE tracks DROP CONSTRAINT tracks_release_group_id_fkey; +ALTER TABLE tracks DROP COLUMN release_group_id; + +-- Step 8: Drop old artist_release_groups and migrate to artist_releases +INSERT INTO artist_releases (artist_id, release_id) +SELECT arg.artist_id, r.id +FROM artist_release_groups arg +JOIN releases r ON arg.release_group_id = r.release_group_id; + +DROP TABLE artist_release_groups; + +-- Step 9: Add indexes for new relations +CREATE INDEX idx_tracks_release_id ON tracks(release_id); +CREATE INDEX idx_artist_releases ON artist_releases(artist_id, release_id); + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin + +-- Rollback: Recreate artist_release_groups +CREATE TABLE artist_release_groups ( + artist_id INT REFERENCES artists(id) ON DELETE CASCADE, + release_group_id INT REFERENCES release_groups(id) ON DELETE CASCADE, + PRIMARY KEY (artist_id, release_group_id) +); + +-- Recreate release_group_id in tracks +ALTER TABLE tracks ADD COLUMN release_group_id INT; + +-- Restore release_group_id values +UPDATE tracks +SET release_group_id = r.release_group_id +FROM releases r +WHERE tracks.release_id = r.id; + +-- Restore artist_release_groups values +INSERT INTO artist_release_groups (artist_id, release_group_id) +SELECT ar.artist_id, r.release_group_id +FROM artist_releases ar +JOIN releases r ON ar.release_id = r.id; + +-- Drop new tables and columns +ALTER TABLE tracks DROP COLUMN release_id; +DROP INDEX IF EXISTS idx_tracks_release_id; +DROP INDEX IF EXISTS idx_artist_releases; +DROP TABLE artist_releases; +DROP TABLE releases; + +-- +goose StatementEnd diff --git a/db/migrations/old/20250606_users_and_source_tracking.sql b/db/migrations/old/20250606_users_and_source_tracking.sql new file mode 100644 index 0000000..e95827d --- /dev/null +++ b/db/migrations/old/20250606_users_and_source_tracking.sql @@ -0,0 +1,52 @@ +-- +goose Up +-- +goose StatementBegin + +CREATE TYPE role AS ENUM ('admin', 'user'); + +CREATE TABLE IF NOT EXISTS users ( + id INT NOT NULL GENERATED ALWAYS AS IDENTITY, + PRIMARY KEY(id), + username TEXT NOT NULL UNIQUE, + password TEXT NOT NULL, + role role NOT NULL DEFAULT 'user' +); + +CREATE TABLE IF NOT EXISTS api_keys ( + id INT NOT NULL GENERATED ALWAYS AS IDENTITY PRIMARY KEY, + key TEXT NOT NULL UNIQUE, + user_id INT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + label TEXT +); + +CREATE TABLE IF NOT EXISTS release_aliases ( + release_id INT NOT NULL REFERENCES releases(id) ON DELETE CASCADE, + alias TEXT NOT NULL, + PRIMARY KEY (release_id, alias), + source TEXT NOT NULL +); + +ALTER TABLE listens +ADD user_id INT NOT NULL REFERENCES users(id); +ALTER TABLE listens +ADD client TEXT; + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin + +ALTER TABLE listens +DROP COLUMN client; +ALTER TABLE listens +DROP COLUMN user_id; + +DROP TABLE IF EXISTS release_aliases; + +DROP TABLE IF EXISTS api_keys; + +DROP TABLE IF EXISTS users; + +DROP TYPE IF EXISTS role; + +-- +goose StatementEnd diff --git a/db/migrations/old/20250607_populate_release_aliases.sql b/db/migrations/old/20250607_populate_release_aliases.sql new file mode 100644 index 0000000..0afad39 --- /dev/null +++ b/db/migrations/old/20250607_populate_release_aliases.sql @@ -0,0 +1,8 @@ +-- +goose Up +-- +goose StatementBegin + +INSERT INTO release_aliases (release_id, alias, source) +SELECT id, title, 'Canonical' +FROM releases; + +-- +goose StatementEnd \ No newline at end of file diff --git a/db/migrations/old/20250608_add_indexes_for_search.sql b/db/migrations/old/20250608_add_indexes_for_search.sql new file mode 100644 index 0000000..a71ca16 --- /dev/null +++ b/db/migrations/old/20250608_add_indexes_for_search.sql @@ -0,0 +1,7 @@ +-- +goose Up +-- +goose StatementBegin + +CREATE INDEX idx_artist_aliases_alias_trgm ON artist_aliases USING GIN (alias gin_trgm_ops); +CREATE INDEX idx_release_aliases_alias_trgm ON release_aliases USING GIN (alias gin_trgm_ops); + +-- +goose StatementEnd \ No newline at end of file diff --git a/db/migrations/old/20250609_default_user.sql b/db/migrations/old/20250609_default_user.sql new file mode 100644 index 0000000..0abd196 --- /dev/null +++ b/db/migrations/old/20250609_default_user.sql @@ -0,0 +1,7 @@ +-- +goose Up +-- +goose StatementBegin + +ALTER TABLE users DROP COLUMN password; +ALTER TABLE users ADD password BYTEA NOT NULL; + +-- +goose StatementEnd \ No newline at end of file diff --git a/db/migrations/old/20250610_sessions.sql b/db/migrations/old/20250610_sessions.sql new file mode 100644 index 0000000..00a244a --- /dev/null +++ b/db/migrations/old/20250610_sessions.sql @@ -0,0 +1,19 @@ +-- +goose Up +-- +goose StatementBegin + +CREATE TABLE sessions ( + id UUID PRIMARY KEY, + user_id INT NOT NULL REFERENCES users(id), + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + expires_at TIMESTAMP NOT NULL, + persistent BOOLEAN NOT NULL DEFAULT false +); + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin + +DROP TABLE IF EXISTS sessions; + +-- +goose StatementEnd \ No newline at end of file diff --git a/db/migrations/old/20250611_release_delete_trigger.sql b/db/migrations/old/20250611_release_delete_trigger.sql new file mode 100644 index 0000000..837b019 --- /dev/null +++ b/db/migrations/old/20250611_release_delete_trigger.sql @@ -0,0 +1,30 @@ +-- +goose Up +-- +goose StatementBegin + +CREATE OR REPLACE FUNCTION delete_orphan_releases() +RETURNS TRIGGER AS $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM artist_releases + WHERE release_id = OLD.release_id + ) THEN + DELETE FROM releases WHERE id = OLD.release_id; + END IF; + RETURN NULL; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER trg_delete_orphan_releases +AFTER DELETE ON artist_releases +FOR EACH ROW +EXECUTE FUNCTION delete_orphan_releases(); + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin + +DROP TRIGGER IF EXISTS trg_delete_orphan_releases ON artist_releases; +DROP FUNCTION IF EXISTS delete_orphan_releases; + +-- +goose StatementEnd diff --git a/db/migrations/old/20250612_refactor_alias.sql b/db/migrations/old/20250612_refactor_alias.sql new file mode 100644 index 0000000..44483aa --- /dev/null +++ b/db/migrations/old/20250612_refactor_alias.sql @@ -0,0 +1,81 @@ +-- +goose Up +-- +goose StatementBegin + +-- Step 1: Add the column as nullable initially +ALTER TABLE artist_aliases +ADD COLUMN is_primary boolean; + +-- Step 2: Set it to true if alias matches artist name, false otherwise +UPDATE artist_aliases aa +SET is_primary = (aa.alias = a.name) +FROM artists a +WHERE aa.artist_id = a.id; + +-- Step 3: Make the column NOT NULL +ALTER TABLE artist_aliases +ALTER COLUMN is_primary SET NOT NULL; + +-- Step 1: Add the column as nullable initially +ALTER TABLE release_aliases +ADD COLUMN is_primary boolean; + +-- Step 2: Set is_primary to true if alias matches release title, false otherwise +UPDATE release_aliases ra +SET is_primary = (ra.alias = r.title) +FROM releases r +WHERE ra.release_id = r.id; + +-- Step 3: Make the column NOT NULL +ALTER TABLE release_aliases +ALTER COLUMN is_primary SET NOT NULL; + +-- Step 1: Create the table +CREATE TABLE track_aliases ( + track_id INTEGER NOT NULL REFERENCES tracks(id) ON DELETE CASCADE, + alias TEXT NOT NULL, + is_primary BOOLEAN NOT NULL, + source TEXT NOT NULL, + PRIMARY KEY (track_id, alias) +); + +-- Step 2: Insert canonical titles from the tracks table +INSERT INTO track_aliases (track_id, alias, is_primary, source) +SELECT + id, + title, + TRUE, + 'Canonical' +FROM tracks; + +ALTER TABLE artists DROP COLUMN IF EXISTS name; +ALTER TABLE tracks DROP COLUMN IF EXISTS title; +ALTER TABLE releases DROP COLUMN IF EXISTS title; + +CREATE VIEW IF NOT EXISTS artists_with_name AS +SELECT + a.*, + aa.alias AS name +FROM artists a +JOIN artist_aliases aa ON aa.artist_id = a.id +WHERE aa.is_primary = TRUE; + +CREATE VIEW IF NOT EXISTS releases_with_title AS +SELECT + r.*, + ra.alias AS title +FROM releases r +JOIN release_aliases ra ON ra.release_id = r.id +WHERE ra.is_primary = TRUE; + +CREATE VIEW IF NOT EXISTS tracks_with_title AS +SELECT + t.*, + ta.alias AS title +FROM tracks t +JOIN track_aliases ta ON ta.track_id = t.id +WHERE ta.is_primary = TRUE; + +CREATE INDEX ON release_aliases (release_id) WHERE is_primary = TRUE; +CREATE INDEX ON track_aliases (track_id) WHERE is_primary = TRUE; + +-- +goose StatementEnd \ No newline at end of file diff --git a/db/queries/alias.sql b/db/queries/alias.sql new file mode 100644 index 0000000..a4a4978 --- /dev/null +++ b/db/queries/alias.sql @@ -0,0 +1,65 @@ +-- name: InsertArtistAlias :exec +INSERT INTO artist_aliases (artist_id, alias, source, is_primary) +VALUES ($1, $2, $3, $4) +ON CONFLICT DO NOTHING; + +-- name: GetAllArtistAliases :many +SELECT * FROM artist_aliases +WHERE artist_id = $1 ORDER BY is_primary DESC; + +-- name: GetArtistAlias :one +SELECT * FROM artist_aliases +WHERE alias = $1 LIMIT 1; + +-- name: SetArtistAliasPrimaryStatus :exec +UPDATE artist_aliases SET is_primary = $1 WHERE artist_id = $2 AND alias = $3; + +-- name: DeleteArtistAlias :exec +DELETE FROM artist_aliases +WHERE artist_id = $1 +AND alias = $2 +AND is_primary = false; + +-- name: InsertReleaseAlias :exec +INSERT INTO release_aliases (release_id, alias, source, is_primary) +VALUES ($1, $2, $3, $4) +ON CONFLICT DO NOTHING; + +-- name: GetAllReleaseAliases :many +SELECT * FROM release_aliases +WHERE release_id = $1 ORDER BY is_primary DESC; + +-- name: GetReleaseAlias :one +SELECT * FROM release_aliases +WHERE alias = $1 LIMIT 1; + +-- name: SetReleaseAliasPrimaryStatus :exec +UPDATE release_aliases SET is_primary = $1 WHERE release_id = $2 AND alias = $3; + +-- name: DeleteReleaseAlias :exec +DELETE FROM release_aliases +WHERE release_id = $1 +AND alias = $2 +AND is_primary = false; + +-- name: InsertTrackAlias :exec +INSERT INTO track_aliases (track_id, alias, source, is_primary) +VALUES ($1, $2, $3, $4) +ON CONFLICT DO NOTHING; + +-- name: GetAllTrackAliases :many +SELECT * FROM track_aliases +WHERE track_id = $1 ORDER BY is_primary DESC; + +-- name: GetTrackAlias :one +SELECT * FROM track_aliases +WHERE alias = $1 LIMIT 1; + +-- name: SetTrackAliasPrimaryStatus :exec +UPDATE track_aliases SET is_primary = $1 WHERE track_id = $2 AND alias = $3; + +-- name: DeleteTrackAlias :exec +DELETE FROM track_aliases +WHERE track_id = $1 +AND alias = $2 +AND is_primary = false; \ No newline at end of file diff --git a/db/queries/artist.sql b/db/queries/artist.sql new file mode 100644 index 0000000..89eef45 --- /dev/null +++ b/db/queries/artist.sql @@ -0,0 +1,112 @@ +-- name: InsertArtist :one +INSERT INTO artists (musicbrainz_id, image, image_source) +VALUES ($1, $2, $3) +RETURNING *; + +-- name: GetArtist :one +SELECT + a.*, + array_agg(aa.alias)::text[] AS aliases +FROM artists_with_name a +LEFT JOIN artist_aliases aa ON a.id = aa.artist_id +WHERE a.id = $1 +GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name; + +-- name: GetTrackArtists :many +SELECT + a.* +FROM artists_with_name a +LEFT JOIN artist_tracks at ON a.id = at.artist_id +WHERE at.track_id = $1 +GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name; + +-- name: GetArtistByImage :one +SELECT * FROM artists WHERE image = $1 LIMIT 1; + +-- name: GetReleaseArtists :many +SELECT + a.* +FROM artists_with_name a +LEFT JOIN artist_releases ar ON a.id = ar.artist_id +WHERE ar.release_id = $1 +GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name; + +-- name: GetArtistByName :one +WITH artist_with_aliases AS ( + SELECT + a.*, + COALESCE(array_agg(aa.alias), '{}')::text[] AS aliases + FROM artists_with_name a + LEFT JOIN artist_aliases aa ON a.id = aa.artist_id + WHERE a.id IN ( + SELECT aa2.artist_id FROM artist_aliases aa2 WHERE aa2.alias = $1 + ) + GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name +) +SELECT * FROM artist_with_aliases; + +-- name: GetArtistByMbzID :one +SELECT + a.*, + array_agg(aa.alias)::text[] AS aliases +FROM artists_with_name a +LEFT JOIN artist_aliases aa ON a.id = aa.artist_id +WHERE a.musicbrainz_id = $1 +GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name; + +-- name: GetTopArtistsPaginated :many +SELECT + a.id, + a.name, + a.musicbrainz_id, + a.image, + COUNT(*) AS listen_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN artist_tracks at ON at.track_id = t.id +JOIN artists_with_name a ON a.id = at.artist_id +WHERE l.listened_at BETWEEN $1 AND $2 +GROUP BY a.id, a.name, a.musicbrainz_id, a.image, a.image_source, a.name +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4; + +-- name: CountTopArtists :one +SELECT COUNT(DISTINCT at.artist_id) AS total_count +FROM listens l +JOIN artist_tracks at ON l.track_id = at.track_id +WHERE l.listened_at BETWEEN $1 AND $2; + +-- name: UpdateArtistMbzID :exec +UPDATE artists SET musicbrainz_id = $2 +WHERE id = $1; + +-- name: UpdateArtistImage :exec +UPDATE artists SET image = $2, image_source = $3 +WHERE id = $1; + +-- name: DeleteConflictingArtistTracks :exec +DELETE FROM artist_tracks at +WHERE at.artist_id = $1 + AND track_id IN ( + SELECT at.track_id FROM artist_tracks at WHERE at.artist_id = $2 + ); + +-- name: UpdateArtistTracks :exec +UPDATE artist_tracks +SET artist_id = $2 +WHERE artist_id = $1; + +-- name: DeleteConflictingArtistReleases :exec +DELETE FROM artist_releases ar +WHERE ar.artist_id = $1 + AND release_id IN ( + SELECT ar.release_id FROM artist_releases ar WHERE ar.artist_id = $2 + ); + +-- name: UpdateArtistReleases :exec +UPDATE artist_releases +SET artist_id = $2 +WHERE artist_id = $1; + +-- name: DeleteArtist :exec +DELETE FROM artists WHERE id = $1; \ No newline at end of file diff --git a/db/queries/etc.sql b/db/queries/etc.sql new file mode 100644 index 0000000..44139b8 --- /dev/null +++ b/db/queries/etc.sql @@ -0,0 +1,9 @@ +-- name: CleanOrphanedEntries :exec +DO $$ +BEGIN + DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l); + DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t); +-- DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t); +-- DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg); + DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at); +END $$; diff --git a/db/queries/listen.sql b/db/queries/listen.sql new file mode 100644 index 0000000..9049c4e --- /dev/null +++ b/db/queries/listen.sql @@ -0,0 +1,222 @@ +-- name: InsertListen :exec +INSERT INTO listens (track_id, listened_at, user_id, client) +VALUES ($1, $2, $3, $4) +ON CONFLICT DO NOTHING; + +-- name: GetLastListensPaginated :many +SELECT + l.*, + t.title AS track_title, + t.release_id AS release_id, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 +ORDER BY l.listened_at DESC +LIMIT $3 OFFSET $4; + +-- name: GetLastListensFromArtistPaginated :many +SELECT + l.*, + t.title AS track_title, + t.release_id AS release_id, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +JOIN artist_tracks at ON t.id = at.track_id +WHERE at.artist_id = $5 + AND l.listened_at BETWEEN $1 AND $2 +ORDER BY l.listened_at DESC +LIMIT $3 OFFSET $4; + +-- name: GetLastListensFromReleasePaginated :many +SELECT + l.*, + t.title AS track_title, + t.release_id AS release_id, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $5 +ORDER BY l.listened_at DESC +LIMIT $3 OFFSET $4; + +-- name: GetLastListensFromTrackPaginated :many +SELECT + l.*, + t.title AS track_title, + t.release_id AS release_id, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.id = $5 +ORDER BY l.listened_at DESC +LIMIT $3 OFFSET $4; + +-- name: CountListens :one +SELECT COUNT(*) AS total_count +FROM listens l +WHERE l.listened_at BETWEEN $1 AND $2; + +-- name: CountListensFromTrack :one +SELECT COUNT(*) AS total_count +FROM listens l +WHERE l.listened_at BETWEEN $1 AND $2 + AND l.track_id = $3; + +-- name: CountListensFromArtist :one +SELECT COUNT(*) AS total_count +FROM listens l +JOIN artist_tracks at ON l.track_id = at.track_id +WHERE l.listened_at BETWEEN $1 AND $2 + AND at.artist_id = $3; + +-- name: CountListensFromRelease :one +SELECT COUNT(*) AS total_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $3; + +-- name: CountTimeListened :one +SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2; + +-- name: CountTimeListenedToArtist :one +SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN artist_tracks at ON t.id = at.track_id +WHERE l.listened_at BETWEEN $1 AND $2 + AND at.artist_id = $3; + +-- name: CountTimeListenedToRelease :one +SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $3; + +-- name: CountTimeListenedToTrack :one +SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.id = $3; + +-- name: ListenActivity :many +WITH buckets AS ( + SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start +), +bucketed_listens AS ( + SELECT + b.bucket_start, + COUNT(l.listened_at) AS listen_count + FROM buckets b + LEFT JOIN listens l + ON l.listened_at >= b.bucket_start + AND l.listened_at < b.bucket_start + $3::interval + GROUP BY b.bucket_start + ORDER BY b.bucket_start +) +SELECT * FROM bucketed_listens; + +-- name: ListenActivityForArtist :many +WITH buckets AS ( + SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start +), +filtered_listens AS ( + SELECT l.* + FROM listens l + JOIN artist_tracks t ON l.track_id = t.track_id + WHERE t.artist_id = $4 +), +bucketed_listens AS ( + SELECT + b.bucket_start, + COUNT(l.listened_at) AS listen_count + FROM buckets b + LEFT JOIN filtered_listens l + ON l.listened_at >= b.bucket_start + AND l.listened_at < b.bucket_start + $3::interval + GROUP BY b.bucket_start + ORDER BY b.bucket_start +) +SELECT * FROM bucketed_listens; + +-- name: ListenActivityForRelease :many +WITH buckets AS ( + SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start +), +filtered_listens AS ( + SELECT l.* + FROM listens l + JOIN tracks t ON l.track_id = t.id + WHERE t.release_id = $4 +), +bucketed_listens AS ( + SELECT + b.bucket_start, + COUNT(l.listened_at) AS listen_count + FROM buckets b + LEFT JOIN filtered_listens l + ON l.listened_at >= b.bucket_start + AND l.listened_at < b.bucket_start + $3::interval + GROUP BY b.bucket_start + ORDER BY b.bucket_start +) +SELECT * FROM bucketed_listens; + +-- name: ListenActivityForTrack :many +WITH buckets AS ( + SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start +), +filtered_listens AS ( + SELECT l.* + FROM listens l + JOIN tracks t ON l.track_id = t.id + WHERE t.id = $4 +), +bucketed_listens AS ( + SELECT + b.bucket_start, + COUNT(l.listened_at) AS listen_count + FROM buckets b + LEFT JOIN filtered_listens l + ON l.listened_at >= b.bucket_start + AND l.listened_at < b.bucket_start + $3::interval + GROUP BY b.bucket_start + ORDER BY b.bucket_start +) +SELECT * FROM bucketed_listens; + +-- name: UpdateTrackIdForListens :exec +UPDATE listens SET track_id = $2 +WHERE track_id = $1; + +-- name: DeleteListen :exec +DELETE FROM listens WHERE track_id = $1 AND listened_at = $2; \ No newline at end of file diff --git a/db/queries/release.sql b/db/queries/release.sql new file mode 100644 index 0000000..e90d95e --- /dev/null +++ b/db/queries/release.sql @@ -0,0 +1,118 @@ +-- name: InsertRelease :one +INSERT INTO releases (musicbrainz_id, various_artists, image, image_source) +VALUES ($1, $2, $3, $4) +RETURNING *; + +-- name: GetRelease :one +SELECT * FROM releases_with_title +WHERE id = $1 LIMIT 1; + +-- name: GetReleaseByMbzID :one +SELECT * FROM releases_with_title +WHERE musicbrainz_id = $1 LIMIT 1; + +-- name: GetReleaseByImageID :one +SELECT * FROM releases +WHERE image = $1 LIMIT 1; + +-- name: GetReleaseByArtistAndTitle :one +SELECT r.* +FROM releases_with_title r +JOIN artist_releases ar ON r.id = ar.release_id +WHERE r.title = $1 AND ar.artist_id = $2 +LIMIT 1; + +-- name: GetReleaseByArtistAndTitles :one +SELECT r.* +FROM releases_with_title r +JOIN artist_releases ar ON r.id = ar.release_id +WHERE r.title = ANY ($1::TEXT[]) AND ar.artist_id = $2 +LIMIT 1; + +-- name: GetTopReleasesFromArtist :many +SELECT + r.*, + COUNT(*) AS listen_count, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON ar.artist_id = a.id + WHERE ar.release_id = r.id + ) AS artists +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN releases_with_title r ON t.release_id = r.id +JOIN artist_releases ar ON r.id = ar.release_id +WHERE ar.artist_id = $5 + AND l.listened_at BETWEEN $1 AND $2 +GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4; + +-- name: GetTopReleasesPaginated :many +SELECT + r.*, + COUNT(*) AS listen_count, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON ar.artist_id = a.id + WHERE ar.release_id = r.id + ) AS artists +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN releases_with_title r ON t.release_id = r.id +WHERE l.listened_at BETWEEN $1 AND $2 +GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4; + +-- name: CountTopReleases :one +SELECT COUNT(DISTINCT r.id) AS total_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN releases r ON t.release_id = r.id +WHERE l.listened_at BETWEEN $1 AND $2; + +-- name: CountReleasesFromArtist :one +SELECT COUNT(*) +FROM releases r +JOIN artist_releases ar ON r.id = ar.release_id +WHERE ar.artist_id = $1; + +-- name: AssociateArtistToRelease :exec +INSERT INTO artist_releases (artist_id, release_id) +VALUES ($1, $2) +ON CONFLICT DO NOTHING; + +-- name: GetReleasesWithoutImages :many +SELECT + r.*, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON a.id = ar.artist_id + WHERE ar.release_id = r.id + ) AS artists +FROM releases_with_title r +WHERE r.image IS NULL + AND r.id > $2 +ORDER BY r.id ASC +LIMIT $1; + +-- name: UpdateReleaseMbzID :exec +UPDATE releases SET musicbrainz_id = $2 +WHERE id = $1; + +-- name: UpdateReleaseImage :exec +UPDATE releases SET image = $2, image_source = $3 +WHERE id = $1; + +-- name: DeleteRelease :exec +DELETE FROM releases WHERE id = $1; + +-- name: DeleteReleasesFromArtist :exec +DELETE FROM releases r +USING artist_releases ar +WHERE ar.release_id = r.id + AND ar.artist_id = $1; \ No newline at end of file diff --git a/db/queries/search.sql b/db/queries/search.sql new file mode 100644 index 0000000..95c3cd8 --- /dev/null +++ b/db/queries/search.sql @@ -0,0 +1,161 @@ +-- name: SearchArtists :many +SELECT id, name, musicbrainz_id, image, score +FROM ( + SELECT + a.id, + a.name, + a.musicbrainz_id, + a.image, + similarity(aa.alias, $1) AS score, + ROW_NUMBER() OVER (PARTITION BY a.id ORDER BY similarity(aa.alias, $1) DESC) AS rn + FROM artist_aliases aa + JOIN artists_with_name a ON aa.artist_id = a.id + WHERE similarity(aa.alias, $1) > 0.28 +) ranked +WHERE rn = 1 +ORDER BY score DESC +LIMIT $2; + +-- name: SearchArtistsBySubstring :many +SELECT id, name, musicbrainz_id, image, score +FROM ( + SELECT + a.id, + a.name, + a.musicbrainz_id, + a.image, + 1.0 AS score, -- why + ROW_NUMBER() OVER (PARTITION BY a.id ORDER BY aa.alias) AS rn + FROM artist_aliases aa + JOIN artists_with_name a ON aa.artist_id = a.id + WHERE aa.alias ILIKE $1 || '%' +) ranked +WHERE rn = 1 +ORDER BY score DESC +LIMIT $2; + +-- name: SearchTracks :many +SELECT + ranked.id, + ranked.title, + ranked.musicbrainz_id, + ranked.release_id, + ranked.image, + ranked.score, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = ranked.id + ) AS artists +FROM ( + SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + similarity(ta.alias, $1) AS score, + ROW_NUMBER() OVER (PARTITION BY t.id ORDER BY similarity(ta.alias, $1) DESC) AS rn + FROM track_aliases ta + JOIN tracks_with_title t ON ta.track_id = t.id + JOIN releases r ON t.release_id = r.id + WHERE similarity(ta.alias, $1) > 0.28 +) ranked +WHERE rn = 1 +ORDER BY score DESC, title +LIMIT $2; + +-- name: SearchTracksBySubstring :many +SELECT + ranked.id, + ranked.title, + ranked.musicbrainz_id, + ranked.release_id, + ranked.image, + ranked.score, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = ranked.id + ) AS artists +FROM ( + SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + 1.0 AS score, + ROW_NUMBER() OVER (PARTITION BY t.id ORDER BY ta.alias) AS rn + FROM track_aliases ta + JOIN tracks_with_title t ON ta.track_id = t.id + JOIN releases r ON t.release_id = r.id + WHERE ta.alias ILIKE $1 || '%' +) ranked +WHERE rn = 1 +ORDER BY score DESC, title +LIMIT $2; + +-- name: SearchReleases :many +SELECT + ranked.id, + ranked.title, + ranked.musicbrainz_id, + ranked.image, + ranked.various_artists, + ranked.score, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON ar.artist_id = a.id + WHERE ar.release_id = ranked.id + ) AS artists +FROM ( + SELECT + r.id, + r.title, + r.musicbrainz_id, + r.image, + r.various_artists, + similarity(ra.alias, $1) AS score, + ROW_NUMBER() OVER (PARTITION BY r.id ORDER BY similarity(ra.alias, $1) DESC) AS rn + FROM release_aliases ra + JOIN releases_with_title r ON ra.release_id = r.id + WHERE similarity(ra.alias, $1) > 0.28 +) ranked +WHERE rn = 1 +ORDER BY score DESC, title +LIMIT $2; + +-- name: SearchReleasesBySubstring :many +SELECT + ranked.id, + ranked.title, + ranked.musicbrainz_id, + ranked.image, + ranked.various_artists, + ranked.score, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON ar.artist_id = a.id + WHERE ar.release_id = ranked.id + ) AS artists +FROM ( + SELECT + r.id, + r.title, + r.musicbrainz_id, + r.image, + r.various_artists, + 1.0 AS score, -- idk why + ROW_NUMBER() OVER (PARTITION BY r.id ORDER BY ra.alias) AS rn + FROM release_aliases ra + JOIN releases_with_title r ON ra.release_id = r.id + WHERE ra.alias ILIKE $1 || '%' +) ranked +WHERE rn = 1 +ORDER BY score DESC, title +LIMIT $2; diff --git a/db/queries/sessions.sql b/db/queries/sessions.sql new file mode 100644 index 0000000..c45601f --- /dev/null +++ b/db/queries/sessions.sql @@ -0,0 +1,19 @@ +-- name: InsertSession :one +INSERT INTO sessions (id, user_id, expires_at, persistent) +VALUES ($1, $2, $3, $4) +RETURNING *; + +-- name: GetSession :one +SELECT * FROM sessions WHERE id = $1 AND expires_at > NOW(); + +-- name: UpdateSessionExpiry :exec +UPDATE sessions SET expires_at = $2 WHERE id = $1; + +-- name: DeleteSession :exec +DELETE FROM sessions WHERE id = $1; + +-- name: GetUserBySession :one +SELECT * +FROM users u +JOIN sessions s ON u.id = s.user_id +WHERE s.id = $1; diff --git a/db/queries/track.sql b/db/queries/track.sql new file mode 100644 index 0000000..73fce83 --- /dev/null +++ b/db/queries/track.sql @@ -0,0 +1,139 @@ +-- name: InsertTrack :one +INSERT INTO tracks (musicbrainz_id, release_id, duration) +VALUES ($1, $2, $3) +RETURNING *; + +-- name: AssociateArtistToTrack :exec +INSERT INTO artist_tracks (artist_id, track_id) +VALUES ($1, $2) +ON CONFLICT DO NOTHING; + +-- name: GetTrack :one +SELECT + t.*, + r.image +FROM tracks_with_title t +JOIN releases r ON t.release_id = r.id +WHERE t.id = $1 LIMIT 1; + +-- name: GetTrackByMbzID :one +SELECT * FROM tracks_with_title +WHERE musicbrainz_id = $1 LIMIT 1; + +-- name: GetAllTracksFromArtist :many +SELECT t.* +FROM tracks_with_title t +JOIN artist_tracks at ON t.id = at.track_id +WHERE at.artist_id = $1; + +-- name: GetTrackByTitleAndArtists :one +SELECT t.* +FROM tracks_with_title t +JOIN artist_tracks at ON at.track_id = t.id +WHERE t.title = $1 + AND at.artist_id = ANY($2::int[]) +GROUP BY t.id, t.title, t.musicbrainz_id, t.duration, t.release_id +HAVING COUNT(DISTINCT at.artist_id) = cardinality($2::int[]); + +-- name: GetTopTracksPaginated :many +SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + COUNT(*) AS listen_count, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +JOIN releases r ON t.release_id = r.id +WHERE l.listened_at BETWEEN $1 AND $2 +GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4; + +-- name: GetTopTracksByArtistPaginated :many +SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + COUNT(*) AS listen_count, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at2 + JOIN artists_with_name a ON a.id = at2.artist_id + WHERE at2.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +JOIN releases r ON t.release_id = r.id +JOIN artist_tracks at ON at.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND at.artist_id = $5 +GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4; + +-- name: GetTopTracksInReleasePaginated :many +SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + COUNT(*) AS listen_count, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at2 + JOIN artists_with_name a ON a.id = at2.artist_id + WHERE at2.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +JOIN releases r ON t.release_id = r.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $5 +GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4; + +-- name: CountTopTracks :one +SELECT COUNT(DISTINCT l.track_id) AS total_count +FROM listens l +WHERE l.listened_at BETWEEN $1 AND $2; + +-- name: CountTopTracksByArtist :one +SELECT COUNT(DISTINCT l.track_id) AS total_count +FROM listens l +JOIN artist_tracks at ON l.track_id = at.track_id +WHERE l.listened_at BETWEEN $1 AND $2 +AND at.artist_id = $3; + +-- name: CountTopTracksByRelease :one +SELECT COUNT(DISTINCT l.track_id) AS total_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 +AND t.release_id = $3; + +-- name: UpdateTrackMbzID :exec +UPDATE tracks SET musicbrainz_id = $2 +WHERE id = $1; + +-- name: UpdateTrackDuration :exec +UPDATE tracks SET duration = $2 +WHERE id = $1; + +-- name: UpdateReleaseForAll :exec +UPDATE tracks SET release_id = $2 +WHERE release_id = $1; + +-- name: DeleteTrack :exec +DELETE FROM tracks WHERE id = $1; \ No newline at end of file diff --git a/db/queries/users.sql b/db/queries/users.sql new file mode 100644 index 0000000..83cc87b --- /dev/null +++ b/db/queries/users.sql @@ -0,0 +1,45 @@ +-- name: InsertUser :one +INSERT INTO users (username, password, role) +VALUES ($1, $2, $3) +RETURNING *; + +-- name: DeleteUser :exec +DELETE FROM users WHERE id = $1; + +-- name: GetUserByUsername :one +SELECT * FROM users WHERE username = $1; + +-- name: CountUsers :one +SELECT COUNT(*) FROM users; + +-- name: InsertApiKey :one +INSERT INTO api_keys (user_id, key, label) +VALUES ($1, $2, $3) +RETURNING *; + +-- name: DeleteApiKey :exec +DELETE FROM api_keys WHERE id = $1; + +-- name: CountApiKeys :one +SELECT COUNT(*) FROM api_keys WHERE user_id = $1; + +-- name: GetUserByApiKey :one +SELECT u.* +FROM users u +JOIN api_keys ak ON u.id = ak.user_id +WHERE ak.key = $1; + +-- name: GetAllApiKeysByUserID :many +SELECT ak.* +FROM api_keys ak +JOIN users u ON ak.user_id = u.id +WHERE u.id = $1; + +-- name: UpdateUserUsername :exec +UPDATE users SET username = $2 WHERE id = $1; + +-- name: UpdateUserPassword :exec +UPDATE users SET password = $2 WHERE id = $1; + +-- name: UpdateApiKeyLabel :exec +UPDATE api_keys SET label = $3 WHERE id = $1 AND user_id = $2; diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000..6240da8 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,21 @@ +# build output +dist/ +# generated types +.astro/ + +# dependencies +node_modules/ + +# logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* + + +# environment variables +.env +.env.production + +# macOS-specific files +.DS_Store diff --git a/docs/.vscode/extensions.json b/docs/.vscode/extensions.json new file mode 100644 index 0000000..22a1505 --- /dev/null +++ b/docs/.vscode/extensions.json @@ -0,0 +1,4 @@ +{ + "recommendations": ["astro-build.astro-vscode"], + "unwantedRecommendations": [] +} diff --git a/docs/.vscode/launch.json b/docs/.vscode/launch.json new file mode 100644 index 0000000..d642209 --- /dev/null +++ b/docs/.vscode/launch.json @@ -0,0 +1,11 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "command": "./node_modules/.bin/astro dev", + "name": "Development server", + "request": "launch", + "type": "node-terminal" + } + ] +} diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..824799f --- /dev/null +++ b/docs/README.md @@ -0,0 +1,54 @@ +# Starlight Starter Kit: Basics + +[![Built with Starlight](https://astro.badg.es/v2/built-with-starlight/tiny.svg)](https://starlight.astro.build) + +``` +yarn create astro@latest -- --template starlight +``` + +[![Open in StackBlitz](https://developer.stackblitz.com/img/open_in_stackblitz.svg)](https://stackblitz.com/github/withastro/starlight/tree/main/examples/basics) +[![Open with CodeSandbox](https://assets.codesandbox.io/github/button-edit-lime.svg)](https://codesandbox.io/p/sandbox/github/withastro/starlight/tree/main/examples/basics) +[![Deploy to Netlify](https://www.netlify.com/img/deploy/button.svg)](https://app.netlify.com/start/deploy?repository=https://github.com/withastro/starlight&create_from_path=examples/basics) +[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fwithastro%2Fstarlight%2Ftree%2Fmain%2Fexamples%2Fbasics&project-name=my-starlight-docs&repository-name=my-starlight-docs) + +> 🧑‍🚀 **Seasoned astronaut?** Delete this file. Have fun! + +## 🚀 Project Structure + +Inside of your Astro + Starlight project, you'll see the following folders and files: + +``` +. +├── public/ +├── src/ +│ ├── assets/ +│ ├── content/ +│ │ ├── docs/ +│ └── content.config.ts +├── astro.config.mjs +├── package.json +└── tsconfig.json +``` + +Starlight looks for `.md` or `.mdx` files in the `src/content/docs/` directory. Each file is exposed as a route based on its file name. + +Images can be added to `src/assets/` and embedded in Markdown with a relative link. + +Static assets, like favicons, can be placed in the `public/` directory. + +## 🧞 Commands + +All commands are run from the root of the project, from a terminal: + +| Command | Action | +| :------------------------ | :----------------------------------------------- | +| `yarn install` | Installs dependencies | +| `yarn dev` | Starts local dev server at `localhost:4321` | +| `yarn build` | Build your production site to `./dist/` | +| `yarn preview` | Preview your build locally, before deploying | +| `yarn astro ...` | Run CLI commands like `astro add`, `astro check` | +| `yarn astro -- --help` | Get help using the Astro CLI | + +## 👀 Want to learn more? + +Check out [Starlight’s docs](https://starlight.astro.build/), read [the Astro documentation](https://docs.astro.build), or jump into the [Astro Discord server](https://astro.build/chat). diff --git a/docs/astro.config.mjs b/docs/astro.config.mjs new file mode 100644 index 0000000..9d8e12d --- /dev/null +++ b/docs/astro.config.mjs @@ -0,0 +1,36 @@ +// @ts-check +import { defineConfig } from 'astro/config'; +import starlight from '@astrojs/starlight'; + +import tailwindcss from '@tailwindcss/vite'; + +// https://astro.build/config +export default defineConfig({ + integrations: [ + starlight({ + title: 'Koito Docs', + social: [{ icon: 'github', label: 'GitHub', href: 'https://github.com/gabehf/koito' }], + sidebar: [ + { + label: 'Guides', + items: [ + // Each item here is one entry in the navigation menu. + { label: 'Example Guide', slug: 'guides/example' }, + ], + }, + { + label: 'Reference', + autogenerate: { directory: 'reference' }, + }, + ], + customCss: [ + // Path to your Tailwind base styles: + './src/styles/global.css', + ], + }), + ], + + vite: { + plugins: [tailwindcss()], + }, +}); \ No newline at end of file diff --git a/docs/package.json b/docs/package.json new file mode 100644 index 0000000..d79c9a2 --- /dev/null +++ b/docs/package.json @@ -0,0 +1,20 @@ +{ + "name": "docs", + "type": "module", + "version": "0.0.1", + "scripts": { + "dev": "astro dev", + "start": "astro dev", + "build": "astro build", + "preview": "astro preview", + "astro": "astro" + }, + "dependencies": { + "@astrojs/starlight": "^0.34.3", + "@astrojs/starlight-tailwind": "^4.0.1", + "@tailwindcss/vite": "^4.1.10", + "astro": "^5.6.1", + "sharp": "^0.32.5", + "tailwindcss": "^4.1.10" + } +} diff --git a/docs/public/favicon.svg b/docs/public/favicon.svg new file mode 100644 index 0000000..cba5ac1 --- /dev/null +++ b/docs/public/favicon.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/src/assets/houston.webp b/docs/src/assets/houston.webp new file mode 100644 index 0000000..930c164 Binary files /dev/null and b/docs/src/assets/houston.webp differ diff --git a/docs/src/content.config.ts b/docs/src/content.config.ts new file mode 100644 index 0000000..d9ee8c9 --- /dev/null +++ b/docs/src/content.config.ts @@ -0,0 +1,7 @@ +import { defineCollection } from 'astro:content'; +import { docsLoader } from '@astrojs/starlight/loaders'; +import { docsSchema } from '@astrojs/starlight/schema'; + +export const collections = { + docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }), +}; diff --git a/docs/src/content/docs/guides/example.md b/docs/src/content/docs/guides/example.md new file mode 100644 index 0000000..ebd0f3b --- /dev/null +++ b/docs/src/content/docs/guides/example.md @@ -0,0 +1,11 @@ +--- +title: Example Guide +description: A guide in my new Starlight docs site. +--- + +Guides lead a user through a specific task they want to accomplish, often with a sequence of steps. +Writing a good guide requires thinking about what your users are trying to do. + +## Further reading + +- Read [about how-to guides](https://diataxis.fr/how-to-guides/) in the Diátaxis framework diff --git a/docs/src/content/docs/index.mdx b/docs/src/content/docs/index.mdx new file mode 100644 index 0000000..9dc2071 --- /dev/null +++ b/docs/src/content/docs/index.mdx @@ -0,0 +1,36 @@ +--- +title: Home +description: Get started building your docs site with Starlight. +template: splash +hero: + tagline: Congrats on setting up a new Starlight project! + image: + file: ../../assets/houston.webp + actions: + - text: Example Guide + link: /guides/example/ + icon: right-arrow + - text: Read the Starlight docs + link: https://starlight.astro.build + icon: external + variant: minimal +--- + +import { Card, CardGrid } from '@astrojs/starlight/components'; + +## Next steps + + + + Edit `src/content/docs/index.mdx` to see this page change. + + + Add Markdown or MDX files to `src/content/docs` to create new pages. + + + Edit your `sidebar` and other config in `astro.config.mjs`. + + + Learn more in [the Starlight Docs](https://starlight.astro.build/). + + diff --git a/docs/src/content/docs/reference/example.md b/docs/src/content/docs/reference/example.md new file mode 100644 index 0000000..0224f09 --- /dev/null +++ b/docs/src/content/docs/reference/example.md @@ -0,0 +1,11 @@ +--- +title: Example Reference +description: A reference page in my new Starlight docs site. +--- + +Reference pages are ideal for outlining how things work in terse and clear terms. +Less concerned with telling a story or addressing a specific use case, they should give a comprehensive outline of what you're documenting. + +## Further reading + +- Read [about reference](https://diataxis.fr/reference/) in the Diátaxis framework diff --git a/docs/src/styles/global.css b/docs/src/styles/global.css new file mode 100644 index 0000000..c95745a --- /dev/null +++ b/docs/src/styles/global.css @@ -0,0 +1,36 @@ +@layer base, starlight, theme, components, utilities; + +@import '@astrojs/starlight-tailwind'; +@import 'tailwindcss/theme.css' layer(theme); +@import 'tailwindcss/utilities.css' layer(utilities); + +@theme { + /* Your preferred text font. Starlight uses a system font stack by default. */ + --font-sans: 'Jost', sans-serif; + /* Your preferred code font. Starlight uses system monospace fonts by default. */ + --font-mono: 'IBM Plex Mono'; + /* Your preferred accent color. Indigo is closest to Starlight’s defaults. */ + --color-accent-50: #FABEB4; + --color-accent-100: #FA9482; + --color-accent-200: #FF826D; + --color-accent-300: #FF7961; + --color-accent-400: #FF6E55; + --color-accent-500: #FF6247; + --color-accent-600: #FF573A; + --color-accent-700: #E6472B; + --color-accent-800: #D0391F; + --color-accent-900: #B82F17; + --color-accent-950: #8A220F; + /* Your preferred gray scale. Zinc is closest to Starlight’s defaults. */ + --color-gray-50: #FFFDFD; + --color-gray-100: #FCF2F0; + --color-gray-200: #F1E8E6; + --color-gray-300: #E9DFDD; + --color-gray-400: #CBB7B3; + --color-gray-500: #4F4845; + --color-gray-600: #3D3735; + --color-gray-700: #2A2524; + --color-gray-800: #1E1B19; + --color-gray-900: #161312; + --color-gray-950: #0C0A09; +} \ No newline at end of file diff --git a/docs/tsconfig.json b/docs/tsconfig.json new file mode 100644 index 0000000..8bf91d3 --- /dev/null +++ b/docs/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "astro/tsconfigs/strict", + "include": [".astro/types.d.ts", "**/*"], + "exclude": ["dist"] +} diff --git a/docs/yarn.lock b/docs/yarn.lock new file mode 100644 index 0000000..f0cfcde --- /dev/null +++ b/docs/yarn.lock @@ -0,0 +1,4053 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4" + integrity sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.24" + +"@astrojs/compiler@^2.12.2": + version "2.12.2" + resolved "https://registry.yarnpkg.com/@astrojs/compiler/-/compiler-2.12.2.tgz#5913b6ec7efffebdfb37fae9a50122802ae08c64" + integrity sha512-w2zfvhjNCkNMmMMOn5b0J8+OmUaBL1o40ipMvqcG6NRpdC+lKxmTi48DT8Xw0SzJ3AfmeFLB45zXZXtmbsjcgw== + +"@astrojs/internal-helpers@0.6.1": + version "0.6.1" + resolved "https://registry.yarnpkg.com/@astrojs/internal-helpers/-/internal-helpers-0.6.1.tgz#87d0f5dbe4bdc2b61c6409672b921bca193abad6" + integrity sha512-l5Pqf6uZu31aG+3Lv8nl/3s4DbUzdlxTWDof4pEpto6GUJNhhCbelVi9dEyurOVyqaelwmS9oSyOWOENSfgo9A== + +"@astrojs/markdown-remark@6.3.2", "@astrojs/markdown-remark@^6.3.1": + version "6.3.2" + resolved "https://registry.yarnpkg.com/@astrojs/markdown-remark/-/markdown-remark-6.3.2.tgz#b85bab36bc92bd9426e75f2efcbdfde26fad57d8" + integrity sha512-bO35JbWpVvyKRl7cmSJD822e8YA8ThR/YbUsciWNA7yTcqpIAL2hJDToWP5KcZBWxGT6IOdOkHSXARSNZc4l/Q== + dependencies: + "@astrojs/internal-helpers" "0.6.1" + "@astrojs/prism" "3.3.0" + github-slugger "^2.0.0" + hast-util-from-html "^2.0.3" + hast-util-to-text "^4.0.2" + import-meta-resolve "^4.1.0" + js-yaml "^4.1.0" + mdast-util-definitions "^6.0.0" + rehype-raw "^7.0.0" + rehype-stringify "^10.0.1" + remark-gfm "^4.0.1" + remark-parse "^11.0.0" + remark-rehype "^11.1.2" + remark-smartypants "^3.0.2" + shiki "^3.2.1" + smol-toml "^1.3.1" + unified "^11.0.5" + unist-util-remove-position "^5.0.0" + unist-util-visit "^5.0.0" + unist-util-visit-parents "^6.0.1" + vfile "^6.0.3" + +"@astrojs/mdx@^4.2.3": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@astrojs/mdx/-/mdx-4.3.0.tgz#1656383910e1e29c955a17a88866382c975e8224" + integrity sha512-OGX2KvPeBzjSSKhkCqrUoDMyzFcjKt5nTE5SFw3RdoLf0nrhyCXBQcCyclzWy1+P+XpOamn+p+hm1EhpCRyPxw== + dependencies: + "@astrojs/markdown-remark" "6.3.2" + "@mdx-js/mdx" "^3.1.0" + acorn "^8.14.1" + es-module-lexer "^1.6.0" + estree-util-visit "^2.0.0" + hast-util-to-html "^9.0.5" + kleur "^4.1.5" + rehype-raw "^7.0.0" + remark-gfm "^4.0.1" + remark-smartypants "^3.0.2" + source-map "^0.7.4" + unist-util-visit "^5.0.0" + vfile "^6.0.3" + +"@astrojs/prism@3.3.0": + version "3.3.0" + resolved "https://registry.yarnpkg.com/@astrojs/prism/-/prism-3.3.0.tgz#5888fcd5665d416450a4fe55b1b7b701b8d586d9" + integrity sha512-q8VwfU/fDZNoDOf+r7jUnMC2//H2l0TuQ6FkGJL8vD8nw/q5KiL3DS1KKBI3QhI9UQhpJ5dc7AtqfbXWuOgLCQ== + dependencies: + prismjs "^1.30.0" + +"@astrojs/sitemap@^3.3.0": + version "3.4.1" + resolved "https://registry.yarnpkg.com/@astrojs/sitemap/-/sitemap-3.4.1.tgz#681ab9ca5470021f3b628726b971c5b9ad0333ad" + integrity sha512-VjZvr1e4FH6NHyyHXOiQgLiw94LnCVY4v06wN/D0gZKchTMkg71GrAHJz81/huafcmavtLkIv26HnpfDq6/h/Q== + dependencies: + sitemap "^8.0.0" + stream-replace-string "^2.0.0" + zod "^3.24.2" + +"@astrojs/starlight-tailwind@^4.0.1": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@astrojs/starlight-tailwind/-/starlight-tailwind-4.0.1.tgz#4d710bc3807f0ab48ff77fd9906c8ebc37ea7a56" + integrity sha512-AOOEWTGqJ7fG66U04xTmZQZ40oZnUYe4Qljpr+No88ozKywtsD1DiXOrGTeHCnZu0hRtMbRtBGB1fZsf0L62iw== + +"@astrojs/starlight@^0.34.3": + version "0.34.3" + resolved "https://registry.yarnpkg.com/@astrojs/starlight/-/starlight-0.34.3.tgz#7bc0d8461296e741820fa1ef10a50a914db696bd" + integrity sha512-MAuD3NF+E+QXJJuVKofoR6xcPTP4BJmYWeOBd03udVdubNGVnPnSWVZAi+ZtnTofES4+mJdp8BNGf+ubUxkiiA== + dependencies: + "@astrojs/markdown-remark" "^6.3.1" + "@astrojs/mdx" "^4.2.3" + "@astrojs/sitemap" "^3.3.0" + "@pagefind/default-ui" "^1.3.0" + "@types/hast" "^3.0.4" + "@types/js-yaml" "^4.0.9" + "@types/mdast" "^4.0.4" + astro-expressive-code "^0.41.1" + bcp-47 "^2.1.0" + hast-util-from-html "^2.0.1" + hast-util-select "^6.0.2" + hast-util-to-string "^3.0.0" + hastscript "^9.0.0" + i18next "^23.11.5" + js-yaml "^4.1.0" + klona "^2.0.6" + mdast-util-directive "^3.0.0" + mdast-util-to-markdown "^2.1.0" + mdast-util-to-string "^4.0.0" + pagefind "^1.3.0" + rehype "^13.0.1" + rehype-format "^5.0.0" + remark-directive "^3.0.0" + ultrahtml "^1.6.0" + unified "^11.0.5" + unist-util-visit "^5.0.0" + vfile "^6.0.2" + +"@astrojs/telemetry@3.3.0": + version "3.3.0" + resolved "https://registry.yarnpkg.com/@astrojs/telemetry/-/telemetry-3.3.0.tgz#397dc1f3ab123470571d80c9b4c1335195d30417" + integrity sha512-UFBgfeldP06qu6khs/yY+q1cDAaArM2/7AEIqQ9Cuvf7B1hNLq0xDrZkct+QoIGyjq56y8IaE2I3CTvG99mlhQ== + dependencies: + ci-info "^4.2.0" + debug "^4.4.0" + dlv "^1.1.3" + dset "^3.1.4" + is-docker "^3.0.0" + is-wsl "^3.1.0" + which-pm-runs "^1.1.0" + +"@babel/helper-string-parser@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz#54da796097ab19ce67ed9f88b47bb2ec49367687" + integrity sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA== + +"@babel/helper-validator-identifier@^7.27.1": + version "7.27.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz#a7054dcc145a967dd4dc8fee845a57c1316c9df8" + integrity sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow== + +"@babel/parser@^7.25.4": + version "7.27.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.27.5.tgz#ed22f871f110aa285a6fd934a0efed621d118826" + integrity sha512-OsQd175SxWkGlzbny8J3K8TnnDD0N3lrIUtB92xwyRpzaenGZhxDvxN/JgU00U3CDZNj9tPuDJ5H0WS4Nt3vKg== + dependencies: + "@babel/types" "^7.27.3" + +"@babel/runtime@^7.23.2": + version "7.27.6" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.27.6.tgz#ec4070a04d76bae8ddbb10770ba55714a417b7c6" + integrity sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q== + +"@babel/types@^7.25.4", "@babel/types@^7.27.3": + version "7.27.6" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.27.6.tgz#a434ca7add514d4e646c80f7375c0aa2befc5535" + integrity sha512-ETyHEk2VHHvl9b9jZP5IHPavHYk57EhanlRRuae9XCpb/j5bDCbPPMOBfCWhnl/7EDJz0jEMCi/RhccCE8r1+Q== + dependencies: + "@babel/helper-string-parser" "^7.27.1" + "@babel/helper-validator-identifier" "^7.27.1" + +"@capsizecss/unpack@^2.4.0": + version "2.4.0" + resolved "https://registry.yarnpkg.com/@capsizecss/unpack/-/unpack-2.4.0.tgz#db93ee886b8016c155ba7934c7adbe42a9734f13" + integrity sha512-GrSU71meACqcmIUxPYOJvGKF0yryjN/L1aCuE9DViCTJI7bfkjgYDPD1zbNDcINJwSSP6UaBZY9GAbYDO7re0Q== + dependencies: + blob-to-buffer "^1.2.8" + cross-fetch "^3.0.4" + fontkit "^2.0.2" + +"@ctrl/tinycolor@^4.0.4": + version "4.1.0" + resolved "https://registry.yarnpkg.com/@ctrl/tinycolor/-/tinycolor-4.1.0.tgz#91a8f8120ffc9da2feb2a38f7862b300d5e9691a" + integrity sha512-WyOx8cJQ+FQus4Mm4uPIZA64gbk3Wxh0so5Lcii0aJifqwoVOlfFtorjLE0Hen4OYyHZMXDWqMmaQemBhgxFRQ== + +"@emnapi/core@^1.4.3": + version "1.4.3" + resolved "https://registry.yarnpkg.com/@emnapi/core/-/core-1.4.3.tgz#9ac52d2d5aea958f67e52c40a065f51de59b77d6" + integrity sha512-4m62DuCE07lw01soJwPiBGC0nAww0Q+RY70VZ+n49yDIO13yyinhbWCeNnaob0lakDtWQzSdtNWzJeOJt2ma+g== + dependencies: + "@emnapi/wasi-threads" "1.0.2" + tslib "^2.4.0" + +"@emnapi/runtime@^1.2.0", "@emnapi/runtime@^1.4.3": + version "1.4.3" + resolved "https://registry.yarnpkg.com/@emnapi/runtime/-/runtime-1.4.3.tgz#c0564665c80dc81c448adac23f9dfbed6c838f7d" + integrity sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ== + dependencies: + tslib "^2.4.0" + +"@emnapi/wasi-threads@1.0.2", "@emnapi/wasi-threads@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@emnapi/wasi-threads/-/wasi-threads-1.0.2.tgz#977f44f844eac7d6c138a415a123818c655f874c" + integrity sha512-5n3nTJblwRi8LlXkJ9eBzu+kZR8Yxcc7ubakyQTFzPMtIhFpUBRbsnc2Dv88IZDIbCDlBiWrknhB4Lsz7mg6BA== + dependencies: + tslib "^2.4.0" + +"@esbuild/aix-ppc64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz#4e0f91776c2b340e75558f60552195f6fad09f18" + integrity sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA== + +"@esbuild/android-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz#bc766407f1718923f6b8079c8c61bf86ac3a6a4f" + integrity sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg== + +"@esbuild/android-arm@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.25.5.tgz#4290d6d3407bae3883ad2cded1081a234473ce26" + integrity sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA== + +"@esbuild/android-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.25.5.tgz#40c11d9cbca4f2406548c8a9895d321bc3b35eff" + integrity sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw== + +"@esbuild/darwin-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz#49d8bf8b1df95f759ac81eb1d0736018006d7e34" + integrity sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ== + +"@esbuild/darwin-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz#e27a5d92a14886ef1d492fd50fc61a2d4d87e418" + integrity sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ== + +"@esbuild/freebsd-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz#97cede59d638840ca104e605cdb9f1b118ba0b1c" + integrity sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw== + +"@esbuild/freebsd-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz#71c77812042a1a8190c3d581e140d15b876b9c6f" + integrity sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw== + +"@esbuild/linux-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz#f7b7c8f97eff8ffd2e47f6c67eb5c9765f2181b8" + integrity sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg== + +"@esbuild/linux-arm@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz#2a0be71b6cd8201fa559aea45598dffabc05d911" + integrity sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw== + +"@esbuild/linux-ia32@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz#763414463cd9ea6fa1f96555d2762f9f84c61783" + integrity sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA== + +"@esbuild/linux-loong64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz#428cf2213ff786a502a52c96cf29d1fcf1eb8506" + integrity sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg== + +"@esbuild/linux-mips64el@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz#5cbcc7fd841b4cd53358afd33527cd394e325d96" + integrity sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg== + +"@esbuild/linux-ppc64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz#0d954ab39ce4f5e50f00c4f8c4fd38f976c13ad9" + integrity sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ== + +"@esbuild/linux-riscv64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz#0e7dd30730505abd8088321e8497e94b547bfb1e" + integrity sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA== + +"@esbuild/linux-s390x@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz#5669af81327a398a336d7e40e320b5bbd6e6e72d" + integrity sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ== + +"@esbuild/linux-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz#b2357dd153aa49038967ddc1ffd90c68a9d2a0d4" + integrity sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw== + +"@esbuild/netbsd-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz#53b4dfb8fe1cee93777c9e366893bd3daa6ba63d" + integrity sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw== + +"@esbuild/netbsd-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz#a0206f6314ce7dc8713b7732703d0f58de1d1e79" + integrity sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ== + +"@esbuild/openbsd-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz#2a796c87c44e8de78001d808c77d948a21ec22fd" + integrity sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw== + +"@esbuild/openbsd-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz#28d0cd8909b7fa3953af998f2b2ed34f576728f0" + integrity sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg== + +"@esbuild/sunos-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz#a28164f5b997e8247d407e36c90d3fd5ddbe0dc5" + integrity sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA== + +"@esbuild/win32-arm64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz#6eadbead38e8bd12f633a5190e45eff80e24007e" + integrity sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw== + +"@esbuild/win32-ia32@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz#bab6288005482f9ed2adb9ded7e88eba9a62cc0d" + integrity sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ== + +"@esbuild/win32-x64@0.25.5": + version "0.25.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz#7fc114af5f6563f19f73324b5d5ff36ece0803d1" + integrity sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g== + +"@expressive-code/core@^0.41.2": + version "0.41.2" + resolved "https://registry.yarnpkg.com/@expressive-code/core/-/core-0.41.2.tgz#b8bca4c1fb7f552ad722c73ca81accbbd9fe259f" + integrity sha512-AJW5Tp9czbLqKMzwudL9Rv4js9afXBxkSGLmCNPq1iRgAYcx9NkTPJiSNCesjKRWoVC328AdSu6fqrD22zDgDg== + dependencies: + "@ctrl/tinycolor" "^4.0.4" + hast-util-select "^6.0.2" + hast-util-to-html "^9.0.1" + hast-util-to-text "^4.0.1" + hastscript "^9.0.0" + postcss "^8.4.38" + postcss-nested "^6.0.1" + unist-util-visit "^5.0.0" + unist-util-visit-parents "^6.0.1" + +"@expressive-code/plugin-frames@^0.41.2": + version "0.41.2" + resolved "https://registry.yarnpkg.com/@expressive-code/plugin-frames/-/plugin-frames-0.41.2.tgz#b664df6530da07f084384dd7d9d4ac13aa05f658" + integrity sha512-pfy0hkJI4nbaONjmksFDcuHmIuyPTFmi1JpABe4q2ajskiJtfBf+WDAL2pg595R9JNoPrrH5+aT9lbkx2noicw== + dependencies: + "@expressive-code/core" "^0.41.2" + +"@expressive-code/plugin-shiki@^0.41.2": + version "0.41.2" + resolved "https://registry.yarnpkg.com/@expressive-code/plugin-shiki/-/plugin-shiki-0.41.2.tgz#b2706be62212ddb97013dc0f6898ef468681e4aa" + integrity sha512-xD4zwqAkDccXqye+235BH5bN038jYiSMLfUrCOmMlzxPDGWdxJDk5z4uUB/aLfivEF2tXyO2zyaarL3Oqht0fQ== + dependencies: + "@expressive-code/core" "^0.41.2" + shiki "^3.2.2" + +"@expressive-code/plugin-text-markers@^0.41.2": + version "0.41.2" + resolved "https://registry.yarnpkg.com/@expressive-code/plugin-text-markers/-/plugin-text-markers-0.41.2.tgz#581ee78df8b9acc70e76526f3a0644efe9eea4b8" + integrity sha512-JFWBz2qYxxJOJkkWf96LpeolbnOqJY95TvwYc0hXIHf9oSWV0h0SY268w/5N3EtQaD9KktzDE+VIVwb9jdb3nw== + dependencies: + "@expressive-code/core" "^0.41.2" + +"@img/sharp-darwin-arm64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz#ef5b5a07862805f1e8145a377c8ba6e98813ca08" + integrity sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ== + optionalDependencies: + "@img/sharp-libvips-darwin-arm64" "1.0.4" + +"@img/sharp-darwin-x64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz#e03d3451cd9e664faa72948cc70a403ea4063d61" + integrity sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q== + optionalDependencies: + "@img/sharp-libvips-darwin-x64" "1.0.4" + +"@img/sharp-libvips-darwin-arm64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz#447c5026700c01a993c7804eb8af5f6e9868c07f" + integrity sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg== + +"@img/sharp-libvips-darwin-x64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz#e0456f8f7c623f9dbfbdc77383caa72281d86062" + integrity sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ== + +"@img/sharp-libvips-linux-arm64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz#979b1c66c9a91f7ff2893556ef267f90ebe51704" + integrity sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA== + +"@img/sharp-libvips-linux-arm@1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz#99f922d4e15216ec205dcb6891b721bfd2884197" + integrity sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g== + +"@img/sharp-libvips-linux-s390x@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz#f8a5eb1f374a082f72b3f45e2fb25b8118a8a5ce" + integrity sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA== + +"@img/sharp-libvips-linux-x64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz#d4c4619cdd157774906e15770ee119931c7ef5e0" + integrity sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw== + +"@img/sharp-libvips-linuxmusl-arm64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz#166778da0f48dd2bded1fa3033cee6b588f0d5d5" + integrity sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA== + +"@img/sharp-libvips-linuxmusl-x64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz#93794e4d7720b077fcad3e02982f2f1c246751ff" + integrity sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw== + +"@img/sharp-linux-arm64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz#edb0697e7a8279c9fc829a60fc35644c4839bb22" + integrity sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA== + optionalDependencies: + "@img/sharp-libvips-linux-arm64" "1.0.4" + +"@img/sharp-linux-arm@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz#422c1a352e7b5832842577dc51602bcd5b6f5eff" + integrity sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ== + optionalDependencies: + "@img/sharp-libvips-linux-arm" "1.0.5" + +"@img/sharp-linux-s390x@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz#f5c077926b48e97e4a04d004dfaf175972059667" + integrity sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q== + optionalDependencies: + "@img/sharp-libvips-linux-s390x" "1.0.4" + +"@img/sharp-linux-x64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz#d806e0afd71ae6775cc87f0da8f2d03a7c2209cb" + integrity sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA== + optionalDependencies: + "@img/sharp-libvips-linux-x64" "1.0.4" + +"@img/sharp-linuxmusl-arm64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz#252975b915894fb315af5deea174651e208d3d6b" + integrity sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g== + optionalDependencies: + "@img/sharp-libvips-linuxmusl-arm64" "1.0.4" + +"@img/sharp-linuxmusl-x64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz#3f4609ac5d8ef8ec7dadee80b560961a60fd4f48" + integrity sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw== + optionalDependencies: + "@img/sharp-libvips-linuxmusl-x64" "1.0.4" + +"@img/sharp-wasm32@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz#6f44f3283069d935bb5ca5813153572f3e6f61a1" + integrity sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg== + dependencies: + "@emnapi/runtime" "^1.2.0" + +"@img/sharp-win32-ia32@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz#1a0c839a40c5351e9885628c85f2e5dfd02b52a9" + integrity sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ== + +"@img/sharp-win32-x64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz#56f00962ff0c4e0eb93d34a047d29fa995e3e342" + integrity sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg== + +"@isaacs/fs-minipass@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz#2d59ae3ab4b38fb4270bfa23d30f8e2e86c7fe32" + integrity sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w== + dependencies: + minipass "^7.0.4" + +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.8" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz#4f0e06362e01362f823d348f1872b08f666d8142" + integrity sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14", "@jridgewell/sourcemap-codec@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + +"@jridgewell/trace-mapping@^0.3.24": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + +"@mdx-js/mdx@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-3.1.0.tgz#10235cab8ad7d356c262e8c21c68df5850a97dc3" + integrity sha512-/QxEhPAvGwbQmy1Px8F899L5Uc2KZ6JtXwlCgJmjSTBedwOZkByYcBG4GceIGPXRDsmfxhHazuS+hlOShRLeDw== + dependencies: + "@types/estree" "^1.0.0" + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdx" "^2.0.0" + collapse-white-space "^2.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" + estree-util-scope "^1.0.0" + estree-walker "^3.0.0" + hast-util-to-jsx-runtime "^2.0.0" + markdown-extensions "^2.0.0" + recma-build-jsx "^1.0.0" + recma-jsx "^1.0.0" + recma-stringify "^1.0.0" + rehype-recma "^1.0.0" + remark-mdx "^3.0.0" + remark-parse "^11.0.0" + remark-rehype "^11.0.0" + source-map "^0.7.0" + unified "^11.0.0" + unist-util-position-from-estree "^2.0.0" + unist-util-stringify-position "^4.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" + +"@napi-rs/wasm-runtime@^0.2.10": + version "0.2.11" + resolved "https://registry.yarnpkg.com/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.11.tgz#192c1610e1625048089ab4e35bc0649ce478500e" + integrity sha512-9DPkXtvHydrcOsopiYpUgPHpmj0HWZKMUnL2dZqpvC42lsratuBG06V5ipyno0fUek5VlFsNQ+AcFATSrJXgMA== + dependencies: + "@emnapi/core" "^1.4.3" + "@emnapi/runtime" "^1.4.3" + "@tybys/wasm-util" "^0.9.0" + +"@oslojs/encoding@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@oslojs/encoding/-/encoding-1.1.0.tgz#55f3d9a597430a01f2a5ef63c6b42f769f9ce34e" + integrity sha512-70wQhgYmndg4GCPxPPxPGevRKqTIJ2Nh4OkiMWmDAVYsTQ+Ta7Sq+rPevXyXGdzr30/qZBnyOalCszoMxlyldQ== + +"@pagefind/darwin-arm64@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@pagefind/darwin-arm64/-/darwin-arm64-1.3.0.tgz#f1e63d031ba710c98b0b83db85df9251a255f543" + integrity sha512-365BEGl6ChOsauRjyVpBjXybflXAOvoMROw3TucAROHIcdBvXk9/2AmEvGFU0r75+vdQI4LJdJdpH4Y6Yqaj4A== + +"@pagefind/darwin-x64@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@pagefind/darwin-x64/-/darwin-x64-1.3.0.tgz#10aa3c5988daa464c5c0db5c5aa4bf72e9bbfba1" + integrity sha512-zlGHA23uuXmS8z3XxEGmbHpWDxXfPZ47QS06tGUq0HDcZjXjXHeLG+cboOy828QIV5FXsm9MjfkP5e4ZNbOkow== + +"@pagefind/default-ui@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@pagefind/default-ui/-/default-ui-1.3.0.tgz#e3fb585d2fb08d463a8abc3c8f430420f0310109" + integrity sha512-CGKT9ccd3+oRK6STXGgfH+m0DbOKayX6QGlq38TfE1ZfUcPc5+ulTuzDbZUnMo+bubsEOIypm4Pl2iEyzZ1cNg== + +"@pagefind/linux-arm64@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@pagefind/linux-arm64/-/linux-arm64-1.3.0.tgz#cceb0391901736427738ee1232ff326a985eda8a" + integrity sha512-8lsxNAiBRUk72JvetSBXs4WRpYrQrVJXjlRRnOL6UCdBN9Nlsz0t7hWstRk36+JqHpGWOKYiuHLzGYqYAqoOnQ== + +"@pagefind/linux-x64@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@pagefind/linux-x64/-/linux-x64-1.3.0.tgz#06ec4c2907780a75d2fb65a22203c5a48abe7a82" + integrity sha512-hAvqdPJv7A20Ucb6FQGE6jhjqy+vZ6pf+s2tFMNtMBG+fzcdc91uTw7aP/1Vo5plD0dAOHwdxfkyw0ugal4kcQ== + +"@pagefind/windows-x64@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@pagefind/windows-x64/-/windows-x64-1.3.0.tgz#ce3394e5143aaca4850a33473a07628971773655" + integrity sha512-BR1bIRWOMqkf8IoU576YDhij1Wd/Zf2kX/kCI0b2qzCKC8wcc2GQJaaRMCpzvCCrmliO4vtJ6RITp/AnoYUUmQ== + +"@rollup/pluginutils@^5.1.4": + version "5.1.4" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.1.4.tgz#bb94f1f9eaaac944da237767cdfee6c5b2262d4a" + integrity sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ== + dependencies: + "@types/estree" "^1.0.0" + estree-walker "^2.0.2" + picomatch "^4.0.2" + +"@rollup/rollup-android-arm-eabi@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.43.0.tgz#9241b59af721beb7e3587a56c6c245d6c465753d" + integrity sha512-Krjy9awJl6rKbruhQDgivNbD1WuLb8xAclM4IR4cN5pHGAs2oIMMQJEiC3IC/9TZJ+QZkmZhlMO/6MBGxPidpw== + +"@rollup/rollup-android-arm64@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.43.0.tgz#f70ee53ba991fdd65c277b0716c559736d490a58" + integrity sha512-ss4YJwRt5I63454Rpj+mXCXicakdFmKnUNxr1dLK+5rv5FJgAxnN7s31a5VchRYxCFWdmnDWKd0wbAdTr0J5EA== + +"@rollup/rollup-darwin-arm64@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.43.0.tgz#9f59000e817cf5760d87515ce899f8b93fe8756a" + integrity sha512-eKoL8ykZ7zz8MjgBenEF2OoTNFAPFz1/lyJ5UmmFSz5jW+7XbH1+MAgCVHy72aG59rbuQLcJeiMrP8qP5d/N0A== + +"@rollup/rollup-darwin-x64@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.43.0.tgz#c92aebd02725ae1b88bdce40f08f7823e8055c78" + integrity sha512-SYwXJgaBYW33Wi/q4ubN+ldWC4DzQY62S4Ll2dgfr/dbPoF50dlQwEaEHSKrQdSjC6oIe1WgzosoaNoHCdNuMg== + +"@rollup/rollup-freebsd-arm64@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.43.0.tgz#b128dbe7b353922ddd729a4fc4e408ddcbf338b5" + integrity sha512-SV+U5sSo0yujrjzBF7/YidieK2iF6E7MdF6EbYxNz94lA+R0wKl3SiixGyG/9Klab6uNBIqsN7j4Y/Fya7wAjQ== + +"@rollup/rollup-freebsd-x64@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.43.0.tgz#88297a0ddfadddd61d7d9b73eb42b3f227301d30" + integrity sha512-J7uCsiV13L/VOeHJBo5SjasKiGxJ0g+nQTrBkAsmQBIdil3KhPnSE9GnRon4ejX1XDdsmK/l30IYLiAaQEO0Cg== + +"@rollup/rollup-linux-arm-gnueabihf@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.43.0.tgz#a59afc092523ebe43d3899f33da9cdd2ec01fb87" + integrity sha512-gTJ/JnnjCMc15uwB10TTATBEhK9meBIY+gXP4s0sHD1zHOaIh4Dmy1X9wup18IiY9tTNk5gJc4yx9ctj/fjrIw== + +"@rollup/rollup-linux-arm-musleabihf@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.43.0.tgz#3095c1327b794bd187d03e372e633717fb69b4c0" + integrity sha512-ZJ3gZynL1LDSIvRfz0qXtTNs56n5DI2Mq+WACWZ7yGHFUEirHBRt7fyIk0NsCKhmRhn7WAcjgSkSVVxKlPNFFw== + +"@rollup/rollup-linux-arm64-gnu@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.43.0.tgz#e43bb77df3a6de85312e991d1e3ad352d1abb00d" + integrity sha512-8FnkipasmOOSSlfucGYEu58U8cxEdhziKjPD2FIa0ONVMxvl/hmONtX/7y4vGjdUhjcTHlKlDhw3H9t98fPvyA== + +"@rollup/rollup-linux-arm64-musl@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.43.0.tgz#34873a437bcd87618f702dc66f0cbce170aebf9f" + integrity sha512-KPPyAdlcIZ6S9C3S2cndXDkV0Bb1OSMsX0Eelr2Bay4EsF9yi9u9uzc9RniK3mcUGCLhWY9oLr6er80P5DE6XA== + +"@rollup/rollup-linux-loongarch64-gnu@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.43.0.tgz#224ff524349e365baa56f1f512822548c2d76910" + integrity sha512-HPGDIH0/ZzAZjvtlXj6g+KDQ9ZMHfSP553za7o2Odegb/BEfwJcR0Sw0RLNpQ9nC6Gy8s+3mSS9xjZ0n3rhcYg== + +"@rollup/rollup-linux-powerpc64le-gnu@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.43.0.tgz#43c3c053b26ace18a1d3dab204596a466c1b0e34" + integrity sha512-gEmwbOws4U4GLAJDhhtSPWPXUzDfMRedT3hFMyRAvM9Mrnj+dJIFIeL7otsv2WF3D7GrV0GIewW0y28dOYWkmw== + +"@rollup/rollup-linux-riscv64-gnu@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.43.0.tgz#e7df825d71daefa7037605015455aa58be43cd7a" + integrity sha512-XXKvo2e+wFtXZF/9xoWohHg+MuRnvO29TI5Hqe9xwN5uN8NKUYy7tXUG3EZAlfchufNCTHNGjEx7uN78KsBo0g== + +"@rollup/rollup-linux-riscv64-musl@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.43.0.tgz#d76ad93a7f4c0b2855a024d8d859196acf38acf5" + integrity sha512-ruf3hPWhjw6uDFsOAzmbNIvlXFXlBQ4nk57Sec8E8rUxs/AI4HD6xmiiasOOx/3QxS2f5eQMKTAwk7KHwpzr/Q== + +"@rollup/rollup-linux-s390x-gnu@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.43.0.tgz#0852608843d05852af3f447bf43bb63d80d62b6a" + integrity sha512-QmNIAqDiEMEvFV15rsSnjoSmO0+eJLoKRD9EAa9rrYNwO/XRCtOGM3A5A0X+wmG+XRrw9Fxdsw+LnyYiZWWcVw== + +"@rollup/rollup-linux-x64-gnu@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.43.0.tgz#d16a57f86357a4e697142bee244afed59b24e6c5" + integrity sha512-jAHr/S0iiBtFyzjhOkAics/2SrXE092qyqEg96e90L3t9Op8OTzS6+IX0Fy5wCt2+KqeHAkti+eitV0wvblEoQ== + +"@rollup/rollup-linux-x64-musl@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.43.0.tgz#51cbc8b1eb46ebc0e284725418b6fbf48686e4e2" + integrity sha512-3yATWgdeXyuHtBhrLt98w+5fKurdqvs8B53LaoKD7P7H7FKOONLsBVMNl9ghPQZQuYcceV5CDyPfyfGpMWD9mQ== + +"@rollup/rollup-win32-arm64-msvc@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.43.0.tgz#d6d84aace2b211119bf0ab1c586e29d01e32aa01" + integrity sha512-wVzXp2qDSCOpcBCT5WRWLmpJRIzv23valvcTwMHEobkjippNf+C3ys/+wf07poPkeNix0paTNemB2XrHr2TnGw== + +"@rollup/rollup-win32-ia32-msvc@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.43.0.tgz#4af33168de2f65b97a8f36bd1d8d21cea34d3ccb" + integrity sha512-fYCTEyzf8d+7diCw8b+asvWDCLMjsCEA8alvtAutqJOJp/wL5hs1rWSqJ1vkjgW0L2NB4bsYJrpKkiIPRR9dvw== + +"@rollup/rollup-win32-x64-msvc@4.43.0": + version "4.43.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.43.0.tgz#42a88207659e404e8ffa655cae763cbad94906ab" + integrity sha512-SnGhLiE5rlK0ofq8kzuDkM0g7FN1s5VYY+YSMTibP7CqShxCQvqtNxTARS4xX4PFJfHjG0ZQYX9iGzI3FQh5Aw== + +"@shikijs/core@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@shikijs/core/-/core-3.6.0.tgz#11048ab2bd9af1bde34eb3c77af1a638a5ae2737" + integrity sha512-9By7Xb3olEX0o6UeJyPLI1PE1scC4d3wcVepvtv2xbuN9/IThYN4Wcwh24rcFeASzPam11MCq8yQpwwzCgSBRw== + dependencies: + "@shikijs/types" "3.6.0" + "@shikijs/vscode-textmate" "^10.0.2" + "@types/hast" "^3.0.4" + hast-util-to-html "^9.0.5" + +"@shikijs/engine-javascript@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@shikijs/engine-javascript/-/engine-javascript-3.6.0.tgz#8d3d6725e43240650e8aa110520b15e8b39df3d5" + integrity sha512-7YnLhZG/TU05IHMG14QaLvTW/9WiK8SEYafceccHUSXs2Qr5vJibUwsDfXDLmRi0zHdzsxrGKpSX6hnqe0k8nA== + dependencies: + "@shikijs/types" "3.6.0" + "@shikijs/vscode-textmate" "^10.0.2" + oniguruma-to-es "^4.3.3" + +"@shikijs/engine-oniguruma@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@shikijs/engine-oniguruma/-/engine-oniguruma-3.6.0.tgz#dce6074a258f1b349ff043eddba9779e76984769" + integrity sha512-nmOhIZ9yT3Grd+2plmW/d8+vZ2pcQmo/UnVwXMUXAKTXdi+LK0S08Ancrz5tQQPkxvjBalpMW2aKvwXfelauvA== + dependencies: + "@shikijs/types" "3.6.0" + "@shikijs/vscode-textmate" "^10.0.2" + +"@shikijs/langs@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@shikijs/langs/-/langs-3.6.0.tgz#e3faea60e12c9bcdbebeeac14975c71f927bc7cb" + integrity sha512-IdZkQJaLBu1LCYCwkr30hNuSDfllOT8RWYVZK1tD2J03DkiagYKRxj/pDSl8Didml3xxuyzUjgtioInwEQM/TA== + dependencies: + "@shikijs/types" "3.6.0" + +"@shikijs/themes@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@shikijs/themes/-/themes-3.6.0.tgz#6003780e033f9fec6b1336395a8994be17959e81" + integrity sha512-Fq2j4nWr1DF4drvmhqKq8x5vVQ27VncF8XZMBuHuQMZvUSS3NBgpqfwz/FoGe36+W6PvniZ1yDlg2d4kmYDU6w== + dependencies: + "@shikijs/types" "3.6.0" + +"@shikijs/types@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@shikijs/types/-/types-3.6.0.tgz#0a4944724320eaa6f151d26d8f982a7f4a0b53ff" + integrity sha512-cLWFiToxYu0aAzJqhXTQsFiJRTFDAGl93IrMSBNaGSzs7ixkLfdG6pH11HipuWFGW5vyx4X47W8HDQ7eSrmBUg== + dependencies: + "@shikijs/vscode-textmate" "^10.0.2" + "@types/hast" "^3.0.4" + +"@shikijs/vscode-textmate@^10.0.2": + version "10.0.2" + resolved "https://registry.yarnpkg.com/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz#a90ab31d0cc1dfb54c66a69e515bf624fa7b2224" + integrity sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg== + +"@swc/helpers@^0.5.12": + version "0.5.17" + resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.17.tgz#5a7be95ac0f0bf186e7e6e890e7a6f6cda6ce971" + integrity sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A== + dependencies: + tslib "^2.8.0" + +"@tailwindcss/node@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/node/-/node-4.1.10.tgz#7a53a224cdd79a926ed990bbf97c74de9dadf595" + integrity sha512-2ACf1znY5fpRBwRhMgj9ZXvb2XZW8qs+oTfotJ2C5xR0/WNL7UHZ7zXl6s+rUqedL1mNi+0O+WQr5awGowS3PQ== + dependencies: + "@ampproject/remapping" "^2.3.0" + enhanced-resolve "^5.18.1" + jiti "^2.4.2" + lightningcss "1.30.1" + magic-string "^0.30.17" + source-map-js "^1.2.1" + tailwindcss "4.1.10" + +"@tailwindcss/oxide-android-arm64@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.10.tgz#ad0f3cbfa219e1ee5fc8ad7170885feda397c4e3" + integrity sha512-VGLazCoRQ7rtsCzThaI1UyDu/XRYVyH4/EWiaSX6tFglE+xZB5cvtC5Omt0OQ+FfiIVP98su16jDVHDEIuH4iQ== + +"@tailwindcss/oxide-darwin-arm64@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.10.tgz#d8d744f93310b45ce16420a9addd1c4329848929" + integrity sha512-ZIFqvR1irX2yNjWJzKCqTCcHZbgkSkSkZKbRM3BPzhDL/18idA8uWCoopYA2CSDdSGFlDAxYdU2yBHwAwx8euQ== + +"@tailwindcss/oxide-darwin-x64@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.10.tgz#476490d1f95592a09801a53b48466e5065d7553f" + integrity sha512-eCA4zbIhWUFDXoamNztmS0MjXHSEJYlvATzWnRiTqJkcUteSjO94PoRHJy1Xbwp9bptjeIxxBHh+zBWFhttbrQ== + +"@tailwindcss/oxide-freebsd-x64@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.10.tgz#7b7ccb813592209216ed39187eb8510ce6b4fc9d" + integrity sha512-8/392Xu12R0cc93DpiJvNpJ4wYVSiciUlkiOHOSOQNH3adq9Gi/dtySK7dVQjXIOzlpSHjeCL89RUUI8/GTI6g== + +"@tailwindcss/oxide-linux-arm-gnueabihf@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.10.tgz#9f223c7994da846b9f3c70ac0b5713371c9b3b32" + integrity sha512-t9rhmLT6EqeuPT+MXhWhlRYIMSfh5LZ6kBrC4FS6/+M1yXwfCtp24UumgCWOAJVyjQwG+lYva6wWZxrfvB+NhQ== + +"@tailwindcss/oxide-linux-arm64-gnu@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.10.tgz#58412e6a359a83144b30b415f637a52c8207f311" + integrity sha512-3oWrlNlxLRxXejQ8zImzrVLuZ/9Z2SeKoLhtCu0hpo38hTO2iL86eFOu4sVR8cZc6n3z7eRXXqtHJECa6mFOvA== + +"@tailwindcss/oxide-linux-arm64-musl@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.10.tgz#3ed868b801a27e8cd35a615855bc94fd2786a6e8" + integrity sha512-saScU0cmWvg/Ez4gUmQWr9pvY9Kssxt+Xenfx1LG7LmqjcrvBnw4r9VjkFcqmbBb7GCBwYNcZi9X3/oMda9sqQ== + +"@tailwindcss/oxide-linux-x64-gnu@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.10.tgz#aca15cc4cf9dcd687eda0f5cd2bc1f4bfb485562" + integrity sha512-/G3ao/ybV9YEEgAXeEg28dyH6gs1QG8tvdN9c2MNZdUXYBaIY/Gx0N6RlJzfLy/7Nkdok4kaxKPHKJUlAaoTdA== + +"@tailwindcss/oxide-linux-x64-musl@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.10.tgz#0c77d1e94e499a9f85c80013e6052dd98d3cfee4" + integrity sha512-LNr7X8fTiKGRtQGOerSayc2pWJp/9ptRYAa4G+U+cjw9kJZvkopav1AQc5HHD+U364f71tZv6XamaHKgrIoVzA== + +"@tailwindcss/oxide-wasm32-wasi@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.10.tgz#6e749424db4f6e076371a66da7c4daf1fcd4f9df" + integrity sha512-d6ekQpopFQJAcIK2i7ZzWOYGZ+A6NzzvQ3ozBvWFdeyqfOZdYHU66g5yr+/HC4ipP1ZgWsqa80+ISNILk+ae/Q== + dependencies: + "@emnapi/core" "^1.4.3" + "@emnapi/runtime" "^1.4.3" + "@emnapi/wasi-threads" "^1.0.2" + "@napi-rs/wasm-runtime" "^0.2.10" + "@tybys/wasm-util" "^0.9.0" + tslib "^2.8.0" + +"@tailwindcss/oxide-win32-arm64-msvc@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.10.tgz#e1663b5a95425f0f458f616399ed9f6707d4a786" + integrity sha512-i1Iwg9gRbwNVOCYmnigWCCgow8nDWSFmeTUU5nbNx3rqbe4p0kRbEqLwLJbYZKmSSp23g4N6rCDmm7OuPBXhDA== + +"@tailwindcss/oxide-win32-x64-msvc@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.10.tgz#de3d4e8b38c31caf2522ad0c6f0efdeb5034fc95" + integrity sha512-sGiJTjcBSfGq2DVRtaSljq5ZgZS2SDHSIfhOylkBvHVjwOsodBhnb3HdmiKkVuUGKD0I7G63abMOVaskj1KpOA== + +"@tailwindcss/oxide@4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/oxide/-/oxide-4.1.10.tgz#b8ad6ae678b54bb533c2074092aadebac0a6d8fe" + integrity sha512-v0C43s7Pjw+B9w21htrQwuFObSkio2aV/qPx/mhrRldbqxbWJK6KizM+q7BF1/1CmuLqZqX3CeYF7s7P9fbA8Q== + dependencies: + detect-libc "^2.0.4" + tar "^7.4.3" + optionalDependencies: + "@tailwindcss/oxide-android-arm64" "4.1.10" + "@tailwindcss/oxide-darwin-arm64" "4.1.10" + "@tailwindcss/oxide-darwin-x64" "4.1.10" + "@tailwindcss/oxide-freebsd-x64" "4.1.10" + "@tailwindcss/oxide-linux-arm-gnueabihf" "4.1.10" + "@tailwindcss/oxide-linux-arm64-gnu" "4.1.10" + "@tailwindcss/oxide-linux-arm64-musl" "4.1.10" + "@tailwindcss/oxide-linux-x64-gnu" "4.1.10" + "@tailwindcss/oxide-linux-x64-musl" "4.1.10" + "@tailwindcss/oxide-wasm32-wasi" "4.1.10" + "@tailwindcss/oxide-win32-arm64-msvc" "4.1.10" + "@tailwindcss/oxide-win32-x64-msvc" "4.1.10" + +"@tailwindcss/vite@^4.1.10": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@tailwindcss/vite/-/vite-4.1.10.tgz#9ffa396a3f85d31f53eeaa4bac33eb0286bc955d" + integrity sha512-QWnD5HDY2IADv+vYR82lOhqOlS1jSCUUAmfem52cXAhRTKxpDh3ARX8TTXJTCCO7Rv7cD2Nlekabv02bwP3a2A== + dependencies: + "@tailwindcss/node" "4.1.10" + "@tailwindcss/oxide" "4.1.10" + tailwindcss "4.1.10" + +"@tybys/wasm-util@^0.9.0": + version "0.9.0" + resolved "https://registry.yarnpkg.com/@tybys/wasm-util/-/wasm-util-0.9.0.tgz#3e75eb00604c8d6db470bf18c37b7d984a0e3355" + integrity sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw== + dependencies: + tslib "^2.4.0" + +"@types/debug@^4.0.0": + version "4.1.12" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.12.tgz#a155f21690871953410df4b6b6f53187f0500917" + integrity sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ== + dependencies: + "@types/ms" "*" + +"@types/estree-jsx@^1.0.0": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@types/estree-jsx/-/estree-jsx-1.0.5.tgz#858a88ea20f34fe65111f005a689fa1ebf70dc18" + integrity sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg== + dependencies: + "@types/estree" "*" + +"@types/estree@*", "@types/estree@^1.0.0": + version "1.0.8" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.8.tgz#958b91c991b1867ced318bedea0e215ee050726e" + integrity sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w== + +"@types/estree@1.0.7": + version "1.0.7" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.7.tgz#4158d3105276773d5b7695cd4834b1722e4f37a8" + integrity sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ== + +"@types/fontkit@^2.0.8": + version "2.0.8" + resolved "https://registry.yarnpkg.com/@types/fontkit/-/fontkit-2.0.8.tgz#59725be650e68acbbff6df9f3fccbd54d9ef7f4c" + integrity sha512-wN+8bYxIpJf+5oZdrdtaX04qUuWHcKxcDEgRS9Qm9ZClSHjzEn13SxUC+5eRM+4yXIeTYk8mTzLAWGF64847ew== + dependencies: + "@types/node" "*" + +"@types/hast@^3.0.0", "@types/hast@^3.0.4": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/hast/-/hast-3.0.4.tgz#1d6b39993b82cea6ad783945b0508c25903e15aa" + integrity sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ== + dependencies: + "@types/unist" "*" + +"@types/js-yaml@^4.0.9": + version "4.0.9" + resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-4.0.9.tgz#cd82382c4f902fed9691a2ed79ec68c5898af4c2" + integrity sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg== + +"@types/mdast@^4.0.0", "@types/mdast@^4.0.4": + version "4.0.4" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-4.0.4.tgz#7ccf72edd2f1aa7dd3437e180c64373585804dd6" + integrity sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA== + dependencies: + "@types/unist" "*" + +"@types/mdx@^2.0.0": + version "2.0.13" + resolved "https://registry.yarnpkg.com/@types/mdx/-/mdx-2.0.13.tgz#68f6877043d377092890ff5b298152b0a21671bd" + integrity sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw== + +"@types/ms@*": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@types/ms/-/ms-2.1.0.tgz#052aa67a48eccc4309d7f0191b7e41434b90bb78" + integrity sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA== + +"@types/nlcst@^2.0.0": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/nlcst/-/nlcst-2.0.3.tgz#31cad346eaab48a9a8a58465d3d05e2530dda762" + integrity sha512-vSYNSDe6Ix3q+6Z7ri9lyWqgGhJTmzRjZRqyq15N0Z/1/UnVsno9G/N40NBijoYx2seFDIl0+B2mgAb9mezUCA== + dependencies: + "@types/unist" "*" + +"@types/node@*": + version "24.0.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-24.0.1.tgz#e9bfcb1c35547437c294403b7bec497772a88b0a" + integrity sha512-MX4Zioh39chHlDJbKmEgydJDS3tspMP/lnQC67G3SWsTnb9NeYVWOjkxpOSy4oMfPs4StcWHwBrvUb4ybfnuaw== + dependencies: + undici-types "~7.8.0" + +"@types/node@^17.0.5": + version "17.0.45" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.45.tgz#2c0fafd78705e7a18b7906b5201a522719dc5190" + integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw== + +"@types/sax@^1.2.1": + version "1.2.7" + resolved "https://registry.yarnpkg.com/@types/sax/-/sax-1.2.7.tgz#ba5fe7df9aa9c89b6dff7688a19023dd2963091d" + integrity sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A== + dependencies: + "@types/node" "*" + +"@types/unist@*", "@types/unist@^3.0.0": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-3.0.3.tgz#acaab0f919ce69cce629c2d4ed2eb4adc1b6c20c" + integrity sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q== + +"@types/unist@^2.0.0": + version "2.0.11" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.11.tgz#11af57b127e32487774841f7a4e54eab166d03c4" + integrity sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA== + +"@ungap/structured-clone@^1.0.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.3.0.tgz#d06bbb384ebcf6c505fde1c3d0ed4ddffe0aaff8" + integrity sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g== + +acorn-jsx@^5.0.0: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn@^8.0.0, acorn@^8.14.1: + version "8.15.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816" + integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg== + +ansi-align@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.1.tgz#0cdf12e111ace773a86e9a1fad1225c43cb19a59" + integrity sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w== + dependencies: + string-width "^4.1.0" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.1.0.tgz#95ec409c69619d6cb1b8b34f14b660ef28ebd654" + integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== + +ansi-styles@^6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== + +anymatch@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.0: + version "5.0.2" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +aria-query@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.3.2.tgz#93f81a43480e33a338f19163a3d10a50c01dcd59" + integrity sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw== + +array-iterate@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/array-iterate/-/array-iterate-2.0.1.tgz#6efd43f8295b3fee06251d3d62ead4bd9805dd24" + integrity sha512-I1jXZMjAgCMmxT4qxXfPXa6SthSoE8h6gkSI9BGGNv8mP8G/v0blc+qFnZu6K42vTOiuME596QaLO0TP3Lk0xg== + +astring@^1.8.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/astring/-/astring-1.9.0.tgz#cc73e6062a7eb03e7d19c22d8b0b3451fd9bfeef" + integrity sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg== + +astro-expressive-code@^0.41.1: + version "0.41.2" + resolved "https://registry.yarnpkg.com/astro-expressive-code/-/astro-expressive-code-0.41.2.tgz#5de3c27c15abfddb016ac2c632b0d6d03b08b651" + integrity sha512-HN0jWTnhr7mIV/2e6uu4PPRNNo/k4UEgTLZqbp3MrHU+caCARveG2yZxaZVBmxyiVdYqW5Pd3u3n2zjnshixbw== + dependencies: + rehype-expressive-code "^0.41.2" + +astro@^5.6.1: + version "5.9.2" + resolved "https://registry.yarnpkg.com/astro/-/astro-5.9.2.tgz#04978c535bb412430acbfd232079d95b030e530b" + integrity sha512-K/zZlQOWMpamfLDOls5jvG7lrsjH1gkk3ESRZyZDCkVBtKHMF4LbjwCicm/iBb3mX3V/PerqRYzLbOy3/4JLCQ== + dependencies: + "@astrojs/compiler" "^2.12.2" + "@astrojs/internal-helpers" "0.6.1" + "@astrojs/markdown-remark" "6.3.2" + "@astrojs/telemetry" "3.3.0" + "@capsizecss/unpack" "^2.4.0" + "@oslojs/encoding" "^1.1.0" + "@rollup/pluginutils" "^5.1.4" + acorn "^8.14.1" + aria-query "^5.3.2" + axobject-query "^4.1.0" + boxen "8.0.1" + ci-info "^4.2.0" + clsx "^2.1.1" + common-ancestor-path "^1.0.1" + cookie "^1.0.2" + cssesc "^3.0.0" + debug "^4.4.0" + deterministic-object-hash "^2.0.2" + devalue "^5.1.1" + diff "^5.2.0" + dlv "^1.1.3" + dset "^3.1.4" + es-module-lexer "^1.6.0" + esbuild "^0.25.0" + estree-walker "^3.0.3" + flattie "^1.1.1" + fontace "~0.3.0" + github-slugger "^2.0.0" + html-escaper "3.0.3" + http-cache-semantics "^4.1.1" + import-meta-resolve "^4.1.0" + js-yaml "^4.1.0" + kleur "^4.1.5" + magic-string "^0.30.17" + magicast "^0.3.5" + mrmime "^2.0.1" + neotraverse "^0.6.18" + p-limit "^6.2.0" + p-queue "^8.1.0" + package-manager-detector "^1.1.0" + picomatch "^4.0.2" + prompts "^2.4.2" + rehype "^13.0.2" + semver "^7.7.1" + shiki "^3.2.1" + tinyexec "^0.3.2" + tinyglobby "^0.2.12" + tsconfck "^3.1.5" + ultrahtml "^1.6.0" + unifont "~0.5.0" + unist-util-visit "^5.0.0" + unstorage "^1.15.0" + vfile "^6.0.3" + vite "^6.3.4" + vitefu "^1.0.6" + xxhash-wasm "^1.1.0" + yargs-parser "^21.1.1" + yocto-spinner "^0.2.1" + zod "^3.24.2" + zod-to-json-schema "^3.24.5" + zod-to-ts "^1.2.0" + optionalDependencies: + sharp "^0.33.3" + +axobject-query@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-4.1.0.tgz#28768c76d0e3cff21bc62a9e2d0b6ac30042a1ee" + integrity sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ== + +b4a@^1.6.4: + version "1.6.7" + resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.6.7.tgz#a99587d4ebbfbd5a6e3b21bdb5d5fa385767abe4" + integrity sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg== + +bail@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/bail/-/bail-2.0.2.tgz#d26f5cd8fe5d6f832a31517b9f7c356040ba6d5d" + integrity sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw== + +bare-events@^2.2.0, bare-events@^2.5.4: + version "2.5.4" + resolved "https://registry.yarnpkg.com/bare-events/-/bare-events-2.5.4.tgz#16143d435e1ed9eafd1ab85f12b89b3357a41745" + integrity sha512-+gFfDkR8pj4/TrWCGUGWmJIkBwuxPS5F+a5yWjOHQt2hHvNZd5YLzadjmDUtFmMM4y429bnKLa8bYBMHcYdnQA== + +bare-fs@^4.0.1: + version "4.1.5" + resolved "https://registry.yarnpkg.com/bare-fs/-/bare-fs-4.1.5.tgz#1d06c076e68cc8bf97010d29af9e3ac3808cdcf7" + integrity sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA== + dependencies: + bare-events "^2.5.4" + bare-path "^3.0.0" + bare-stream "^2.6.4" + +bare-os@^3.0.1: + version "3.6.1" + resolved "https://registry.yarnpkg.com/bare-os/-/bare-os-3.6.1.tgz#9921f6f59edbe81afa9f56910658422c0f4858d4" + integrity sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g== + +bare-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bare-path/-/bare-path-3.0.0.tgz#b59d18130ba52a6af9276db3e96a2e3d3ea52178" + integrity sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw== + dependencies: + bare-os "^3.0.1" + +bare-stream@^2.6.4: + version "2.6.5" + resolved "https://registry.yarnpkg.com/bare-stream/-/bare-stream-2.6.5.tgz#bba8e879674c4c27f7e27805df005c15d7a2ca07" + integrity sha512-jSmxKJNJmHySi6hC42zlZnq00rga4jjxcgNZjY9N5WlOe/iOoGRtdwGsHzQv2RlH2KOYMwGUXhf2zXd32BA9RA== + dependencies: + streamx "^2.21.0" + +base-64@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/base-64/-/base-64-1.0.0.tgz#09d0f2084e32a3fd08c2475b973788eee6ae8f4a" + integrity sha512-kwDPIFCGx0NZHog36dj+tHiwP4QMzsZ3AgMViUBKI0+V5n4U0ufTCUMhnQ04diaRI8EX/QcPfql7zlhZ7j4zgg== + +base64-js@^1.1.2, base64-js@^1.3.0, base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +bcp-47-match@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/bcp-47-match/-/bcp-47-match-2.0.3.tgz#603226f6e5d3914a581408be33b28a53144b09d0" + integrity sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ== + +bcp-47@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/bcp-47/-/bcp-47-2.1.0.tgz#7e80734c3338fe8320894981dccf4968c3092df6" + integrity sha512-9IIS3UPrvIa1Ej+lVDdDwO7zLehjqsaByECw0bu2RRGP73jALm6FYbzI5gWbgHLvNdkvfXB5YrSbocZdOS0c0w== + dependencies: + is-alphabetical "^2.0.0" + is-alphanumerical "^2.0.0" + is-decimal "^2.0.0" + +bl@^4.0.3: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + +blob-to-buffer@^1.2.8: + version "1.2.9" + resolved "https://registry.yarnpkg.com/blob-to-buffer/-/blob-to-buffer-1.2.9.tgz#a17fd6c1c564011408f8971e451544245daaa84a" + integrity sha512-BF033y5fN6OCofD3vgHmNtwZWRcq9NLyyxyILx9hfMy1sXYy4ojFl765hJ2lP0YaN2fuxPaLO2Vzzoxy0FLFFA== + +boolbase@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + +boxen@8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-8.0.1.tgz#7e9fcbb45e11a2d7e6daa8fdcebfc3242fc19fe3" + integrity sha512-F3PH5k5juxom4xktynS7MoFY+NUWH5LC4CnH11YB8NPew+HLpmBLCybSAEyb2F+4pRXhuhWqFesoQd6DAyc2hw== + dependencies: + ansi-align "^3.0.1" + camelcase "^8.0.0" + chalk "^5.3.0" + cli-boxes "^3.0.0" + string-width "^7.2.0" + type-fest "^4.21.0" + widest-line "^5.0.0" + wrap-ansi "^9.0.0" + +brotli@^1.3.2: + version "1.3.3" + resolved "https://registry.yarnpkg.com/brotli/-/brotli-1.3.3.tgz#7365d8cc00f12cf765d2b2c898716bcf4b604d48" + integrity sha512-oTKjJdShmDuGW94SyyaoQvAjf30dZaHnjJ8uAF+u2/vGJkJbJPJAT1gDiOJP5v1Zb6f9KEyW/1HpuaWIXtGHPg== + dependencies: + base64-js "^1.1.2" + +buffer@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + +camelcase@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-8.0.0.tgz#c0d36d418753fb6ad9c5e0437579745c1c14a534" + integrity sha512-8WB3Jcas3swSvjIeA2yvCJ+Miyz5l1ZmB6HFb9R1317dt9LCQoswg/BGrmAmkWVEszSrrg4RwmO46qIm2OEnSA== + +ccount@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5" + integrity sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg== + +chalk@^5.3.0: + version "5.4.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.4.1.tgz#1b48bf0963ec158dce2aacf69c093ae2dd2092d8" + integrity sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w== + +character-entities-html4@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-2.1.0.tgz#1f1adb940c971a4b22ba39ddca6b618dc6e56b2b" + integrity sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA== + +character-entities-legacy@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz#76bc83a90738901d7bc223a9e93759fdd560125b" + integrity sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ== + +character-entities@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/character-entities/-/character-entities-2.0.2.tgz#2d09c2e72cd9523076ccb21157dff66ad43fcc22" + integrity sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ== + +character-reference-invalid@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz#85c66b041e43b47210faf401278abf808ac45cb9" + integrity sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw== + +chokidar@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-4.0.3.tgz#7be37a4c03c9aee1ecfe862a4a23b2c70c205d30" + integrity sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA== + dependencies: + readdirp "^4.0.1" + +chownr@^1.1.1: + version "1.1.4" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + +chownr@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-3.0.0.tgz#9855e64ecd240a9cc4267ce8a4aa5d24a1da15e4" + integrity sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g== + +ci-info@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-4.2.0.tgz#cbd21386152ebfe1d56f280a3b5feccbd96764c7" + integrity sha512-cYY9mypksY8NRqgDB1XD1RiJL338v/551niynFTGkZOO2LHuB2OmOYxDIe/ttN9AHwrqdum1360G3ald0W9kCg== + +cli-boxes@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-3.0.0.tgz#71a10c716feeba005e4504f36329ef0b17cf3145" + integrity sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g== + +clone@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + integrity sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w== + +clsx@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999" + integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA== + +collapse-white-space@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-2.1.0.tgz#640257174f9f42c740b40f3b55ee752924feefca" + integrity sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw== + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@^1.0.0, color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +color-string@^1.9.0: + version "1.9.1" + resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.9.1.tgz#4467f9146f036f855b764dfb5bf8582bf342c7a4" + integrity sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg== + dependencies: + color-name "^1.0.0" + simple-swizzle "^0.2.2" + +color@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/color/-/color-4.2.3.tgz#d781ecb5e57224ee43ea9627560107c0e0c6463a" + integrity sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A== + dependencies: + color-convert "^2.0.1" + color-string "^1.9.0" + +comma-separated-tokens@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee" + integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== + +common-ancestor-path@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/common-ancestor-path/-/common-ancestor-path-1.0.1.tgz#4f7d2d1394d91b7abdf51871c62f71eadb0182a7" + integrity sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w== + +cookie-es@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cookie-es/-/cookie-es-1.2.2.tgz#18ceef9eb513cac1cb6c14bcbf8bdb2679b34821" + integrity sha512-+W7VmiVINB+ywl1HGXJXmrqkOhpKrIiVZV6tQuV54ZyQC7MMuBt81Vc336GMLoHBq5hV/F9eXgt5Mnx0Rha5Fg== + +cookie@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-1.0.2.tgz#27360701532116bd3f1f9416929d176afe1e4610" + integrity sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA== + +cross-fetch@^3.0.4: + version "3.2.0" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.2.0.tgz#34e9192f53bc757d6614304d9e5e6fb4edb782e3" + integrity sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q== + dependencies: + node-fetch "^2.7.0" + +crossws@^0.3.4: + version "0.3.5" + resolved "https://registry.yarnpkg.com/crossws/-/crossws-0.3.5.tgz#daad331d44148ea6500098bc858869f3a5ab81a6" + integrity sha512-ojKiDvcmByhwa8YYqbQI/hg7MEU0NC03+pSdEq4ZUnZR9xXpwk7E43SMNGkn+JxJGPFtNvQ48+vV2p+P1ml5PA== + dependencies: + uncrypto "^0.1.3" + +css-selector-parser@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/css-selector-parser/-/css-selector-parser-3.1.2.tgz#181106cc2acc9fa74f0dbd6860c3305202284c81" + integrity sha512-WfUcL99xWDs7b3eZPoRszWVfbNo8ErCF15PTvVROjkShGlAfjIkG6hlfj/sl6/rfo5Q9x9ryJ3VqVnAZDA+gcw== + +css-tree@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-3.1.0.tgz#7aabc035f4e66b5c86f54570d55e05b1346eb0fd" + integrity sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w== + dependencies: + mdn-data "2.12.2" + source-map-js "^1.0.1" + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +debug@^4.0.0, debug@^4.4.0: + version "4.4.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.1.tgz#e5a8bc6cbc4c6cd3e64308b0693a3d4fa550189b" + integrity sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ== + dependencies: + ms "^2.1.3" + +decode-named-character-reference@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/decode-named-character-reference/-/decode-named-character-reference-1.1.0.tgz#5d6ce68792808901210dac42a8e9853511e2b8bf" + integrity sha512-Wy+JTSbFThEOXQIR2L6mxJvEs+veIzpmqD7ynWxMXGpnk3smkHQOp6forLdHsKpAMW9iJpaBBIxz285t1n1C3w== + dependencies: + character-entities "^2.0.0" + +decompress-response@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" + integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== + dependencies: + mimic-response "^3.1.0" + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== + +defu@^6.1.4: + version "6.1.4" + resolved "https://registry.yarnpkg.com/defu/-/defu-6.1.4.tgz#4e0c9cf9ff68fe5f3d7f2765cc1a012dfdcb0479" + integrity sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg== + +dequal@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" + integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== + +destr@^2.0.3, destr@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/destr/-/destr-2.0.5.tgz#7d112ff1b925fb8d2079fac5bdb4a90973b51fdb" + integrity sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA== + +detect-libc@^2.0.0, detect-libc@^2.0.2, detect-libc@^2.0.3, detect-libc@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.4.tgz#f04715b8ba815e53b4d8109655b6508a6865a7e8" + integrity sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA== + +deterministic-object-hash@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/deterministic-object-hash/-/deterministic-object-hash-2.0.2.tgz#b251ddc801443905f0e9fef08816a46bc9fe3807" + integrity sha512-KxektNH63SrbfUyDiwXqRb1rLwKt33AmMv+5Nhsw1kqZ13SJBRTgZHtGbE+hH3a1mVW1cz+4pqSWVPAtLVXTzQ== + dependencies: + base-64 "^1.0.0" + +devalue@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/devalue/-/devalue-5.1.1.tgz#a71887ac0f354652851752654e4bd435a53891ae" + integrity sha512-maua5KUiapvEwiEAe+XnlZ3Rh0GD+qI1J/nb9vrJc3muPXvcF/8gXYTWF76+5DAqHyDUtOIImEuo0YKE9mshVw== + +devlop@^1.0.0, devlop@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/devlop/-/devlop-1.1.0.tgz#4db7c2ca4dc6e0e834c30be70c94bbc976dc7018" + integrity sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA== + dependencies: + dequal "^2.0.0" + +dfa@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/dfa/-/dfa-1.2.0.tgz#96ac3204e2d29c49ea5b57af8d92c2ae12790657" + integrity sha512-ED3jP8saaweFTjeGX8HQPjeC1YYyZs98jGNZx6IiBvxW7JG5v492kamAQB3m2wop07CvU/RQmzcKr6bgcC5D/Q== + +diff@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.2.0.tgz#26ded047cd1179b78b9537d5ef725503ce1ae531" + integrity sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A== + +direction@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/direction/-/direction-2.0.1.tgz#71800dd3c4fa102406502905d3866e65bdebb985" + integrity sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA== + +dlv@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +dset@^3.1.4: + version "3.1.4" + resolved "https://registry.yarnpkg.com/dset/-/dset-3.1.4.tgz#f8eaf5f023f068a036d08cd07dc9ffb7d0065248" + integrity sha512-2QF/g9/zTaPDc3BjNcVTGoBbXBgYfMTTceLaYcFJ/W9kggFUkhxD/hMEeuLKbugyef9SqAx8cpgwlIP/jinUTA== + +emoji-regex@^10.3.0: + version "10.4.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.4.0.tgz#03553afea80b3975749cfcb36f776ca268e413d4" + integrity sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +end-of-stream@^1.1.0, end-of-stream@^1.4.1: + version "1.4.4" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +enhanced-resolve@^5.18.1: + version "5.18.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.18.1.tgz#728ab082f8b7b6836de51f1637aab5d3b9568faf" + integrity sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +entities@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/entities/-/entities-6.0.1.tgz#c28c34a43379ca7f61d074130b2f5f7020a30694" + integrity sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g== + +es-module-lexer@^1.6.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.7.0.tgz#9159601561880a85f2734560a9099b2c31e5372a" + integrity sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA== + +esast-util-from-estree@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/esast-util-from-estree/-/esast-util-from-estree-2.0.0.tgz#8d1cfb51ad534d2f159dc250e604f3478a79f1ad" + integrity sha512-4CyanoAudUSBAn5K13H4JhsMH6L9ZP7XbLVe/dKybkxMO7eDyLsT8UHl9TRNrU2Gr9nz+FovfSIjuXWJ81uVwQ== + dependencies: + "@types/estree-jsx" "^1.0.0" + devlop "^1.0.0" + estree-util-visit "^2.0.0" + unist-util-position-from-estree "^2.0.0" + +esast-util-from-js@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/esast-util-from-js/-/esast-util-from-js-2.0.1.tgz#5147bec34cc9da44accf52f87f239a40ac3e8225" + integrity sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw== + dependencies: + "@types/estree-jsx" "^1.0.0" + acorn "^8.0.0" + esast-util-from-estree "^2.0.0" + vfile-message "^4.0.0" + +esbuild@^0.25.0: + version "0.25.5" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.25.5.tgz#71075054993fdfae76c66586f9b9c1f8d7edd430" + integrity sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ== + optionalDependencies: + "@esbuild/aix-ppc64" "0.25.5" + "@esbuild/android-arm" "0.25.5" + "@esbuild/android-arm64" "0.25.5" + "@esbuild/android-x64" "0.25.5" + "@esbuild/darwin-arm64" "0.25.5" + "@esbuild/darwin-x64" "0.25.5" + "@esbuild/freebsd-arm64" "0.25.5" + "@esbuild/freebsd-x64" "0.25.5" + "@esbuild/linux-arm" "0.25.5" + "@esbuild/linux-arm64" "0.25.5" + "@esbuild/linux-ia32" "0.25.5" + "@esbuild/linux-loong64" "0.25.5" + "@esbuild/linux-mips64el" "0.25.5" + "@esbuild/linux-ppc64" "0.25.5" + "@esbuild/linux-riscv64" "0.25.5" + "@esbuild/linux-s390x" "0.25.5" + "@esbuild/linux-x64" "0.25.5" + "@esbuild/netbsd-arm64" "0.25.5" + "@esbuild/netbsd-x64" "0.25.5" + "@esbuild/openbsd-arm64" "0.25.5" + "@esbuild/openbsd-x64" "0.25.5" + "@esbuild/sunos-x64" "0.25.5" + "@esbuild/win32-arm64" "0.25.5" + "@esbuild/win32-ia32" "0.25.5" + "@esbuild/win32-x64" "0.25.5" + +escape-string-regexp@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8" + integrity sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw== + +estree-util-attach-comments@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz#344bde6a64c8a31d15231e5ee9e297566a691c2d" + integrity sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw== + dependencies: + "@types/estree" "^1.0.0" + +estree-util-build-jsx@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz#b6d0bced1dcc4f06f25cf0ceda2b2dcaf98168f1" + integrity sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ== + dependencies: + "@types/estree-jsx" "^1.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" + estree-walker "^3.0.0" + +estree-util-is-identifier-name@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz#0b5ef4c4ff13508b34dcd01ecfa945f61fce5dbd" + integrity sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg== + +estree-util-scope@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/estree-util-scope/-/estree-util-scope-1.0.0.tgz#9cbdfc77f5cb51e3d9ed4ad9c4adbff22d43e585" + integrity sha512-2CAASclonf+JFWBNJPndcOpA8EMJwa0Q8LUFJEKqXLW6+qBvbFZuF5gItbQOs/umBUkjviCSDCbBwU2cXbmrhQ== + dependencies: + "@types/estree" "^1.0.0" + devlop "^1.0.0" + +estree-util-to-js@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz#10a6fb924814e6abb62becf0d2bc4dea51d04f17" + integrity sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg== + dependencies: + "@types/estree-jsx" "^1.0.0" + astring "^1.8.0" + source-map "^0.7.0" + +estree-util-visit@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/estree-util-visit/-/estree-util-visit-2.0.0.tgz#13a9a9f40ff50ed0c022f831ddf4b58d05446feb" + integrity sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww== + dependencies: + "@types/estree-jsx" "^1.0.0" + "@types/unist" "^3.0.0" + +estree-walker@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" + integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== + +estree-walker@^3.0.0, estree-walker@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-3.0.3.tgz#67c3e549ec402a487b4fc193d1953a524752340d" + integrity sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g== + dependencies: + "@types/estree" "^1.0.0" + +eventemitter3@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" + integrity sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA== + +expand-template@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" + integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== + +expressive-code@^0.41.2: + version "0.41.2" + resolved "https://registry.yarnpkg.com/expressive-code/-/expressive-code-0.41.2.tgz#f57b784fac2aa89cd85144823637afcc71feaaf0" + integrity sha512-aLZiZaqorRtNExtGpUjK9zFH9aTpWeoTXMyLo4b4IcuXfPqtLPPxhRm/QlPb8QqIcMMXnSiGRHSFpQfX0m7HJw== + dependencies: + "@expressive-code/core" "^0.41.2" + "@expressive-code/plugin-frames" "^0.41.2" + "@expressive-code/plugin-shiki" "^0.41.2" + "@expressive-code/plugin-text-markers" "^0.41.2" + +extend@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-fifo@^1.2.0, fast-fifo@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.3.2.tgz#286e31de96eb96d38a97899815740ba2a4f3640c" + integrity sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ== + +fdir@^6.4.4: + version "6.4.6" + resolved "https://registry.yarnpkg.com/fdir/-/fdir-6.4.6.tgz#2b268c0232697063111bbf3f64810a2a741ba281" + integrity sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w== + +flattie@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/flattie/-/flattie-1.1.1.tgz#88182235723113667d36217fec55359275d6fe3d" + integrity sha512-9UbaD6XdAL97+k/n+N7JwX46K/M6Zc6KcFYskrYL8wbBV/Uyk0CTAMY0VT+qiK5PM7AIc9aTWYtq65U7T+aCNQ== + +fontace@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/fontace/-/fontace-0.3.0.tgz#ddc16ff039d07330a87c9492cf264f121ce3847b" + integrity sha512-czoqATrcnxgWb/nAkfyIrRp6Q8biYj7nGnL6zfhTcX+JKKpWHFBnb8uNMw/kZr7u++3Y3wYSYoZgHkCcsuBpBg== + dependencies: + "@types/fontkit" "^2.0.8" + fontkit "^2.0.4" + +fontkit@^2.0.2, fontkit@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/fontkit/-/fontkit-2.0.4.tgz#4765d664c68b49b5d6feb6bd1051ee49d8ec5ab0" + integrity sha512-syetQadaUEDNdxdugga9CpEYVaQIxOwk7GlwZWWZ19//qW4zE5bknOKeMBDYAASwnpaSHKJITRLMF9m1fp3s6g== + dependencies: + "@swc/helpers" "^0.5.12" + brotli "^1.3.2" + clone "^2.1.2" + dfa "^1.2.0" + fast-deep-equal "^3.1.3" + restructure "^3.0.0" + tiny-inflate "^1.0.3" + unicode-properties "^1.4.0" + unicode-trie "^2.0.0" + +fs-constants@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" + integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== + +fsevents@~2.3.2, fsevents@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + +get-east-asian-width@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz#21b4071ee58ed04ee0db653371b55b4299875389" + integrity sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ== + +github-from-package@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" + integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== + +github-slugger@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-2.0.0.tgz#52cf2f9279a21eb6c59dd385b410f0c0adda8f1a" + integrity sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw== + +graceful-fs@^4.2.4: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +h3@^1.15.2: + version "1.15.3" + resolved "https://registry.yarnpkg.com/h3/-/h3-1.15.3.tgz#e242ec6a7692a45caed3e4a73710cede4fb8d863" + integrity sha512-z6GknHqyX0h9aQaTx22VZDf6QyZn+0Nh+Ym8O/u0SGSkyF5cuTJYKlc8MkzW3Nzf9LE1ivcpmYC3FUGpywhuUQ== + dependencies: + cookie-es "^1.2.2" + crossws "^0.3.4" + defu "^6.1.4" + destr "^2.0.5" + iron-webcrypto "^1.2.1" + node-mock-http "^1.0.0" + radix3 "^1.1.2" + ufo "^1.6.1" + uncrypto "^0.1.3" + +hast-util-embedded@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/hast-util-embedded/-/hast-util-embedded-3.0.0.tgz#be4477780fbbe079cdba22982e357a0de4ba853e" + integrity sha512-naH8sld4Pe2ep03qqULEtvYr7EjrLK2QHY8KJR6RJkTUjPGObe1vnx585uzem2hGra+s1q08DZZpfgDVYRbaXA== + dependencies: + "@types/hast" "^3.0.0" + hast-util-is-element "^3.0.0" + +hast-util-format@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/hast-util-format/-/hast-util-format-1.1.0.tgz#373e77382e07deb04f6676f1b4437e7d8549d985" + integrity sha512-yY1UDz6bC9rDvCWHpx12aIBGRG7krurX0p0Fm6pT547LwDIZZiNr8a+IHDogorAdreULSEzP82Nlv5SZkHZcjA== + dependencies: + "@types/hast" "^3.0.0" + hast-util-embedded "^3.0.0" + hast-util-minify-whitespace "^1.0.0" + hast-util-phrasing "^3.0.0" + hast-util-whitespace "^3.0.0" + html-whitespace-sensitive-tag-names "^3.0.0" + unist-util-visit-parents "^6.0.0" + +hast-util-from-html@^2.0.0, hast-util-from-html@^2.0.1, hast-util-from-html@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/hast-util-from-html/-/hast-util-from-html-2.0.3.tgz#485c74785358beb80c4ba6346299311ac4c49c82" + integrity sha512-CUSRHXyKjzHov8yKsQjGOElXy/3EKpyX56ELnkHH34vDVw1N1XSQ1ZcAvTyAPtGqLTuKP/uxM+aLkSPqF/EtMw== + dependencies: + "@types/hast" "^3.0.0" + devlop "^1.1.0" + hast-util-from-parse5 "^8.0.0" + parse5 "^7.0.0" + vfile "^6.0.0" + vfile-message "^4.0.0" + +hast-util-from-parse5@^8.0.0: + version "8.0.3" + resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-8.0.3.tgz#830a35022fff28c3fea3697a98c2f4cc6b835a2e" + integrity sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg== + dependencies: + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + devlop "^1.0.0" + hastscript "^9.0.0" + property-information "^7.0.0" + vfile "^6.0.0" + vfile-location "^5.0.0" + web-namespaces "^2.0.0" + +hast-util-has-property@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/hast-util-has-property/-/hast-util-has-property-3.0.0.tgz#4e595e3cddb8ce530ea92f6fc4111a818d8e7f93" + integrity sha512-MNilsvEKLFpV604hwfhVStK0usFY/QmM5zX16bo7EjnAEGofr5YyI37kzopBlZJkHD4t887i+q/C8/tr5Q94cA== + dependencies: + "@types/hast" "^3.0.0" + +hast-util-is-body-ok-link@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/hast-util-is-body-ok-link/-/hast-util-is-body-ok-link-3.0.1.tgz#ef63cb2f14f04ecf775139cd92bda5026380d8b4" + integrity sha512-0qpnzOBLztXHbHQenVB8uNuxTnm/QBFUOmdOSsEn7GnBtyY07+ENTWVFBAnXd/zEgd9/SUG3lRY7hSIBWRgGpQ== + dependencies: + "@types/hast" "^3.0.0" + +hast-util-is-element@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz#6e31a6532c217e5b533848c7e52c9d9369ca0932" + integrity sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g== + dependencies: + "@types/hast" "^3.0.0" + +hast-util-minify-whitespace@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/hast-util-minify-whitespace/-/hast-util-minify-whitespace-1.0.1.tgz#7588fd1a53f48f1d30406b81959dffc3650daf55" + integrity sha512-L96fPOVpnclQE0xzdWb/D12VT5FabA7SnZOUMtL1DbXmYiHJMXZvFkIZfiMmTCNJHUeO2K9UYNXoVyfz+QHuOw== + dependencies: + "@types/hast" "^3.0.0" + hast-util-embedded "^3.0.0" + hast-util-is-element "^3.0.0" + hast-util-whitespace "^3.0.0" + unist-util-is "^6.0.0" + +hast-util-parse-selector@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz#352879fa86e25616036037dd8931fb5f34cb4a27" + integrity sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A== + dependencies: + "@types/hast" "^3.0.0" + +hast-util-phrasing@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/hast-util-phrasing/-/hast-util-phrasing-3.0.1.tgz#fa284c0cd4a82a0dd6020de8300a7b1ebffa1690" + integrity sha512-6h60VfI3uBQUxHqTyMymMZnEbNl1XmEGtOxxKYL7stY2o601COo62AWAYBQR9lZbYXYSBoxag8UpPRXK+9fqSQ== + dependencies: + "@types/hast" "^3.0.0" + hast-util-embedded "^3.0.0" + hast-util-has-property "^3.0.0" + hast-util-is-body-ok-link "^3.0.0" + hast-util-is-element "^3.0.0" + +hast-util-raw@^9.0.0: + version "9.1.0" + resolved "https://registry.yarnpkg.com/hast-util-raw/-/hast-util-raw-9.1.0.tgz#79b66b26f6f68fb50dfb4716b2cdca90d92adf2e" + integrity sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw== + dependencies: + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + "@ungap/structured-clone" "^1.0.0" + hast-util-from-parse5 "^8.0.0" + hast-util-to-parse5 "^8.0.0" + html-void-elements "^3.0.0" + mdast-util-to-hast "^13.0.0" + parse5 "^7.0.0" + unist-util-position "^5.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" + web-namespaces "^2.0.0" + zwitch "^2.0.0" + +hast-util-select@^6.0.2: + version "6.0.4" + resolved "https://registry.yarnpkg.com/hast-util-select/-/hast-util-select-6.0.4.tgz#1d8f69657a57441d0ce0ade35887874d3e65a303" + integrity sha512-RqGS1ZgI0MwxLaKLDxjprynNzINEkRHY2i8ln4DDjgv9ZhcYVIHN9rlpiYsqtFwrgpYU361SyWDQcGNIBVu3lw== + dependencies: + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + bcp-47-match "^2.0.0" + comma-separated-tokens "^2.0.0" + css-selector-parser "^3.0.0" + devlop "^1.0.0" + direction "^2.0.0" + hast-util-has-property "^3.0.0" + hast-util-to-string "^3.0.0" + hast-util-whitespace "^3.0.0" + nth-check "^2.0.0" + property-information "^7.0.0" + space-separated-tokens "^2.0.0" + unist-util-visit "^5.0.0" + zwitch "^2.0.0" + +hast-util-to-estree@^3.0.0: + version "3.1.3" + resolved "https://registry.yarnpkg.com/hast-util-to-estree/-/hast-util-to-estree-3.1.3.tgz#e654c1c9374645135695cc0ab9f70b8fcaf733d7" + integrity sha512-48+B/rJWAp0jamNbAAf9M7Uf//UVqAoMmgXhBdxTDJLGKY+LRnZ99qcG+Qjl5HfMpYNzS5v4EAwVEF34LeAj7w== + dependencies: + "@types/estree" "^1.0.0" + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + comma-separated-tokens "^2.0.0" + devlop "^1.0.0" + estree-util-attach-comments "^3.0.0" + estree-util-is-identifier-name "^3.0.0" + hast-util-whitespace "^3.0.0" + mdast-util-mdx-expression "^2.0.0" + mdast-util-mdx-jsx "^3.0.0" + mdast-util-mdxjs-esm "^2.0.0" + property-information "^7.0.0" + space-separated-tokens "^2.0.0" + style-to-js "^1.0.0" + unist-util-position "^5.0.0" + zwitch "^2.0.0" + +hast-util-to-html@^9.0.0, hast-util-to-html@^9.0.1, hast-util-to-html@^9.0.5: + version "9.0.5" + resolved "https://registry.yarnpkg.com/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz#ccc673a55bb8e85775b08ac28380f72d47167005" + integrity sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw== + dependencies: + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + ccount "^2.0.0" + comma-separated-tokens "^2.0.0" + hast-util-whitespace "^3.0.0" + html-void-elements "^3.0.0" + mdast-util-to-hast "^13.0.0" + property-information "^7.0.0" + space-separated-tokens "^2.0.0" + stringify-entities "^4.0.0" + zwitch "^2.0.4" + +hast-util-to-jsx-runtime@^2.0.0: + version "2.3.6" + resolved "https://registry.yarnpkg.com/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz#ff31897aae59f62232e21594eac7ef6b63333e98" + integrity sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg== + dependencies: + "@types/estree" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + comma-separated-tokens "^2.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" + hast-util-whitespace "^3.0.0" + mdast-util-mdx-expression "^2.0.0" + mdast-util-mdx-jsx "^3.0.0" + mdast-util-mdxjs-esm "^2.0.0" + property-information "^7.0.0" + space-separated-tokens "^2.0.0" + style-to-js "^1.0.0" + unist-util-position "^5.0.0" + vfile-message "^4.0.0" + +hast-util-to-parse5@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz#477cd42d278d4f036bc2ea58586130f6f39ee6ed" + integrity sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw== + dependencies: + "@types/hast" "^3.0.0" + comma-separated-tokens "^2.0.0" + devlop "^1.0.0" + property-information "^6.0.0" + space-separated-tokens "^2.0.0" + web-namespaces "^2.0.0" + zwitch "^2.0.0" + +hast-util-to-string@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/hast-util-to-string/-/hast-util-to-string-3.0.1.tgz#a4f15e682849326dd211c97129c94b0c3e76527c" + integrity sha512-XelQVTDWvqcl3axRfI0xSeoVKzyIFPwsAGSLIsKdJKQMXDYJS4WYrBNF/8J7RdhIcFI2BOHgAifggsvsxp/3+A== + dependencies: + "@types/hast" "^3.0.0" + +hast-util-to-text@^4.0.1, hast-util-to-text@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/hast-util-to-text/-/hast-util-to-text-4.0.2.tgz#57b676931e71bf9cb852453678495b3080bfae3e" + integrity sha512-KK6y/BN8lbaq654j7JgBydev7wuNMcID54lkRav1P0CaE1e47P72AWWPiGKXTJU271ooYzcvTAn/Zt0REnvc7A== + dependencies: + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + hast-util-is-element "^3.0.0" + unist-util-find-after "^5.0.0" + +hast-util-whitespace@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz#7778ed9d3c92dd9e8c5c8f648a49c21fc51cb621" + integrity sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw== + dependencies: + "@types/hast" "^3.0.0" + +hastscript@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-9.0.1.tgz#dbc84bef6051d40084342c229c451cd9dc567dff" + integrity sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w== + dependencies: + "@types/hast" "^3.0.0" + comma-separated-tokens "^2.0.0" + hast-util-parse-selector "^4.0.0" + property-information "^7.0.0" + space-separated-tokens "^2.0.0" + +html-escaper@3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-3.0.3.tgz#4d336674652beb1dcbc29ef6b6ba7f6be6fdfed6" + integrity sha512-RuMffC89BOWQoY0WKGpIhn5gX3iI54O6nRA0yC124NYVtzjmFWBIiFd8M0x+ZdX0P9R4lADg1mgP8C7PxGOWuQ== + +html-void-elements@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-3.0.0.tgz#fc9dbd84af9e747249034d4d62602def6517f1d7" + integrity sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg== + +html-whitespace-sensitive-tag-names@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/html-whitespace-sensitive-tag-names/-/html-whitespace-sensitive-tag-names-3.0.1.tgz#c35edd28205f3bf8c1fd03274608d60b923de5b2" + integrity sha512-q+310vW8zmymYHALr1da4HyXUQ0zgiIwIicEfotYPWGN0OJVEN/58IJ3A4GBYcEq3LGAZqKb+ugvP0GNB9CEAA== + +http-cache-semantics@^4.1.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz#205f4db64f8562b76a4ff9235aa5279839a09dd5" + integrity sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ== + +i18next@^23.11.5: + version "23.16.8" + resolved "https://registry.yarnpkg.com/i18next/-/i18next-23.16.8.tgz#3ae1373d344c2393f465556f394aba5a9233b93a" + integrity sha512-06r/TitrM88Mg5FdUXAKL96dJMzgqLE5dv3ryBAra4KCwD9mJ4ndOTS95ZuymIGoE+2hzfdaMak2X11/es7ZWg== + dependencies: + "@babel/runtime" "^7.23.2" + +ieee754@^1.1.13: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + +import-meta-resolve@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/import-meta-resolve/-/import-meta-resolve-4.1.0.tgz#f9db8bead9fafa61adb811db77a2bf22c5399706" + integrity sha512-I6fiaX09Xivtk+THaMfAwnA3MVA5Big1WHF1Dfx9hFuvNIWpXnorlkzhcQf6ehrqQiiZECRt1poOAkPmer3ruw== + +inherits@^2.0.3, inherits@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +ini@~1.3.0: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +inline-style-parser@0.2.4: + version "0.2.4" + resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.2.4.tgz#f4af5fe72e612839fcd453d989a586566d695f22" + integrity sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q== + +iron-webcrypto@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/iron-webcrypto/-/iron-webcrypto-1.2.1.tgz#aa60ff2aa10550630f4c0b11fd2442becdb35a6f" + integrity sha512-feOM6FaSr6rEABp/eDfVseKyTMDt+KGpeB35SkVn9Tyn0CqvVsY3EwI0v5i8nMHyJnzCIQf7nsy3p41TPkJZhg== + +is-alphabetical@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-2.0.1.tgz#01072053ea7c1036df3c7d19a6daaec7f19e789b" + integrity sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ== + +is-alphanumerical@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz#7c03fbe96e3e931113e57f964b0a368cc2dfd875" + integrity sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw== + dependencies: + is-alphabetical "^2.0.0" + is-decimal "^2.0.0" + +is-arrayish@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" + integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== + +is-decimal@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-2.0.1.tgz#9469d2dc190d0214fd87d78b78caecc0cc14eef7" + integrity sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A== + +is-docker@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-3.0.0.tgz#90093aa3106277d8a77a5910dbae71747e15a200" + integrity sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-hexadecimal@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz#86b5bf668fca307498d319dfc03289d781a90027" + integrity sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg== + +is-inside-container@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-inside-container/-/is-inside-container-1.0.0.tgz#e81fba699662eb31dbdaf26766a61d4814717ea4" + integrity sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA== + dependencies: + is-docker "^3.0.0" + +is-plain-obj@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-4.1.0.tgz#d65025edec3657ce032fd7db63c97883eaed71f0" + integrity sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg== + +is-wsl@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-3.1.0.tgz#e1c657e39c10090afcbedec61720f6b924c3cbd2" + integrity sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw== + dependencies: + is-inside-container "^1.0.0" + +jiti@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-2.4.2.tgz#d19b7732ebb6116b06e2038da74a55366faef560" + integrity sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A== + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +kleur@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" + integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== + +klona@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.6.tgz#85bffbf819c03b2f53270412420a4555ef882e22" + integrity sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== + +lightningcss-darwin-arm64@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz#3d47ce5e221b9567c703950edf2529ca4a3700ae" + integrity sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ== + +lightningcss-darwin-x64@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz#e81105d3fd6330860c15fe860f64d39cff5fbd22" + integrity sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA== + +lightningcss-freebsd-x64@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz#a0e732031083ff9d625c5db021d09eb085af8be4" + integrity sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig== + +lightningcss-linux-arm-gnueabihf@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz#1f5ecca6095528ddb649f9304ba2560c72474908" + integrity sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q== + +lightningcss-linux-arm64-gnu@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz#eee7799726103bffff1e88993df726f6911ec009" + integrity sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw== + +lightningcss-linux-arm64-musl@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz#f2e4b53f42892feeef8f620cbb889f7c064a7dfe" + integrity sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ== + +lightningcss-linux-x64-gnu@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz#2fc7096224bc000ebb97eea94aea248c5b0eb157" + integrity sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw== + +lightningcss-linux-x64-musl@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz#66dca2b159fd819ea832c44895d07e5b31d75f26" + integrity sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ== + +lightningcss-win32-arm64-msvc@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz#7d8110a19d7c2d22bfdf2f2bb8be68e7d1b69039" + integrity sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA== + +lightningcss-win32-x64-msvc@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz#fd7dd008ea98494b85d24b4bea016793f2e0e352" + integrity sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg== + +lightningcss@1.30.1: + version "1.30.1" + resolved "https://registry.yarnpkg.com/lightningcss/-/lightningcss-1.30.1.tgz#78e979c2d595bfcb90d2a8c0eb632fe6c5bfed5d" + integrity sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg== + dependencies: + detect-libc "^2.0.3" + optionalDependencies: + lightningcss-darwin-arm64 "1.30.1" + lightningcss-darwin-x64 "1.30.1" + lightningcss-freebsd-x64 "1.30.1" + lightningcss-linux-arm-gnueabihf "1.30.1" + lightningcss-linux-arm64-gnu "1.30.1" + lightningcss-linux-arm64-musl "1.30.1" + lightningcss-linux-x64-gnu "1.30.1" + lightningcss-linux-x64-musl "1.30.1" + lightningcss-win32-arm64-msvc "1.30.1" + lightningcss-win32-x64-msvc "1.30.1" + +longest-streak@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/longest-streak/-/longest-streak-3.1.0.tgz#62fa67cd958742a1574af9f39866364102d90cd4" + integrity sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g== + +lru-cache@^10.4.3: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + +magic-string@^0.30.17: + version "0.30.17" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.17.tgz#450a449673d2460e5bbcfba9a61916a1714c7453" + integrity sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA== + dependencies: + "@jridgewell/sourcemap-codec" "^1.5.0" + +magicast@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/magicast/-/magicast-0.3.5.tgz#8301c3c7d66704a0771eb1bad74274f0ec036739" + integrity sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ== + dependencies: + "@babel/parser" "^7.25.4" + "@babel/types" "^7.25.4" + source-map-js "^1.2.0" + +markdown-extensions@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/markdown-extensions/-/markdown-extensions-2.0.0.tgz#34bebc83e9938cae16e0e017e4a9814a8330d3c4" + integrity sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q== + +markdown-table@^3.0.0: + version "3.0.4" + resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-3.0.4.tgz#fe44d6d410ff9d6f2ea1797a3f60aa4d2b631c2a" + integrity sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw== + +mdast-util-definitions@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-definitions/-/mdast-util-definitions-6.0.0.tgz#c1bb706e5e76bb93f9a09dd7af174002ae69ac24" + integrity sha512-scTllyX6pnYNZH/AIp/0ePz6s4cZtARxImwoPJ7kS42n+MnVsI4XbnG6d4ibehRIldYMWM2LD7ImQblVhUejVQ== + dependencies: + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + unist-util-visit "^5.0.0" + +mdast-util-directive@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/mdast-util-directive/-/mdast-util-directive-3.1.0.tgz#f3656f4aab6ae3767d3c72cfab5e8055572ccba1" + integrity sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q== + dependencies: + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + ccount "^2.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + parse-entities "^4.0.0" + stringify-entities "^4.0.0" + unist-util-visit-parents "^6.0.0" + +mdast-util-find-and-replace@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz#70a3174c894e14df722abf43bc250cbae44b11df" + integrity sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg== + dependencies: + "@types/mdast" "^4.0.0" + escape-string-regexp "^5.0.0" + unist-util-is "^6.0.0" + unist-util-visit-parents "^6.0.0" + +mdast-util-from-markdown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz#4850390ca7cf17413a9b9a0fbefcd1bc0eb4160a" + integrity sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA== + dependencies: + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + decode-named-character-reference "^1.0.0" + devlop "^1.0.0" + mdast-util-to-string "^4.0.0" + micromark "^4.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-decode-string "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + unist-util-stringify-position "^4.0.0" + +mdast-util-gfm-autolink-literal@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz#abd557630337bd30a6d5a4bd8252e1c2dc0875d5" + integrity sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ== + dependencies: + "@types/mdast" "^4.0.0" + ccount "^2.0.0" + devlop "^1.0.0" + mdast-util-find-and-replace "^3.0.0" + micromark-util-character "^2.0.0" + +mdast-util-gfm-footnote@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz#7778e9d9ca3df7238cc2bd3fa2b1bf6a65b19403" + integrity sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ== + dependencies: + "@types/mdast" "^4.0.0" + devlop "^1.1.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + +mdast-util-gfm-strikethrough@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz#d44ef9e8ed283ac8c1165ab0d0dfd058c2764c16" + integrity sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg== + dependencies: + "@types/mdast" "^4.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-gfm-table@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz#7a435fb6223a72b0862b33afbd712b6dae878d38" + integrity sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg== + dependencies: + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + markdown-table "^3.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-gfm-task-list-item@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz#e68095d2f8a4303ef24094ab642e1047b991a936" + integrity sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ== + dependencies: + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-gfm@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz#2cdf63b92c2a331406b0fb0db4c077c1b0331751" + integrity sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ== + dependencies: + mdast-util-from-markdown "^2.0.0" + mdast-util-gfm-autolink-literal "^2.0.0" + mdast-util-gfm-footnote "^2.0.0" + mdast-util-gfm-strikethrough "^2.0.0" + mdast-util-gfm-table "^2.0.0" + mdast-util-gfm-task-list-item "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-mdx-expression@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz#43f0abac9adc756e2086f63822a38c8d3c3a5096" + integrity sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ== + dependencies: + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-mdx-jsx@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz#fd04c67a2a7499efb905a8a5c578dddc9fdada0d" + integrity sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q== + dependencies: + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + ccount "^2.0.0" + devlop "^1.1.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + parse-entities "^4.0.0" + stringify-entities "^4.0.0" + unist-util-stringify-position "^4.0.0" + vfile-message "^4.0.0" + +mdast-util-mdx@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz#792f9cf0361b46bee1fdf1ef36beac424a099c41" + integrity sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w== + dependencies: + mdast-util-from-markdown "^2.0.0" + mdast-util-mdx-expression "^2.0.0" + mdast-util-mdx-jsx "^3.0.0" + mdast-util-mdxjs-esm "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-mdxjs-esm@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz#019cfbe757ad62dd557db35a695e7314bcc9fa97" + integrity sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg== + dependencies: + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-phrasing@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz#7cc0a8dec30eaf04b7b1a9661a92adb3382aa6e3" + integrity sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w== + dependencies: + "@types/mdast" "^4.0.0" + unist-util-is "^6.0.0" + +mdast-util-to-hast@^13.0.0: + version "13.2.0" + resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz#5ca58e5b921cc0a3ded1bc02eed79a4fe4fe41f4" + integrity sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA== + dependencies: + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + "@ungap/structured-clone" "^1.0.0" + devlop "^1.0.0" + micromark-util-sanitize-uri "^2.0.0" + trim-lines "^3.0.0" + unist-util-position "^5.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" + +mdast-util-to-markdown@^2.0.0, mdast-util-to-markdown@^2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz#f910ffe60897f04bb4b7e7ee434486f76288361b" + integrity sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA== + dependencies: + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + longest-streak "^3.0.0" + mdast-util-phrasing "^4.0.0" + mdast-util-to-string "^4.0.0" + micromark-util-classify-character "^2.0.0" + micromark-util-decode-string "^2.0.0" + unist-util-visit "^5.0.0" + zwitch "^2.0.0" + +mdast-util-to-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz#7a5121475556a04e7eddeb67b264aae79d312814" + integrity sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg== + dependencies: + "@types/mdast" "^4.0.0" + +mdn-data@2.12.2: + version "2.12.2" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.12.2.tgz#9ae6c41a9e65adf61318b32bff7b64fbfb13f8cf" + integrity sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA== + +micromark-core-commonmark@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz#c691630e485021a68cf28dbc2b2ca27ebf678cd4" + integrity sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg== + dependencies: + decode-named-character-reference "^1.0.0" + devlop "^1.0.0" + micromark-factory-destination "^2.0.0" + micromark-factory-label "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-factory-title "^2.0.0" + micromark-factory-whitespace "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-classify-character "^2.0.0" + micromark-util-html-tag-name "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-resolve-all "^2.0.0" + micromark-util-subtokenize "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-directive@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/micromark-extension-directive/-/micromark-extension-directive-3.0.2.tgz#2eb61985d1995a7c1ff7621676a4f32af29409e8" + integrity sha512-wjcXHgk+PPdmvR58Le9d7zQYWy+vKEU9Se44p2CrCDPiLr2FMyiT4Fyb5UFKFC66wGB3kPlgD7q3TnoqPS7SZA== + dependencies: + devlop "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-factory-whitespace "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + parse-entities "^4.0.0" + +micromark-extension-gfm-autolink-literal@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz#6286aee9686c4462c1e3552a9d505feddceeb935" + integrity sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-sanitize-uri "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-gfm-footnote@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz#4dab56d4e398b9853f6fe4efac4fc9361f3e0750" + integrity sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw== + dependencies: + devlop "^1.0.0" + micromark-core-commonmark "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-sanitize-uri "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-gfm-strikethrough@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz#86106df8b3a692b5f6a92280d3879be6be46d923" + integrity sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw== + dependencies: + devlop "^1.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-classify-character "^2.0.0" + micromark-util-resolve-all "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-gfm-table@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz#fac70bcbf51fe65f5f44033118d39be8a9b5940b" + integrity sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg== + dependencies: + devlop "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-gfm-tagfilter@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz#f26d8a7807b5985fba13cf61465b58ca5ff7dc57" + integrity sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg== + dependencies: + micromark-util-types "^2.0.0" + +micromark-extension-gfm-task-list-item@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz#bcc34d805639829990ec175c3eea12bb5b781f2c" + integrity sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw== + dependencies: + devlop "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-gfm@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz#3e13376ab95dd7a5cfd0e29560dfe999657b3c5b" + integrity sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w== + dependencies: + micromark-extension-gfm-autolink-literal "^2.0.0" + micromark-extension-gfm-footnote "^2.0.0" + micromark-extension-gfm-strikethrough "^2.0.0" + micromark-extension-gfm-table "^2.0.0" + micromark-extension-gfm-tagfilter "^2.0.0" + micromark-extension-gfm-task-list-item "^2.0.0" + micromark-util-combine-extensions "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-mdx-expression@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.1.tgz#43d058d999532fb3041195a3c3c05c46fa84543b" + integrity sha512-dD/ADLJ1AeMvSAKBwO22zG22N4ybhe7kFIZ3LsDI0GlsNr2A3KYxb0LdC1u5rj4Nw+CHKY0RVdnHX8vj8ejm4Q== + dependencies: + "@types/estree" "^1.0.0" + devlop "^1.0.0" + micromark-factory-mdx-expression "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-events-to-acorn "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-mdx-jsx@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-3.0.2.tgz#ffc98bdb649798902fa9fc5689f67f9c1c902044" + integrity sha512-e5+q1DjMh62LZAJOnDraSSbDMvGJ8x3cbjygy2qFEi7HCeUT4BDKCvMozPozcD6WmOt6sVvYDNBKhFSz3kjOVQ== + dependencies: + "@types/estree" "^1.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" + micromark-factory-mdx-expression "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-events-to-acorn "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + vfile-message "^4.0.0" + +micromark-extension-mdx-md@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-mdx-md/-/micromark-extension-mdx-md-2.0.0.tgz#1d252881ea35d74698423ab44917e1f5b197b92d" + integrity sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ== + dependencies: + micromark-util-types "^2.0.0" + +micromark-extension-mdxjs-esm@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-3.0.0.tgz#de21b2b045fd2059bd00d36746081de38390d54a" + integrity sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A== + dependencies: + "@types/estree" "^1.0.0" + devlop "^1.0.0" + micromark-core-commonmark "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-events-to-acorn "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + unist-util-position-from-estree "^2.0.0" + vfile-message "^4.0.0" + +micromark-extension-mdxjs@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-mdxjs/-/micromark-extension-mdxjs-3.0.0.tgz#b5a2e0ed449288f3f6f6c544358159557549de18" + integrity sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ== + dependencies: + acorn "^8.0.0" + acorn-jsx "^5.0.0" + micromark-extension-mdx-expression "^3.0.0" + micromark-extension-mdx-jsx "^3.0.0" + micromark-extension-mdx-md "^2.0.0" + micromark-extension-mdxjs-esm "^3.0.0" + micromark-util-combine-extensions "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-destination@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz#8fef8e0f7081f0474fbdd92deb50c990a0264639" + integrity sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-label@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz#5267efa97f1e5254efc7f20b459a38cb21058ba1" + integrity sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg== + dependencies: + devlop "^1.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-mdx-expression@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-2.0.3.tgz#bb09988610589c07d1c1e4425285895041b3dfa9" + integrity sha512-kQnEtA3vzucU2BkrIa8/VaSAsP+EJ3CKOvhMuJgOEGg9KDC6OAY6nSnNDVRiVNRqj7Y4SlSzcStaH/5jge8JdQ== + dependencies: + "@types/estree" "^1.0.0" + devlop "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-events-to-acorn "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + unist-util-position-from-estree "^2.0.0" + vfile-message "^4.0.0" + +micromark-factory-space@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz#36d0212e962b2b3121f8525fc7a3c7c029f334fc" + integrity sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-title@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz#237e4aa5d58a95863f01032d9ee9b090f1de6e94" + integrity sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw== + dependencies: + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-whitespace@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz#06b26b2983c4d27bfcc657b33e25134d4868b0b1" + integrity sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ== + dependencies: + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-character@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-2.1.1.tgz#2f987831a40d4c510ac261e89852c4e9703ccda6" + integrity sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q== + dependencies: + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-chunked@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz#47fbcd93471a3fccab86cff03847fc3552db1051" + integrity sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA== + dependencies: + micromark-util-symbol "^2.0.0" + +micromark-util-classify-character@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz#d399faf9c45ca14c8b4be98b1ea481bced87b629" + integrity sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-combine-extensions@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz#2a0f490ab08bff5cc2fd5eec6dd0ca04f89b30a9" + integrity sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg== + dependencies: + micromark-util-chunked "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-decode-numeric-character-reference@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz#fcf15b660979388e6f118cdb6bf7d79d73d26fe5" + integrity sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw== + dependencies: + micromark-util-symbol "^2.0.0" + +micromark-util-decode-string@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz#6cb99582e5d271e84efca8e61a807994d7161eb2" + integrity sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ== + dependencies: + decode-named-character-reference "^1.0.0" + micromark-util-character "^2.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-symbol "^2.0.0" + +micromark-util-encode@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz#0d51d1c095551cfaac368326963cf55f15f540b8" + integrity sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw== + +micromark-util-events-to-acorn@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-2.0.3.tgz#e7a8a6b55a47e5a06c720d5a1c4abae8c37c98f3" + integrity sha512-jmsiEIiZ1n7X1Rr5k8wVExBQCg5jy4UXVADItHmNk1zkwEVhBuIUKRu3fqv+hs4nxLISi2DQGlqIOGiFxgbfHg== + dependencies: + "@types/estree" "^1.0.0" + "@types/unist" "^3.0.0" + devlop "^1.0.0" + estree-util-visit "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + vfile-message "^4.0.0" + +micromark-util-html-tag-name@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz#e40403096481986b41c106627f98f72d4d10b825" + integrity sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA== + +micromark-util-normalize-identifier@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz#c30d77b2e832acf6526f8bf1aa47bc9c9438c16d" + integrity sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q== + dependencies: + micromark-util-symbol "^2.0.0" + +micromark-util-resolve-all@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz#e1a2d62cdd237230a2ae11839027b19381e31e8b" + integrity sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg== + dependencies: + micromark-util-types "^2.0.0" + +micromark-util-sanitize-uri@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz#ab89789b818a58752b73d6b55238621b7faa8fd7" + integrity sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-encode "^2.0.0" + micromark-util-symbol "^2.0.0" + +micromark-util-subtokenize@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz#d8ade5ba0f3197a1cf6a2999fbbfe6357a1a19ee" + integrity sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA== + dependencies: + devlop "^1.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-symbol@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz#e5da494e8eb2b071a0d08fb34f6cefec6c0a19b8" + integrity sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q== + +micromark-util-types@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-2.0.2.tgz#f00225f5f5a0ebc3254f96c36b6605c4b393908e" + integrity sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA== + +micromark@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/micromark/-/micromark-4.0.2.tgz#91395a3e1884a198e62116e33c9c568e39936fdb" + integrity sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA== + dependencies: + "@types/debug" "^4.0.0" + debug "^4.0.0" + decode-named-character-reference "^1.0.0" + devlop "^1.0.0" + micromark-core-commonmark "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-combine-extensions "^2.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-encode "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-resolve-all "^2.0.0" + micromark-util-sanitize-uri "^2.0.0" + micromark-util-subtokenize "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +mimic-response@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" + integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== + +minimist@^1.2.0, minimist@^1.2.3: + version "1.2.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + +minipass@^7.0.4, minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + +minizlib@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-3.0.2.tgz#f33d638eb279f664439aa38dc5f91607468cb574" + integrity sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA== + dependencies: + minipass "^7.1.2" + +mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" + integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== + +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + +mrmime@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mrmime/-/mrmime-2.0.1.tgz#bc3e87f7987853a54c9850eeb1f1078cd44adddc" + integrity sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ== + +ms@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +nanoid@^3.3.11: + version "3.3.11" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.11.tgz#4f4f112cefbe303202f2199838128936266d185b" + integrity sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w== + +napi-build-utils@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-2.0.0.tgz#13c22c0187fcfccce1461844136372a47ddc027e" + integrity sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA== + +neotraverse@^0.6.18: + version "0.6.18" + resolved "https://registry.yarnpkg.com/neotraverse/-/neotraverse-0.6.18.tgz#abcb33dda2e8e713cf6321b29405e822230cdb30" + integrity sha512-Z4SmBUweYa09+o6pG+eASabEpP6QkQ70yHj351pQoEXIs8uHbaU2DWVmzBANKgflPa47A50PtB2+NgRpQvr7vA== + +nlcst-to-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/nlcst-to-string/-/nlcst-to-string-4.0.0.tgz#05511e8461ebfb415952eb0b7e9a1a7d40471bd4" + integrity sha512-YKLBCcUYKAg0FNlOBT6aI91qFmSiFKiluk655WzPF+DDMA02qIyy8uiRqI8QXtcFpEvll12LpL5MXqEmAZ+dcA== + dependencies: + "@types/nlcst" "^2.0.0" + +node-abi@^3.3.0: + version "3.75.0" + resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.75.0.tgz#2f929a91a90a0d02b325c43731314802357ed764" + integrity sha512-OhYaY5sDsIka7H7AtijtI9jwGYLyl29eQn/W623DiN/MIv5sUqc4g7BIDThX+gb7di9f6xK02nkp8sdfFWZLTg== + dependencies: + semver "^7.3.5" + +node-addon-api@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" + integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== + +node-fetch-native@^1.6.4, node-fetch-native@^1.6.6: + version "1.6.6" + resolved "https://registry.yarnpkg.com/node-fetch-native/-/node-fetch-native-1.6.6.tgz#ae1d0e537af35c2c0b0de81cbff37eedd410aa37" + integrity sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ== + +node-fetch@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + +node-mock-http@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-mock-http/-/node-mock-http-1.0.0.tgz#4b32cd509c7f46d844e68ea93fb8be405a18a42a" + integrity sha512-0uGYQ1WQL1M5kKvGRXWQ3uZCHtLTO8hln3oBjIusM75WoesZ909uQJs/Hb946i2SS+Gsrhkaa6iAO17jRIv6DQ== + +normalize-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +nth-check@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + +ofetch@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/ofetch/-/ofetch-1.4.1.tgz#b6bf6b0d75ba616cef6519dd8b6385a8bae480ec" + integrity sha512-QZj2DfGplQAr2oj9KzceK9Hwz6Whxazmn85yYeVuS3u9XTMOGMRx0kO95MQ+vLsj/S/NwBDMMLU5hpxvI6Tklw== + dependencies: + destr "^2.0.3" + node-fetch-native "^1.6.4" + ufo "^1.5.4" + +ohash@^2.0.0: + version "2.0.11" + resolved "https://registry.yarnpkg.com/ohash/-/ohash-2.0.11.tgz#60b11e8cff62ca9dee88d13747a5baa145f5900b" + integrity sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ== + +once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +oniguruma-parser@^0.12.1: + version "0.12.1" + resolved "https://registry.yarnpkg.com/oniguruma-parser/-/oniguruma-parser-0.12.1.tgz#82ba2208d7a2b69ee344b7efe0ae930c627dcc4a" + integrity sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w== + +oniguruma-to-es@^4.3.3: + version "4.3.3" + resolved "https://registry.yarnpkg.com/oniguruma-to-es/-/oniguruma-to-es-4.3.3.tgz#50db2c1e28ec365e102c1863dfd3d1d1ad18613e" + integrity sha512-rPiZhzC3wXwE59YQMRDodUwwT9FZ9nNBwQQfsd1wfdtlKEyCdRV0avrTcSZ5xlIvGRVPd/cx6ZN45ECmS39xvg== + dependencies: + oniguruma-parser "^0.12.1" + regex "^6.0.1" + regex-recursion "^6.0.2" + +p-limit@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-6.2.0.tgz#c254d22ba6aeef441a3564c5e6c2f2da59268a0f" + integrity sha512-kuUqqHNUqoIWp/c467RI4X6mmyuojY5jGutNU0wVTmEOOfcuwLqyMVoAi9MKi2Ak+5i9+nhmrK4ufZE8069kHA== + dependencies: + yocto-queue "^1.1.1" + +p-queue@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-8.1.0.tgz#d71929249868b10b16f885d8a82beeaf35d32279" + integrity sha512-mxLDbbGIBEXTJL0zEx8JIylaj3xQ7Z/7eEVjcF9fJX4DBiH9oqe+oahYnlKKxm0Ci9TlWTyhSHgygxMxjIB2jw== + dependencies: + eventemitter3 "^5.0.1" + p-timeout "^6.1.2" + +p-timeout@^6.1.2: + version "6.1.4" + resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-6.1.4.tgz#418e1f4dd833fa96a2e3f532547dd2abdb08dbc2" + integrity sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg== + +package-manager-detector@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/package-manager-detector/-/package-manager-detector-1.3.0.tgz#b42d641c448826e03c2b354272456a771ce453c0" + integrity sha512-ZsEbbZORsyHuO00lY1kV3/t72yp6Ysay6Pd17ZAlNGuGwmWDLCJxFpRs0IzfXfj1o4icJOkUEioexFHzyPurSQ== + +pagefind@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/pagefind/-/pagefind-1.3.0.tgz#467560447dcc7bbe590f1b888cc8bc733bb377fa" + integrity sha512-8KPLGT5g9s+olKMRTU9LFekLizkVIu9tes90O1/aigJ0T5LmyPqTzGJrETnSw3meSYg58YH7JTzhTTW/3z6VAw== + optionalDependencies: + "@pagefind/darwin-arm64" "1.3.0" + "@pagefind/darwin-x64" "1.3.0" + "@pagefind/linux-arm64" "1.3.0" + "@pagefind/linux-x64" "1.3.0" + "@pagefind/windows-x64" "1.3.0" + +pako@^0.2.5: + version "0.2.9" + resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" + integrity sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA== + +parse-entities@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-4.0.2.tgz#61d46f5ed28e4ee62e9ddc43d6b010188443f159" + integrity sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw== + dependencies: + "@types/unist" "^2.0.0" + character-entities-legacy "^3.0.0" + character-reference-invalid "^2.0.0" + decode-named-character-reference "^1.0.0" + is-alphanumerical "^2.0.0" + is-decimal "^2.0.0" + is-hexadecimal "^2.0.0" + +parse-latin@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/parse-latin/-/parse-latin-7.0.0.tgz#8dfacac26fa603f76417f36233fc45602a323e1d" + integrity sha512-mhHgobPPua5kZ98EF4HWiH167JWBfl4pvAIXXdbaVohtK7a6YBOy56kvhCqduqyo/f3yrHFWmqmiMg/BkBkYYQ== + dependencies: + "@types/nlcst" "^2.0.0" + "@types/unist" "^3.0.0" + nlcst-to-string "^4.0.0" + unist-util-modify-children "^4.0.0" + unist-util-visit-children "^3.0.0" + vfile "^6.0.0" + +parse5@^7.0.0: + version "7.3.0" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.3.0.tgz#d7e224fa72399c7a175099f45fc2ad024b05ec05" + integrity sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw== + dependencies: + entities "^6.0.0" + +picocolors@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== + +picomatch@^2.0.4: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +picomatch@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.2.tgz#77c742931e8f3b8820946c76cd0c1f13730d1dab" + integrity sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg== + +postcss-nested@^6.0.1: + version "6.2.0" + resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.2.0.tgz#4c2d22ab5f20b9cb61e2c5c5915950784d068131" + integrity sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ== + dependencies: + postcss-selector-parser "^6.1.1" + +postcss-selector-parser@^6.1.1: + version "6.1.2" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz#27ecb41fb0e3b6ba7a1ec84fff347f734c7929de" + integrity sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss@^8.4.38, postcss@^8.5.3: + version "8.5.5" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.5.5.tgz#04de7797f6911fb1c96550e96616d08681537ef3" + integrity sha512-d/jtm+rdNT8tpXuHY5MMtcbJFBkhXE6593XVR9UoGCH8jSFGci7jGvMGH5RYd5PBJW+00NZQt6gf7CbagJCrhg== + dependencies: + nanoid "^3.3.11" + picocolors "^1.1.1" + source-map-js "^1.2.1" + +prebuild-install@^7.1.1: + version "7.1.3" + resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.3.tgz#d630abad2b147443f20a212917beae68b8092eec" + integrity sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug== + dependencies: + detect-libc "^2.0.0" + expand-template "^2.0.3" + github-from-package "0.0.0" + minimist "^1.2.3" + mkdirp-classic "^0.5.3" + napi-build-utils "^2.0.0" + node-abi "^3.3.0" + pump "^3.0.0" + rc "^1.2.7" + simple-get "^4.0.0" + tar-fs "^2.0.0" + tunnel-agent "^0.6.0" + +prismjs@^1.30.0: + version "1.30.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.30.0.tgz#d9709969d9d4e16403f6f348c63553b19f0975a9" + integrity sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw== + +prompts@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +property-information@^6.0.0: + version "6.5.0" + resolved "https://registry.yarnpkg.com/property-information/-/property-information-6.5.0.tgz#6212fbb52ba757e92ef4fb9d657563b933b7ffec" + integrity sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig== + +property-information@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/property-information/-/property-information-7.1.0.tgz#b622e8646e02b580205415586b40804d3e8bfd5d" + integrity sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ== + +pump@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.2.tgz#836f3edd6bc2ee599256c924ffe0d88573ddcbf8" + integrity sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +radix3@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/radix3/-/radix3-1.1.2.tgz#fd27d2af3896c6bf4bcdfab6427c69c2afc69ec0" + integrity sha512-b484I/7b8rDEdSDKckSSBA8knMpcdsXudlE/LNL639wFoHKwLbEkQFZHWEYwDC0wa0FKUcCY+GAF73Z7wxNVFA== + +rc@^1.2.7: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +readable-stream@^3.1.1, readable-stream@^3.4.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@^4.0.1: + version "4.1.2" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-4.1.2.tgz#eb85801435fbf2a7ee58f19e0921b068fc69948d" + integrity sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg== + +recma-build-jsx@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/recma-build-jsx/-/recma-build-jsx-1.0.0.tgz#c02f29e047e103d2fab2054954e1761b8ea253c4" + integrity sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew== + dependencies: + "@types/estree" "^1.0.0" + estree-util-build-jsx "^3.0.0" + vfile "^6.0.0" + +recma-jsx@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/recma-jsx/-/recma-jsx-1.0.0.tgz#f7bef02e571a49d6ba3efdfda8e2efab48dbe3aa" + integrity sha512-5vwkv65qWwYxg+Atz95acp8DMu1JDSqdGkA2Of1j6rCreyFUE/gp15fC8MnGEuG1W68UKjM6x6+YTWIh7hZM/Q== + dependencies: + acorn-jsx "^5.0.0" + estree-util-to-js "^2.0.0" + recma-parse "^1.0.0" + recma-stringify "^1.0.0" + unified "^11.0.0" + +recma-parse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/recma-parse/-/recma-parse-1.0.0.tgz#c351e161bb0ab47d86b92a98a9d891f9b6814b52" + integrity sha512-OYLsIGBB5Y5wjnSnQW6t3Xg7q3fQ7FWbw/vcXtORTnyaSFscOtABg+7Pnz6YZ6c27fG1/aN8CjfwoUEUIdwqWQ== + dependencies: + "@types/estree" "^1.0.0" + esast-util-from-js "^2.0.0" + unified "^11.0.0" + vfile "^6.0.0" + +recma-stringify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/recma-stringify/-/recma-stringify-1.0.0.tgz#54632030631e0c7546136ff9ef8fde8e7b44f130" + integrity sha512-cjwII1MdIIVloKvC9ErQ+OgAtwHBmcZ0Bg4ciz78FtbT8In39aAYbaA7zvxQ61xVMSPE8WxhLwLbhif4Js2C+g== + dependencies: + "@types/estree" "^1.0.0" + estree-util-to-js "^2.0.0" + unified "^11.0.0" + vfile "^6.0.0" + +regex-recursion@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/regex-recursion/-/regex-recursion-6.0.2.tgz#a0b1977a74c87f073377b938dbedfab2ea582b33" + integrity sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg== + dependencies: + regex-utilities "^2.3.0" + +regex-utilities@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/regex-utilities/-/regex-utilities-2.3.0.tgz#87163512a15dce2908cf079c8960d5158ff43280" + integrity sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng== + +regex@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/regex/-/regex-6.0.1.tgz#282fa4435d0c700b09c0eb0982b602e05ab6a34f" + integrity sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA== + dependencies: + regex-utilities "^2.3.0" + +rehype-expressive-code@^0.41.2: + version "0.41.2" + resolved "https://registry.yarnpkg.com/rehype-expressive-code/-/rehype-expressive-code-0.41.2.tgz#9ebdee54c451df6358b71e503c575c7fa45ffd13" + integrity sha512-vHYfWO9WxAw6kHHctddOt+P4266BtyT1mrOIuxJD+1ELuvuJAa5uBIhYt0OVMyOhlvf57hzWOXJkHnMhpaHyxw== + dependencies: + expressive-code "^0.41.2" + +rehype-format@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/rehype-format/-/rehype-format-5.0.1.tgz#e255e59bed0c062156aaf51c16fad5a521a1f5c8" + integrity sha512-zvmVru9uB0josBVpr946OR8ui7nJEdzZobwLOOqHb/OOD88W0Vk2SqLwoVOj0fM6IPCCO6TaV9CvQvJMWwukFQ== + dependencies: + "@types/hast" "^3.0.0" + hast-util-format "^1.0.0" + +rehype-parse@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/rehype-parse/-/rehype-parse-9.0.1.tgz#9993bda129acc64c417a9d3654a7be38b2a94c20" + integrity sha512-ksCzCD0Fgfh7trPDxr2rSylbwq9iYDkSn8TCDmEJ49ljEUBxDVCzCHv7QNzZOfODanX4+bWQ4WZqLCRWYLfhag== + dependencies: + "@types/hast" "^3.0.0" + hast-util-from-html "^2.0.0" + unified "^11.0.0" + +rehype-raw@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/rehype-raw/-/rehype-raw-7.0.0.tgz#59d7348fd5dbef3807bbaa1d443efd2dd85ecee4" + integrity sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww== + dependencies: + "@types/hast" "^3.0.0" + hast-util-raw "^9.0.0" + vfile "^6.0.0" + +rehype-recma@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/rehype-recma/-/rehype-recma-1.0.0.tgz#d68ef6344d05916bd96e25400c6261775411aa76" + integrity sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw== + dependencies: + "@types/estree" "^1.0.0" + "@types/hast" "^3.0.0" + hast-util-to-estree "^3.0.0" + +rehype-stringify@^10.0.0, rehype-stringify@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/rehype-stringify/-/rehype-stringify-10.0.1.tgz#2ec1ebc56c6aba07905d3b4470bdf0f684f30b75" + integrity sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA== + dependencies: + "@types/hast" "^3.0.0" + hast-util-to-html "^9.0.0" + unified "^11.0.0" + +rehype@^13.0.1, rehype@^13.0.2: + version "13.0.2" + resolved "https://registry.yarnpkg.com/rehype/-/rehype-13.0.2.tgz#ab0b3ac26573d7b265a0099feffad450e4cf1952" + integrity sha512-j31mdaRFrwFRUIlxGeuPXXKWQxet52RBQRvCmzl5eCefn/KGbomK5GMHNMsOJf55fgo3qw5tST5neDuarDYR2A== + dependencies: + "@types/hast" "^3.0.0" + rehype-parse "^9.0.0" + rehype-stringify "^10.0.0" + unified "^11.0.0" + +remark-directive@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/remark-directive/-/remark-directive-3.0.1.tgz#689ba332f156cfe1118e849164cc81f157a3ef0a" + integrity sha512-gwglrEQEZcZYgVyG1tQuA+h58EZfq5CSULw7J90AFuCTyib1thgHPoqQ+h9iFvU6R+vnZ5oNFQR5QKgGpk741A== + dependencies: + "@types/mdast" "^4.0.0" + mdast-util-directive "^3.0.0" + micromark-extension-directive "^3.0.0" + unified "^11.0.0" + +remark-gfm@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/remark-gfm/-/remark-gfm-4.0.1.tgz#33227b2a74397670d357bf05c098eaf8513f0d6b" + integrity sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg== + dependencies: + "@types/mdast" "^4.0.0" + mdast-util-gfm "^3.0.0" + micromark-extension-gfm "^3.0.0" + remark-parse "^11.0.0" + remark-stringify "^11.0.0" + unified "^11.0.0" + +remark-mdx@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/remark-mdx/-/remark-mdx-3.1.0.tgz#f979be729ecb35318fa48e2135c1169607a78343" + integrity sha512-Ngl/H3YXyBV9RcRNdlYsZujAmhsxwzxpDzpDEhFBVAGthS4GDgnctpDjgFl/ULx5UEDzqtW1cyBSNKqYYrqLBA== + dependencies: + mdast-util-mdx "^3.0.0" + micromark-extension-mdxjs "^3.0.0" + +remark-parse@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-11.0.0.tgz#aa60743fcb37ebf6b069204eb4da304e40db45a1" + integrity sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA== + dependencies: + "@types/mdast" "^4.0.0" + mdast-util-from-markdown "^2.0.0" + micromark-util-types "^2.0.0" + unified "^11.0.0" + +remark-rehype@^11.0.0, remark-rehype@^11.1.2: + version "11.1.2" + resolved "https://registry.yarnpkg.com/remark-rehype/-/remark-rehype-11.1.2.tgz#2addaadda80ca9bd9aa0da763e74d16327683b37" + integrity sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw== + dependencies: + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + mdast-util-to-hast "^13.0.0" + unified "^11.0.0" + vfile "^6.0.0" + +remark-smartypants@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/remark-smartypants/-/remark-smartypants-3.0.2.tgz#cbaf2b39624c78fcbd6efa224678c1d2e9bc1dfb" + integrity sha512-ILTWeOriIluwEvPjv67v7Blgrcx+LZOkAUVtKI3putuhlZm84FnqDORNXPPm+HY3NdZOMhyDwZ1E+eZB/Df5dA== + dependencies: + retext "^9.0.0" + retext-smartypants "^6.0.0" + unified "^11.0.4" + unist-util-visit "^5.0.0" + +remark-stringify@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/remark-stringify/-/remark-stringify-11.0.0.tgz#4c5b01dd711c269df1aaae11743eb7e2e7636fd3" + integrity sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw== + dependencies: + "@types/mdast" "^4.0.0" + mdast-util-to-markdown "^2.0.0" + unified "^11.0.0" + +restructure@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/restructure/-/restructure-3.0.2.tgz#e6b2fad214f78edee21797fa8160fef50eb9b49a" + integrity sha512-gSfoiOEA0VPE6Tukkrr7I0RBdE0s7H1eFCDBk05l1KIQT1UIKNc5JZy6jdyW6eYH3aR3g5b3PuL77rq0hvwtAw== + +retext-latin@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/retext-latin/-/retext-latin-4.0.0.tgz#d02498aa1fd39f1bf00e2ff59b1384c05d0c7ce3" + integrity sha512-hv9woG7Fy0M9IlRQloq/N6atV82NxLGveq+3H2WOi79dtIYWN8OaxogDm77f8YnVXJL2VD3bbqowu5E3EMhBYA== + dependencies: + "@types/nlcst" "^2.0.0" + parse-latin "^7.0.0" + unified "^11.0.0" + +retext-smartypants@^6.0.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/retext-smartypants/-/retext-smartypants-6.2.0.tgz#4e852c2974cf2cfa253eeec427c97efc43b5d158" + integrity sha512-kk0jOU7+zGv//kfjXEBjdIryL1Acl4i9XNkHxtM7Tm5lFiCog576fjNC9hjoR7LTKQ0DsPWy09JummSsH1uqfQ== + dependencies: + "@types/nlcst" "^2.0.0" + nlcst-to-string "^4.0.0" + unist-util-visit "^5.0.0" + +retext-stringify@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/retext-stringify/-/retext-stringify-4.0.0.tgz#501d5440bd4d121e351c7c509f8507de9611e159" + integrity sha512-rtfN/0o8kL1e+78+uxPTqu1Klt0yPzKuQ2BfWwwfgIUSayyzxpM1PJzkKt4V8803uB9qSy32MvI7Xep9khTpiA== + dependencies: + "@types/nlcst" "^2.0.0" + nlcst-to-string "^4.0.0" + unified "^11.0.0" + +retext@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/retext/-/retext-9.0.0.tgz#ab5cd72836894167b0ca6ae70fdcfaa166267f7a" + integrity sha512-sbMDcpHCNjvlheSgMfEcVrZko3cDzdbe1x/e7G66dFp0Ff7Mldvi2uv6JkJQzdRcvLYE8CA8Oe8siQx8ZOgTcA== + dependencies: + "@types/nlcst" "^2.0.0" + retext-latin "^4.0.0" + retext-stringify "^4.0.0" + unified "^11.0.0" + +rollup@^4.34.9: + version "4.43.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.43.0.tgz#275c09119eb7eaf0c3dea040523b81ef43c57b8c" + integrity sha512-wdN2Kd3Twh8MAEOEJZsuxuLKCsBEo4PVNLK6tQWAn10VhsVewQLzcucMgLolRlhFybGxfclbPeEYBaP6RvUFGg== + dependencies: + "@types/estree" "1.0.7" + optionalDependencies: + "@rollup/rollup-android-arm-eabi" "4.43.0" + "@rollup/rollup-android-arm64" "4.43.0" + "@rollup/rollup-darwin-arm64" "4.43.0" + "@rollup/rollup-darwin-x64" "4.43.0" + "@rollup/rollup-freebsd-arm64" "4.43.0" + "@rollup/rollup-freebsd-x64" "4.43.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.43.0" + "@rollup/rollup-linux-arm-musleabihf" "4.43.0" + "@rollup/rollup-linux-arm64-gnu" "4.43.0" + "@rollup/rollup-linux-arm64-musl" "4.43.0" + "@rollup/rollup-linux-loongarch64-gnu" "4.43.0" + "@rollup/rollup-linux-powerpc64le-gnu" "4.43.0" + "@rollup/rollup-linux-riscv64-gnu" "4.43.0" + "@rollup/rollup-linux-riscv64-musl" "4.43.0" + "@rollup/rollup-linux-s390x-gnu" "4.43.0" + "@rollup/rollup-linux-x64-gnu" "4.43.0" + "@rollup/rollup-linux-x64-musl" "4.43.0" + "@rollup/rollup-win32-arm64-msvc" "4.43.0" + "@rollup/rollup-win32-ia32-msvc" "4.43.0" + "@rollup/rollup-win32-x64-msvc" "4.43.0" + fsevents "~2.3.2" + +safe-buffer@^5.0.1, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +sax@^1.2.4: + version "1.4.1" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.4.1.tgz#44cc8988377f126304d3b3fc1010c733b929ef0f" + integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== + +semver@^7.3.5, semver@^7.5.4, semver@^7.6.3, semver@^7.7.1: + version "7.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.2.tgz#67d99fdcd35cec21e6f8b87a7fd515a33f982b58" + integrity sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA== + +sharp@^0.32.5: + version "0.32.6" + resolved "https://registry.yarnpkg.com/sharp/-/sharp-0.32.6.tgz#6ad30c0b7cd910df65d5f355f774aa4fce45732a" + integrity sha512-KyLTWwgcR9Oe4d9HwCwNM2l7+J0dUQwn/yf7S0EnTtb0eVS4RxO0eUSvxPtzT4F3SY+C4K6fqdv/DO27sJ/v/w== + dependencies: + color "^4.2.3" + detect-libc "^2.0.2" + node-addon-api "^6.1.0" + prebuild-install "^7.1.1" + semver "^7.5.4" + simple-get "^4.0.1" + tar-fs "^3.0.4" + tunnel-agent "^0.6.0" + +sharp@^0.33.3: + version "0.33.5" + resolved "https://registry.yarnpkg.com/sharp/-/sharp-0.33.5.tgz#13e0e4130cc309d6a9497596715240b2ec0c594e" + integrity sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw== + dependencies: + color "^4.2.3" + detect-libc "^2.0.3" + semver "^7.6.3" + optionalDependencies: + "@img/sharp-darwin-arm64" "0.33.5" + "@img/sharp-darwin-x64" "0.33.5" + "@img/sharp-libvips-darwin-arm64" "1.0.4" + "@img/sharp-libvips-darwin-x64" "1.0.4" + "@img/sharp-libvips-linux-arm" "1.0.5" + "@img/sharp-libvips-linux-arm64" "1.0.4" + "@img/sharp-libvips-linux-s390x" "1.0.4" + "@img/sharp-libvips-linux-x64" "1.0.4" + "@img/sharp-libvips-linuxmusl-arm64" "1.0.4" + "@img/sharp-libvips-linuxmusl-x64" "1.0.4" + "@img/sharp-linux-arm" "0.33.5" + "@img/sharp-linux-arm64" "0.33.5" + "@img/sharp-linux-s390x" "0.33.5" + "@img/sharp-linux-x64" "0.33.5" + "@img/sharp-linuxmusl-arm64" "0.33.5" + "@img/sharp-linuxmusl-x64" "0.33.5" + "@img/sharp-wasm32" "0.33.5" + "@img/sharp-win32-ia32" "0.33.5" + "@img/sharp-win32-x64" "0.33.5" + +shiki@^3.2.1, shiki@^3.2.2: + version "3.6.0" + resolved "https://registry.yarnpkg.com/shiki/-/shiki-3.6.0.tgz#217309a83345ca3e77da87a93fa1720bda1b8790" + integrity sha512-tKn/Y0MGBTffQoklaATXmTqDU02zx8NYBGQ+F6gy87/YjKbizcLd+Cybh/0ZtOBX9r1NEnAy/GTRDKtOsc1L9w== + dependencies: + "@shikijs/core" "3.6.0" + "@shikijs/engine-javascript" "3.6.0" + "@shikijs/engine-oniguruma" "3.6.0" + "@shikijs/langs" "3.6.0" + "@shikijs/themes" "3.6.0" + "@shikijs/types" "3.6.0" + "@shikijs/vscode-textmate" "^10.0.2" + "@types/hast" "^3.0.4" + +simple-concat@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" + integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== + +simple-get@^4.0.0, simple-get@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543" + integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA== + dependencies: + decompress-response "^6.0.0" + once "^1.3.1" + simple-concat "^1.0.0" + +simple-swizzle@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" + integrity sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg== + dependencies: + is-arrayish "^0.3.1" + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +sitemap@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/sitemap/-/sitemap-8.0.0.tgz#eb6ea48f95787cd680b83683c555d6f6b5a903fd" + integrity sha512-+AbdxhM9kJsHtruUF39bwS/B0Fytw6Fr1o4ZAIAEqA6cke2xcoO2GleBw9Zw7nRzILVEgz7zBM5GiTJjie1G9A== + dependencies: + "@types/node" "^17.0.5" + "@types/sax" "^1.2.1" + arg "^5.0.0" + sax "^1.2.4" + +smol-toml@^1.3.1: + version "1.3.4" + resolved "https://registry.yarnpkg.com/smol-toml/-/smol-toml-1.3.4.tgz#4ec76e0e709f586bc50ba30eb79024173c2b2221" + integrity sha512-UOPtVuYkzYGee0Bd2Szz8d2G3RfMfJ2t3qVdZUAozZyAk+a0Sxa+QKix0YCwjL/A1RR0ar44nCxaoN9FxdJGwA== + +source-map-js@^1.0.1, source-map-js@^1.2.0, source-map-js@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46" + integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA== + +source-map@^0.7.0, source-map@^0.7.4: + version "0.7.4" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== + +space-separated-tokens@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz#1ecd9d2350a3844572c3f4a312bceb018348859f" + integrity sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q== + +stream-replace-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/stream-replace-string/-/stream-replace-string-2.0.0.tgz#e49fd584bd1c633613e010bc73b9db49cb5024ad" + integrity sha512-TlnjJ1C0QrmxRNrON00JvaFFlNh5TTG00APw23j74ET7gkQpTASi6/L2fuiav8pzK715HXtUeClpBTw2NPSn6w== + +streamx@^2.15.0, streamx@^2.21.0: + version "2.22.1" + resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.22.1.tgz#c97cbb0ce18da4f4db5a971dc9ab68ff5dc7f5a5" + integrity sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA== + dependencies: + fast-fifo "^1.3.2" + text-decoder "^1.1.0" + optionalDependencies: + bare-events "^2.2.0" + +string-width@^4.1.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^7.0.0, string-width@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-7.2.0.tgz#b5bb8e2165ce275d4d43476dd2700ad9091db6dc" + integrity sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ== + dependencies: + emoji-regex "^10.3.0" + get-east-asian-width "^1.0.0" + strip-ansi "^7.1.0" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +stringify-entities@^4.0.0: + version "4.0.4" + resolved "https://registry.yarnpkg.com/stringify-entities/-/stringify-entities-4.0.4.tgz#b3b79ef5f277cc4ac73caeb0236c5ba939b3a4f3" + integrity sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg== + dependencies: + character-entities-html4 "^2.0.0" + character-entities-legacy "^3.0.0" + +strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== + dependencies: + ansi-regex "^6.0.1" + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== + +style-to-js@^1.0.0: + version "1.1.16" + resolved "https://registry.yarnpkg.com/style-to-js/-/style-to-js-1.1.16.tgz#e6bd6cd29e250bcf8fa5e6591d07ced7575dbe7a" + integrity sha512-/Q6ld50hKYPH3d/r6nr117TZkHR0w0kGGIVfpG9N6D8NymRPM9RqCUv4pRpJ62E5DqOYx2AFpbZMyCPnjQCnOw== + dependencies: + style-to-object "1.0.8" + +style-to-object@1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-1.0.8.tgz#67a29bca47eaa587db18118d68f9d95955e81292" + integrity sha512-xT47I/Eo0rwJmaXC4oilDGDWLohVhR6o/xAQcPQN8q6QBuZVL8qMYL85kLmST5cPjAorwvqIA4qXTRQoYHaL6g== + dependencies: + inline-style-parser "0.2.4" + +tailwindcss@4.1.10, tailwindcss@^4.1.10: + version "4.1.10" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-4.1.10.tgz#515741b0a79316d1971d182f7fbc435b68679373" + integrity sha512-P3nr6WkvKV/ONsTzj6Gb57sWPMX29EPNPopo7+FcpkQaNsrNpZ1pv8QmrYI2RqEKD7mlGqLnGovlcYnBK0IqUA== + +tapable@^2.2.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.2.tgz#ab4984340d30cb9989a490032f086dbb8b56d872" + integrity sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg== + +tar-fs@^2.0.0: + version "2.1.3" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.3.tgz#fb3b8843a26b6f13a08e606f7922875eb1fbbf92" + integrity sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg== + dependencies: + chownr "^1.1.1" + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^2.1.4" + +tar-fs@^3.0.4: + version "3.0.9" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.9.tgz#d570793c6370d7078926c41fa422891566a0b617" + integrity sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA== + dependencies: + pump "^3.0.0" + tar-stream "^3.1.5" + optionalDependencies: + bare-fs "^4.0.1" + bare-path "^3.0.0" + +tar-stream@^2.1.4: + version "2.2.0" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" + integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== + dependencies: + bl "^4.0.3" + end-of-stream "^1.4.1" + fs-constants "^1.0.0" + inherits "^2.0.3" + readable-stream "^3.1.1" + +tar-stream@^3.1.5: + version "3.1.7" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.7.tgz#24b3fb5eabada19fe7338ed6d26e5f7c482e792b" + integrity sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ== + dependencies: + b4a "^1.6.4" + fast-fifo "^1.2.0" + streamx "^2.15.0" + +tar@^7.4.3: + version "7.4.3" + resolved "https://registry.yarnpkg.com/tar/-/tar-7.4.3.tgz#88bbe9286a3fcd900e94592cda7a22b192e80571" + integrity sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw== + dependencies: + "@isaacs/fs-minipass" "^4.0.0" + chownr "^3.0.0" + minipass "^7.1.2" + minizlib "^3.0.1" + mkdirp "^3.0.1" + yallist "^5.0.0" + +text-decoder@^1.1.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/text-decoder/-/text-decoder-1.2.3.tgz#b19da364d981b2326d5f43099c310cc80d770c65" + integrity sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA== + dependencies: + b4a "^1.6.4" + +tiny-inflate@^1.0.0, tiny-inflate@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tiny-inflate/-/tiny-inflate-1.0.3.tgz#122715494913a1805166aaf7c93467933eea26c4" + integrity sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw== + +tinyexec@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/tinyexec/-/tinyexec-0.3.2.tgz#941794e657a85e496577995c6eef66f53f42b3d2" + integrity sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA== + +tinyglobby@^0.2.12, tinyglobby@^0.2.13: + version "0.2.14" + resolved "https://registry.yarnpkg.com/tinyglobby/-/tinyglobby-0.2.14.tgz#5280b0cf3f972b050e74ae88406c0a6a58f4079d" + integrity sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ== + dependencies: + fdir "^6.4.4" + picomatch "^4.0.2" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +trim-lines@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/trim-lines/-/trim-lines-3.0.1.tgz#d802e332a07df861c48802c04321017b1bd87338" + integrity sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg== + +trough@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/trough/-/trough-2.2.0.tgz#94a60bd6bd375c152c1df911a4b11d5b0256f50f" + integrity sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw== + +tsconfck@^3.1.5: + version "3.1.6" + resolved "https://registry.yarnpkg.com/tsconfck/-/tsconfck-3.1.6.tgz#da1f0b10d82237ac23422374b3fce1edb23c3ead" + integrity sha512-ks6Vjr/jEw0P1gmOVwutM3B7fWxoWBL2KRDb1JfqGVawBmO5UsvmWOQFGHBPl5yxYz4eERr19E6L7NMv+Fej4w== + +tslib@^2.4.0, tslib@^2.8.0: + version "2.8.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" + integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== + dependencies: + safe-buffer "^5.0.1" + +type-fest@^4.21.0: + version "4.41.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-4.41.0.tgz#6ae1c8e5731273c2bf1f58ad39cbae2c91a46c58" + integrity sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA== + +ufo@^1.5.4, ufo@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.6.1.tgz#ac2db1d54614d1b22c1d603e3aef44a85d8f146b" + integrity sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA== + +ultrahtml@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/ultrahtml/-/ultrahtml-1.6.0.tgz#0d1aad7bbfeae512438d30e799c11622127a1ac8" + integrity sha512-R9fBn90VTJrqqLDwyMph+HGne8eqY1iPfYhPzZrvKpIfwkWZbcYlfpsb8B9dTvBfpy1/hqAD7Wi8EKfP9e8zdw== + +uncrypto@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/uncrypto/-/uncrypto-0.1.3.tgz#e1288d609226f2d02d8d69ee861fa20d8348ef2b" + integrity sha512-Ql87qFHB3s/De2ClA9e0gsnS6zXG27SkTiSJwjCc9MebbfapQfuPzumMIUMi38ezPZVNFcHI9sUIepeQfw8J8Q== + +undici-types@~7.8.0: + version "7.8.0" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-7.8.0.tgz#de00b85b710c54122e44fbfd911f8d70174cd294" + integrity sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw== + +unicode-properties@^1.4.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/unicode-properties/-/unicode-properties-1.4.1.tgz#96a9cffb7e619a0dc7368c28da27e05fc8f9be5f" + integrity sha512-CLjCCLQ6UuMxWnbIylkisbRj31qxHPAurvena/0iwSVbQ2G1VY5/HjV0IRabOEbDHlzZlRdCrD4NhB0JtU40Pg== + dependencies: + base64-js "^1.3.0" + unicode-trie "^2.0.0" + +unicode-trie@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-trie/-/unicode-trie-2.0.0.tgz#8fd8845696e2e14a8b67d78fa9e0dd2cad62fec8" + integrity sha512-x7bc76x0bm4prf1VLg79uhAzKw8DVboClSN5VxJuQ+LKDOVEW9CdH+VY7SP+vX7xCYQqzzgQpFqz15zeLvAtZQ== + dependencies: + pako "^0.2.5" + tiny-inflate "^1.0.0" + +unified@^11.0.0, unified@^11.0.4, unified@^11.0.5: + version "11.0.5" + resolved "https://registry.yarnpkg.com/unified/-/unified-11.0.5.tgz#f66677610a5c0a9ee90cab2b8d4d66037026d9e1" + integrity sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA== + dependencies: + "@types/unist" "^3.0.0" + bail "^2.0.0" + devlop "^1.0.0" + extend "^3.0.0" + is-plain-obj "^4.0.0" + trough "^2.0.0" + vfile "^6.0.0" + +unifont@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/unifont/-/unifont-0.5.0.tgz#1c995b64cf6748468a5fa8431183f6bf10f23204" + integrity sha512-4DueXMP5Hy4n607sh+vJ+rajoLu778aU3GzqeTCqsD/EaUcvqZT9wPC8kgK6Vjh22ZskrxyRCR71FwNOaYn6jA== + dependencies: + css-tree "^3.0.0" + ohash "^2.0.0" + +unist-util-find-after@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unist-util-find-after/-/unist-util-find-after-5.0.0.tgz#3fccc1b086b56f34c8b798e1ff90b5c54468e896" + integrity sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ== + dependencies: + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" + +unist-util-is@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-6.0.0.tgz#b775956486aff107a9ded971d996c173374be424" + integrity sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-modify-children@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/unist-util-modify-children/-/unist-util-modify-children-4.0.0.tgz#981d6308e887b005d1f491811d3cbcc254b315e9" + integrity sha512-+tdN5fGNddvsQdIzUF3Xx82CU9sMM+fA0dLgR9vOmT0oPT2jH+P1nd5lSqfCfXAw+93NhcXNY2qqvTUtE4cQkw== + dependencies: + "@types/unist" "^3.0.0" + array-iterate "^2.0.0" + +unist-util-position-from-estree@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unist-util-position-from-estree/-/unist-util-position-from-estree-2.0.0.tgz#d94da4df596529d1faa3de506202f0c9a23f2200" + integrity sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-position@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unist-util-position/-/unist-util-position-5.0.0.tgz#678f20ab5ca1207a97d7ea8a388373c9cf896be4" + integrity sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-remove-position@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unist-util-remove-position/-/unist-util-remove-position-5.0.0.tgz#fea68a25658409c9460408bc6b4991b965b52163" + integrity sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q== + dependencies: + "@types/unist" "^3.0.0" + unist-util-visit "^5.0.0" + +unist-util-stringify-position@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz#449c6e21a880e0855bf5aabadeb3a740314abac2" + integrity sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-visit-children@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/unist-util-visit-children/-/unist-util-visit-children-3.0.0.tgz#4bced199b71d7f3c397543ea6cc39e7a7f37dc7e" + integrity sha512-RgmdTfSBOg04sdPcpTSD1jzoNBjt9a80/ZCzp5cI9n1qPzLZWF9YdvWGN2zmTumP1HWhXKdUWexjy/Wy/lJ7tA== + dependencies: + "@types/unist" "^3.0.0" + +unist-util-visit-parents@^6.0.0, unist-util-visit-parents@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz#4d5f85755c3b8f0dc69e21eca5d6d82d22162815" + integrity sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw== + dependencies: + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" + +unist-util-visit@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-5.0.0.tgz#a7de1f31f72ffd3519ea71814cccf5fd6a9217d6" + integrity sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg== + dependencies: + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" + unist-util-visit-parents "^6.0.0" + +unstorage@^1.15.0: + version "1.16.0" + resolved "https://registry.yarnpkg.com/unstorage/-/unstorage-1.16.0.tgz#686e23d459532e0eccc32e15eb3b415d8f309431" + integrity sha512-WQ37/H5A7LcRPWfYOrDa1Ys02xAbpPJq6q5GkO88FBXVSQzHd7+BjEwfRqyaSWCv9MbsJy058GWjjPjcJ16GGA== + dependencies: + anymatch "^3.1.3" + chokidar "^4.0.3" + destr "^2.0.5" + h3 "^1.15.2" + lru-cache "^10.4.3" + node-fetch-native "^1.6.6" + ofetch "^1.4.1" + ufo "^1.6.1" + +util-deprecate@^1.0.1, util-deprecate@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +vfile-location@^5.0.0: + version "5.0.3" + resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-5.0.3.tgz#cb9eacd20f2b6426d19451e0eafa3d0a846225c3" + integrity sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg== + dependencies: + "@types/unist" "^3.0.0" + vfile "^6.0.0" + +vfile-message@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-4.0.2.tgz#c883c9f677c72c166362fd635f21fc165a7d1181" + integrity sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw== + dependencies: + "@types/unist" "^3.0.0" + unist-util-stringify-position "^4.0.0" + +vfile@^6.0.0, vfile@^6.0.2, vfile@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/vfile/-/vfile-6.0.3.tgz#3652ab1c496531852bf55a6bac57af981ebc38ab" + integrity sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q== + dependencies: + "@types/unist" "^3.0.0" + vfile-message "^4.0.0" + +vite@^6.3.4: + version "6.3.5" + resolved "https://registry.yarnpkg.com/vite/-/vite-6.3.5.tgz#fec73879013c9c0128c8d284504c6d19410d12a3" + integrity sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ== + dependencies: + esbuild "^0.25.0" + fdir "^6.4.4" + picomatch "^4.0.2" + postcss "^8.5.3" + rollup "^4.34.9" + tinyglobby "^0.2.13" + optionalDependencies: + fsevents "~2.3.3" + +vitefu@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/vitefu/-/vitefu-1.0.6.tgz#3d2534621ea95081e6fbf4c0d8db9a292357a41b" + integrity sha512-+Rex1GlappUyNN6UfwbVZne/9cYC4+R2XDk9xkNXBKMw6HQagdX9PgZ8V2v1WUSK1wfBLp7qbI1+XSNIlB1xmA== + +web-namespaces@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-2.0.1.tgz#1010ff7c650eccb2592cebeeaf9a1b253fd40692" + integrity sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ== + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +which-pm-runs@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/which-pm-runs/-/which-pm-runs-1.1.0.tgz#35ccf7b1a0fce87bd8b92a478c9d045785d3bf35" + integrity sha512-n1brCuqClxfFfq/Rb0ICg9giSZqCS+pLtccdag6C2HyufBrh3fBOiy9nb6ggRMvWOVH5GrdJskj5iGTZNxd7SA== + +widest-line@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-5.0.0.tgz#b74826a1e480783345f0cd9061b49753c9da70d0" + integrity sha512-c9bZp7b5YtRj2wOe6dlj32MK+Bx/M/d+9VB2SHM1OtsUHR0aV0tdP6DWh/iMt0kWi1t5g1Iudu6hQRNd1A4PVA== + dependencies: + string-width "^7.0.0" + +wrap-ansi@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-9.0.0.tgz#1a3dc8b70d85eeb8398ddfb1e4a02cd186e58b3e" + integrity sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q== + dependencies: + ansi-styles "^6.2.1" + string-width "^7.0.0" + strip-ansi "^7.1.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +xxhash-wasm@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/xxhash-wasm/-/xxhash-wasm-1.1.0.tgz#ffe7f0b98220a4afac171e3fb9b6d1f8771f015e" + integrity sha512-147y/6YNh+tlp6nd/2pWq38i9h6mz/EuQ6njIrmW8D1BS5nCqs0P6DG+m6zTGnNz5I+uhZ0SHxBs9BsPrwcKDA== + +yallist@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-5.0.0.tgz#00e2de443639ed0d78fd87de0d27469fbcffb533" + integrity sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw== + +yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yocto-queue@^1.1.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.2.1.tgz#36d7c4739f775b3cbc28e6136e21aa057adec418" + integrity sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg== + +yocto-spinner@^0.2.1: + version "0.2.3" + resolved "https://registry.yarnpkg.com/yocto-spinner/-/yocto-spinner-0.2.3.tgz#e803d2f267c7f0c3188645878522066764263a13" + integrity sha512-sqBChb33loEnkoXte1bLg45bEBsOP9N1kzQh5JZNKj/0rik4zAPTNSAVPj3uQAdc6slYJ0Ksc403G2XgxsJQFQ== + dependencies: + yoctocolors "^2.1.1" + +yoctocolors@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/yoctocolors/-/yoctocolors-2.1.1.tgz#e0167474e9fbb9e8b3ecca738deaa61dd12e56fc" + integrity sha512-GQHQqAopRhwU8Kt1DDM8NjibDXHC8eoh1erhGAJPEyveY9qqVeXvVikNKrDz69sHowPMorbPUrH/mx8c50eiBQ== + +zod-to-json-schema@^3.24.5: + version "3.24.5" + resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz#d1095440b147fb7c2093812a53c54df8d5df50a3" + integrity sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g== + +zod-to-ts@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/zod-to-ts/-/zod-to-ts-1.2.0.tgz#873a2fd8242d7b649237be97e0c64d7954ae0c51" + integrity sha512-x30XE43V+InwGpvTySRNz9kB7qFU8DlyEy7BsSTCHPH1R0QasMmHWZDCzYm6bVXtj/9NNJAZF3jW8rzFvH5OFA== + +zod@^3.24.2: + version "3.25.62" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.25.62.tgz#e959ff299b9e7a4f92d800bf476243ca13a7f6c5" + integrity sha512-YCxsr4DmhPcrKPC9R1oBHQNlQzlJEyPAId//qTau/vBee9uO8K6prmRq4eMkOyxvBfH4wDPIPdLx9HVMWIY3xA== + +zwitch@^2.0.0, zwitch@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-2.0.4.tgz#c827d4b0acb76fc3e685a4c6ec2902d51070e9d7" + integrity sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A== diff --git a/engine/engine.go b/engine/engine.go new file mode 100644 index 0000000..a7133b9 --- /dev/null +++ b/engine/engine.go @@ -0,0 +1,225 @@ +package engine + +import ( + "context" + "fmt" + "io" + "net/http" + "os" + "os/signal" + "path" + "strings" + "sync/atomic" + "syscall" + "time" + + "github.com/gabehf/koito/engine/middleware" + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/db/psql" + "github.com/gabehf/koito/internal/images" + "github.com/gabehf/koito/internal/importer" + "github.com/gabehf/koito/internal/logger" + mbz "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/utils" + "github.com/go-chi/chi/v5" + chimiddleware "github.com/go-chi/chi/v5/middleware" + "github.com/rs/zerolog" +) + +const Version = "dev" + +func Run( + getenv func(string) string, + w io.Writer, +) error { + err := cfg.Load(getenv) + if err != nil { + return fmt.Errorf("failed to load configuration: %v", err) + } + l := logger.Get() + if cfg.StructuredLogging() { + *l = l.Output(w) + } else { + *l = l.Output(zerolog.ConsoleWriter{ + Out: w, + TimeFormat: time.RFC3339, + // FormatLevel: func(i interface{}) string { + // return strings.ToUpper(fmt.Sprintf("[%s]", i)) + // }, + FormatMessage: func(i interface{}) string { + return fmt.Sprintf("\u001b[30;1m>\u001b[0m %s |", i) + }, + }) + } + + ctx := logger.NewContext(l) + + l.Info().Msgf("Koito %s", Version) + + _, err = os.Stat(cfg.ConfigDir()) + if err != nil { + l.Info().Msgf("Creating config dir: %s", cfg.ConfigDir()) + err = os.MkdirAll(cfg.ConfigDir(), 0744) + if err != nil { + l.Error().Err(err).Msg("Failed to create config directory") + return err + } + } + l.Info().Msgf("Using config dir: %s", cfg.ConfigDir()) + _, err = os.Stat(path.Join(cfg.ConfigDir(), "import")) + if err != nil { + l.Debug().Msgf("Creating import dir: %s", path.Join(cfg.ConfigDir(), "import")) + err = os.Mkdir(path.Join(cfg.ConfigDir(), "import"), 0744) + if err != nil { + l.Error().Err(err).Msg("Failed to create import directory") + return err + } + } + + var store *psql.Psql + store, err = psql.New() + for err != nil { + l.Error().Err(err).Msg("Failed to connect to database; retrying in 5 seconds") + time.Sleep(5 * time.Second) + store, err = psql.New() + } + defer store.Close(ctx) + + var mbzC mbz.MusicBrainzCaller + if !cfg.MusicBrainzDisabled() { + mbzC = mbz.NewMusicBrainzClient() + } else { + mbzC = &mbz.MbzErrorCaller{} + } + + images.Initialize(images.ImageSourceOpts{ + UserAgent: "Koito v0.0.1 (contact@koito.app)", + EnableCAA: !cfg.CoverArtArchiveDisabled(), + EnableDeezer: !cfg.DeezerDisabled(), + }) + + userCount, _ := store.CountUsers(ctx) + if userCount < 1 { + l.Debug().Msg("Creating default user...") + user, err := store.SaveUser(ctx, db.SaveUserOpts{ + Username: cfg.DefaultUsername(), + Password: cfg.DefaultPassword(), + Role: models.UserRoleAdmin, + }) + if err != nil { + l.Fatal().AnErr("error", err).Msg("Failed to save default user in database") + } + apikey, err := utils.GenerateRandomString(48) + if err != nil { + l.Fatal().AnErr("error", err).Msg("Failed to generate default api key") + } + label := "Default" + _, err = store.SaveApiKey(ctx, db.SaveApiKeyOpts{ + Key: apikey, + UserID: user.ID, + Label: label, + }) + if err != nil { + l.Fatal().AnErr("error", err).Msg("Failed to save default api key in database") + } + l.Info().Msgf("Default user has been created. Login: %s : %s", cfg.DefaultUsername(), cfg.DefaultPassword()) + } + + if cfg.AllowAllHosts() { + l.Warn().Msg("Your configuration allows requests from all hosts. This is a potential security risk!") + } else if len(cfg.AllowedHosts()) == 0 || cfg.AllowedHosts()[0] == "" { + l.Warn().Msgf("You are currently not allowing any hosts! Did you forget to set the %s variable?", cfg.ALLOWED_HOSTS_ENV) + } else { + l.Debug().Msgf("Allowing hosts: %v", cfg.AllowedHosts()) + } + + var ready atomic.Bool + + mux := chi.NewRouter() + // bind general middleware to mux + mux.Use(middleware.WithRequestID) + mux.Use(middleware.Logger(l)) + mux.Use(chimiddleware.Recoverer) + mux.Use(chimiddleware.RealIP) + // call router binds on mux + bindRoutes(mux, &ready, store, mbzC) + + httpServer := &http.Server{ + Addr: cfg.ListenAddr(), + Handler: mux, + } + + go func() { + ready.Store(true) // signal readiness + l.Info().Msg("listening on " + cfg.ListenAddr()) + if err := httpServer.ListenAndServe(); err != nil && err != http.ErrServerClosed { + l.Fatal().AnErr("error", err).Msg("Error when running ListenAndServe") + } + }() + + // Import + if !cfg.SkipImport() { + go func() { + RunImporter(l, store) + }() + } + + l.Info().Msg("Pruning orphaned images...") + go catalog.PruneOrphanedImages(logger.NewContext(l), store) + // Wait for interrupt signal to gracefully shutdown the server with a timeout of 10 seconds. + // Use a buffered channel to avoid missing signals as recommended for signal.Notify + quit := make(chan os.Signal, 1) + signal.Notify(quit, os.Interrupt, syscall.SIGINT, syscall.SIGTERM) + <-quit + l.Info().Msg("Received server shutdown notice") + ctx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + l.Info().Msg("waiting for all processes to finish...") + mbzC.Shutdown() + if err := httpServer.Shutdown(ctx); err != nil { + return err + } + l.Info().Msg("shutdown successful") + return nil +} + +func RunImporter(l *zerolog.Logger, store db.DB) { + l.Debug().Msg("Checking for import files...") + files, err := os.ReadDir(path.Join(cfg.ConfigDir(), "import")) + if err != nil { + l.Err(err).Msg("Failed to read files from import dir") + } + if len(files) > 0 { + l.Info().Msg("Files found in import directory. Attempting to import...") + } else { + return + } + defer func() { + if r := recover(); r != nil { + l.Error().Interface("recover", r).Msg("Panic when importing files") + } + }() + for _, file := range files { + if file.IsDir() { + continue + } + if strings.Contains(file.Name(), "Streaming_History_Audio") { + l.Info().Msgf("Import file %s detecting as being Spotify export", file.Name()) + err := importer.ImportSpotifyFile(logger.NewContext(l), store, file.Name()) + if err != nil { + l.Err(err).Msgf("Failed to import file: %s", file.Name()) + } + } else if strings.Contains(file.Name(), "maloja") { + l.Info().Msgf("Import file %s detecting as being Maloja export", file.Name()) + err := importer.ImportMalojaFile(logger.NewContext(l), store, file.Name()) + if err != nil { + l.Err(err).Msgf("Failed to import file: %s", file.Name()) + } + } else { + l.Warn().Msgf("File %s not recognized as a valid import file; make sure it is valid and named correctly", file.Name()) + } + } +} diff --git a/engine/engine_test.go b/engine/engine_test.go new file mode 100644 index 0000000..c18061c --- /dev/null +++ b/engine/engine_test.go @@ -0,0 +1,144 @@ +package engine_test + +import ( + "context" + "fmt" + "log" + "net" + "net/http" + "os" + "strconv" + "testing" + "time" + + "github.com/gabehf/koito/engine" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db/psql" + "github.com/gabehf/koito/internal/utils" + "github.com/ory/dockertest/v3" +) + +var store *psql.Psql + +func getTestGetenv(resource *dockertest.Resource) func(string) string { + dir, err := utils.GenerateRandomString(8) + if err != nil { + panic(err) + } + + listener, err := net.Listen("tcp", ":0") + if err != nil { + panic(fmt.Errorf("failed to get an open port: %w", err)) + } + defer listener.Close() + + port := strconv.Itoa(listener.Addr().(*net.TCPAddr).Port) + + return func(env string) string { + switch env { + case cfg.ENABLE_STRUCTURED_LOGGING_ENV: + return "true" + case cfg.LOG_LEVEL_ENV: + return "debug" + case cfg.DATABASE_URL_ENV: + return fmt.Sprintf("postgres://postgres:secret@localhost:%s", resource.GetPort("5432/tcp")) + case cfg.DEFAULT_PASSWORD_ENV: + return "testuser123" + case cfg.DEFAULT_USERNAME_ENV: + return "test" + case cfg.CONFIG_DIR_ENV: + return dir + case cfg.LISTEN_PORT_ENV: + return port + case cfg.ALLOWED_HOSTS_ENV: + return "*" + case cfg.DISABLE_DEEZER_ENV, cfg.DISABLE_COVER_ART_ARCHIVE_ENV, cfg.DISABLE_MUSICBRAINZ_ENV, cfg.SKIP_IMPORT_ENV: + return "true" + default: + return "" + } + } +} + +func TestMain(m *testing.M) { + // uses a sensible default on windows (tcp/http) and linux/osx (socket) + pool, err := dockertest.NewPool("") + if err != nil { + log.Fatalf("Could not construct pool: %s", err) + } + + // uses pool to try to connect to Docker + err = pool.Client.Ping() + if err != nil { + log.Fatalf("Could not connect to Docker: %s", err) + } + + // pulls an image, creates a container based on it and runs it + resource, err := pool.Run("postgres", "latest", []string{"POSTGRES_PASSWORD=secret"}) + if err != nil { + log.Fatalf("Could not start resource: %s", err) + } + + getenv := getTestGetenv(resource) + err = cfg.Load(getenv) + if err != nil { + log.Fatalf("Could not load cfg: %s", err) + } + + // exponential backoff-retry, because the application in the container might not be ready to accept connections yet + if err := pool.Retry(func() error { + var err error + store, err = psql.New() + if err != nil { + log.Println("Failed to connect to test database, retrying...") + return err + } + return store.Ping(context.Background()) + }); err != nil { + log.Fatalf("Could not connect to database: %s", err) + } + + go engine.Run(getenv, os.Stdout) + + // Wait until the web server is reachable + for i := 0; i < 20; i++ { + url := fmt.Sprintf("http://%s/apis/web/v1/health", cfg.ListenAddr()) + client := &http.Client{ + Timeout: 2 * time.Second, // Set your desired timeout + } + resp, err := client.Get(url) + if err != nil { + if i >= 19 { + log.Fatalf("Web server is not reachable: %s", err) + } + log.Printf("Failed to connect to web server at %s, retrying... (%d/20)", url, i+1) + time.Sleep(1 * time.Second) + continue + } + defer resp.Body.Close() + if resp.StatusCode == http.StatusOK { + err = nil + break + } + log.Printf("Unexpected status code at %s, retrying... (%d/20)", url, i+1) + time.Sleep(1 * time.Second) + } + + code := m.Run() + + // You can't defer this because os.Exit doesn't care for defer + if err := pool.Purge(resource); err != nil { + log.Fatalf("Could not purge resource: %s", err) + } + + err = os.RemoveAll(cfg.ConfigDir()) + if err != nil { + log.Fatalf("Could not remove temporary config dir: %v", err) + } + + os.Exit(code) +} + +func host() string { + return fmt.Sprintf("http://%s", cfg.ListenAddr()) +} diff --git a/engine/handlers/alias.go b/engine/handlers/alias.go new file mode 100644 index 0000000..7637571 --- /dev/null +++ b/engine/handlers/alias.go @@ -0,0 +1,270 @@ +package handlers + +import ( + "net/http" + "strconv" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/utils" +) + +// GetAliasesHandler retrieves all aliases for a given artist or album ID. +func GetAliasesHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + // Parse query parameters + artistIDStr := r.URL.Query().Get("artist_id") + albumIDStr := r.URL.Query().Get("album_id") + trackIDStr := r.URL.Query().Get("track_id") + + if artistIDStr == "" && albumIDStr == "" && trackIDStr == "" { + utils.WriteError(w, "artist_id, album_id, or track_id must be provided", http.StatusBadRequest) + return + } + + var aliases []models.Alias + + if artistIDStr != "" { + artistID, err := strconv.Atoi(artistIDStr) + if err != nil { + utils.WriteError(w, "invalid artist_id", http.StatusBadRequest) + return + } + aliases, err = store.GetAllArtistAliases(ctx, int32(artistID)) + if err != nil { + l.Err(err).Msg("Failed to get artist aliases") + utils.WriteError(w, "failed to retrieve aliases", http.StatusInternalServerError) + return + } + } else if albumIDStr != "" { + albumID, err := strconv.Atoi(albumIDStr) + if err != nil { + utils.WriteError(w, "invalid album_id", http.StatusBadRequest) + return + } + aliases, err = store.GetAllAlbumAliases(ctx, int32(albumID)) + if err != nil { + l.Err(err).Msg("Failed to get artist aliases") + utils.WriteError(w, "failed to retrieve aliases", http.StatusInternalServerError) + return + } + } else if trackIDStr != "" { + trackID, err := strconv.Atoi(trackIDStr) + if err != nil { + utils.WriteError(w, "invalid track_id", http.StatusBadRequest) + return + } + aliases, err = store.GetAllTrackAliases(ctx, int32(trackID)) + if err != nil { + l.Err(err).Msg("Failed to get artist aliases") + utils.WriteError(w, "failed to retrieve aliases", http.StatusInternalServerError) + return + } + } + + utils.WriteJSON(w, http.StatusOK, aliases) + } +} + +// DeleteAliasHandler deletes an alias for a given artist or album ID. +func DeleteAliasHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + // Parse query parameters + artistIDStr := r.URL.Query().Get("artist_id") + albumIDStr := r.URL.Query().Get("album_id") + trackIDStr := r.URL.Query().Get("track_id") + alias := r.URL.Query().Get("alias") + + if alias == "" || (artistIDStr == "" && albumIDStr == "" && trackIDStr == "") { + utils.WriteError(w, "alias and artist_id, album_id, or track_id must be provided", http.StatusBadRequest) + return + } + if utils.MoreThanOneString(artistIDStr, albumIDStr, trackIDStr) { + utils.WriteError(w, "only one of artist_id, album_id, or track_id can be provided at a time", http.StatusBadRequest) + return + } + + if artistIDStr != "" { + artistID, err := strconv.Atoi(artistIDStr) + if err != nil { + utils.WriteError(w, "invalid artist_id", http.StatusBadRequest) + return + } + err = store.DeleteArtistAlias(ctx, int32(artistID), alias) + if err != nil { + l.Err(err).Msg("Failed to delete alias") + utils.WriteError(w, "failed to delete alias", http.StatusInternalServerError) + return + } + } else if albumIDStr != "" { + albumID, err := strconv.Atoi(albumIDStr) + if err != nil { + utils.WriteError(w, "invalid album_id", http.StatusBadRequest) + return + } + err = store.DeleteAlbumAlias(ctx, int32(albumID), alias) + if err != nil { + l.Err(err).Msg("Failed to delete alias") + utils.WriteError(w, "failed to delete alias", http.StatusInternalServerError) + return + } + } else if trackIDStr != "" { + trackID, err := strconv.Atoi(trackIDStr) + if err != nil { + utils.WriteError(w, "invalid album_id", http.StatusBadRequest) + return + } + err = store.DeleteTrackAlias(ctx, int32(trackID), alias) + if err != nil { + l.Err(err).Msg("Failed to delete alias") + utils.WriteError(w, "failed to delete alias", http.StatusInternalServerError) + return + } + } + + w.WriteHeader(http.StatusNoContent) + } +} + +// CreateAliasHandler creates new aliases for a given artist, album, or track. +func CreateAliasHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + err := r.ParseForm() + if err != nil { + utils.WriteError(w, "invalid request body", http.StatusBadRequest) + return + } + + artistIDStr := r.URL.Query().Get("artist_id") + albumIDStr := r.URL.Query().Get("album_id") + trackIDStr := r.URL.Query().Get("track_id") + + if artistIDStr == "" && albumIDStr == "" && trackIDStr == "" { + utils.WriteError(w, "artist_id, album_id, or track_id must be provided", http.StatusBadRequest) + return + } + if utils.MoreThanOneString(artistIDStr, albumIDStr, trackIDStr) { + utils.WriteError(w, "only one of artist_id, album_id, or track_id can be provided at a time", http.StatusBadRequest) + return + } + + alias := r.FormValue("alias") + if alias == "" { + utils.WriteError(w, "alias must be provided", http.StatusBadRequest) + return + } + + if artistIDStr != "" { + artistID, err := strconv.Atoi(artistIDStr) + if err != nil { + utils.WriteError(w, "invalid artist_id", http.StatusBadRequest) + return + } + err = store.SaveArtistAliases(ctx, int32(artistID), []string{alias}, "Manual") + if err != nil { + l.Err(err).Msg("Failed to save alias") + utils.WriteError(w, "failed to save alias", http.StatusInternalServerError) + return + } + } else if albumIDStr != "" { + albumID, err := strconv.Atoi(albumIDStr) + if err != nil { + utils.WriteError(w, "invalid album_id", http.StatusBadRequest) + return + } + err = store.SaveAlbumAliases(ctx, int32(albumID), []string{alias}, "Manual") + if err != nil { + l.Err(err).Msg("Failed to save alias") + utils.WriteError(w, "failed to save alias", http.StatusInternalServerError) + return + } + } else if trackIDStr != "" { + trackID, err := strconv.Atoi(trackIDStr) + if err != nil { + utils.WriteError(w, "invalid track_id", http.StatusBadRequest) + return + } + err = store.SaveTrackAliases(ctx, int32(trackID), []string{alias}, "Manual") + if err != nil { + l.Err(err).Msg("Failed to save alias") + utils.WriteError(w, "failed to save alias", http.StatusInternalServerError) + return + } + } + + w.WriteHeader(http.StatusCreated) + } +} + +// sets the primary alias for albums, artists, and tracks +func SetPrimaryAliasHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + // Parse query parameters + artistIDStr := r.URL.Query().Get("artist_id") + albumIDStr := r.URL.Query().Get("album_id") + trackIDStr := r.URL.Query().Get("track_id") + alias := r.URL.Query().Get("alias") + + if alias == "" || (artistIDStr == "" && albumIDStr == "" && trackIDStr == "") { + utils.WriteError(w, "alias and artist_id, album_id, or track_id must be provided", http.StatusBadRequest) + return + } + if utils.MoreThanOneString(artistIDStr, albumIDStr, trackIDStr) { + utils.WriteError(w, "only one of artist_id, album_id, or track_id can be provided", http.StatusBadRequest) + return + } + + if artistIDStr != "" { + artistID, err := strconv.Atoi(artistIDStr) + if err != nil { + utils.WriteError(w, "invalid artist_id", http.StatusBadRequest) + return + } + err = store.SetPrimaryArtistAlias(ctx, int32(artistID), alias) + if err != nil { + l.Err(err).Msg("Failed to set primary alias") + utils.WriteError(w, "failed to set primary alias", http.StatusInternalServerError) + return + } + } else if albumIDStr != "" { + albumID, err := strconv.Atoi(albumIDStr) + if err != nil { + utils.WriteError(w, "invalid album_id", http.StatusBadRequest) + return + } + err = store.SetPrimaryAlbumAlias(ctx, int32(albumID), alias) + if err != nil { + l.Err(err).Msg("Failed to set primary alias") + utils.WriteError(w, "failed to set primary alias", http.StatusInternalServerError) + return + } + } else if trackIDStr != "" { + trackID, err := strconv.Atoi(trackIDStr) + if err != nil { + utils.WriteError(w, "invalid track_id", http.StatusBadRequest) + return + } + err = store.SetPrimaryTrackAlias(ctx, int32(trackID), alias) + if err != nil { + l.Err(err).Msg("Failed to set primary alias") + utils.WriteError(w, "failed to set primary alias", http.StatusInternalServerError) + return + } + } + + w.WriteHeader(http.StatusNoContent) + } +} diff --git a/engine/handlers/apikeys.go b/engine/handlers/apikeys.go new file mode 100644 index 0000000..9c61346 --- /dev/null +++ b/engine/handlers/apikeys.go @@ -0,0 +1,153 @@ +package handlers + +import ( + "net/http" + "strconv" + + "github.com/gabehf/koito/engine/middleware" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func GenerateApiKeyHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + user := middleware.GetUserFromContext(ctx) + if user == nil { + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + r.ParseForm() + label := r.FormValue("label") + if label == "" { + utils.WriteError(w, "label is required", http.StatusBadRequest) + return + } + + apiKey, err := utils.GenerateRandomString(48) + if err != nil { + l.Err(err).Msg("Failed to generate API key") + utils.WriteError(w, "failed to generate api key", http.StatusInternalServerError) + return + } + opts := db.SaveApiKeyOpts{ + UserID: user.ID, + Key: apiKey, + Label: label, + } + l.Debug().Any("opts", opts).Send() + key, err := store.SaveApiKey(ctx, opts) + if err != nil { + l.Err(err).Msg("Failed to save API key") + utils.WriteError(w, "failed to save api key", http.StatusInternalServerError) + return + } + utils.WriteJSON(w, 201, key) + } +} + +func DeleteApiKeyHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + user := middleware.GetUserFromContext(ctx) + if user == nil { + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + idStr := r.URL.Query().Get("id") + if idStr == "" { + utils.WriteError(w, "id is required", http.StatusBadRequest) + return + } + apiKey, err := strconv.Atoi(idStr) + if err != nil { + utils.WriteError(w, "id is invalid", http.StatusBadRequest) + return + } + + err = store.DeleteApiKey(ctx, int32(apiKey)) + if err != nil { + l.Err(err).Msg("Failed to delete API key") + utils.WriteError(w, "failed to delete api key", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) + } +} + +func GetApiKeysHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + l.Debug().Msgf("Retrieving user from middleware...") + + user := middleware.GetUserFromContext(ctx) + if user == nil { + l.Debug().Msgf("Could not retrieve user from middleware") + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + l.Debug().Msgf("Retrieved user '%s' from middleware", user.Username) + + apiKeys, err := store.GetApiKeysByUserID(ctx, user.ID) + if err != nil { + l.Err(err).Msg("Failed to retrieve API keys") + utils.WriteError(w, "failed to retrieve api keys", http.StatusInternalServerError) + return + } + + utils.WriteJSON(w, http.StatusOK, apiKeys) + } +} + +func UpdateApiKeyLabelHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + user := middleware.GetUserFromContext(ctx) + if user == nil { + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + idStr := r.URL.Query().Get("id") + if idStr == "" { + utils.WriteError(w, "id is required", http.StatusBadRequest) + return + } + apiKeyID, err := strconv.Atoi(idStr) + if err != nil { + utils.WriteError(w, "id is invalid", http.StatusBadRequest) + return + } + + label := r.FormValue("label") + if label == "" { + utils.WriteError(w, "label is required", http.StatusBadRequest) + return + } + + err = store.UpdateApiKeyLabel(ctx, db.UpdateApiKeyLabelOpts{ + UserID: user.ID, + ID: int32(apiKeyID), + Label: label, + }) + if err != nil { + l.Err(err).Msg("Failed to update API key label") + utils.WriteError(w, "failed to update api key label", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusOK) + } +} diff --git a/engine/handlers/auth.go b/engine/handlers/auth.go new file mode 100644 index 0000000..e4cc5cc --- /dev/null +++ b/engine/handlers/auth.go @@ -0,0 +1,149 @@ +package handlers + +import ( + "net/http" + "strings" + "time" + + "github.com/gabehf/koito/engine/middleware" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" + "golang.org/x/crypto/bcrypt" +) + +func LoginHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + ctx := r.Context() + l.Debug().Msg("Recieved login request") + + r.ParseForm() + username := r.FormValue("username") + password := r.FormValue("password") + if username == "" || password == "" { + utils.WriteError(w, "username and password are required", http.StatusBadRequest) + return + } + + user, err := store.GetUserByUsername(ctx, username) + if err != nil { + l.Err(err).Msg("Error searching for user in database") + utils.WriteError(w, "internal server error", http.StatusInternalServerError) + return + } else if user == nil { + utils.WriteError(w, "username or password is incorrect", http.StatusBadRequest) + return + } + + err = bcrypt.CompareHashAndPassword(user.Password, []byte(password)) + if err != nil { + utils.WriteError(w, "username or password is incorrect", http.StatusBadRequest) + return + } + + keepSignedIn := false + expiresAt := time.Now().Add(1 * 24 * time.Hour) + if strings.ToLower(r.FormValue("remember_me")) == "true" { + keepSignedIn = true + expiresAt = time.Now().Add(30 * 24 * time.Hour) + } + + session, err := store.SaveSession(ctx, user.ID, expiresAt, keepSignedIn) + if err != nil { + l.Err(err).Msg("Failed to create session") + utils.WriteError(w, "failed to create session", http.StatusInternalServerError) + return + } + + cookie := &http.Cookie{ + Name: "koito_session", + Value: session.ID.String(), + Path: "/", + HttpOnly: true, + Secure: false, + } + + if keepSignedIn { + cookie.Expires = expiresAt + } + + http.SetCookie(w, cookie) + w.WriteHeader(http.StatusNoContent) + } +} + +func LogoutHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + cookie, err := r.Cookie("koito_session") + if err == nil { + sid, err := uuid.Parse(cookie.Value) + if err != nil { + utils.WriteError(w, "session cookie is invalid", http.StatusUnauthorized) + return + } + err = store.DeleteSession(r.Context(), sid) + if err != nil { + l.Err(err).Msg("Failed to delete session") + utils.WriteError(w, "internal server error", http.StatusInternalServerError) + return + } + } + + // Clear the cookie + http.SetCookie(w, &http.Cookie{ + Name: "koito_session", + Value: "", + Path: "/", + HttpOnly: true, + MaxAge: -1, // expire immediately + }) + + w.WriteHeader(http.StatusNoContent) + } +} + +func MeHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + u := middleware.GetUserFromContext(ctx) + if u == nil { + l.Debug().Msg("Invalid user retrieved from context") + } + utils.WriteJSON(w, 200, u) + } +} + +func UpdateUserHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + u := middleware.GetUserFromContext(ctx) + if u == nil { + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + r.ParseForm() + username := r.FormValue("username") + password := r.FormValue("password") + + l.Debug().Msgf("Recieved update request for user with id %d", u.ID) + + err := store.UpdateUser(ctx, db.UpdateUserOpts{ + ID: u.ID, + Username: username, + Password: password, + }) + if err != nil { + l.Err(err).Msg("Failed to update user") + utils.WriteError(w, err.Error(), http.StatusBadRequest) + return + } + w.WriteHeader(http.StatusNoContent) + } +} diff --git a/engine/handlers/delete.go b/engine/handlers/delete.go new file mode 100644 index 0000000..1b99325 --- /dev/null +++ b/engine/handlers/delete.go @@ -0,0 +1,137 @@ +package handlers + +import ( + "net/http" + "strconv" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +// DeleteTrackHandler deletes a track by its ID. +func DeleteTrackHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + trackIDStr := r.URL.Query().Get("id") + if trackIDStr == "" { + utils.WriteError(w, "track_id must be provided", http.StatusBadRequest) + return + } + + trackID, err := strconv.Atoi(trackIDStr) + if err != nil { + utils.WriteError(w, "invalid id", http.StatusBadRequest) + return + } + + err = store.DeleteTrack(ctx, int32(trackID)) + if err != nil { + l.Err(err).Msg("Failed to delete track") + utils.WriteError(w, "failed to delete track", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) + } +} + +// DeleteTrackHandler deletes a track by its ID. +func DeleteListenHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + trackIDStr := r.URL.Query().Get("track_id") + if trackIDStr == "" { + utils.WriteError(w, "track_id must be provided", http.StatusBadRequest) + return + } + trackID, err := strconv.Atoi(trackIDStr) + if err != nil { + utils.WriteError(w, "invalid id", http.StatusBadRequest) + return + } + + unixStr := r.URL.Query().Get("unix") + if trackIDStr == "" { + utils.WriteError(w, "unix timestamp must be provided", http.StatusBadRequest) + return + } + unix, err := strconv.ParseInt(unixStr, 10, 64) + if err != nil { + utils.WriteError(w, "invalid unix timestamp", http.StatusBadRequest) + return + } + + err = store.DeleteListen(ctx, int32(trackID), time.Unix(unix, 0)) + if err != nil { + l.Err(err).Msg("Failed to delete listen") + utils.WriteError(w, "failed to delete listen", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) + } +} + +// DeleteArtistHandler deletes an artist by its ID. +func DeleteArtistHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + artistIDStr := r.URL.Query().Get("id") + if artistIDStr == "" { + utils.WriteError(w, "id must be provided", http.StatusBadRequest) + return + } + + artistID, err := strconv.Atoi(artistIDStr) + if err != nil { + utils.WriteError(w, "invalid id", http.StatusBadRequest) + return + } + + err = store.DeleteArtist(ctx, int32(artistID)) + if err != nil { + l.Err(err).Msg("Failed to delete artist") + utils.WriteError(w, "failed to delete artist", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) + } +} + +// DeleteAlbumHandler deletes an album by its ID. +func DeleteAlbumHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + albumIDStr := r.URL.Query().Get("id") + if albumIDStr == "" { + utils.WriteError(w, "id must be provided", http.StatusBadRequest) + return + } + + albumID, err := strconv.Atoi(albumIDStr) + if err != nil { + utils.WriteError(w, "invalid id", http.StatusBadRequest) + return + } + + err = store.DeleteAlbum(ctx, int32(albumID)) + if err != nil { + l.Err(err).Msg("Failed to delete album") + utils.WriteError(w, "failed to delete album", http.StatusInternalServerError) + return + } + + w.WriteHeader(http.StatusNoContent) + } +} diff --git a/engine/handlers/get_album.go b/engine/handlers/get_album.go new file mode 100644 index 0000000..9d773c1 --- /dev/null +++ b/engine/handlers/get_album.go @@ -0,0 +1,28 @@ +package handlers + +import ( + "net/http" + "strconv" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/utils" +) + +func GetAlbumHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + + idStr := r.URL.Query().Get("id") + id, err := strconv.Atoi(idStr) + if err != nil { + utils.WriteError(w, "id is invalid", 400) + return + } + + album, err := store.GetAlbum(r.Context(), db.GetAlbumOpts{ID: int32(id)}) + if err != nil { + utils.WriteError(w, "album with specified id could not be found", http.StatusNotFound) + return + } + utils.WriteJSON(w, http.StatusOK, album) + } +} diff --git a/engine/handlers/get_artist.go b/engine/handlers/get_artist.go new file mode 100644 index 0000000..bac18d8 --- /dev/null +++ b/engine/handlers/get_artist.go @@ -0,0 +1,28 @@ +package handlers + +import ( + "net/http" + "strconv" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/utils" +) + +func GetArtistHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + + idStr := r.URL.Query().Get("id") + id, err := strconv.Atoi(idStr) + if err != nil { + utils.WriteError(w, "id is invalid", 400) + return + } + + artist, err := store.GetArtist(r.Context(), db.GetArtistOpts{ID: int32(id)}) + if err != nil { + utils.WriteError(w, "artist with specified id could not be found", http.StatusNotFound) + return + } + utils.WriteJSON(w, http.StatusOK, artist) + } +} diff --git a/engine/handlers/get_listen_activity.go b/engine/handlers/get_listen_activity.go new file mode 100644 index 0000000..6d3f91c --- /dev/null +++ b/engine/handlers/get_listen_activity.go @@ -0,0 +1,65 @@ +package handlers + +import ( + "net/http" + "strconv" + "strings" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func GetListenActivityHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + + rangeStr := r.URL.Query().Get("range") + _range, _ := strconv.Atoi(rangeStr) + + monthStr := r.URL.Query().Get("month") + month, _ := strconv.Atoi(monthStr) + yearStr := r.URL.Query().Get("year") + year, _ := strconv.Atoi(yearStr) + + artistIdStr := r.URL.Query().Get("artist_id") + artistId, _ := strconv.Atoi(artistIdStr) + albumIdStr := r.URL.Query().Get("album_id") + albumId, _ := strconv.Atoi(albumIdStr) + trackIdStr := r.URL.Query().Get("track_id") + trackId, _ := strconv.Atoi(trackIdStr) + + var step db.StepInterval + switch strings.ToLower(r.URL.Query().Get("step")) { + case "day": + step = db.StepDay + case "week": + step = db.StepWeek + case "month": + step = db.StepMonth + case "year": + step = db.StepYear + default: + l.Debug().Msgf("Using default value '%s' for step", db.StepDefault) + step = db.StepDay + } + + opts := db.ListenActivityOpts{ + Step: step, + Range: _range, + Month: month, + Year: year, + AlbumID: int32(albumId), + ArtistID: int32(artistId), + TrackID: int32(trackId), + } + + activity, err := store.GetListenActivity(r.Context(), opts) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, err.Error(), 500) + return + } + utils.WriteJSON(w, http.StatusOK, activity) + } +} diff --git a/engine/handlers/get_listens.go b/engine/handlers/get_listens.go new file mode 100644 index 0000000..405acaa --- /dev/null +++ b/engine/handlers/get_listens.go @@ -0,0 +1,23 @@ +package handlers + +import ( + "net/http" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func GetListensHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + opts := OptsFromRequest(r) + listens, err := store.GetListensPaginated(r.Context(), opts) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "failed to get listens: "+err.Error(), 400) + return + } + utils.WriteJSON(w, http.StatusOK, listens) + } +} diff --git a/engine/handlers/get_top_albums.go b/engine/handlers/get_top_albums.go new file mode 100644 index 0000000..bdda30c --- /dev/null +++ b/engine/handlers/get_top_albums.go @@ -0,0 +1,23 @@ +package handlers + +import ( + "net/http" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func GetTopAlbumsHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + opts := OptsFromRequest(r) + albums, err := store.GetTopAlbumsPaginated(r.Context(), opts) + if err != nil { + l.Err(err).Msg("Failed to get top albums") + utils.WriteError(w, "failed to get albums", 400) + return + } + utils.WriteJSON(w, http.StatusOK, albums) + } +} diff --git a/engine/handlers/get_top_artists.go b/engine/handlers/get_top_artists.go new file mode 100644 index 0000000..9700d23 --- /dev/null +++ b/engine/handlers/get_top_artists.go @@ -0,0 +1,23 @@ +package handlers + +import ( + "net/http" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func GetTopArtistsHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + opts := OptsFromRequest(r) + artists, err := store.GetTopArtistsPaginated(r.Context(), opts) + if err != nil { + l.Err(err).Msg("Failed to get top artists") + utils.WriteError(w, "failed to get artists", 400) + return + } + utils.WriteJSON(w, http.StatusOK, artists) + } +} diff --git a/engine/handlers/get_top_tracks.go b/engine/handlers/get_top_tracks.go new file mode 100644 index 0000000..5937f2b --- /dev/null +++ b/engine/handlers/get_top_tracks.go @@ -0,0 +1,23 @@ +package handlers + +import ( + "net/http" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func GetTopTracksHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + opts := OptsFromRequest(r) + tracks, err := store.GetTopTracksPaginated(r.Context(), opts) + if err != nil { + l.Err(err).Msg("Failed to get top tracks") + utils.WriteError(w, "failed to get tracks", 400) + return + } + utils.WriteJSON(w, http.StatusOK, tracks) + } +} diff --git a/engine/handlers/get_track.go b/engine/handlers/get_track.go new file mode 100644 index 0000000..d97c8be --- /dev/null +++ b/engine/handlers/get_track.go @@ -0,0 +1,31 @@ +package handlers + +import ( + "net/http" + "strconv" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func GetTrackHandler(store db.DB) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + + idStr := r.URL.Query().Get("id") + id, err := strconv.Atoi(idStr) + if err != nil { + utils.WriteError(w, "id is invalid", 400) + return + } + + track, err := store.GetTrack(r.Context(), db.GetTrackOpts{ID: int32(id)}) + if err != nil { + l.Err(err).Msg("Failed to get top albums") + utils.WriteError(w, "track with specified id could not be found", http.StatusNotFound) + return + } + utils.WriteJSON(w, http.StatusOK, track) + } +} diff --git a/engine/handlers/handlers.go b/engine/handlers/handlers.go new file mode 100644 index 0000000..946b6b0 --- /dev/null +++ b/engine/handlers/handlers.go @@ -0,0 +1,77 @@ +// package handlers implements route handlers +package handlers + +import ( + "net/http" + "strconv" + "strings" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" +) + +const defaultLimitSize = 100 +const maximumLimit = 500 + +func OptsFromRequest(r *http.Request) db.GetItemsOpts { + l := logger.FromContext(r.Context()) + + limitStr := r.URL.Query().Get("limit") + limit, err := strconv.Atoi(limitStr) + if err != nil { + l.Debug().Msgf("query parameter 'limit' not specified, using default %d", defaultLimitSize) + limit = defaultLimitSize + } + if limit > maximumLimit { + l.Debug().Msgf("limit must not be greater than %d, using default %d", maximumLimit, defaultLimitSize) + limit = defaultLimitSize + } + pageStr := r.URL.Query().Get("page") + page, _ := strconv.Atoi(pageStr) + if page < 1 { + page = 1 + } + + weekStr := r.URL.Query().Get("week") + week, _ := strconv.Atoi(weekStr) + monthStr := r.URL.Query().Get("month") + month, _ := strconv.Atoi(monthStr) + yearStr := r.URL.Query().Get("year") + year, _ := strconv.Atoi(yearStr) + + artistIdStr := r.URL.Query().Get("artist_id") + artistId, _ := strconv.Atoi(artistIdStr) + albumIdStr := r.URL.Query().Get("album_id") + albumId, _ := strconv.Atoi(albumIdStr) + trackIdStr := r.URL.Query().Get("track_id") + trackId, _ := strconv.Atoi(trackIdStr) + + var period db.Period + switch strings.ToLower(r.URL.Query().Get("period")) { + case "day": + period = db.PeriodDay + case "week": + period = db.PeriodWeek + case "month": + period = db.PeriodMonth + case "year": + period = db.PeriodYear + case "all_time": + period = db.PeriodAllTime + default: + l.Debug().Msgf("Using default value '%s' for period", db.PeriodDay) + period = db.PeriodDay + } + + return db.GetItemsOpts{ + Limit: limit, + Period: period, + Page: page, + Week: week, + Month: month, + Year: year, + ArtistID: artistId, + AlbumID: albumId, + TrackID: trackId, + } +} diff --git a/engine/handlers/health.go b/engine/handlers/health.go new file mode 100644 index 0000000..29d7203 --- /dev/null +++ b/engine/handlers/health.go @@ -0,0 +1,10 @@ +package handlers + +import "net/http" + +func HealthHandler() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"status":"ready"}`)) + } +} diff --git a/engine/handlers/image_handler.go b/engine/handlers/image_handler.go new file mode 100644 index 0000000..0ce5b81 --- /dev/null +++ b/engine/handlers/image_handler.go @@ -0,0 +1,208 @@ +package handlers + +import ( + "bytes" + "net/http" + "os" + "path" + "path/filepath" + "sync" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" + "github.com/go-chi/chi/v5" + "github.com/google/uuid" +) + +func ImageHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + size := chi.URLParam(r, "size") + filename := chi.URLParam(r, "filename") + + imageSize, err := catalog.ParseImageSize(size) + if err != nil { + w.WriteHeader(http.StatusNotFound) + return + } + + imgid, err := uuid.Parse(filename) + if err != nil { + serveDefaultImage(w, r, imageSize) + return + } + + desiredImgPath := filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, size, filepath.Clean(filename)) + + if _, err := os.Stat(desiredImgPath); os.IsNotExist(err) { + l.Debug().Msg("Image not found in desired size") + // file doesn't exist in desired size + + fullSizePath := filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, string(catalog.ImageSizeFull), filepath.Clean(filename)) + largeSizePath := filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, string(catalog.ImageSizeLarge), filepath.Clean(filename)) + + // check if file exists at either full or large size + // note: have to check both in case a user switched caching full size on and off + // which would result in cache misses from source changing + var sourcePath string + if _, err = os.Stat(fullSizePath); os.IsNotExist(err) { + if _, err = os.Stat(largeSizePath); os.IsNotExist(err) { + l.Warn().Msgf("Could not find requested image %s. If this image is tied to an album or artist, it should be replaced", imgid.String()) + serveDefaultImage(w, r, imageSize) + return + } else if err != nil { + // non-not found error for full file + l.Err(err).Msg("Failed to access source image file") + w.WriteHeader(http.StatusInternalServerError) + return + } + sourcePath = largeSizePath + } else if err != nil { + // non-not found error for full file + l.Err(err).Msg("Failed to access source image file") + w.WriteHeader(http.StatusInternalServerError) + return + } else { + sourcePath = fullSizePath + } + + // source size file was found + + // create and cache image at desired size + + imageBuf, err := os.ReadFile(sourcePath) + if err != nil { + l.Err(err).Msg("Failed to read source image file") + w.WriteHeader(http.StatusInternalServerError) + return + } + + err = catalog.CompressAndSaveImage(r.Context(), imgid.String(), imageSize, bytes.NewReader(imageBuf)) + if err != nil { + l.Err(err).Msg("Failed to save compressed image to cache") + } + } else if err != nil { + // non-not found error for desired file + l.Err(err).Msg("Failed to access desired image file") + w.WriteHeader(http.StatusInternalServerError) + return + } + + // Serve image + http.ServeFile(w, r, desiredImgPath) + } +} + +func serveDefaultImage(w http.ResponseWriter, r *http.Request, size catalog.ImageSize) { + var lock sync.Mutex + l := logger.FromContext(r.Context()) + defaultImagePath := filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, string(size), "default_img") + if _, err := os.Stat(defaultImagePath); os.IsNotExist(err) { + l.Debug().Msg("Default image does not exist in cache at desired size") + defaultImagePath := filepath.Join(catalog.SourceImageDir(), "default_img") + if _, err = os.Stat(defaultImagePath); os.IsNotExist(err) { + l.Debug().Msg("Default image does not exist in cache, attempting to move...") + err = os.MkdirAll(filepath.Dir(defaultImagePath), 0755) + if err != nil { + l.Err(err).Msg("Error when attempting to create image_cache/full dir") + w.WriteHeader(http.StatusInternalServerError) + return + } + lock.Lock() + utils.CopyFile(path.Join("assets", "default_img"), defaultImagePath) + lock.Unlock() + } else if err != nil { + // non-not found error + l.Error().Err(err).Msg("Error when attempting to read default image in cache") + w.WriteHeader(http.StatusInternalServerError) + return + } + // default_img does (or now does) exist in cache at full size + file, err := os.Open(path.Join(catalog.SourceImageDir(), "default_img")) + if err != nil { + l.Err(err).Msg("Error when reading default image from source dir") + w.WriteHeader(http.StatusInternalServerError) + return + } + err = catalog.CompressAndSaveImage(r.Context(), "default_img", size, file) + if err != nil { + l.Err(err).Msg("Error when caching default img at desired size") + w.WriteHeader(http.StatusInternalServerError) + return + } + } else if err != nil { + // non-not found error + l.Error().Err(err).Msg("Error when attempting to read default image in cache") + w.WriteHeader(http.StatusInternalServerError) + return + } + + // serve default_img at desired size + http.ServeFile(w, r, path.Join(cfg.ConfigDir(), catalog.ImageCacheDir, string(size), "default_img")) +} + +// func SearchMissingAlbumImagesHandler(store db.DB) http.HandlerFunc { +// return func(w http.ResponseWriter, r *http.Request) { +// ctx := r.Context() +// l := logger.FromContext(ctx) +// l.Info().Msg("Beginning search for albums with missing images") +// go func() { +// defer func() { +// if r := recover(); r != nil { +// l.Error().Interface("recover", r).Msg("Panic when searching for missing album images") +// } +// }() +// ctx := logger.NewContext(l) +// from := int32(0) +// count := 0 +// for { +// albums, err := store.AlbumsWithoutImages(ctx, from) +// if errors.Is(err, pgx.ErrNoRows) { +// break +// } else if err != nil { +// l.Err(err).Msg("Failed to search for missing images") +// return +// } +// l.Debug().Msgf("Queried %d albums on page %d", len(albums), from) +// if len(albums) < 1 { +// break +// } +// for _, a := range albums { +// l.Debug().Msgf("Searching images for album %s", a.Title) +// img, err := imagesrc.GetAlbumImages(ctx, imagesrc.AlbumImageOpts{ +// Artists: utils.FlattenSimpleArtistNames(a.Artists), +// Album: a.Title, +// ReleaseMbzID: a.MbzID, +// }) +// if err == nil && img != "" { +// l.Debug().Msg("Image found! Downloading...") +// imgid, err := catalog.DownloadAndCacheImage(ctx, img) +// if err != nil { +// l.Err(err).Msgf("Failed to download image for %s", a.Title) +// continue +// } +// err = store.UpdateAlbum(ctx, db.UpdateAlbumOpts{ +// ID: a.ID, +// Image: imgid, +// }) +// if err != nil { +// l.Err(err).Msgf("Failed to update image for %s", a.Title) +// continue +// } +// l.Info().Msgf("Found new album image for %s", a.Title) +// count++ +// } +// if err != nil { +// l.Err(err).Msgf("Failed to get album images for %s", a.Title) +// } +// } +// from = albums[len(albums)-1].ID +// } +// l.Info().Msgf("Completed search, finding %d new images", count) +// }() +// w.WriteHeader(http.StatusOK) +// } +// } diff --git a/engine/handlers/lbz_submit_listen.go b/engine/handlers/lbz_submit_listen.go new file mode 100644 index 0000000..3d184a8 --- /dev/null +++ b/engine/handlers/lbz_submit_listen.go @@ -0,0 +1,278 @@ +package handlers + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/gabehf/koito/engine/middleware" + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + mbz "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" + "github.com/rs/zerolog" + "golang.org/x/sync/singleflight" +) + +type LbzListenType string + +const ( + ListenTypeSingle LbzListenType = "single" + ListenTypePlayingNow LbzListenType = "playing_now" + ListenTypeImport LbzListenType = "import" +) + +type LbzSubmitListenRequest struct { + ListenType LbzListenType `json:"listen_type,omitempty"` + Payload []LbzSubmitListenPayload `json:"payload,omitempty"` +} + +type LbzSubmitListenPayload struct { + ListenedAt int64 `json:"listened_at,omitempty"` + TrackMeta LbzTrackMeta `json:"track_metadata"` +} + +type LbzTrackMeta struct { + ArtistName string `json:"artist_name"` // required + TrackName string `json:"track_name"` // required + ReleaseName string `json:"release_name,omitempty"` + AdditionalInfo LbzAdditionalInfo `json:"additional_info,omitempty"` +} + +type LbzAdditionalInfo struct { + MediaPlayer string `json:"media_player,omitempty"` + SubmissionClient string `json:"submission_client,omitempty"` + SubmissionClientVersion string `json:"submission_client_version,omitempty"` + ReleaseMBID string `json:"release_mbid,omitempty"` + ReleaseGroupMBID string `json:"release_group_mbid,omitempty"` + ArtistMBIDs []string `json:"artist_mbids,omitempty"` + ArtistNames []string `json:"artist_names,omitempty"` + RecordingMBID string `json:"recording_mbid,omitempty"` + DurationMs int32 `json:"duration_ms,omitempty"` + Duration int32 `json:"duration,omitempty"` + Tags []string `json:"tags,omitempty"` + AlbumArtist string `json:"albumartist,omitempty"` +} + +const ( + maxListensPerRequest = 1000 +) + +var sfGroup singleflight.Group + +func LbzSubmitListenHandler(store db.DB, mbzc mbz.MusicBrainzCaller) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + + var req LbzSubmitListenRequest + requestBytes, err := io.ReadAll(r.Body) + if err != nil { + utils.WriteError(w, "failed to read request body", http.StatusBadRequest) + return + } + if err := json.NewDecoder(bytes.NewBuffer(requestBytes)).Decode(&req); err != nil { + l.Debug().Err(err).Msg("Failed to decode request") + utils.WriteError(w, "failed to decode request", http.StatusBadRequest) + return + } + + u := middleware.GetUserFromContext(r.Context()) + if u == nil { + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + l.Debug().Any("request_body", req).Msg("Recieved request") + + if len(req.Payload) < 1 { + l.Error().Msg("Payload is nil") + utils.WriteError(w, "payload is nil", http.StatusBadRequest) + return + } + + if len(req.Payload) > maxListensPerRequest { + l.Error().Msg("Payload exceeds max listens per request") + utils.WriteError(w, "payload exceeds max listens per request", http.StatusBadRequest) + return + } + + if len(req.Payload) != 1 && req.ListenType != "import" { + l.Error().Msg("Payload must only contain one listen for non-import requests") + utils.WriteError(w, "payload must only contain one listen for non-import requests", http.StatusBadRequest) + return + } + + for _, payload := range req.Payload { + + if payload.TrackMeta.ArtistName == "" || payload.TrackMeta.TrackName == "" { + l.Error().Msg("Artist name or track name are missing, unable to process listen") + utils.WriteError(w, "Artist name or track name are missing", http.StatusBadRequest) + return + } + + if req.ListenType != ListenTypePlayingNow && req.ListenType != ListenTypeSingle && req.ListenType != ListenTypeImport { + l.Debug().Msg("No listen type provided, assuming 'single'") + req.ListenType = "single" + } + + artistMbzIDs, err := utils.ParseUUIDSlice(payload.TrackMeta.AdditionalInfo.ArtistMBIDs) + if err != nil { + l.Debug().Err(err).Msg("Failed to parse one or more uuids") + } + rgMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.ReleaseGroupMBID) + if err != nil { + rgMbzID = uuid.Nil + } + releaseMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.ReleaseMBID) + if err != nil { + releaseMbzID = uuid.Nil + } + recordingMbzID, err := uuid.Parse(payload.TrackMeta.AdditionalInfo.RecordingMBID) + if err != nil { + recordingMbzID = uuid.Nil + } + + var client string + if payload.TrackMeta.AdditionalInfo.MediaPlayer != "" { + client = payload.TrackMeta.AdditionalInfo.MediaPlayer + } else if payload.TrackMeta.AdditionalInfo.SubmissionClient != "" { + client = payload.TrackMeta.AdditionalInfo.SubmissionClient + } + + var duration int32 + if payload.TrackMeta.AdditionalInfo.Duration != 0 { + duration = payload.TrackMeta.AdditionalInfo.Duration + } else if payload.TrackMeta.AdditionalInfo.DurationMs != 0 { + duration = payload.TrackMeta.AdditionalInfo.DurationMs / 1000 + } + + var listenedAt = time.Now() + if payload.ListenedAt != 0 { + listenedAt = time.Unix(payload.ListenedAt, 0) + } + + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: payload.TrackMeta.AdditionalInfo.ArtistNames, + Artist: payload.TrackMeta.ArtistName, + ArtistMbzIDs: artistMbzIDs, + TrackTitle: payload.TrackMeta.TrackName, + RecordingMbzID: recordingMbzID, + ReleaseTitle: payload.TrackMeta.ReleaseName, + ReleaseMbzID: releaseMbzID, + ReleaseGroupMbzID: rgMbzID, + Duration: duration, + Time: listenedAt, + UserID: u.ID, + Client: client, + } + + if req.ListenType == ListenTypePlayingNow { + opts.SkipSaveListen = true + } + _, err, shared := sfGroup.Do(buildCaolescingKey(payload), func() (interface{}, error) { + return 0, catalog.SubmitListen(r.Context(), store, opts) + }) + if shared { + l.Info().Msg("Duplicate requests detected; results were coalesced") + } + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Header().Set("Content-Type", "application/json") + w.Write([]byte("{\"status\": \"internal server error\"}")) + } + } + + w.WriteHeader(http.StatusOK) + w.Header().Set("Content-Type", "application/json") + w.Write([]byte("{\"status\": \"ok\"}")) + + if cfg.LbzRelayEnabled() { + go doLbzRelay(requestBytes, l) + } + + } +} + +func doLbzRelay(requestBytes []byte, l *zerolog.Logger) { + defer func() { + if r := recover(); r != nil { + l.Error().Interface("recover", r).Msg("Panic in doLbzRelay") + } + }() + const ( + maxRetryDuration = 10 * time.Second + initialBackoff = 1 * time.Second + maxBackoff = 4 * time.Second + ) + req, err := http.NewRequest("POST", cfg.LbzRelayUrl()+"/submit-listens", bytes.NewBuffer(requestBytes)) + if err != nil { + l.Error().Msg("Failed to build ListenBrainz relay request") + l.Error().Err(err).Send() + return + } + req.Header.Add("Authorization", "Token "+cfg.LbzRelayToken()) + req.Header.Add("Content-Type", "application/json") + + client := &http.Client{ + Timeout: 5 * time.Second, + } + + var resp *http.Response + var body []byte + start := time.Now() + backoff := initialBackoff + + for { + resp, err = client.Do(req) + if err != nil { + l.Err(err).Msg("Failed to send ListenBrainz relay request") + return + } + defer resp.Body.Close() + + if resp.StatusCode >= 200 && resp.StatusCode < 300 { + l.Info().Msg("Successfully relayed ListenBrainz submission") + return + } + + body, _ = io.ReadAll(resp.Body) + + if resp.StatusCode >= 500 && time.Since(start)+backoff <= maxRetryDuration { + l.Warn(). + Int("status", resp.StatusCode). + Str("response", string(body)). + Msg("Retryable server error from ListenBrainz relay, retrying...") + time.Sleep(backoff) + backoff *= 2 + if backoff > maxBackoff { + backoff = maxBackoff + } + continue + } + + // 4xx status or timeout exceeded + l.Warn(). + Int("status", resp.StatusCode). + Str("response", string(body)). + Msg("Non-2XX response from ListenBrainz relay") + return + } +} + +func buildCaolescingKey(p LbzSubmitListenPayload) string { + // the key not including the listen_type introduces the very rare possibility of a playing_now + // request taking precedence over a single, meaning that a listen will not be logged when it + // should, however that would require a playing_now request to fire a few seconds before a 'single' + // of the same track, which should never happen outside of misbehaving clients + // + // this could be fixed by restructuring the database inserts for idempotency, which would + // eliminate the need to coalesce responses, however i'm not gonna do that right now + return fmt.Sprintf("%s:%s:%s", p.TrackMeta.ArtistName, p.TrackMeta.TrackName, p.TrackMeta.ReleaseName) +} diff --git a/engine/handlers/lbz_validate.go b/engine/handlers/lbz_validate.go new file mode 100644 index 0000000..583ff5f --- /dev/null +++ b/engine/handlers/lbz_validate.go @@ -0,0 +1,41 @@ +package handlers + +import ( + "net/http" + + "github.com/gabehf/koito/engine/middleware" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +type LbzValidateResponse struct { + Code int `json:"code"` + Error string `json:"error,omitempty"` + Message string `json:"message,omitempty"` + Valid bool `json:"valid,omitempty"` + UserName string `json:"user_name,omitempty"` +} + +func LbzValidateTokenHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + l.Debug().Msg("Validating user token...") + + u := middleware.GetUserFromContext(ctx) + var response LbzValidateResponse + if u == nil { + response.Code = http.StatusUnauthorized + response.Error = "Incorrect Authorization" + w.WriteHeader(http.StatusUnauthorized) + utils.WriteJSON(w, http.StatusOK, response) + } else { + response.Code = 200 + response.Message = "Token valid." + response.Valid = true + response.UserName = u.Username + utils.WriteJSON(w, http.StatusOK, response) + } + } +} diff --git a/engine/handlers/merge.go b/engine/handlers/merge.go new file mode 100644 index 0000000..f11e251 --- /dev/null +++ b/engine/handlers/merge.go @@ -0,0 +1,97 @@ +package handlers + +import ( + "net/http" + "strconv" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +func MergeTracksHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + + fromidStr := r.URL.Query().Get("from_id") + fromId, err := strconv.Atoi(fromidStr) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "from_id is invalid", 400) + return + } + toidStr := r.URL.Query().Get("to_id") + toId, err := strconv.Atoi(toidStr) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "to_id is invalid", 400) + return + } + + err = store.MergeTracks(r.Context(), int32(fromId), int32(toId)) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "Failed to merge tracks: "+err.Error(), http.StatusInternalServerError) + return + } + w.WriteHeader(http.StatusNoContent) + } +} + +func MergeReleaseGroupsHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + + fromidStr := r.URL.Query().Get("from_id") + fromId, err := strconv.Atoi(fromidStr) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "from_id is invalid", 400) + return + } + toidStr := r.URL.Query().Get("to_id") + toId, err := strconv.Atoi(toidStr) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "to_id is invalid", 400) + return + } + + err = store.MergeAlbums(r.Context(), int32(fromId), int32(toId)) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "Failed to merge albums: "+err.Error(), http.StatusInternalServerError) + return + } + w.WriteHeader(http.StatusNoContent) + } +} + +func MergeArtistsHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + + fromidStr := r.URL.Query().Get("from_id") + fromId, err := strconv.Atoi(fromidStr) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "from_id is invalid", 400) + return + } + toidStr := r.URL.Query().Get("to_id") + toId, err := strconv.Atoi(toidStr) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "to_id is invalid", 400) + return + } + + err = store.MergeArtists(r.Context(), int32(fromId), int32(toId)) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "Failed to merge artists: "+err.Error(), http.StatusInternalServerError) + return + } + w.WriteHeader(http.StatusNoContent) + } +} diff --git a/engine/handlers/replace_image.go b/engine/handlers/replace_image.go new file mode 100644 index 0000000..8a33e2f --- /dev/null +++ b/engine/handlers/replace_image.go @@ -0,0 +1,178 @@ +package handlers + +import ( + "io" + "net/http" + "strconv" + "strings" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" +) + +type ReplaceImageResponse struct { + Success bool `json:"success"` + Image string `json:"image"` + Message string `json:"message,omitempty"` +} + +func ReplaceImageHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + artistIdStr := r.FormValue("artist_id") + artistId, _ := strconv.Atoi(artistIdStr) + albumIdStr := r.FormValue("album_id") + albumId, _ := strconv.Atoi(albumIdStr) + + if artistId != 0 && albumId != 0 { + utils.WriteError(w, "Only one of artist_id and album_id can be set", http.StatusBadRequest) + return + } else if artistId == 0 && albumId == 0 { + utils.WriteError(w, "One of artist_id and album_id must be set", http.StatusBadRequest) + return + } + + var oldImage *uuid.UUID + if artistId != 0 { + a, err := store.GetArtist(ctx, db.GetArtistOpts{ + ID: int32(artistId), + }) + if err != nil { + utils.WriteError(w, "Artist with specified id could not be found", http.StatusBadRequest) + return + } + oldImage = a.Image + } else if albumId != 0 { + a, err := store.GetAlbum(ctx, db.GetAlbumOpts{ + ID: int32(albumId), + }) + if err != nil { + utils.WriteError(w, "Album with specified id could not be found", http.StatusBadRequest) + return + } + oldImage = a.Image + } + + l.Debug().Msgf("Getting image from request...") + + var id uuid.UUID + var err error + + fileUrl := r.FormValue("image_url") + if fileUrl != "" { + l.Debug().Msg("Image identified as remote file") + err = catalog.ValidateImageURL(fileUrl) + if err != nil { + utils.WriteError(w, "url is invalid or not an image file", http.StatusBadRequest) + return + } + id = uuid.New() + var dlSize catalog.ImageSize + if cfg.FullImageCacheEnabled() { + dlSize = catalog.ImageSizeFull + } else { + dlSize = catalog.ImageSizeLarge + } + l.Debug().Msg("Downloading album image from source...") + err = catalog.DownloadAndCacheImage(ctx, id, fileUrl, dlSize) + if err != nil { + l.Err(err).Msg("Failed to cache image") + } + } else { + file, _, err := r.FormFile("image") + if err != nil { + utils.WriteError(w, "Invalid file", http.StatusBadRequest) + return + } + defer file.Close() + + buf := make([]byte, 512) + if _, err := file.Read(buf); err != nil { + utils.WriteError(w, "Could not read file", http.StatusInternalServerError) + return + } + + contentType := http.DetectContentType(buf) + if !strings.HasPrefix(contentType, "image/") { + utils.WriteError(w, "Only image uploads are allowed", http.StatusBadRequest) + return + } + + if _, err := file.Seek(0, io.SeekStart); err != nil { + utils.WriteError(w, "Could not seek file", http.StatusInternalServerError) + return + } + + l.Debug().Msgf("Saving image to cache...") + + id = uuid.New() + + var dlSize catalog.ImageSize + if cfg.FullImageCacheEnabled() { + dlSize = catalog.ImageSizeFull + } else { + dlSize = catalog.ImageSizeLarge + } + + err = catalog.CompressAndSaveImage(ctx, id.String(), dlSize, file) + if err != nil { + utils.WriteError(w, "Could not save file", http.StatusInternalServerError) + return + } + } + + l.Debug().Msgf("Updating database...") + + var imgsrc string + if fileUrl != "" { + imgsrc = fileUrl + } else { + imgsrc = catalog.ImageSourceUserUpload + } + + if artistId != 0 { + err := store.UpdateArtist(ctx, db.UpdateArtistOpts{ + ID: int32(artistId), + Image: id, + ImageSrc: imgsrc, + }) + if err != nil { + l.Err(err).Msg("Artist image could not be updated") + utils.WriteError(w, "Artist image could not be updated", http.StatusInternalServerError) + return + } + } else if albumId != 0 { + err := store.UpdateAlbum(ctx, db.UpdateAlbumOpts{ + ID: int32(albumId), + Image: id, + ImageSrc: imgsrc, + }) + if err != nil { + l.Err(err).Msg("Album image could not be updated") + utils.WriteError(w, "Album image could not be updated", http.StatusInternalServerError) + return + } + } + + if oldImage != nil { + l.Debug().Msg("Cleaning up old image file...") + err = catalog.DeleteImage(*oldImage) + if err != nil { + l.Err(err).Msg("Failed to delete old image file") + utils.WriteError(w, "Could not delete old image file", http.StatusInternalServerError) + return + } + } + + utils.WriteJSON(w, http.StatusOK, ReplaceImageResponse{ + Success: true, + Image: id.String(), + }) + } +} diff --git a/engine/handlers/replace_image_test.go b/engine/handlers/replace_image_test.go new file mode 100644 index 0000000..4a0e3ad --- /dev/null +++ b/engine/handlers/replace_image_test.go @@ -0,0 +1 @@ +package handlers_test diff --git a/engine/handlers/search.go b/engine/handlers/search.go new file mode 100644 index 0000000..51d5a3c --- /dev/null +++ b/engine/handlers/search.go @@ -0,0 +1,47 @@ +package handlers + +import ( + "net/http" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/utils" +) + +type SearchResults struct { + Artists []*models.Artist `json:"artists"` + Albums []*models.Album `json:"albums"` + Tracks []*models.Track `json:"tracks"` +} + +func SearchHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + q := r.URL.Query().Get("q") + artists, err := store.SearchArtists(ctx, q) + if err != nil { + l.Err(err).Msg("Failed to search for artists") + utils.WriteError(w, "failed to search in database", http.StatusInternalServerError) + return + } + albums, err := store.SearchAlbums(ctx, q) + if err != nil { + l.Err(err).Msg("Failed to search for albums") + utils.WriteError(w, "failed to search in database", http.StatusInternalServerError) + return + } + tracks, err := store.SearchTracks(ctx, q) + if err != nil { + l.Err(err).Msg("Failed to search for tracks") + utils.WriteError(w, "failed to search in database", http.StatusInternalServerError) + return + } + utils.WriteJSON(w, http.StatusOK, SearchResults{ + Artists: artists, + Albums: albums, + Tracks: tracks, + }) + } +} diff --git a/engine/handlers/stats.go b/engine/handlers/stats.go new file mode 100644 index 0000000..107426a --- /dev/null +++ b/engine/handlers/stats.go @@ -0,0 +1,77 @@ +package handlers + +import ( + "net/http" + "strings" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +type StatsResponse struct { + ListenCount int64 `json:"listen_count"` + TrackCount int64 `json:"track_count"` + AlbumCount int64 `json:"album_count"` + ArtistCount int64 `json:"artist_count"` + HoursListened int64 `json:"hours_listened"` +} + +func StatsHandler(store db.DB) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + var period db.Period + switch strings.ToLower(r.URL.Query().Get("period")) { + case "day": + period = db.PeriodDay + case "week": + period = db.PeriodWeek + case "month": + period = db.PeriodMonth + case "year": + period = db.PeriodYear + case "all_time": + period = db.PeriodAllTime + default: + l.Debug().Msgf("Using default value '%s' for period", db.PeriodDay) + period = db.PeriodDay + } + listens, err := store.CountListens(r.Context(), period) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "failed to get listens: "+err.Error(), http.StatusInternalServerError) + return + } + tracks, err := store.CountTracks(r.Context(), period) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "failed to get listens: "+err.Error(), http.StatusInternalServerError) + return + } + albums, err := store.CountAlbums(r.Context(), period) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "failed to get listens: "+err.Error(), http.StatusInternalServerError) + return + } + artists, err := store.CountArtists(r.Context(), period) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "failed to get listens: "+err.Error(), http.StatusInternalServerError) + return + } + timeListenedS, err := store.CountTimeListened(r.Context(), period) + if err != nil { + l.Err(err).Send() + utils.WriteError(w, "failed to get listens: "+err.Error(), http.StatusInternalServerError) + return + } + utils.WriteJSON(w, http.StatusOK, StatsResponse{ + ListenCount: listens, + TrackCount: tracks, + AlbumCount: albums, + ArtistCount: artists, + HoursListened: timeListenedS / 60 / 60, + }) + } +} diff --git a/engine/import_test.go b/engine/import_test.go new file mode 100644 index 0000000..09c6f39 --- /dev/null +++ b/engine/import_test.go @@ -0,0 +1,64 @@ +package engine_test + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/gabehf/koito/engine" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestImportMaloja(t *testing.T) { + + src := "../static/maloja_import_test.json" + destDir := filepath.Join(cfg.ConfigDir(), "import") + dest := filepath.Join(destDir, "maloja_import_test.json") + + // not going to make the dest dir because engine should make it already + + input, err := os.ReadFile(src) + require.NoError(t, err) + + require.NoError(t, os.WriteFile(dest, input, os.ModePerm)) + + engine.RunImporter(logger.Get(), store) + + // maloja test import is 38 Magnify Tokyo streams + a, err := store.GetArtist(context.Background(), db.GetArtistOpts{Name: "Magnify Tokyo"}) + require.NoError(t, err) + t.Log(a) + assert.Equal(t, "Magnify Tokyo", a.Name) + assert.EqualValues(t, 38, a.ListenCount) +} + +func TestImportSpotify(t *testing.T) { + + src := "../static/Streaming_History_Audio_spotify_import_test.json" + destDir := filepath.Join(cfg.ConfigDir(), "import") + dest := filepath.Join(destDir, "Streaming_History_Audio_spotify_import_test.json") + + // not going to make the dest dir because engine should make it already + + input, err := os.ReadFile(src) + require.NoError(t, err) + + require.NoError(t, os.WriteFile(dest, input, os.ModePerm)) + + engine.RunImporter(logger.Get(), store) + + a, err := store.GetArtist(context.Background(), db.GetArtistOpts{Name: "The Story So Far"}) + require.NoError(t, err) + track, err := store.GetTrack(context.Background(), db.GetTrackOpts{Title: "Clairvoyant", ArtistIDs: []int32{a.ID}}) + require.NoError(t, err) + t.Log(track) + assert.Equal(t, "Clairvoyant", track.Title) + // spotify includes duration data, but we only import when reason_end = trackdone + // this is the only track with valid duration data + assert.EqualValues(t, 181, track.Duration) +} diff --git a/engine/long_test.go b/engine/long_test.go new file mode 100644 index 0000000..fad9f16 --- /dev/null +++ b/engine/long_test.go @@ -0,0 +1,734 @@ +package engine_test + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "mime/multipart" + "net/http" + "net/url" + "os" + "path" + "strings" + "sync" + "testing" + "time" + + "github.com/gabehf/koito/engine/handlers" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/models" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var session string +var apikey string +var loginOnce sync.Once +var apikeyOnce sync.Once + +func login(t *testing.T) { + loginOnce.Do(func() { + formdata := url.Values{} + formdata.Set("username", cfg.DefaultUsername()) + formdata.Set("password", cfg.DefaultPassword()) + encoded := formdata.Encode() + resp, err := http.DefaultClient.Post(host()+"/apis/web/v1/login", "application/x-www-form-urlencoded", strings.NewReader(encoded)) + respBytes, _ := io.ReadAll(resp.Body) + t.Logf("Login request response: %s - %s", resp.Status, respBytes) + require.NoError(t, err) + require.Len(t, resp.Cookies(), 1) + session = resp.Cookies()[0].Value + require.NotEmpty(t, session) + }) +} + +func makeAuthRequest(t *testing.T, session, method, endpoint string, body io.Reader) (*http.Response, error) { + req, err := http.NewRequest(method, host()+endpoint, body) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: session, + }) + t.Logf("Making request to %s with session: %s", endpoint, session) + return http.DefaultClient.Do(req) +} + +// Expects a valid session +func getApiKey(t *testing.T, session string) { + apikeyOnce.Do(func() { + resp, err := makeAuthRequest(t, session, "GET", "/apis/web/v1/user/apikeys", nil) + require.NoError(t, err) + var keys []models.ApiKey + err = json.NewDecoder(resp.Body).Decode(&keys) + require.NoError(t, err) + require.GreaterOrEqual(t, len(keys), 1) + apikey = keys[0].Key + }) +} + +func truncateTestData(t *testing.T) { + err := store.Exec(context.Background(), + `TRUNCATE + artists, + artist_aliases, + tracks, + artist_tracks, + releases, + artist_releases, + release_aliases, + listens + RESTART IDENTITY CASCADE`) + require.NoError(t, err) +} + +func doSubmitListens(t *testing.T) { + login(t) + getApiKey(t, session) + truncateTestData(t) + bodies := []string{fmt.Sprintf(`{ + "listen_type": "single", + "payload": [ + { + "listened_at": %d, + "track_metadata": { + "additional_info": { + "artist_mbids": [ + "efc787f0-046f-4a60-beff-77b398c8cdf4" + ], + "artist_names": [ + "さユり" + ], + "duration_ms": 275960, + "recording_mbid": "21524d55-b1f8-45d1-b172-976cba447199", + "release_group_mbid": "3281e0d9-fa44-4337-a8ce-6f264beeae16", + "release_mbid": "eb790e90-0065-4852-b47d-bbeede4aa9fc", + "submission_client": "navidrome", + "submission_client_version": "0.56.1 (fa2cf362)" + }, + "artist_name": "さユり", + "release_name": "酸欠少女", + "track_name": "花の塔" + } + } + ] + }`, time.Now().Add(-2*time.Hour).Unix()), // yesterday + fmt.Sprintf(`{ + "listen_type": "single", + "payload": [ + { + "listened_at": %d, + "track_metadata": { + "additional_info": { + "artist_mbids": [ + "80b3cb83-b7a3-4f79-ad42-8325cefb3626" + ], + "artist_names": [ + "キタニタツヤ" + ], + "duration_ms": 197270, + "recording_mbid": "4e909c21-e7a8-404d-b75a-0c8c2926efb0", + "release_group_mbid": "89069d92-e495-462c-b189-3431551868ed", + "release_mbid": "e16a49d6-77f3-4d73-b93c-cac855ce6ad5", + "submission_client": "navidrome", + "submission_client_version": "0.56.1 (fa2cf362)" + }, + "artist_name": "キタニタツヤ", + "release_name": "Where Our Blue Is", + "track_name": "Where Our Blue Is" + } + } + ] + }`, time.Now().Unix()), + fmt.Sprintf(`{ + "listen_type": "single", + "payload": [ + { + "listened_at": %d, + "track_metadata": { + "additional_info": { + "artist_mbids": [ + "1262ab85-308b-46e7-b0b5-91fef8e46b62" + ], + "artist_names": [ + "ネクライトーキー" + ], + "duration_ms": 241560, + "recording_mbid": "8eec4f3f-a059-4217-aad1-fbf82e33e756", + "release_group_mbid": "14f1aff0-dd19-4b42-82dd-720386b6d4c1", + "release_mbid": "7762d7af-7b6c-454f-977e-1b261743e265", + "submission_client": "navidrome", + "submission_client_version": "0.56.1 (fa2cf362)" + }, + "artist_name": "ネクライトーキー", + "release_name": "ONE!", + "track_name": "こんがらがった!" + } + } + ] + }`, time.Now().Add(-1*time.Hour).Unix())} + for _, body := range bodies { + req, err := http.NewRequest("POST", host()+"/apis/listenbrainz/1/submit-listens", strings.NewReader(body)) + require.NoError(t, err) + req.Header.Add("Authorization", fmt.Sprintf("Token %s", apikey)) + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + respBytes, err := io.ReadAll(resp.Body) + require.NoError(t, err) + assert.Equal(t, `{"status": "ok"}`, string(respBytes)) + } +} + +func TestGetters(t *testing.T) { + t.Run("Submit Listens", doSubmitListens) + // Artist was saved + resp, err := http.DefaultClient.Get(host() + "/apis/web/v1/artist?id=1") + assert.NoError(t, err) + var artist models.Artist + err = json.NewDecoder(resp.Body).Decode(&artist) + require.NoError(t, err) + assert.Equal(t, "さユり", artist.Name) + + // Album was saved + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/album?id=1") + assert.NoError(t, err) + var album models.Album + err = json.NewDecoder(resp.Body).Decode(&album) + require.NoError(t, err) + assert.Equal(t, "酸欠少女", album.Title) + + // Track was saved + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/track?id=1") + assert.NoError(t, err) + var track models.Track + err = json.NewDecoder(resp.Body).Decode(&track) + require.NoError(t, err) + assert.Equal(t, "花の塔", track.Title) + + // Listen was saved + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/listens") + assert.NoError(t, err) + var listens db.PaginatedResponse[models.Listen] + err = json.NewDecoder(resp.Body).Decode(&listens) + require.NoError(t, err) + require.Len(t, listens.Items, 3) + assert.EqualValues(t, 2, listens.Items[0].Track.ID) + assert.Equal(t, "Where Our Blue Is", listens.Items[0].Track.Title) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-artists") + assert.NoError(t, err) + var artists db.PaginatedResponse[models.Artist] + err = json.NewDecoder(resp.Body).Decode(&artists) + require.NoError(t, err) + require.Len(t, artists.Items, 3) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-albums") + assert.NoError(t, err) + var albums db.PaginatedResponse[models.Album] + err = json.NewDecoder(resp.Body).Decode(&albums) + require.NoError(t, err) + require.Len(t, albums.Items, 3) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/top-tracks") + assert.NoError(t, err) + var tracks db.PaginatedResponse[models.Track] + err = json.NewDecoder(resp.Body).Decode(&tracks) + require.NoError(t, err) + require.Len(t, tracks.Items, 3) + + truncateTestData(t) +} + +func TestMerge(t *testing.T) { + + t.Run("Submit Listens", doSubmitListens) + + resp, err := makeAuthRequest(t, session, "POST", "/apis/web/v1/merge/tracks?from_id=1&to_id=2", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/track?id=2") + require.NoError(t, err) + var track models.Track + err = json.NewDecoder(resp.Body).Decode(&track) + require.NoError(t, err) + assert.EqualValues(t, 2, track.ListenCount) + + truncateTestData(t) + + t.Run("Submit Listens", doSubmitListens) + + resp, err = makeAuthRequest(t, session, "POST", "/apis/web/v1/merge/artists?from_id=1&to_id=2", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/artist?id=2") + require.NoError(t, err) + var artist models.Artist + err = json.NewDecoder(resp.Body).Decode(&artist) + require.NoError(t, err) + assert.EqualValues(t, 2, artist.ListenCount) + + truncateTestData(t) + + t.Run("Submit Listens", doSubmitListens) + + resp, err = makeAuthRequest(t, session, "POST", "/apis/web/v1/merge/albums?from_id=1&to_id=2", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/album?id=2") + require.NoError(t, err) + var album models.Album + err = json.NewDecoder(resp.Body).Decode(&album) + require.NoError(t, err) + assert.EqualValues(t, 2, album.ListenCount) + + truncateTestData(t) +} + +func TestValidateToken(t *testing.T) { + login(t) + getApiKey(t, session) + + req, err := http.NewRequest("GET", host()+"/apis/listenbrainz/1/validate-token", nil) + require.NoError(t, err) + req.Header.Add("Authorization", fmt.Sprintf("Token %s", apikey)) + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + var actual handlers.LbzValidateResponse + require.NoError(t, json.NewDecoder(resp.Body).Decode(&actual)) + t.Log(actual) + var expected handlers.LbzValidateResponse + expected.Code = 200 + expected.Message = "Token valid." + expected.Valid = true + expected.UserName = "test" + assert.True(t, assert.ObjectsAreEqual(expected, actual)) + + req, err = http.NewRequest("GET", host()+"/apis/listenbrainz/1/validate-token", nil) + require.NoError(t, err) + req.Header.Add("Authorization", "Token thisisasuperinvalidtoken") + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + assert.Equal(t, 401, resp.StatusCode) + + req, err = http.NewRequest("GET", host()+"/apis/listenbrainz/1/validate-token", nil) + require.NoError(t, err) + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + assert.Equal(t, 401, resp.StatusCode) +} + +func TestDelete(t *testing.T) { + + t.Run("Submit Listens", doSubmitListens) + + resp, err := makeAuthRequest(t, session, "DELETE", "/apis/web/v1/artist?id=1", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/artist?id=1") + require.NoError(t, err) + require.Equal(t, 404, resp.StatusCode) + + resp, err = makeAuthRequest(t, session, "DELETE", "/apis/web/v1/album?id=1", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/album?id=1") + require.NoError(t, err) + require.Equal(t, 404, resp.StatusCode) + + resp, err = makeAuthRequest(t, session, "DELETE", "/apis/web/v1/track?id=1", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/track?id=1") + require.NoError(t, err) + require.Equal(t, 404, resp.StatusCode) + + truncateTestData(t) +} + +func TestAliasesAndSearch(t *testing.T) { + + t.Run("Submit Listens", doSubmitListens) + + resp, err := makeAuthRequest(t, session, "POST", "/apis/web/v1/aliases?artist_id=1&alias=Sayuri", nil) + require.NoError(t, err) + require.Equal(t, 201, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/aliases?artist_id=1") + require.NoError(t, err) + require.Equal(t, 200, resp.StatusCode) + var actual []models.Alias + require.NoError(t, json.NewDecoder(resp.Body).Decode(&actual)) + require.Len(t, actual, 2) + assert.Equal(t, actual[0].Alias, "さユり") + assert.Equal(t, actual[0].Source, "Canonical") + assert.Equal(t, actual[1].Alias, "Sayuri") + assert.Equal(t, actual[1].Source, "Manual") + + resp, err = makeAuthRequest(t, session, "POST", "/apis/web/v1/aliases?album_id=1&alias=Sanketsu+Girl", nil) + require.NoError(t, err) + require.Equal(t, 201, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/aliases?album_id=1") + require.NoError(t, err) + require.Equal(t, 200, resp.StatusCode) + actual = nil + require.NoError(t, json.NewDecoder(resp.Body).Decode(&actual)) + require.Len(t, actual, 2) + assert.Equal(t, actual[0].Alias, "酸欠少女") + assert.Equal(t, actual[0].Source, "Canonical") + assert.Equal(t, actual[1].Alias, "Sanketsu Girl") + assert.Equal(t, actual[1].Source, "Manual") + + resp, err = makeAuthRequest(t, session, "POST", "/apis/web/v1/aliases?track_id=1&alias=Tower+of+Flower", nil) + require.NoError(t, err) + require.Equal(t, 201, resp.StatusCode) + + resp, err = makeAuthRequest(t, session, "POST", "/apis/web/v1/aliases/primary?track_id=1&alias=Tower+of+Flower", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/track?id=1") + require.NoError(t, err) + require.Equal(t, 200, resp.StatusCode) + var track models.Track + require.NoError(t, json.NewDecoder(resp.Body).Decode(&track)) + require.Len(t, actual, 2) + assert.Equal(t, track.Title, "Tower of Flower") + + resp, err = makeAuthRequest(t, session, "POST", "/apis/web/v1/aliases/primary?artist_id=1&alias=Sayuri", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + // make sure searching works with aliases + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/search?q=Sanketsu") + require.NoError(t, err) + require.Equal(t, 200, resp.StatusCode) + var results handlers.SearchResults + require.NoError(t, json.NewDecoder(resp.Body).Decode(&results)) + require.Len(t, results.Albums, 1) + assert.Equal(t, results.Albums[0].Title, "酸欠少女") + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/search?q=Sayuri") + require.NoError(t, err) + require.Equal(t, 200, resp.StatusCode) + results = handlers.SearchResults{} + require.NoError(t, json.NewDecoder(resp.Body).Decode(&results)) + require.Len(t, results.Artists, 1) + assert.Equal(t, results.Artists[0].Name, "Sayuri") // reflects the new primary alias + + truncateTestData(t) +} + +func TestStats(t *testing.T) { + // zeroes + resp, err := http.DefaultClient.Get(host() + "/apis/web/v1/stats") + t.Log(resp) + require.NoError(t, err) + + t.Run("Submit Listens", doSubmitListens) + + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/stats") + t.Log(resp) + require.NoError(t, err) + var actual handlers.StatsResponse + require.NoError(t, json.NewDecoder(resp.Body).Decode(&actual)) + assert.EqualValues(t, 3, actual.ListenCount) + assert.EqualValues(t, 3, actual.TrackCount) + assert.EqualValues(t, 3, actual.AlbumCount) + assert.EqualValues(t, 3, actual.ArtistCount) + assert.EqualValues(t, 0, actual.HoursListened) +} + +func TestListenActivity(t *testing.T) { + + // this test fails when run a bit after midnight + // i'll figure out a better test later + + // t.Run("Submit Listens", doSubmitListens) + + // resp, err := http.DefaultClient.Get(host() + "/apis/web/v1/listen-activity?range=3") + // t.Log(resp) + // require.NoError(t, err) + // var actual []db.ListenActivityItem + // require.NoError(t, json.NewDecoder(resp.Body).Decode(&actual)) + // t.Log(actual) + // require.Len(t, actual, 3) + // assert.EqualValues(t, 3, actual[2].Listens) +} + +func TestAuth(t *testing.T) { + // logs in a new session + formdata := url.Values{} + formdata.Set("username", cfg.DefaultUsername()) + formdata.Set("password", cfg.DefaultPassword()) + encoded := formdata.Encode() + resp, err := http.DefaultClient.Post(host()+"/apis/web/v1/login", "application/x-www-form-urlencoded", strings.NewReader(encoded)) + respBytes, _ := io.ReadAll(resp.Body) + t.Logf("Login request response: %s - %s", resp.Status, respBytes) + require.NoError(t, err) + require.Len(t, resp.Cookies(), 1) + s := resp.Cookies()[0].Value + require.NotEmpty(t, s) + + // test update user + req, err := http.NewRequest("PATCH", host()+"/apis/web/v1/user?username=new&password=supersecret", nil) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: s, + }) + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + // test /me with updated info + req, err = http.NewRequest("GET", host()+"/apis/web/v1/user/me", nil) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: s, + }) + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, 200, resp.StatusCode) + var me models.User + require.NoError(t, json.NewDecoder(resp.Body).Decode(&me)) + require.Equal(t, "new", me.Username) + + // login with old password fails + formdata = url.Values{} + formdata.Set("username", cfg.DefaultUsername()) + formdata.Set("password", cfg.DefaultPassword()) + encoded = formdata.Encode() + resp, err = http.DefaultClient.Post(host()+"/apis/web/v1/login", "application/x-www-form-urlencoded", strings.NewReader(encoded)) + require.NoError(t, err) + require.Equal(t, 400, resp.StatusCode) + + // reset update so other tests dont fail + req, err = http.NewRequest("PATCH", host()+fmt.Sprintf("/apis/web/v1/user?username=%s&password=%s", cfg.DefaultUsername(), cfg.DefaultPassword()), nil) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: s, + }) + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + // creates api key + req, err = http.NewRequest("POST", host()+"/apis/web/v1/user/apikeys?label=testing", nil) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: s, + }) + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, 201, resp.StatusCode) + var response struct { + Key string `json:"key"` + } + require.NoError(t, json.NewDecoder(resp.Body).Decode(&response)) + require.NotEmpty(t, response.Key) + + // validates api key + req, err = http.NewRequest("GET", host()+"/apis/listenbrainz/1/validate-token", nil) + require.NoError(t, err) + req.Header.Add("Authorization", fmt.Sprintf("Token %s", response.Key)) + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + var actual handlers.LbzValidateResponse + require.NoError(t, json.NewDecoder(resp.Body).Decode(&actual)) + var expected handlers.LbzValidateResponse + expected.Code = 200 + expected.Message = "Token valid." + expected.Valid = true + expected.UserName = "test" + assert.True(t, assert.ObjectsAreEqual(expected, actual)) + + // changes api key label + login(t) // i dont care about using the new session anymore + resp, err = makeAuthRequest(t, s, "PATCH", "/apis/web/v1/user/apikeys?id=2&label=well+tested", nil) + require.NoError(t, err) + assert.Equal(t, 200, resp.StatusCode) + resp, err = makeAuthRequest(t, s, "GET", "/apis/web/v1/user/apikeys", nil) + require.NoError(t, err) + var keys []models.ApiKey + err = json.NewDecoder(resp.Body).Decode(&keys) + require.NoError(t, err) + require.GreaterOrEqual(t, len(keys), 2) + require.NotNil(t, keys[1].Label) + assert.Equal(t, "well tested", keys[1].Label) + + // logs out + req, err = http.NewRequest("POST", host()+"/apis/web/v1/logout", nil) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: s, + }) + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + // attempts to create an api key - unauthorized + formdata = url.Values{} + formdata.Set("label", "testing") + encoded = formdata.Encode() + req, err = http.NewRequest("POST", host()+"/apis/web/v1/user/apikeys", strings.NewReader(encoded)) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: s, + }) + resp, err = http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, 401, resp.StatusCode) +} + +func TestDeleteListen(t *testing.T) { + login(t) + getApiKey(t, session) + + truncateTestData(t) + + body := `{ + "listen_type": "single", + "payload": [ + { + "listened_at": 1749475719, + "track_metadata": { + "additional_info": { + "artist_mbids": [ + "80b3cb83-b7a3-4f79-ad42-8325cefb3626" + ], + "artist_names": [ + "キタニタツヤ" + ], + "duration_ms": 197270, + "recording_mbid": "4e909c21-e7a8-404d-b75a-0c8c2926efb0", + "release_group_mbid": "89069d92-e495-462c-b189-3431551868ed", + "release_mbid": "e16a49d6-77f3-4d73-b93c-cac855ce6ad5", + "submission_client": "navidrome", + "submission_client_version": "0.56.1 (fa2cf362)" + }, + "artist_name": "キタニタツヤ", + "release_name": "Where Our Blue Is", + "track_name": "Where Our Blue Is" + } + } + ] + }` + + req, err := http.NewRequest("POST", host()+"/apis/listenbrainz/1/submit-listens", strings.NewReader(body)) + require.NoError(t, err) + req.Header.Add("Authorization", fmt.Sprintf("Token %s", apikey)) + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + respBytes, err := io.ReadAll(resp.Body) + require.NoError(t, err) + assert.Equal(t, `{"status": "ok"}`, string(respBytes)) + + resp, err = makeAuthRequest(t, session, "DELETE", "/apis/web/v1/listen?track_id=1&unix=1749475719", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + // deletes are idempotent + resp, err = makeAuthRequest(t, session, "DELETE", "/apis/web/v1/listen?track_id=1&unix=1749475719", nil) + require.NoError(t, err) + require.Equal(t, 204, resp.StatusCode) + + // listen is deleted + resp, err = http.DefaultClient.Get(host() + "/apis/web/v1/track?id=1") + require.NoError(t, err) + var track models.Track + err = json.NewDecoder(resp.Body).Decode(&track) + require.NoError(t, err) + assert.EqualValues(t, 0, track.ListenCount) +} + +func TestArtistReplaceImage(t *testing.T) { + + t.Run("Submit Listens", doSubmitListens) + + buf := &bytes.Buffer{} + mpw := multipart.NewWriter(buf) + mpw.WriteField("artist_id", "1") + w, err := mpw.CreateFormFile("image", path.Join("..", "static", "yuu.jpg")) + require.NoError(t, err) + f, err := os.Open(path.Join("..", "static", "yuu.jpg")) + require.NoError(t, err) + defer f.Close() + _, err = io.Copy(w, f) + require.NoError(t, err) + require.NoError(t, mpw.Close()) + + req, err := http.NewRequest("POST", host()+"/apis/web/v1/replace-image", buf) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: session, + }) + req.Header.Add("Content-Type", mpw.FormDataContentType()) + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, 200, resp.StatusCode) + response := new(handlers.ReplaceImageResponse) + require.NoError(t, json.NewDecoder(resp.Body).Decode(response)) + require.NotEmpty(t, response.Image) + newid, err := uuid.Parse(response.Image) + require.NoError(t, err) + + a, err := store.GetArtist(context.Background(), db.GetArtistOpts{ID: 1}) + require.NoError(t, err) + assert.NotNil(t, a.Image) + assert.Equal(t, newid, *a.Image) +} + +func TestAlbumReplaceImage(t *testing.T) { + + t.Run("Submit Listens", doSubmitListens) + + buf := &bytes.Buffer{} + mpw := multipart.NewWriter(buf) + mpw.WriteField("album_id", "1") + w, err := mpw.CreateFormFile("image", path.Join("..", "static", "yuu.jpg")) + require.NoError(t, err) + f, err := os.Open(path.Join("..", "static", "yuu.jpg")) + require.NoError(t, err) + defer f.Close() + _, err = io.Copy(w, f) + require.NoError(t, err) + require.NoError(t, mpw.Close()) + + req, err := http.NewRequest("POST", host()+"/apis/web/v1/replace-image", buf) + require.NoError(t, err) + req.AddCookie(&http.Cookie{ + Name: "koito_session", + Value: session, + }) + req.Header.Add("Content-Type", mpw.FormDataContentType()) + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err) + require.Equal(t, 200, resp.StatusCode) + response := new(handlers.ReplaceImageResponse) + require.NoError(t, json.NewDecoder(resp.Body).Decode(response)) + require.NotEmpty(t, response.Image) + newid, err := uuid.Parse(response.Image) + require.NoError(t, err) + + a, err := store.GetAlbum(context.Background(), db.GetAlbumOpts{ID: 1}) + require.NoError(t, err) + assert.NotNil(t, a.Image) + assert.Equal(t, newid, *a.Image) +} diff --git a/engine/middleware/hosts.go b/engine/middleware/hosts.go new file mode 100644 index 0000000..ba8c0fa --- /dev/null +++ b/engine/middleware/hosts.go @@ -0,0 +1,24 @@ +package middleware + +import ( + "net/http" + "slices" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/logger" +) + +func AllowedHosts(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + l := logger.Get() + if cfg.AllowAllHosts() { + next.ServeHTTP(w, r) + return + } else if slices.Contains(cfg.AllowedHosts(), r.Host) { + next.ServeHTTP(w, r) + return + } + l.Warn().Msgf("Request denied from host %s. If you want to allow requests like this, add the host to your %s variable", r.Host, cfg.ALLOWED_HOSTS_ENV) + w.WriteHeader(http.StatusForbidden) + }) +} diff --git a/engine/middleware/middleware.go b/engine/middleware/middleware.go new file mode 100644 index 0000000..b1b5f48 --- /dev/null +++ b/engine/middleware/middleware.go @@ -0,0 +1,103 @@ +package middleware + +import ( + "context" + "crypto/rand" + "math/big" + "net/http" + "runtime/debug" + "strings" + "time" + + "github.com/go-chi/chi/v5/middleware" + "github.com/rs/zerolog" + + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" +) + +type RequestIDHook struct{} + +func (h RequestIDHook) Run(e *zerolog.Event, level zerolog.Level, msg string) { + if ctx := e.GetCtx(); ctx != nil { + if reqID, ok := ctx.Value("requestID").(string); ok { + e.Str("request_id", reqID) + } + } +} + +const requestIDKey MiddlwareContextKey = "requestID" + +const base62Chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + +func GenerateRequestID() string { + const length = 8 // ~0.23% chance of collision in 1M requests + id := make([]byte, length) + for i := 0; i < length; i++ { + n, _ := rand.Int(rand.Reader, big.NewInt(int64(len(base62Chars)))) + id[i] = base62Chars[n.Int64()] + } + return string(id) +} + +func WithRequestID(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + reqID := GenerateRequestID() + ctx := context.WithValue(r.Context(), requestIDKey, reqID) + + w.Header().Set("X-Request-ID", reqID) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// GetRequestID extracts the request ID from context +func GetRequestID(ctx context.Context) string { + if val, ok := ctx.Value(requestIDKey).(string); ok { + return val + } + return "" +} + +// Logger logs requests and injects a request-scoped logger with a request ID into the context. +func Logger(baseLogger *zerolog.Logger) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + fn := func(w http.ResponseWriter, r *http.Request) { + reqID := GetRequestID(r.Context()) + l := baseLogger.With().Str("request_id", reqID).Logger() + + // Inject logger with request_id into the context + r = logger.Inject(r, &l) + + ww := middleware.NewWrapResponseWriter(w, r.ProtoMajor) + t1 := time.Now() + defer func() { + t2 := time.Now() + if rec := recover(); rec != nil { + l.Error(). + Str("type", "error"). + Timestamp(). + Interface("recover_info", rec). + Bytes("debug_stack", debug.Stack()). + Msg("log system error") + utils.WriteError(ww, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) + return + } + pathS := strings.Split(r.URL.Path, "/") + if len(pathS) > 1 && pathS[1] == "apis" { + l.Info(). + Str("type", "access"). + Timestamp(). + Msgf("Received %s %s - Responded with %d in %.2fms", r.Method, r.URL.Path, ww.Status(), float64(t2.Sub(t1).Nanoseconds())/1_000_000.0) + } else { + l.Debug(). + Str("type", "access"). + Timestamp(). + Msgf("Received %s %s - Responded with %d in %.2fms", r.Method, r.URL.Path, ww.Status(), float64(t2.Sub(t1).Nanoseconds())/1_000_000.0) + } + + }() + next.ServeHTTP(ww, r) + } + return http.HandlerFunc(fn) + } +} diff --git a/engine/middleware/validate.go b/engine/middleware/validate.go new file mode 100644 index 0000000..42b7068 --- /dev/null +++ b/engine/middleware/validate.go @@ -0,0 +1,106 @@ +package middleware + +import ( + "context" + "net/http" + "strings" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" +) + +type MiddlwareContextKey string + +const ( + UserContextKey MiddlwareContextKey = "user" + apikeyContextKey MiddlwareContextKey = "apikeyID" +) + +func ValidateSession(store db.DB) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + l := logger.FromContext(r.Context()) + cookie, err := r.Cookie("koito_session") + var sid uuid.UUID + if err == nil { + sid, err = uuid.Parse(cookie.Value) + if err != nil { + utils.WriteError(w, "session cookie is invalid", http.StatusUnauthorized) + return + } + } + + l.Debug().Msg("Retrieved login cookie from request") + + u, err := store.GetUserBySession(r.Context(), sid) + if err != nil { + l.Err(err).Msg("Failed to get user from session") + utils.WriteError(w, "internal server error", http.StatusInternalServerError) + return + } + if u == nil { + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + ctx := context.WithValue(r.Context(), UserContextKey, u) + r = r.WithContext(ctx) + + l.Debug().Msgf("Refreshing session for user '%s'", u.Username) + + store.RefreshSession(r.Context(), sid, time.Now().Add(30*24*time.Hour)) + + l.Debug().Msgf("Refreshed session for user '%s'", u.Username) + + next.ServeHTTP(w, r) + }) + } +} + +func ValidateApiKey(store db.DB) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + l := logger.FromContext(ctx) + + authh := r.Header.Get("Authorization") + s := strings.Split(authh, "Token ") + if len(s) < 2 { + l.Debug().Msg("Authorization header must be formatted 'Token {token}'") + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + key := s[1] + + u, err := store.GetUserByApiKey(ctx, key) + if err != nil { + l.Err(err).Msg("Failed to get user from database using api key") + utils.WriteError(w, "internal server error", http.StatusInternalServerError) + return + } + if u == nil { + l.Debug().Msg("Api key does not exist") + utils.WriteError(w, "unauthorized", http.StatusUnauthorized) + return + } + + ctx = context.WithValue(r.Context(), UserContextKey, u) + r = r.WithContext(ctx) + + next.ServeHTTP(w, r) + }) + } +} + +func GetUserFromContext(ctx context.Context) *models.User { + user, ok := ctx.Value(UserContextKey).(*models.User) + if !ok { + return nil + } + return user +} diff --git a/engine/routes.go b/engine/routes.go new file mode 100644 index 0000000..732fadd --- /dev/null +++ b/engine/routes.go @@ -0,0 +1,140 @@ +package engine + +import ( + "net/http" + "os" + "path/filepath" + "strings" + "sync/atomic" + "time" + + "github.com/gabehf/koito/engine/handlers" + "github.com/gabehf/koito/engine/middleware" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + mbz "github.com/gabehf/koito/internal/mbz" + "github.com/go-chi/chi/v5" + chimiddleware "github.com/go-chi/chi/v5/middleware" + "github.com/go-chi/cors" + "github.com/go-chi/httprate" +) + +func bindRoutes( + r *chi.Mux, + ready *atomic.Bool, + db db.DB, + mbz mbz.MusicBrainzCaller, +) { + r.With(chimiddleware.RequestSize(5<<20)). + With(middleware.AllowedHosts). + Get("/images/{size}/{filename}", handlers.ImageHandler(db)) + + r.Route("/apis/web/v1", func(r chi.Router) { + r.Use(middleware.AllowedHosts) + r.Get("/artist", handlers.GetArtistHandler(db)) + r.Get("/album", handlers.GetAlbumHandler(db)) + r.Get("/track", handlers.GetTrackHandler(db)) + r.Get("/top-tracks", handlers.GetTopTracksHandler(db)) + r.Get("/top-albums", handlers.GetTopAlbumsHandler(db)) + r.Get("/top-artists", handlers.GetTopArtistsHandler(db)) + r.Get("/listens", handlers.GetListensHandler(db)) + r.Get("/listen-activity", handlers.GetListenActivityHandler(db)) + r.Get("/stats", handlers.StatsHandler(db)) + r.Get("/search", handlers.SearchHandler(db)) + r.Get("/aliases", handlers.GetAliasesHandler(db)) + r.Post("/logout", handlers.LogoutHandler(db)) + if !cfg.RateLimitDisabled() { + r.With(httprate.Limit( + 10, + time.Minute, + httprate.WithLimitHandler(func(w http.ResponseWriter, r *http.Request) { + http.Error(w, `{"error":"too many requests"}`, http.StatusTooManyRequests) + }), + )).Post("/login", handlers.LoginHandler(db)) + } else { + r.Post("/login", handlers.LoginHandler(db)) + } + + r.Get("/health", func(w http.ResponseWriter, r *http.Request) { + if !ready.Load() { + http.Error(w, "not ready", http.StatusServiceUnavailable) + return + } + w.WriteHeader(http.StatusOK) + }) + + r.Group(func(r chi.Router) { + r.Use(middleware.ValidateSession(db)) + r.Post("/replace-image", handlers.ReplaceImageHandler(db)) + r.Post("/merge/tracks", handlers.MergeTracksHandler(db)) + r.Post("/merge/albums", handlers.MergeReleaseGroupsHandler(db)) + r.Post("/merge/artists", handlers.MergeArtistsHandler(db)) + r.Delete("/artist", handlers.DeleteArtistHandler(db)) + r.Delete("/album", handlers.DeleteAlbumHandler(db)) + r.Delete("/track", handlers.DeleteTrackHandler(db)) + r.Delete("/listen", handlers.DeleteListenHandler(db)) + r.Post("/aliases", handlers.CreateAliasHandler(db)) + r.Delete("/aliases", handlers.DeleteAliasHandler(db)) + r.Post("/aliases/primary", handlers.SetPrimaryAliasHandler(db)) + r.Get("/user/apikeys", handlers.GetApiKeysHandler(db)) + r.Post("/user/apikeys", handlers.GenerateApiKeyHandler(db)) + r.Patch("/user/apikeys", handlers.UpdateApiKeyLabelHandler(db)) + r.Delete("/user/apikeys", handlers.DeleteApiKeyHandler(db)) + r.Get("/user/me", handlers.MeHandler(db)) + r.Patch("/user", handlers.UpdateUserHandler(db)) + }) + }) + + r.Route("/apis/listenbrainz/1", func(r chi.Router) { + r.Use(cors.Handler(cors.Options{ + AllowedOrigins: []string{"*"}, + AllowedHeaders: []string{"Content-Type", "Authorization"}, + })) + + r.With(middleware.ValidateApiKey(db)).Post("/submit-listens", handlers.LbzSubmitListenHandler(db, mbz)) + r.With(middleware.ValidateApiKey(db)).Get("/validate-token", handlers.LbzValidateTokenHandler(db)) + }) + + // serve react client + workDir, _ := os.Getwd() + filesDir := http.Dir(filepath.Join(workDir, "client/build/client")) + fileServer(r, "/", filesDir) + + // serve client public files + filesDir = http.Dir(filepath.Join(workDir, "client/public")) + publicServer(r, "/public", filesDir) +} + +// FileServer conveniently sets up a http.FileServer handler to serve +// static files from a http.FileSystem. +func fileServer(r chi.Router, path string, root http.FileSystem) { + if strings.ContainsAny(path, "{}*") { + panic("FileServer does not permit any URL parameters.") + } + + // Serve static files + fs := http.FileServer(root) + r.Get(path+"*", func(w http.ResponseWriter, r *http.Request) { + // Check if file exists + filePath := filepath.Join("client/build/client", strings.TrimPrefix(r.URL.Path, path)) + if _, err := os.Stat(filePath); os.IsNotExist(err) { + // File doesn't exist, serve index.html + http.ServeFile(w, r, filepath.Join("client/build/client", "index.html")) + return + } + + // Serve file normally + fs.ServeHTTP(w, r) + }) +} + +func publicServer(r chi.Router, path string, root http.FileSystem) { + if strings.ContainsAny(path, "{}*") { + panic("FileServer does not permit any URL parameters.") + } + fs := http.FileServer(root) + r.Get(path+"*", func(w http.ResponseWriter, r *http.Request) { + r.URL.Path = strings.TrimPrefix(r.URL.Path, path) + fs.ServeHTTP(w, r) + }) +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..874f117 --- /dev/null +++ b/go.mod @@ -0,0 +1,66 @@ +module github.com/gabehf/koito + +go 1.23.7 + +require ( + github.com/go-chi/chi/v5 v5.2.1 + github.com/go-chi/cors v1.2.1 + github.com/google/uuid v1.6.0 + github.com/h2non/bimg v1.1.9 + github.com/jackc/pgx/v5 v5.7.4 + github.com/ory/dockertest/v3 v3.12.0 + github.com/pressly/goose/v3 v3.24.3 + github.com/rs/zerolog v1.34.0 + github.com/stretchr/testify v1.10.0 + golang.org/x/sync v0.14.0 + golang.org/x/time v0.11.0 +) + +require ( + github.com/gosimple/unidecode v1.0.1 + golang.org/x/crypto v0.38.0 +) + +require ( + dario.cat/mergo v1.0.2 // indirect + github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/containerd/continuity v0.4.5 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/docker/cli v28.1.1+incompatible // indirect + github.com/docker/docker v28.1.1+incompatible // indirect + github.com/docker/go-connections v0.5.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/go-chi/httprate v0.15.0 // indirect + github.com/go-viper/mapstructure/v2 v2.2.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/klauspost/cpuid/v2 v2.2.10 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mfridman/interpolate v0.0.2 // indirect + github.com/moby/docker-image-spec v1.3.1 // indirect + github.com/moby/sys/user v0.4.0 // indirect + github.com/moby/term v0.5.2 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.1 // indirect + github.com/opencontainers/runc v1.3.0 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/sethvargo/go-retry v0.3.0 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect + github.com/xeipuuv/gojsonschema v1.2.0 // indirect + github.com/zeebo/xxh3 v1.0.2 // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.25.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..6ab8ff6 --- /dev/null +++ b/go.sum @@ -0,0 +1,189 @@ +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= +github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= +github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4= +github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/docker/cli v28.1.1+incompatible h1:eyUemzeI45DY7eDPuwUcmDyDj1pM98oD5MdSpiItp8k= +github.com/docker/cli v28.1.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/docker v28.1.1+incompatible h1:49M11BFLsVO1gxY9UX9p/zwkE/rswggs8AdFmXQw51I= +github.com/docker/docker v28.1.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/go-chi/chi/v5 v5.2.1 h1:KOIHODQj58PmL80G2Eak4WdvUzjSJSm0vG72crDCqb8= +github.com/go-chi/chi/v5 v5.2.1/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= +github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= +github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= +github.com/go-chi/httprate v0.15.0 h1:j54xcWV9KGmPf/X4H32/aTH+wBlrvxL7P+SdnRqxh5g= +github.com/go-chi/httprate v0.15.0/go.mod h1:rzGHhVrsBn3IMLYDOZQsSU4fJNWcjui4fWKJcCId1R4= +github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU= +github.com/go-sql-driver/mysql v1.9.2/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU= +github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= +github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gosimple/unidecode v1.0.1 h1:hZzFTMMqSswvf0LBJZCZgThIZrpDHFXux9KeGmn6T/o= +github.com/gosimple/unidecode v1.0.1/go.mod h1:CP0Cr1Y1kogOtx0bJblKzsVWrqYaqfNOnHzpgWw4Awc= +github.com/h2non/bimg v1.1.9 h1:WH20Nxko9l/HFm4kZCA3Phbgu2cbHvYzxwxn9YROEGg= +github.com/h2non/bimg v1.1.9/go.mod h1:R3+UiYwkK4rQl6KVFTOFJHitgLbZXBZNFh2cv3AEbp8= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.7.4 h1:9wKznZrhWa2QiHL+NjTSPP6yjl3451BX3imWDnokYlg= +github.com/jackc/pgx/v5 v5.7.4/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= +github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY= +github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs= +github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs= +github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ= +github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc= +github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= +github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= +github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/opencontainers/runc v1.3.0 h1:cvP7xbEvD0QQAs0nZKLzkVog2OPZhI/V2w3WmTmUSXI= +github.com/opencontainers/runc v1.3.0/go.mod h1:9wbWt42gV+KRxKRVVugNP6D5+PQciRbenB4fLVsqGPs= +github.com/ory/dockertest/v3 v3.12.0 h1:3oV9d0sDzlSQfHtIaB5k6ghUCVMVLpAY8hwrqoCyRCw= +github.com/ory/dockertest/v3 v3.12.0/go.mod h1:aKNDTva3cp8dwOWwb9cWuX84aH5akkxXRvO7KCwWVjE= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pressly/goose/v3 v3.24.3 h1:DSWWNwwggVUsYZ0X2VitiAa9sKuqtBfe+Jr9zFGwWlM= +github.com/pressly/goose/v3 v3.24.3/go.mod h1:v9zYL4xdViLHCUUJh/mhjnm6JrK7Eul8AS93IxiZM4E= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= +github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE= +github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= +github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= +golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= +golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI= +golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= +golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= +golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= +modernc.org/libc v1.65.0 h1:e183gLDnAp9VJh6gWKdTy0CThL9Pt7MfcR/0bgb7Y1Y= +modernc.org/libc v1.65.0/go.mod h1:7m9VzGq7APssBTydds2zBcxGREwvIGpuUBaKTXdm2Qs= +modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= +modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg= +modernc.org/memory v1.10.0 h1:fzumd51yQ1DxcOxSO+S6X7+QTuVU+n8/Aj7swYjFfC4= +modernc.org/memory v1.10.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= +modernc.org/sqlite v1.37.0 h1:s1TMe7T3Q3ovQiK2Ouz4Jwh7dw4ZDqbebSDTlSJdfjI= +modernc.org/sqlite v1.37.0/go.mod h1:5YiWv+YviqGMuGw4V+PNplcyaJ5v+vQd7TQOgkACoJM= diff --git a/internal/catalog/associate_album.go b/internal/catalog/associate_album.go new file mode 100644 index 0000000..af39152 --- /dev/null +++ b/internal/catalog/associate_album.go @@ -0,0 +1,243 @@ +package catalog + +import ( + "context" + "errors" + "slices" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/images" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" +) + +type AssociateAlbumOpts struct { + Artists []*models.Artist + ReleaseMbzID uuid.UUID + ReleaseGroupMbzID uuid.UUID + ReleaseName string + TrackName string // required + Mbzc mbz.MusicBrainzCaller +} + +func AssociateAlbum(ctx context.Context, d db.DB, opts AssociateAlbumOpts) (*models.Album, error) { + l := logger.FromContext(ctx) + if opts.TrackName == "" { + return nil, errors.New("required parameter TrackName missing") + } + releaseTitle := opts.ReleaseName + if releaseTitle == "" { + releaseTitle = opts.TrackName + } + if opts.ReleaseMbzID != uuid.Nil { + l.Debug().Msgf("Associating album '%s' by MusicBrainz release ID", releaseTitle) + return matchAlbumByMbzReleaseID(ctx, d, opts) + } else { + l.Debug().Msgf("Associating album '%s' by title and artist", releaseTitle) + return matchAlbumByTitle(ctx, d, opts) + } +} + +func matchAlbumByMbzReleaseID(ctx context.Context, d db.DB, opts AssociateAlbumOpts) (*models.Album, error) { + l := logger.FromContext(ctx) + a, err := d.GetAlbum(ctx, db.GetAlbumOpts{MusicBrainzID: opts.ReleaseMbzID}) + if err == nil { + l.Debug().Msgf("Found release '%s' by MusicBrainz Release ID", a.Title) + return &models.Album{ + ID: a.ID, + MbzID: &opts.ReleaseMbzID, + Title: a.Title, + VariousArtists: a.VariousArtists, + Image: a.Image, + }, nil + } else if !errors.Is(err, pgx.ErrNoRows) { + return nil, err + } else { + l.Debug().Msgf("Album '%s' could not be found by MusicBrainz Release ID", opts.ReleaseName) + rg, err := createOrUpdateAlbumWithMbzReleaseID(ctx, d, opts) + if err != nil { + return matchAlbumByTitle(ctx, d, opts) + } + return rg, nil + } +} + +func createOrUpdateAlbumWithMbzReleaseID(ctx context.Context, d db.DB, opts AssociateAlbumOpts) (*models.Album, error) { + l := logger.FromContext(ctx) + release, err := opts.Mbzc.GetRelease(ctx, opts.ReleaseMbzID) + if err != nil { + l.Warn().Msg("MusicBrainz unreachable, falling back to release title matching") + return matchAlbumByTitle(ctx, d, opts) + } + var album *models.Album + titles := []string{release.Title, opts.ReleaseName} + utils.Unique(&titles) + l.Debug().Msgf("Searching for albums '%v' from artist id %d in DB", titles, opts.Artists[0].ID) + album, err = d.GetAlbum(ctx, db.GetAlbumOpts{ + ArtistID: opts.Artists[0].ID, + Titles: titles, + }) + if err == nil { + l.Debug().Msgf("Found album %s, updating with MusicBrainz Release ID...", album.Title) + err := d.UpdateAlbum(ctx, db.UpdateAlbumOpts{ + ID: album.ID, + MusicBrainzID: opts.ReleaseMbzID, + }) + if err != nil { + l.Err(err).Msg("Failed to update album with MusicBrainz Release ID") + return nil, err + } + l.Debug().Msgf("Updated album '%s' with MusicBrainz Release ID", album.Title) + if opts.ReleaseGroupMbzID != uuid.Nil { + aliases, err := opts.Mbzc.GetReleaseTitles(ctx, opts.ReleaseGroupMbzID) + if err == nil { + l.Debug().Msgf("Associating aliases '%s' with Release '%s'", aliases, album.Title) + err = d.SaveAlbumAliases(ctx, album.ID, aliases, "MusicBrainz") + if err != nil { + l.Err(err).Msg("Failed to save aliases") + } + } else { + l.Info().AnErr("err", err).Msg("Failed to get release group from MusicBrainz") + } + } + } else if !errors.Is(err, pgx.ErrNoRows) { + l.Err(err).Msg("Error while searching for album by MusicBrainz Release ID") + return nil, err + } else { + l.Debug().Msgf("Album %s could not be found. Creating...", release.Title) + var variousArtists bool + for _, artistCredit := range release.ArtistCredit { + if artistCredit.Name == "Various Artists" { + l.Debug().Msgf("MusicBrainz release group '%s' detected as being a Various Artists compilation release", release.Title) + variousArtists = true + } + } + l.Debug().Msg("Searching for album images...") + var imgid uuid.UUID + imgUrl, err := images.GetAlbumImage(ctx, images.AlbumImageOpts{ + Artists: utils.UniqueIgnoringCase(slices.Concat(utils.FlattenMbzArtistCreditNames(release.ArtistCredit), utils.FlattenArtistNames(opts.Artists))), + Album: release.Title, + ReleaseMbzID: &opts.ReleaseMbzID, + }) + if err == nil && imgUrl != "" { + var size ImageSize + if cfg.FullImageCacheEnabled() { + size = ImageSizeFull + } else { + size = ImageSizeLarge + } + imgid = uuid.New() + l.Debug().Msg("Downloading album image from source...") + err = DownloadAndCacheImage(ctx, imgid, imgUrl, size) + if err != nil { + l.Err(err).Msg("Failed to cache image") + } + } + if err != nil { + l.Debug().Msgf("Failed to get album images for %s: %s", release.Title, err.Error()) + } + album, err = d.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: release.Title, + MusicBrainzID: opts.ReleaseMbzID, + ArtistIDs: utils.FlattenArtistIDs(opts.Artists), + VariousArtists: variousArtists, + Image: imgid, + ImageSrc: imgUrl, + }) + if err != nil { + return nil, err + } + if opts.ReleaseGroupMbzID != uuid.Nil { + aliases, err := opts.Mbzc.GetReleaseTitles(ctx, opts.ReleaseGroupMbzID) + if err == nil { + l.Debug().Msgf("Associating aliases '%s' with Release '%s'", aliases, album.Title) + err = d.SaveAlbumAliases(ctx, album.ID, aliases, "MusicBrainz") + if err != nil { + l.Err(err).Msg("Failed to save aliases") + } + } else { + l.Info().AnErr("err", err).Msg("Failed to get release group from MusicBrainz") + } + } + l.Info().Msgf("Created album '%s' with MusicBrainz Release ID", album.Title) + } + return &models.Album{ + ID: album.ID, + MbzID: &opts.ReleaseMbzID, + Title: album.Title, + VariousArtists: album.VariousArtists, + }, nil +} + +func matchAlbumByTitle(ctx context.Context, d db.DB, opts AssociateAlbumOpts) (*models.Album, error) { + l := logger.FromContext(ctx) + var releaseName string + if opts.ReleaseName != "" { + releaseName = opts.ReleaseName + } else { + releaseName = opts.TrackName + } + a, err := d.GetAlbum(ctx, db.GetAlbumOpts{ + Title: releaseName, + ArtistID: opts.Artists[0].ID, + }) + if err == nil { + l.Debug().Msgf("Found album '%s' by artist and title", a.Title) + if a.MbzID == nil && opts.ReleaseMbzID != uuid.Nil { + l.Debug().Msgf("Updating album with id %d with MusicBrainz ID %s", a.ID, opts.ReleaseMbzID) + err = d.UpdateAlbum(ctx, db.UpdateAlbumOpts{ + ID: a.ID, + MusicBrainzID: opts.ReleaseMbzID, + }) + if err != nil { + l.Err(err).Msg("Failed to associate existing release with MusicBrainz ID") + } + } + } else if !errors.Is(err, pgx.ErrNoRows) { + return nil, err + } else { + var imgid uuid.UUID + imgUrl, err := images.GetAlbumImage(ctx, images.AlbumImageOpts{ + Artists: utils.FlattenArtistNames(opts.Artists), + Album: opts.ReleaseName, + ReleaseMbzID: &opts.ReleaseMbzID, + }) + if err == nil && imgUrl != "" { + var size ImageSize + if cfg.FullImageCacheEnabled() { + size = ImageSizeFull + } else { + size = ImageSizeLarge + } + imgid = uuid.New() + l.Debug().Msg("Downloading album image from source...") + err = DownloadAndCacheImage(ctx, imgid, imgUrl, size) + if err != nil { + l.Err(err).Msg("Failed to cache image") + } + } + if err != nil { + l.Debug().Msgf("Failed to get album images for %s: %s", opts.ReleaseName, err.Error()) + } + a, err = d.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: releaseName, + ArtistIDs: utils.FlattenArtistIDs(opts.Artists), + Image: imgid, + MusicBrainzID: opts.ReleaseMbzID, + ImageSrc: imgUrl, + }) + if err != nil { + return nil, err + } + l.Info().Msgf("Created album '%s' with artist and title", a.Title) + } + return &models.Album{ + ID: a.ID, + Title: a.Title, + }, nil +} diff --git a/internal/catalog/associate_artists.go b/internal/catalog/associate_artists.go new file mode 100644 index 0000000..0014b3e --- /dev/null +++ b/internal/catalog/associate_artists.go @@ -0,0 +1,231 @@ +package catalog + +import ( + "context" + "errors" + "slices" + "strings" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/images" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/models" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" +) + +type AssociateArtistsOpts struct { + ArtistMbzIDs []uuid.UUID + ArtistNames []string + ArtistName string + TrackTitle string + Mbzc mbz.MusicBrainzCaller +} + +func AssociateArtists(ctx context.Context, d db.DB, opts AssociateArtistsOpts) ([]*models.Artist, error) { + l := logger.FromContext(ctx) + + var result []*models.Artist + + if len(opts.ArtistMbzIDs) > 0 { + l.Debug().Msg("Associating artists by MusicBrainz ID(s)") + mbzMatches, err := matchArtistsByMBID(ctx, d, opts) + if err != nil { + return nil, err + } + result = append(result, mbzMatches...) + } + + if len(opts.ArtistNames) > len(result) { + l.Debug().Msg("Associating artists by list of artist names") + nameMatches, err := matchArtistsByNames(ctx, opts.ArtistNames, result, d) + if err != nil { + return nil, err + } + result = append(result, nameMatches...) + } + + if len(result) < 1 { + allArtists := slices.Concat(opts.ArtistNames, ParseArtists(opts.ArtistName, opts.TrackTitle)) + l.Debug().Msgf("Associating artists by artist name(s) %v and track title '%s'", allArtists, opts.TrackTitle) + fallbackMatches, err := matchArtistsByNames(ctx, allArtists, nil, d) + if err != nil { + return nil, err + } + result = append(result, fallbackMatches...) + } + + return result, nil +} + +func matchArtistsByMBID(ctx context.Context, d db.DB, opts AssociateArtistsOpts) ([]*models.Artist, error) { + l := logger.FromContext(ctx) + var result []*models.Artist + + for _, id := range opts.ArtistMbzIDs { + if id == uuid.Nil { + l.Warn().Msg("Provided artist has uuid.Nil MusicBrainzID") + return matchArtistsByNames(ctx, opts.ArtistNames, result, d) + } + a, err := d.GetArtist(ctx, db.GetArtistOpts{ + MusicBrainzID: id, + }) + if err == nil { + l.Debug().Msgf("Artist '%s' found by MusicBrainz ID", a.Name) + result = append(result, a) + continue + } + + if !errors.Is(err, pgx.ErrNoRows) { + return nil, err + } + + if len(opts.ArtistNames) < 1 { + opts.ArtistNames = slices.Concat(opts.ArtistNames, ParseArtists(opts.ArtistName, opts.TrackTitle)) + } + a, err = resolveAliasOrCreateArtist(ctx, id, opts.ArtistNames, d, opts.Mbzc) + if err != nil { + l.Warn().Msg("MusicBrainz unreachable, falling back to artist name matching") + return matchArtistsByNames(ctx, opts.ArtistNames, result, d) + // return nil, err + } + result = append(result, a) + } + return result, nil +} +func resolveAliasOrCreateArtist(ctx context.Context, mbzID uuid.UUID, names []string, d db.DB, mbz mbz.MusicBrainzCaller) (*models.Artist, error) { + l := logger.FromContext(ctx) + + aliases, err := mbz.GetArtistPrimaryAliases(ctx, mbzID) + if err != nil { + return nil, err + } + l.Debug().Msgf("Got aliases %v from MusicBrainz", aliases) + + for _, alias := range aliases { + a, err := d.GetArtist(ctx, db.GetArtistOpts{ + Name: alias, + }) + if err == nil && (a.MbzID == nil || *a.MbzID == uuid.Nil) { + a.MbzID = &mbzID + l.Debug().Msgf("Alias '%s' found in DB. Associating with MusicBrainz ID...", alias) + if updateErr := d.UpdateArtist(ctx, db.UpdateArtistOpts{ID: a.ID, MusicBrainzID: mbzID}); updateErr != nil { + return nil, updateErr + } + if saveAliasErr := d.SaveArtistAliases(ctx, a.ID, aliases, "MusicBrainz"); saveAliasErr != nil { + return nil, saveAliasErr + } + return a, nil + } + } + + canonical := aliases[0] + for _, alias := range aliases { + for _, name := range names { + if strings.EqualFold(alias, name) { + l.Debug().Msgf("Canonical name for artist is '%s'", alias) + canonical = alias + break + } + } + } + + var imgid uuid.UUID + imgUrl, err := images.GetArtistImage(ctx, images.ArtistImageOpts{ + Aliases: aliases, + }) + if err == nil && imgUrl != "" { + var size ImageSize + if cfg.FullImageCacheEnabled() { + size = ImageSizeFull + } else { + size = ImageSizeLarge + } + imgid = uuid.New() + l.Debug().Msg("Downloading artist image from source...") + err = DownloadAndCacheImage(ctx, imgid, imgUrl, size) + if err != nil { + l.Err(err).Msg("Failed to cache image") + } + } else if err != nil { + l.Warn().Msgf("Failed to get artist image from ImageSrc: %s", err.Error()) + } + + u, err := d.SaveArtist(ctx, db.SaveArtistOpts{ + MusicBrainzID: mbzID, + Name: canonical, + Aliases: aliases, + Image: imgid, + ImageSrc: imgUrl, + }) + if err != nil { + return nil, err + } + l.Info().Msgf("Created artist '%s' with MusicBrainz Artist ID", canonical) + return u, nil +} + +func matchArtistsByNames(ctx context.Context, names []string, existing []*models.Artist, d db.DB) ([]*models.Artist, error) { + l := logger.FromContext(ctx) + var result []*models.Artist + + for _, name := range names { + if artistExists(name, existing) || artistExists(name, result) { + l.Debug().Msgf("Artist '%s' already found, skipping...", name) + continue + } + a, err := d.GetArtist(ctx, db.GetArtistOpts{ + Name: name, + }) + if err == nil { + l.Debug().Msgf("Artist '%s' found in DB", name) + result = append(result, a) + continue + } + if errors.Is(err, pgx.ErrNoRows) { + var imgid uuid.UUID + imgUrl, err := images.GetArtistImage(ctx, images.ArtistImageOpts{ + Aliases: []string{name}, + }) + if err == nil && imgUrl != "" { + var size ImageSize + if cfg.FullImageCacheEnabled() { + size = ImageSizeFull + } else { + size = ImageSizeLarge + } + imgid = uuid.New() + l.Debug().Msg("Downloading artist image from source...") + err = DownloadAndCacheImage(ctx, imgid, imgUrl, size) + if err != nil { + l.Err(err).Msg("Failed to cache image") + } + } else if err != nil { + l.Debug().Msgf("Failed to get artist images for %s: %s", name, err.Error()) + } + a, err = d.SaveArtist(ctx, db.SaveArtistOpts{Name: name, Image: imgid, ImageSrc: imgUrl}) + if err != nil { + return nil, err + } + l.Info().Msgf("Created artist '%s' with artist name", name) + result = append(result, a) + } else { + return nil, err + } + } + return result, nil +} + +func artistExists(name string, artists []*models.Artist) bool { + for _, a := range artists { + allAliases := append(a.Aliases, a.Name) + for _, alias := range allAliases { + if strings.EqualFold(name, alias) { + return true + } + } + } + return false +} diff --git a/internal/catalog/associate_track.go b/internal/catalog/associate_track.go new file mode 100644 index 0000000..5304c0b --- /dev/null +++ b/internal/catalog/associate_track.go @@ -0,0 +1,119 @@ +package catalog + +import ( + "context" + "errors" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/models" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" +) + +type AssociateTrackOpts struct { + ArtistIDs []int32 + AlbumID int32 + TrackMbzID uuid.UUID + TrackName string + Duration int32 + Mbzc mbz.MusicBrainzCaller +} + +func AssociateTrack(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*models.Track, error) { + l := logger.FromContext(ctx) + if opts.TrackName == "" { + return nil, errors.New("missing required parameter 'opts.TrackName'") + } + if len(opts.ArtistIDs) < 1 { + return nil, errors.New("at least one artist id must be specified") + } + if opts.AlbumID == 0 { + return nil, errors.New("release group id must be specified") + } + // first, try to match track Mbz ID + if opts.TrackMbzID != uuid.Nil { + l.Debug().Msgf("Associating track '%s' by MusicBrainz recording ID", opts.TrackName) + return matchTrackByMbzID(ctx, d, opts) + } else { + l.Debug().Msgf("Associating track '%s' by title and artist", opts.TrackName) + return matchTrackByTitleAndArtist(ctx, d, opts) + } +} + +// If no match is found, will call matchTrackByTitleAndArtist and associate the Mbz ID with the result +func matchTrackByMbzID(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*models.Track, error) { + l := logger.FromContext(ctx) + track, err := d.GetTrack(ctx, db.GetTrackOpts{ + MusicBrainzID: opts.TrackMbzID, + }) + if err == nil { + l.Debug().Msgf("Found track '%s' by MusicBrainz ID", track.Title) + return track, nil + } else if !errors.Is(err, pgx.ErrNoRows) { + return nil, err + } else { + l.Debug().Msgf("Track '%s' could not be found by MusicBrainz ID", opts.TrackName) + track, err := matchTrackByTitleAndArtist(ctx, d, opts) + if err != nil { + return nil, err + } + l.Debug().Msgf("Updating track '%s' with MusicBrainz ID %s", opts.TrackName, opts.TrackMbzID) + err = d.UpdateTrack(ctx, db.UpdateTrackOpts{ + ID: track.ID, + MusicBrainzID: opts.TrackMbzID, + }) + if err != nil { + return nil, err + } + track.MbzID = &opts.TrackMbzID + return track, nil + } +} + +func matchTrackByTitleAndArtist(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*models.Track, error) { + l := logger.FromContext(ctx) + // try provided track title + track, err := d.GetTrack(ctx, db.GetTrackOpts{ + Title: opts.TrackName, + ArtistIDs: opts.ArtistIDs, + }) + if err == nil { + l.Debug().Msgf("Track '%s' found by title and artist match", track.Title) + return track, nil + } else if !errors.Is(err, pgx.ErrNoRows) { + return nil, err + } else { + if opts.TrackMbzID != uuid.Nil { + mbzTrack, err := opts.Mbzc.GetTrack(ctx, opts.TrackMbzID) + if err == nil { + track, err := d.GetTrack(ctx, db.GetTrackOpts{ + Title: mbzTrack.Title, + ArtistIDs: opts.ArtistIDs, + }) + if err == nil { + l.Debug().Msgf("Track '%s' found by MusicBrainz title and artist match", opts.TrackName) + return track, nil + } + } + } + l.Debug().Msgf("Track '%s' could not be found by title and artist match", opts.TrackName) + t, err := d.SaveTrack(ctx, db.SaveTrackOpts{ + RecordingMbzID: opts.TrackMbzID, + AlbumID: opts.AlbumID, + Title: opts.TrackName, + ArtistIDs: opts.ArtistIDs, + Duration: opts.Duration, + }) + if err != nil { + return nil, err + } + if opts.TrackMbzID == uuid.Nil { + l.Info().Msgf("Created track '%s' with title and artist", opts.TrackName) + } else { + l.Info().Msgf("Created track '%s' with MusicBrainz Recording ID", opts.TrackName) + } + return t, nil + } +} diff --git a/internal/catalog/catalog.go b/internal/catalog/catalog.go new file mode 100644 index 0000000..495104d --- /dev/null +++ b/internal/catalog/catalog.go @@ -0,0 +1,228 @@ +// Package catalog manages the internal metadata of the catalog of music the user has submitted listens for. +// This includes artists, releases (album, single, ep, etc), and tracks, as well as ingesting +// listens submitted both via the API(s) and other methods. +package catalog + +import ( + "context" + "errors" + "regexp" + "strings" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/models" + "github.com/google/uuid" +) + +type GetListensOpts struct { + ArtistID int32 + ReleaseGroupID int32 + TrackID int32 + Limit int +} + +type SaveListenOpts struct { + TrackID int32 + Time time.Time +} + +type SubmitListenOpts struct { + // When true, skips registering the listen and only associates or creates the + // artist, release, release group, and track in DB + SkipSaveListen bool + + MbzCaller mbz.MusicBrainzCaller + ArtistNames []string + Artist string + ArtistMbzIDs []uuid.UUID + TrackTitle string + RecordingMbzID uuid.UUID + Duration int32 // in seconds + ReleaseTitle string + ReleaseMbzID uuid.UUID + ReleaseGroupMbzID uuid.UUID + Time time.Time + UserID int32 + Client string +} + +const ( + ImageSourceUserUpload = "User Upload" +) + +func SubmitListen(ctx context.Context, store db.DB, opts SubmitListenOpts) error { + l := logger.FromContext(ctx) + + if opts.Artist == "" || opts.TrackTitle == "" { + return errors.New("track name and artist are required") + } + + artists, err := AssociateArtists( + ctx, + store, + AssociateArtistsOpts{ + ArtistMbzIDs: opts.ArtistMbzIDs, + ArtistNames: opts.ArtistNames, + ArtistName: opts.Artist, + Mbzc: opts.MbzCaller, + TrackTitle: opts.TrackTitle, + }) + if err != nil { + l.Error().Err(err).Msg("Failed to associate artists to listen") + return err + } else if len(artists) < 1 { + l.Debug().Msg("Failed to associate any artists to release") + } + + artistIDs := make([]int32, len(artists)) + + for i, artist := range artists { + artistIDs[i] = artist.ID + l.Debug().Any("artist", artist).Msg("Matched listen to artist") + } + rg, err := AssociateAlbum(ctx, store, AssociateAlbumOpts{ + ReleaseMbzID: opts.ReleaseMbzID, + ReleaseGroupMbzID: opts.ReleaseGroupMbzID, + ReleaseName: opts.ReleaseTitle, + TrackName: opts.TrackTitle, + Mbzc: opts.MbzCaller, + Artists: artists, + }) + if err != nil { + l.Error().Err(err).Msg("Failed to associate release group to listen") + return err + } + + // ensure artists are associated with release group + store.AddArtistsToAlbum(ctx, db.AddArtistsToAlbumOpts{ + ArtistIDs: artistIDs, + AlbumID: rg.ID, + }) + + track, err := AssociateTrack(ctx, store, AssociateTrackOpts{ + ArtistIDs: artistIDs, + AlbumID: rg.ID, + TrackMbzID: opts.RecordingMbzID, + TrackName: opts.TrackTitle, + Duration: opts.Duration, + Mbzc: opts.MbzCaller, + }) + if err != nil { + l.Error().Err(err).Msg("Failed to associate track to listen") + return err + } + + if track.Duration == 0 && opts.Duration != 0 { + err := store.UpdateTrack(ctx, db.UpdateTrackOpts{ + ID: track.ID, + Duration: opts.Duration, + }) + if err != nil { + l.Err(err).Msgf("Failed to update duration for track %s", track.Title) + } + } + + if opts.SkipSaveListen { + return nil + } + + l.Info().Msgf("Received listen: '%s' by %s, from release '%s'", track.Title, buildArtistStr(artists), rg.Title) + + return store.SaveListen(ctx, db.SaveListenOpts{ + TrackID: track.ID, + Time: opts.Time, + UserID: opts.UserID, + Client: opts.Client, + }) +} + +func buildArtistStr(artists []*models.Artist) string { + artistNames := make([]string, len(artists)) + for i, artist := range artists { + artistNames[i] = artist.Name + } + return strings.Join(artistNames, " & ") +} + +var ( + // Bracketed feat patterns + bracketFeatPatterns = []*regexp.Regexp{ + regexp.MustCompile(`(?i)\(feat\. ([^)]*)\)`), + regexp.MustCompile(`(?i)\[feat\. ([^\]]*)\]`), + } + // Inline feat (not in brackets) + inlineFeatPattern = regexp.MustCompile(`(?i)feat\. ([^()\[\]]+)$`) + + // Delimiters only used inside feat. sections + featSplitDelimiters = regexp.MustCompile(`(?i)\s*(?:,|&|and|·)\s*`) + + // Delimiter for separating artists in main string (rare but real usage) + mainArtistDotSplitter = regexp.MustCompile(`\s+·\s+`) +) + +// ParseArtists extracts all contributing artist names from the artist and title strings +func ParseArtists(artist string, title string) []string { + seen := make(map[string]struct{}) + var out []string + + add := func(name string) { + name = strings.TrimSpace(name) + if name == "" { + return + } + if _, exists := seen[name]; !exists { + seen[name] = struct{}{} + out = append(out, name) + } + } + + foundFeat := false + + // Extract bracketed features from artist + for _, re := range bracketFeatPatterns { + if matches := re.FindStringSubmatch(artist); matches != nil { + foundFeat = true + artist = strings.Replace(artist, matches[0], "", 1) + for _, name := range featSplitDelimiters.Split(matches[1], -1) { + add(name) + } + } + } + // Extract inline feat. from artist + if matches := inlineFeatPattern.FindStringSubmatch(artist); matches != nil { + foundFeat = true + artist = strings.Replace(artist, matches[0], "", 1) + for _, name := range featSplitDelimiters.Split(matches[1], -1) { + add(name) + } + } + + // Add base artist(s) + if foundFeat { + add(strings.TrimSpace(artist)) + } else { + // Only split on " · " in base artist string + for _, name := range mainArtistDotSplitter.Split(artist, -1) { + add(name) + } + } + + // Extract features from title + for _, re := range bracketFeatPatterns { + if matches := re.FindStringSubmatch(title); matches != nil { + for _, name := range featSplitDelimiters.Split(matches[1], -1) { + add(name) + } + } + } + if matches := inlineFeatPattern.FindStringSubmatch(title); matches != nil { + for _, name := range featSplitDelimiters.Split(matches[1], -1) { + add(name) + } + } + + return out +} diff --git a/internal/catalog/catalog_test.go b/internal/catalog/catalog_test.go new file mode 100644 index 0000000..6148466 --- /dev/null +++ b/internal/catalog/catalog_test.go @@ -0,0 +1,366 @@ +package catalog_test + +import ( + "context" + "fmt" + "log" + "os" + "testing" + "time" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db/psql" + "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/utils" + _ "github.com/gabehf/koito/testing_init" + "github.com/google/uuid" + "github.com/ory/dockertest/v3" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var ( + mbzArtistData = map[uuid.UUID]*mbz.MusicBrainzArtist{ + uuid.MustParse("00000000-0000-0000-0000-000000000001"): { + Name: "ATARASHII GAKKO!", + SortName: "Atarashii Gakko", + Aliases: []mbz.MusicBrainzArtistAlias{ + { + Name: "新しい学校のリーダーズ", + Type: "Artist name", + Primary: true, + }, + }, + }, + } + mbzReleaseGroupData = map[uuid.UUID]*mbz.MusicBrainzReleaseGroup{ + uuid.MustParse("00000000-0000-0000-0000-000000000011"): { + Title: "AG! Calling", + Type: "Album", + ArtistCredit: []mbz.MusicBrainzArtistCredit{ + { + Artist: mbz.MusicBrainzArtist{ + Name: "ATARASHII GAKKO!", + Aliases: []mbz.MusicBrainzArtistAlias{ + { + Name: "新しい学校のリーダーズ", + Type: "Artist name", + Primary: true, + }, + }, + }, + Name: "ATARASHII GAKKO!", + }, + }, + Releases: []mbz.MusicBrainzRelease{ + { + Title: "AG! Calling", + ID: "00000000-0000-0000-0000-000000000101", + ArtistCredit: []mbz.MusicBrainzArtistCredit{ + { + Artist: mbz.MusicBrainzArtist{ + Name: "ATARASHII GAKKO!", + Aliases: []mbz.MusicBrainzArtistAlias{ + { + Name: "ATARASHII GAKKO!", + Type: "Artist name", + Primary: true, + }, + }, + }, + Name: "ATARASHII GAKKO!", + }, + }, + Status: "Official", + }, + { + Title: "AG! Calling - Alt Title", + ID: "00000000-0000-0000-0000-000000000102", + ArtistCredit: []mbz.MusicBrainzArtistCredit{ + { + Artist: mbz.MusicBrainzArtist{ + Name: "ATARASHII GAKKO!", + Aliases: []mbz.MusicBrainzArtistAlias{ + { + Name: "ATARASHII GAKKO!", + Type: "Artist name", + Primary: true, + }, + }, + }, + Name: "ATARASHII GAKKO!", + }, + }, + Status: "Official", + }, + }, + }, + } + mbzReleaseData = map[uuid.UUID]*mbz.MusicBrainzRelease{ + uuid.MustParse("00000000-0000-0000-0000-000000000101"): { + Title: "AG! Calling", + ID: "00000000-0000-0000-0000-000000000101", + ArtistCredit: []mbz.MusicBrainzArtistCredit{ + { + Artist: mbz.MusicBrainzArtist{ + Name: "ATARASHII GAKKO!", + Aliases: []mbz.MusicBrainzArtistAlias{ + { + Name: "新しい学校のリーダーズ", + Type: "Artist name", + Primary: true, + }, + }, + }, + Name: "ATARASHII GAKKO!", + }, + }, + Status: "Official", + }, + uuid.MustParse("00000000-0000-0000-0000-000000000202"): { + Title: "EVANGELION FINALLY", + ID: "00000000-0000-0000-0000-000000000202", + ArtistCredit: []mbz.MusicBrainzArtistCredit{ + { + Artist: mbz.MusicBrainzArtist{ + Name: "Various Artists", + }, + Name: "Various Artists", + }, + }, + Status: "Official", + }, + } + mbzTrackData = map[uuid.UUID]*mbz.MusicBrainzTrack{ + uuid.MustParse("00000000-0000-0000-0000-000000001001"): { + Title: "Tokyo Calling", + }, + } +) + +var store *psql.Psql + +func getTestGetenv(resource *dockertest.Resource) func(string) string { + dir, err := utils.GenerateRandomString(8) + if err != nil { + panic(err) + } + return func(env string) string { + switch env { + case cfg.ENABLE_STRUCTURED_LOGGING_ENV: + return "true" + case cfg.LOG_LEVEL_ENV: + return "debug" + case cfg.DATABASE_URL_ENV: + return fmt.Sprintf("postgres://postgres:secret@localhost:%s", resource.GetPort("5432/tcp")) + case cfg.CONFIG_DIR_ENV: + return dir + case cfg.DISABLE_DEEZER_ENV, cfg.DISABLE_COVER_ART_ARCHIVE_ENV, cfg.DISABLE_MUSICBRAINZ_ENV, cfg.ENABLE_FULL_IMAGE_CACHE_ENV: + return "true" + default: + return "" + } + } +} + +func truncateTestData(t *testing.T) { + err := store.Exec(context.Background(), + `TRUNCATE + artists, + artist_aliases, + tracks, + artist_tracks, + releases, + artist_releases, + release_aliases, + listens + RESTART IDENTITY CASCADE`) + require.NoError(t, err) +} + +func setupTestDataWithMbzIDs(t *testing.T) { + truncateTestData(t) + + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001')`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'ATARASHII GAKKO!', 'Testing', true)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000101')`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'AG! Calling', 'Testing', true)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_releases (artist_id, release_id) + VALUES (1, 1)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO tracks (release_id, musicbrainz_id) + VALUES (1, '00000000-0000-0000-0000-000000001001')`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO track_aliases (track_id, alias, source, is_primary) + VALUES (1, 'Tokyo Calling', 'Testing', true)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_tracks (artist_id, track_id) + VALUES (1, 1)`) + require.NoError(t, err) +} + +func setupTestDataSansMbzIDs(t *testing.T) { + truncateTestData(t) + + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES (NULL)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'ATARASHII GAKKO!', 'Testing', true)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id) + VALUES (NULL)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'AG! Calling', 'Testing', true)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_releases (artist_id, release_id) + VALUES (1, 1)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO tracks (release_id, musicbrainz_id) + VALUES (1, NULL)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO track_aliases (track_id, alias, source, is_primary) + VALUES (1, 'Tokyo Calling', 'Testing', true)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_tracks (artist_id, track_id) + VALUES (1, 1)`) + require.NoError(t, err) +} + +func TestMain(m *testing.M) { + pool, err := dockertest.NewPool("") + if err != nil { + log.Fatalf("Could not construct pool: %s", err) + } + + if err := pool.Client.Ping(); err != nil { + log.Fatalf("Could not connect to Docker: %s", err) + } + + resource, err := pool.Run("postgres", "latest", []string{"POSTGRES_PASSWORD=secret"}) + if err != nil { + log.Fatalf("Could not start resource: %s", err) + } + + err = cfg.Load(getTestGetenv(resource)) + if err != nil { + log.Fatalf("Could not load cfg: %s", err) + } + + if err := pool.Retry(func() error { + var err error + store, err = psql.New() + if err != nil { + log.Println("Failed to connect to test database, retrying...") + return err + } + return store.Ping(context.Background()) + }); err != nil { + log.Fatalf("Could not connect to database: %s", err) + } + + // insert a user into the db with id 1 to use for tests + err = store.Exec(context.Background(), `INSERT INTO users (username, password) VALUES ('test', DECODE('abc123', 'hex'))`) + if err != nil { + log.Fatalf("Failed to insert test user: %v", err) + } + + code := m.Run() + + // You can't defer this because os.Exit doesn't care for defer + if err := pool.Purge(resource); err != nil { + log.Fatalf("Could not purge resource: %s", err) + } + + err = os.RemoveAll(cfg.ConfigDir()) + if err != nil { + log.Fatalf("Could not remove temporary config dir: %v", err) + } + + os.Exit(code) +} + +// From: https://brandur.org/fragments/go-equal-time +// EqualTime compares two times in a way that's safer and with better fail +// output than a call to `require.Equal` would produce. +// +// It takes care to: +// +// - Strip off monotonic portions of timestamps so they aren't considered for +// purposes of comparison. +// +// - Truncate nanoseconds in a functionally equivalent way to how pgx would do +// it so that times that have round-tripped from Postgres can still be +// compared. Postgres only stores times to the microsecond level. +// +// - Use formatted, human-friendly time outputs so that in case of a failure, +// the discrepancy is easier to pick out. +func EqualTime(t testing.TB, t1, t2 time.Time) { + // Note that leaving off the nanosecond portion will have the effect of + // truncating it rather than rounding to the nearest microsecond, which + // functionally matches pgx's behavior while persisting. + const rfc3339Micro = "2006-01-02T15:04:05.999999Z07:00" + + require.Equal(t, + t1.Format(rfc3339Micro), + t2.Format(rfc3339Micro), + ) +} + +func TestArtistStringParse(t *testing.T) { + type input struct { + Name string + Title string + } + cases := map[input][]string{ + // only one artist + {"NELKE", ""}: {"NELKE"}, + {"The Brook & The Bluff", ""}: {"The Brook & The Bluff"}, + {"half·alive", ""}: {"half·alive"}, + // Earth, Wind, & Fire + {"Earth, Wind & Fire", "The Very Best of Earth, Wind & Fire"}: {"Earth, Wind & Fire"}, + // only artists in artist string + {"Carly Rae Jepsen feat. Rufus Wainwright", ""}: {"Carly Rae Jepsen", "Rufus Wainwright"}, + {"Mimi (feat. HATSUNE MIKU & KAFU)", ""}: {"Mimi", "HATSUNE MIKU", "KAFU"}, + {"Magnify Tokyo · Kanade Ishihara", ""}: {"Magnify Tokyo", "Kanade Ishihara"}, + {"Daft Punk [feat. Paul Williams]", ""}: {"Daft Punk", "Paul Williams"}, + // primary artist in artist string, features in title + {"Tyler, The Creator", "CA (feat. Alice Smith, Leon Ware & Clem Creevy)"}: {"Tyler, The Creator", "Alice Smith", "Leon Ware", "Clem Creevy"}, + {"ONE OK ROCK", "C.U.R.I.O.S.I.T.Y. (feat. Paledusk and CHICO CARLITO)"}: {"ONE OK ROCK", "Paledusk", "CHICO CARLITO"}, + {"Rat Tally", "In My Car feat. Madeline Kenney"}: {"Rat Tally", "Madeline Kenney"}, + // artists in both + {"Daft Punk feat. Julian Casablancas", "Instant Crush (feat. Julian Casablancas)"}: {"Daft Punk", "Julian Casablancas"}, + {"Paramore (feat. Joy Williams)", "Hate to See Your Heart Break feat. Joy Williams"}: {"Paramore", "Joy Williams"}, + } + + for in, out := range cases { + artists := catalog.ParseArtists(in.Name, in.Title) + assert.ElementsMatch(t, out, artists) + } +} diff --git a/internal/catalog/images.go b/internal/catalog/images.go new file mode 100644 index 0000000..d93ac46 --- /dev/null +++ b/internal/catalog/images.go @@ -0,0 +1,266 @@ +package catalog + +import ( + "bytes" + "context" + "fmt" + "io" + "net/http" + "os" + "path" + "path/filepath" + "strings" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/google/uuid" + "github.com/h2non/bimg" +) + +type ImageSize string + +const ( + ImageSizeSmall ImageSize = "small" + ImageSizeMedium ImageSize = "medium" + ImageSizeLarge ImageSize = "large" + // imageSizeXL ImageSize = "xl" + ImageSizeFull ImageSize = "full" + + ImageCacheDir = "image_cache" +) + +func ParseImageSize(size string) (ImageSize, error) { + switch strings.ToLower(size) { + case "small": + return ImageSizeSmall, nil + case "medium": + return ImageSizeMedium, nil + case "large": + return ImageSizeLarge, nil + // case "xl": + // return imageSizeXL, nil + case "full": + return ImageSizeFull, nil + default: + return "", fmt.Errorf("unknown image size: %s", size) + } +} +func GetImageSize(size ImageSize) int { + var px int + switch size { + case "small": + px = 48 + case "medium": + px = 256 + case "large": + px = 500 + case "xl": + px = 1000 + } + return px +} + +func SourceImageDir() string { + if cfg.FullImageCacheEnabled() { + return path.Join(cfg.ConfigDir(), ImageCacheDir, "full") + } else { + return path.Join(cfg.ConfigDir(), ImageCacheDir, "large") + } +} + +// ValidateImageURL checks if the URL points to a valid image by performing a HEAD request. +func ValidateImageURL(url string) error { + resp, err := http.Head(url) + if err != nil { + return fmt.Errorf("failed to perform HEAD request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("HEAD request failed, status code: %d", resp.StatusCode) + } + + contentType := resp.Header.Get("Content-Type") + if !strings.HasPrefix(contentType, "image/") { + return fmt.Errorf("URL does not point to an image, content type: %s", contentType) + } + + return nil +} + +// DownloadAndCacheImage downloads an image from the given URL, then calls CompressAndSaveImage. +func DownloadAndCacheImage(ctx context.Context, id uuid.UUID, url string, size ImageSize) error { + l := logger.FromContext(ctx) + err := ValidateImageURL(url) + if err != nil { + return err + } + l.Debug().Msgf("Downloading image for ID %s", id) + resp, err := http.Get(url) + if err != nil { + return fmt.Errorf("failed to download image: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("failed to download image, status code: %d", resp.StatusCode) + } + + return CompressAndSaveImage(ctx, id.String(), size, resp.Body) +} + +// Compresses an image to the specified size, then saves it to the correct cache folder. +func CompressAndSaveImage(ctx context.Context, filename string, size ImageSize, body io.Reader) error { + l := logger.FromContext(ctx) + + if size == ImageSizeFull { + return saveImage(filename, size, body) + } + + l.Debug().Msg("Creating resized image") + compressed, err := compressImage(size, body) + if err != nil { + return err + } + + return saveImage(filename, size, compressed) +} + +// SaveImage saves an image to the image_cache/{size} folder +func saveImage(filename string, size ImageSize, data io.Reader) error { + configDir := cfg.ConfigDir() + cacheDir := filepath.Join(configDir, ImageCacheDir) + + // Ensure the cache directory exists + err := os.MkdirAll(filepath.Join(cacheDir, string(size)), os.ModePerm) + if err != nil { + return fmt.Errorf("failed to create full image cache directory: %w", err) + } + + // Create a file in the cache directory + imagePath := filepath.Join(cacheDir, string(size), filename) + file, err := os.Create(imagePath) + if err != nil { + return fmt.Errorf("failed to create image file: %w", err) + } + defer file.Close() + + // Save the image to the file + _, err = io.Copy(file, data) + if err != nil { + return fmt.Errorf("failed to save image: %w", err) + } + + return nil +} + +func compressImage(size ImageSize, data io.Reader) (io.Reader, error) { + imgBytes, err := io.ReadAll(data) + if err != nil { + return nil, err + } + px := GetImageSize(size) + // Resize with bimg + imgBytes, err = bimg.NewImage(imgBytes).Process(bimg.Options{ + Width: px, + Height: px, + Crop: true, + Quality: 85, + StripMetadata: true, + Type: bimg.WEBP, + }) + if err != nil { + return nil, err + } + if len(imgBytes) == 0 { + return nil, fmt.Errorf("compression failed") + } + return bytes.NewReader(imgBytes), nil +} + +func DeleteImage(filename uuid.UUID) error { + configDir := cfg.ConfigDir() + cacheDir := filepath.Join(configDir, ImageCacheDir) + + // err := os.Remove(path.Join(cacheDir, "xl", filename.String())) + // if err != nil && !os.IsNotExist(err) { + // return err + // } + err := os.Remove(path.Join(cacheDir, "full", filename.String())) + if err != nil && !os.IsNotExist(err) { + return err + } + err = os.Remove(path.Join(cacheDir, "large", filename.String())) + if err != nil && !os.IsNotExist(err) { + return err + } + err = os.Remove(path.Join(cacheDir, "medium", filename.String())) + if err != nil && !os.IsNotExist(err) { + return err + } + err = os.Remove(path.Join(cacheDir, "small", filename.String())) + if err != nil && !os.IsNotExist(err) { + return err + } + return nil +} + +// Finds any images in all image_cache folders and deletes them if they are not associated with +// an album or artist. +func PruneOrphanedImages(ctx context.Context, store db.DB) error { + l := logger.FromContext(ctx) + + configDir := cfg.ConfigDir() + cacheDir := filepath.Join(configDir, ImageCacheDir) + + count := 0 + // go through every folder to find orphaned images + // store already processed images to speed up pruining + memo := make(map[string]bool) + for _, dir := range []string{"large", "medium", "small", "full"} { + c, err := pruneDirImgs(ctx, store, path.Join(cacheDir, dir), memo) + if err != nil { + return err + } + count += c + } + l.Info().Msgf("Purged %d images", count) + return nil +} + +// returns the number of pruned images +func pruneDirImgs(ctx context.Context, store db.DB, path string, memo map[string]bool) (int, error) { + l := logger.FromContext(ctx) + count := 0 + files, err := os.ReadDir(path) + if err != nil { + l.Info().Msgf("Failed to read from directory %s; skipping for prune", path) + files = []os.DirEntry{} + } + for _, file := range files { + fn := file.Name() + imageid, err := uuid.Parse(fn) + if err != nil { + l.Debug().Msgf("Filename does not appear to be UUID: %s", fn) + continue + } + exists, err := store.ImageHasAssociation(ctx, imageid) + if err != nil { + return 0, err + } else if exists { + continue + } + // image does not have association + l.Debug().Msgf("Deleting image: %s", imageid) + err = DeleteImage(imageid) + if err != nil { + l.Err(err).Msg("Error purging orphaned images") + } + if memo != nil { + memo[fn] = true + } + count++ + } + return count, nil +} diff --git a/internal/catalog/images_test.go b/internal/catalog/images_test.go new file mode 100644 index 0000000..e0077e9 --- /dev/null +++ b/internal/catalog/images_test.go @@ -0,0 +1,74 @@ +package catalog_test + +import ( + "context" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/cfg" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestImageLifecycle(t *testing.T) { + + // serve yuu.jpg as test image + imageBytes, err := os.ReadFile(filepath.Join("static", "yuu.jpg")) + require.NoError(t, err) + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "image/jpeg") + w.WriteHeader(http.StatusOK) + w.Write(imageBytes) + })) + defer server.Close() + + imgID := uuid.New() + + err = catalog.DownloadAndCacheImage(context.Background(), imgID, server.URL, catalog.ImageSizeFull) + require.NoError(t, err) + err = catalog.DownloadAndCacheImage(context.Background(), imgID, server.URL, catalog.ImageSizeMedium) + require.NoError(t, err) + + // ensure download is correct + + imagePath := filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "full", imgID.String()) + _, err = os.Stat(imagePath) + assert.NoError(t, err) + imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "medium", imgID.String()) + _, err = os.Stat(imagePath) + assert.NoError(t, err) + + assert.NoError(t, catalog.DeleteImage(imgID)) + + // ensure delete works + + imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "full", imgID.String()) + _, err = os.Stat(imagePath) + assert.Error(t, err) + imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "medium", imgID.String()) + _, err = os.Stat(imagePath) + assert.Error(t, err) + + // re-download for prune + + err = catalog.DownloadAndCacheImage(context.Background(), imgID, server.URL, catalog.ImageSizeFull) + require.NoError(t, err) + err = catalog.DownloadAndCacheImage(context.Background(), imgID, server.URL, catalog.ImageSizeMedium) + require.NoError(t, err) + + assert.NoError(t, catalog.PruneOrphanedImages(context.Background(), store)) + + // ensure prune works + + imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "full", imgID.String()) + _, err = os.Stat(imagePath) + assert.Error(t, err) + imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "medium", imgID.String()) + _, err = os.Stat(imagePath) + assert.Error(t, err) +} diff --git a/internal/catalog/submit_listen_test.go b/internal/catalog/submit_listen_test.go new file mode 100644 index 0000000..5fcea61 --- /dev/null +++ b/internal/catalog/submit_listen_test.go @@ -0,0 +1,858 @@ +package catalog_test + +import ( + "context" + "testing" + "time" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/mbz" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// this file is very long + +func TestSubmitListen_CreateAllMbzIDs(t *testing.T) { + truncateTestData(t) + + // artist gets created with musicbrainz id + // release group gets created with mbz id + // track gets created with mbz id + // test listen time is opts time + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + ReleaseGroups: mbzReleaseGroupData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + } + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseGroupMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000011") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + ReleaseGroupMbzID: releaseGroupMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // Verify that listen time is correct + p, err := store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 1}) + require.NoError(t, err) + require.Len(t, p.Items, 1) + l := p.Items[0] + EqualTime(t, opts.Time, l.Time) +} + +func TestSubmitListen_CreateAllMbzIDsNoReleaseGroupID(t *testing.T) { + truncateTestData(t) + + // release group gets created with release id + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + ReleaseGroups: mbzReleaseGroupData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + } + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM releases_with_title + WHERE title = $1 + )`, "AG! Calling") + require.NoError(t, err) + assert.True(t, exists, "expected release to be created") +} + +func TestSubmitListen_CreateAllNoMbzIDs(t *testing.T) { + truncateTestData(t) + + // artist gets created with artist names + // release group gets created with artist and title + // track gets created with title and artist + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{} + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + TrackTitle: "Tokyo Calling", + ReleaseTitle: "AG! Calling", + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") +} + +func TestSubmitListen_CreateAllNoMbzIDsNoArtistNamesNoReleaseTitle(t *testing.T) { + truncateTestData(t) + + // artists get created with artist and track title + // release group gets created with artist and track title + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{} + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistMbzIDs: []uuid.UUID{ + uuid.MustParse("00000000-0000-0000-0000-000000000000"), + }, + Artist: "Rat Tally", + TrackTitle: "In My Car feat. Madeline Kenney", + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM releases_with_title + WHERE title = $1 + )`, opts.TrackTitle) + require.NoError(t, err) + assert.True(t, exists, "expected created release to have track title as title") + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artists_with_name + WHERE name = $1 + )`, "Rat Tally") + require.NoError(t, err) + assert.True(t, exists, "expected primary artist to be created") + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artists_with_name + WHERE name = $1 + )`, "Madeline Kenney") + require.NoError(t, err) + assert.True(t, exists, "expected featured artist to be created") +} + +func TestSubmitListen_MatchAllMbzIDs(t *testing.T) { + setupTestDataWithMbzIDs(t) + + // artist gets matched with musicbrainz id + // release gets matched with mbz id + // track gets matched with mbz id + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + } + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate track created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE title = $1 + `, "AG! Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate release group created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE name = $1 + `, "ATARASHII GAKKO!") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate artist created") +} + +func TestSubmitListen_MatchTrackFromMbzTitle(t *testing.T) { + setupTestDataSansMbzIDs(t) + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Tracks: mbzTrackData, + } + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + TrackTitle: "Tokyo Calling - Alt Title", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate track created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE title = $1 + `, "AG! Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate release group created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE name = $1 + `, "ATARASHII GAKKO!") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate artist created") +} + +func TestSubmitListen_VariousArtistsRelease(t *testing.T) { + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Releases: mbzReleaseData, + } + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000202") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ARIANNE"}, + Artist: "ARIANNE", + TrackTitle: "KOMM, SUSSER TOD (M-10 Director's Edit version)", + ReleaseTitle: "Evangelion Finally", + ReleaseMbzID: releaseMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM releases WHERE various_artists = $1 + `, true) + require.NoError(t, err) + assert.EqualValues(t, 1, count) +} + +func TestSubmitListen_MatchOneArtistMbzIDOneArtistName(t *testing.T) { + setupTestDataWithMbzIDs(t) + + // artist gets matched with musicbrainz id + // release gets matched with mbz id + // track gets matched with mbz id + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + } + // i really do want to use real tracks for tests but i dont wanna set up all the data for one test + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!", "Fake Artist"}, + Artist: "ATARASHII GAKKO! feat. Fake Artist", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate track created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE title = $1 + `, "AG! Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate release group created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE name = $1 + `, "ATARASHII GAKKO!") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate artist created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE name = $1 + `, "Fake Artist") + require.NoError(t, err) + assert.Equal(t, 1, count, "expected featured artist to be created") +} + +func TestSubmitListen_MatchAllMbzIDsNoReleaseGroupIDNoTrackID(t *testing.T) { + setupTestDataWithMbzIDs(t) + + // release group gets matched with release id + // track gets matched with title and artist + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + ReleaseGroups: mbzReleaseGroupData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + } + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE title = $1 + `, "AG! Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate release created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate track created") +} + +func TestSubmitListen_MatchNoMbzIDs(t *testing.T) { + setupTestDataSansMbzIDs(t) + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{} + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + TrackTitle: "Tokyo Calling", + ReleaseTitle: "AG! Calling", + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE name = $1 AND musicbrainz_id IS NULL + `, "ATARASHII GAKKO!") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate artist created or has been associated with fake musicbrainz id") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE title = $1 AND musicbrainz_id IS NULL + `, "AG! Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate release created or has been associated with fake musicbrainz id") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 AND musicbrainz_id IS NULL + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate track created or has been associated with fake musicbrainz id") +} + +func TestSubmitListen_UpdateTrackDuration(t *testing.T) { + setupTestDataSansMbzIDs(t) + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{} + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + TrackTitle: "Tokyo Calling", + ReleaseTitle: "AG! Calling", + Time: time.Now(), + Duration: 191, + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 AND duration = 191 + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "expected duration to be updated") +} + +func TestSubmitListen_MatchFromTrackTitleNoMbzIDs(t *testing.T) { + setupTestDataSansMbzIDs(t) + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + } + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistMbzIDs: []uuid.UUID{ + uuid.MustParse("00000000-0000-0000-0000-000000000001"), + }, + Artist: "ATARASHII GAKKO!", + TrackTitle: "Tokyo Calling", + ReleaseTitle: "AG! Calling", + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT * FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE name = $1 + `, "ATARASHII GAKKO!") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate artist created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE title = $1 + `, "AG! Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate release created") +} + +func TestSubmitListen_AssociateAllMbzIDs(t *testing.T) { + setupTestDataSansMbzIDs(t) + + // existing artist gets associated with mbz id (also updates aliases) + // exisiting release gets associated with mbz id + // existing track gets associated with mbz id (with new artist association) + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + } + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate track created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE title = $1 + `, "AG! Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate release created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE name = $1 + `, "ATARASHII GAKKO!") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate artist created") + + // Verify that the mbz ids were saved + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM tracks + WHERE musicbrainz_id = $1 + )`, trackMbzID) + require.NoError(t, err) + assert.True(t, exists, "expected track row with mbz id to exist") + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artists + WHERE musicbrainz_id = $1 + )`, artistMbzID) + require.NoError(t, err) + assert.True(t, exists, "expected artist row with mbz id to exist") + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM releases + WHERE musicbrainz_id = $1 + )`, releaseMbzID) + require.NoError(t, err) + assert.True(t, exists, "expected release row with mbz id to exist") +} + +func TestSubmitListen_AssociateAllMbzIDsWithMbzUnreachable(t *testing.T) { + setupTestDataSansMbzIDs(t) + + // existing artist gets associated with mbz id (also updates aliases) + // exisiting release gets associated with mbz id + // existing track gets associated with mbz id (with new artist association) + + ctx := context.Background() + mbzc := &mbz.MbzErrorCaller{} + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 + `, "Tokyo Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate track created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM releases_with_title WHERE title = $1 + `, "AG! Calling") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate release created") + count, err = store.Count(ctx, ` + SELECT COUNT(*) FROM artists_with_name WHERE name = $1 + `, "ATARASHII GAKKO!") + require.NoError(t, err) + assert.Equal(t, 1, count, "duplicate artist created") + + // Verify that the mbz ids were saved + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM tracks + WHERE musicbrainz_id = $1 + )`, trackMbzID) + require.NoError(t, err) + assert.True(t, exists, "expected track row with mbz id to exist") + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artists + WHERE musicbrainz_id = $1 + )`, artistMbzID) + require.NoError(t, err) + // as artist names and mbz ids can be ids with unknown order + assert.False(t, exists, "artists cannot be associated with mbz ids when mbz is unreachable") + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM releases + WHERE musicbrainz_id = $1 + )`, releaseMbzID) + require.NoError(t, err) + assert.True(t, exists, "expected release row with mbz id to exist") +} + +func TestSubmitListen_AssociateReleaseAliases(t *testing.T) { + setupTestDataSansMbzIDs(t) + + // existing artist gets associated with mbz id (also updates aliases) + // exisiting release group gets associated with mbz id + // existing track gets associated with mbz id (with new artist association) + + ctx := context.Background() + mbzc := &mbz.MbzMockCaller{ + Artists: mbzArtistData, + Releases: mbzReleaseData, + Tracks: mbzTrackData, + ReleaseGroups: mbzReleaseGroupData, + } + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseGroupMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000011") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + ReleaseGroupMbzID: releaseGroupMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") + + // verify that track, release group, and artist are existing ones and not duplicates + count, err := store.Count(ctx, ` + SELECT COUNT(*) FROM release_aliases WHERE alias = $1 + `, "AG! Calling - Alt Title") + require.NoError(t, err) + assert.Equal(t, 1, count, "expected release alias to exist") +} + +func TestSubmitListen_MusicBrainzUnreachable(t *testing.T) { + truncateTestData(t) + + // test don't fail when mbz unreachable + + ctx := context.Background() + mbzc := &mbz.MbzErrorCaller{} + artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001") + releaseGroupMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000011") + releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101") + trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001") + opts := catalog.SubmitListenOpts{ + MbzCaller: mbzc, + ArtistNames: []string{"ATARASHII GAKKO!"}, + Artist: "ATARASHII GAKKO!", + ArtistMbzIDs: []uuid.UUID{ + artistMbzID, + }, + TrackTitle: "Tokyo Calling", + RecordingMbzID: trackMbzID, + ReleaseTitle: "AG! Calling", + ReleaseMbzID: releaseMbzID, + ReleaseGroupMbzID: releaseGroupMbzID, + Time: time.Now(), + UserID: 1, + } + + err := catalog.SubmitListen(ctx, store, opts) + require.NoError(t, err) + + // Verify that the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen row to exist") +} diff --git a/internal/cfg/cfg.go b/internal/cfg/cfg.go new file mode 100644 index 0000000..1d7bd4b --- /dev/null +++ b/internal/cfg/cfg.go @@ -0,0 +1,280 @@ +package cfg + +import ( + "errors" + "fmt" + "strconv" + "strings" + "sync" +) + +const ( + defaultBaseUrl = "http://127.0.0.1" + defaultListenPort = 4110 + defaultMusicBrainzUrl = "https://musicbrainz.org" +) + +const ( + BASE_URL_ENV = "KOITO_BASE_URL" + DATABASE_URL_ENV = "KOITO_DATABASE_URL" + BIND_ADDR_ENV = "KOITO_BIND_ADDR" + LISTEN_PORT_ENV = "KOITO_LISTEN_PORT" + ENABLE_STRUCTURED_LOGGING_ENV = "KOITO_ENABLE_STRUCTURED_LOGGING" + ENABLE_FULL_IMAGE_CACHE_ENV = "KOITO_ENABLE_FULL_IMAGE_CACHE" + LOG_LEVEL_ENV = "KOITO_LOG_LEVEL" + MUSICBRAINZ_URL_ENV = "KOITO_MUSICBRAINZ_URL" + ENABLE_LBZ_RELAY_ENV = "KOITO_ENABLE_LBZ_RELAY" + LBZ_RELAY_URL_ENV = "KOITO_LBZ_RELAY_URL" + LBZ_RELAY_TOKEN_ENV = "KOITO_LBZ_RELAY_TOKEN" + LASTFM_API_KEY_ENV = "KOITO_LASTFM_API_KEY" + CONFIG_DIR_ENV = "KOITO_CONFIG_DIR" + DEFAULT_USERNAME_ENV = "KOITO_DEFAULT_USERNAME" + DEFAULT_PASSWORD_ENV = "KOITO_DEFAULT_PASSWORD" + DISABLE_DEEZER_ENV = "KOITO_DISABLE_DEEZER" + DISABLE_COVER_ART_ARCHIVE_ENV = "KOITO_DISABLE_COVER_ART_ARCHIVE" + DISABLE_MUSICBRAINZ_ENV = "KOITO_DISABLE_MUSICBRAINZ" + SKIP_IMPORT_ENV = "KOITO_SKIP_IMPORT" + ALLOWED_HOSTS_ENV = "KOITO_ALLOWED_HOSTS" + DISABLE_RATE_LIMIT_ENV = "KOITO_DISABLE_RATE_LIMIT" +) + +type config struct { + bindAddr string + listenPort int + configDir string + baseUrl string + databaseUrl string + musicBrainzUrl string + logLevel int + structuredLogging bool + enableFullImageCache bool + lbzRelayEnabled bool + lbzRelayUrl string + lbzRelayToken string + defaultPw string + defaultUsername string + disableDeezer bool + disableCAA bool + disableMusicBrainz bool + skipImport bool + allowedHosts []string + allowAllHosts bool + disableRateLimit bool +} + +var ( + globalConfig *config + once sync.Once + lock sync.RWMutex +) + +// Initialize initializes the global configuration using the provided getenv function. +func Load(getenv func(string) string) error { + var err error + once.Do(func() { + globalConfig, err = loadConfig(getenv) + }) + return err +} + +// loadConfig loads the configuration from environment variables. +func loadConfig(getenv func(string) string) (*config, error) { + cfg := new(config) + cfg.baseUrl = getenv(BASE_URL_ENV) + if cfg.baseUrl == "" { + cfg.baseUrl = defaultBaseUrl + } + cfg.databaseUrl = getenv(DATABASE_URL_ENV) + if cfg.databaseUrl == "" { + return nil, errors.New("required parameter " + DATABASE_URL_ENV + " not provided") + } + cfg.bindAddr = getenv(BIND_ADDR_ENV) + var err error + cfg.listenPort, err = strconv.Atoi(getenv(LISTEN_PORT_ENV)) + if err != nil { + cfg.listenPort = defaultListenPort + } + cfg.musicBrainzUrl = getenv(MUSICBRAINZ_URL_ENV) + if cfg.musicBrainzUrl == "" { + cfg.musicBrainzUrl = defaultMusicBrainzUrl + } + if parseBool(getenv(ENABLE_LBZ_RELAY_ENV)) { + cfg.lbzRelayEnabled = true + cfg.lbzRelayToken = getenv(LBZ_RELAY_TOKEN_ENV) + cfg.lbzRelayUrl = getenv(LBZ_RELAY_URL_ENV) + } + + cfg.disableRateLimit = parseBool(getenv(DISABLE_RATE_LIMIT_ENV)) + + cfg.structuredLogging = parseBool(getenv(ENABLE_STRUCTURED_LOGGING_ENV)) + + cfg.enableFullImageCache = parseBool(getenv(ENABLE_FULL_IMAGE_CACHE_ENV)) + cfg.disableDeezer = parseBool(getenv(DISABLE_DEEZER_ENV)) + cfg.disableCAA = parseBool(getenv(DISABLE_COVER_ART_ARCHIVE_ENV)) + cfg.disableMusicBrainz = parseBool(getenv(DISABLE_MUSICBRAINZ_ENV)) + cfg.skipImport = parseBool(getenv(SKIP_IMPORT_ENV)) + + if getenv(DEFAULT_USERNAME_ENV) == "" { + cfg.defaultUsername = "admin" + } else { + cfg.defaultUsername = getenv(DEFAULT_USERNAME_ENV) + } + if getenv(DEFAULT_PASSWORD_ENV) == "" { + cfg.defaultPw = "changeme" + } else { + cfg.defaultPw = getenv(DEFAULT_PASSWORD_ENV) + } + + cfg.configDir = getenv(CONFIG_DIR_ENV) + if cfg.configDir == "" { + cfg.configDir = "/etc/koito" + } + + rawHosts := getenv(ALLOWED_HOSTS_ENV) + cfg.allowedHosts = strings.Split(rawHosts, ",") + cfg.allowAllHosts = cfg.allowedHosts[0] == "*" + + switch strings.ToLower(getenv(LOG_LEVEL_ENV)) { + case "debug": + cfg.logLevel = 0 + case "warn": + cfg.logLevel = 2 + case "error": + cfg.logLevel = 3 + case "fatal": + cfg.logLevel = 4 + default: + cfg.logLevel = 0 + } + return cfg, nil +} + +func parseBool(s string) bool { + if strings.ToLower(s) == "true" { + return true + } else { + return false + } +} + +// Global accessors for configuration values + +func ListenAddr() string { + lock.RLock() + defer lock.RUnlock() + return fmt.Sprintf("%s:%d", globalConfig.bindAddr, globalConfig.listenPort) +} + +func ConfigDir() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.configDir +} + +func BaseUrl() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.baseUrl +} + +func DatabaseUrl() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.databaseUrl +} + +func MusicBrainzUrl() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.musicBrainzUrl +} + +func LogLevel() int { + lock.RLock() + defer lock.RUnlock() + return globalConfig.logLevel +} + +func StructuredLogging() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.structuredLogging +} + +func LbzRelayEnabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.lbzRelayEnabled +} + +func LbzRelayUrl() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.lbzRelayUrl +} + +func LbzRelayToken() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.lbzRelayToken +} + +func DefaultPassword() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.defaultPw +} + +func DefaultUsername() string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.defaultUsername +} + +func FullImageCacheEnabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.enableFullImageCache +} + +func DeezerDisabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.disableDeezer +} + +func CoverArtArchiveDisabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.disableCAA +} + +func MusicBrainzDisabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.disableMusicBrainz +} + +func SkipImport() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.skipImport +} + +func AllowedHosts() []string { + lock.RLock() + defer lock.RUnlock() + return globalConfig.allowedHosts +} + +func AllowAllHosts() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.allowAllHosts +} + +func RateLimitDisabled() bool { + lock.RLock() + defer lock.RUnlock() + return globalConfig.disableRateLimit +} diff --git a/internal/db/db.go b/internal/db/db.go new file mode 100644 index 0000000..637a51f --- /dev/null +++ b/internal/db/db.go @@ -0,0 +1,82 @@ +// package db defines the database interface +package db + +import ( + "context" + "time" + + "github.com/gabehf/koito/internal/models" + "github.com/google/uuid" +) + +type DB interface { + // Get + GetArtist(ctx context.Context, opts GetArtistOpts) (*models.Artist, error) + GetAlbum(ctx context.Context, opts GetAlbumOpts) (*models.Album, error) + GetTrack(ctx context.Context, opts GetTrackOpts) (*models.Track, error) + GetTopTracksPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Track], error) + GetTopArtistsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Artist], error) + GetTopAlbumsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Album], error) + GetListensPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Listen], error) + GetListenActivity(ctx context.Context, opts ListenActivityOpts) ([]ListenActivityItem, error) + GetAllArtistAliases(ctx context.Context, id int32) ([]models.Alias, error) + GetAllAlbumAliases(ctx context.Context, id int32) ([]models.Alias, error) + GetAllTrackAliases(ctx context.Context, id int32) ([]models.Alias, error) + GetApiKeysByUserID(ctx context.Context, id int32) ([]models.ApiKey, error) + GetUserBySession(ctx context.Context, sessionId uuid.UUID) (*models.User, error) + GetUserByUsername(ctx context.Context, username string) (*models.User, error) + GetUserByApiKey(ctx context.Context, key string) (*models.User, error) + // Save + SaveArtist(ctx context.Context, opts SaveArtistOpts) (*models.Artist, error) + SaveArtistAliases(ctx context.Context, id int32, aliases []string, source string) error + SaveAlbum(ctx context.Context, opts SaveAlbumOpts) (*models.Album, error) + SaveAlbumAliases(ctx context.Context, id int32, aliases []string, source string) error + SaveTrack(ctx context.Context, opts SaveTrackOpts) (*models.Track, error) + SaveTrackAliases(ctx context.Context, id int32, aliases []string, source string) error + SaveListen(ctx context.Context, opts SaveListenOpts) error + SaveUser(ctx context.Context, opts SaveUserOpts) (*models.User, error) + SaveApiKey(ctx context.Context, opts SaveApiKeyOpts) (*models.ApiKey, error) + SaveSession(ctx context.Context, userId int32, expiresAt time.Time, persistent bool) (*models.Session, error) + // Update + UpdateArtist(ctx context.Context, opts UpdateArtistOpts) error + UpdateTrack(ctx context.Context, opts UpdateTrackOpts) error + UpdateAlbum(ctx context.Context, opts UpdateAlbumOpts) error + AddArtistsToAlbum(ctx context.Context, opts AddArtistsToAlbumOpts) error + UpdateUser(ctx context.Context, opts UpdateUserOpts) error + UpdateApiKeyLabel(ctx context.Context, opts UpdateApiKeyLabelOpts) error + RefreshSession(ctx context.Context, sessionId uuid.UUID, expiresAt time.Time) error + SetPrimaryArtistAlias(ctx context.Context, id int32, alias string) error + SetPrimaryAlbumAlias(ctx context.Context, id int32, alias string) error + SetPrimaryTrackAlias(ctx context.Context, id int32, alias string) error + // Delete + DeleteArtist(ctx context.Context, id int32) error + DeleteAlbum(ctx context.Context, id int32) error + DeleteTrack(ctx context.Context, id int32) error + DeleteListen(ctx context.Context, trackId int32, listenedAt time.Time) error + DeleteArtistAlias(ctx context.Context, id int32, alias string) error + DeleteAlbumAlias(ctx context.Context, id int32, alias string) error + DeleteTrackAlias(ctx context.Context, id int32, alias string) error + DeleteSession(ctx context.Context, sessionId uuid.UUID) error + DeleteApiKey(ctx context.Context, id int32) error + // Count + CountListens(ctx context.Context, period Period) (int64, error) + CountTracks(ctx context.Context, period Period) (int64, error) + CountAlbums(ctx context.Context, period Period) (int64, error) + CountArtists(ctx context.Context, period Period) (int64, error) + CountTimeListened(ctx context.Context, period Period) (int64, error) + CountUsers(ctx context.Context) (int64, error) + // Search + SearchArtists(ctx context.Context, q string) ([]*models.Artist, error) + SearchAlbums(ctx context.Context, q string) ([]*models.Album, error) + SearchTracks(ctx context.Context, q string) ([]*models.Track, error) + // Merge + MergeTracks(ctx context.Context, fromId, toId int32) error + MergeAlbums(ctx context.Context, fromId, toId int32) error + MergeArtists(ctx context.Context, fromId, toId int32) error + // Etc + ImageHasAssociation(ctx context.Context, image uuid.UUID) (bool, error) + GetImageSource(ctx context.Context, image uuid.UUID) (string, error) + AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.Album, error) + Ping(ctx context.Context) error + Close(ctx context.Context) +} diff --git a/internal/db/opts.go b/internal/db/opts.go new file mode 100644 index 0000000..481ccc3 --- /dev/null +++ b/internal/db/opts.go @@ -0,0 +1,140 @@ +package db + +import ( + "time" + + "github.com/gabehf/koito/internal/models" + "github.com/google/uuid" +) + +type GetAlbumOpts struct { + ID int32 + MusicBrainzID uuid.UUID + ArtistID int32 + Title string + Titles []string + Image uuid.UUID +} + +type GetArtistOpts struct { + ID int32 + MusicBrainzID uuid.UUID + Name string + Image uuid.UUID +} + +type GetTrackOpts struct { + ID int32 + MusicBrainzID uuid.UUID + Title string + ArtistIDs []int32 +} + +type SaveTrackOpts struct { + Title string + AlbumID int32 + ArtistIDs []int32 + RecordingMbzID uuid.UUID + Duration int32 +} + +type SaveAlbumOpts struct { + Title string + MusicBrainzID uuid.UUID + Type string + ArtistIDs []int32 + VariousArtists bool + Image uuid.UUID + ImageSrc string + Aliases []string +} + +type SaveArtistOpts struct { + Name string + MusicBrainzID uuid.UUID + Aliases []string + Image uuid.UUID + ImageSrc string +} + +type UpdateApiKeyLabelOpts struct { + UserID int32 + ID int32 + Label string +} + +type SaveUserOpts struct { + Username string + Password string + Role models.UserRole +} + +type SaveApiKeyOpts struct { + Key string + UserID int32 + Label string +} + +type SaveListenOpts struct { + TrackID int32 + Time time.Time + UserID int32 + Client string +} + +type UpdateTrackOpts struct { + ID int32 + MusicBrainzID uuid.UUID + Duration int32 +} + +type UpdateArtistOpts struct { + ID int32 + MusicBrainzID uuid.UUID + Image uuid.UUID + ImageSrc string +} + +type UpdateAlbumOpts struct { + ID int32 + MusicBrainzID uuid.UUID + Image uuid.UUID + ImageSrc string +} + +type UpdateUserOpts struct { + ID int32 + Username string + Password string +} + +type AddArtistsToAlbumOpts struct { + AlbumID int32 + ArtistIDs []int32 +} + +type GetItemsOpts struct { + Limit int + Period Period + Page int + Week int // 1-52 + Month int // 1-12 + Year int + + // Used only for getting top tracks + ArtistID int + AlbumID int + + // Used for getting listens + TrackID int +} + +type ListenActivityOpts struct { + Step StepInterval + Range int + Month int + Year int + AlbumID int32 + ArtistID int32 + TrackID int32 +} diff --git a/internal/db/period.go b/internal/db/period.go new file mode 100644 index 0000000..5711d05 --- /dev/null +++ b/internal/db/period.go @@ -0,0 +1,108 @@ +package db + +import ( + "time" +) + +// should this be in db package ??? + +type Period string + +const ( + PeriodDay Period = "day" + PeriodWeek Period = "week" + PeriodMonth Period = "month" + PeriodYear Period = "year" + PeriodAllTime Period = "all_time" + PeriodDefault Period = "day" +) + +func StartTimeFromPeriod(p Period) time.Time { + now := time.Now() + switch p { + case "day": + return now.AddDate(0, 0, -1) + case "week": + return now.AddDate(0, 0, -7) + case "month": + return now.AddDate(0, -1, 0) + case "year": + return now.AddDate(-1, 0, 0) + case "all_time": + return time.Time{} + default: + // default 1 day + return now.AddDate(0, 0, -1) + } +} + +type StepInterval string + +const ( + StepDay StepInterval = "day" + StepWeek StepInterval = "week" + StepMonth StepInterval = "month" + StepYear StepInterval = "year" + StepDefault StepInterval = "day" + + DefaultRange int = 12 +) + +// start is the time of 00:00 at the beginning of opts.Range opts.Steps ago, +// end is the end time of the current opts.Step. +// E.g. if step is StepWeek and range is 4, start will be the time 00:00 on Sunday on the 4th week ago, +// and end will be 23:59:59 on Saturday at the end of the current week. +// If opts.Year (or opts.Year + opts.Month) is provided, start and end will simply by the start and end times of that year/month. +func ListenActivityOptsToTimes(opts ListenActivityOpts) (start, end time.Time) { + now := time.Now() + + // If Year (and optionally Month) are specified, use calendar boundaries + if opts.Year != 0 { + if opts.Month != 0 { + // Specific month of a specific year + start = time.Date(opts.Year, time.Month(opts.Month), 1, 0, 0, 0, 0, now.Location()) + end = start.AddDate(0, 1, 0).Add(-time.Nanosecond) + } else { + // Whole year + start = time.Date(opts.Year, 1, 1, 0, 0, 0, 0, now.Location()) + end = start.AddDate(1, 0, 0).Add(-time.Nanosecond) + } + return start, end + } + + // X days ago + today = range + opts.Range = opts.Range - 1 + + // Determine step and align accordingly + switch opts.Step { + case StepDay: + today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location()) + start = today.AddDate(0, 0, -opts.Range) + end = today.AddDate(0, 0, 1).Add(-time.Nanosecond) + + case StepWeek: + // Align to most recent Sunday + weekday := int(now.Weekday()) // Sunday = 0 + startOfThisWeek := time.Date(now.Year(), now.Month(), now.Day()-weekday, 0, 0, 0, 0, now.Location()) + start = startOfThisWeek.AddDate(0, 0, -7*opts.Range) + end = startOfThisWeek.AddDate(0, 0, 7).Add(-time.Nanosecond) + + case StepMonth: + firstOfThisMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, now.Location()) + start = firstOfThisMonth.AddDate(0, -opts.Range, 0) + end = firstOfThisMonth.AddDate(0, 1, 0).Add(-time.Nanosecond) + + case StepYear: + firstOfThisYear := time.Date(now.Year(), 1, 1, 0, 0, 0, 0, now.Location()) + start = firstOfThisYear.AddDate(-opts.Range, 0, 0) + end = firstOfThisYear.AddDate(1, 0, 0).Add(-time.Nanosecond) + + default: + // Default to daily + today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location()) + start = today.AddDate(0, 0, -opts.Range) + end = today.AddDate(0, 0, 1).Add(-time.Nanosecond) + } + + return start, end +} diff --git a/internal/db/period_test.go b/internal/db/period_test.go new file mode 100644 index 0000000..8705ce7 --- /dev/null +++ b/internal/db/period_test.go @@ -0,0 +1,28 @@ +package db_test + +import ( + "testing" + "time" +) + +func TestListenActivityOptsToTimes(t *testing.T) { + + // default range + // opts := db.ListenActivityOpts{} + // t1, t2 := db.ListenActivityOptsToTimes(opts) + // t.Logf("%s to %s", t1, t2) + // assert.WithinDuration(t, bod(time.Now().Add(-11*24*time.Hour)), t1, 5*time.Second) + // assert.WithinDuration(t, eod(time.Now()), t2, 5*time.Second) +} + +func eod(t time.Time) time.Time { + year, month, day := t.Date() + loc := t.Location() + return time.Date(year, month, day, 23, 59, 59, 0, loc) +} + +func bod(t time.Time) time.Time { + year, month, day := t.Date() + loc := t.Location() + return time.Date(year, month, day, 0, 0, 0, 0, loc) +} diff --git a/internal/db/psql/album.go b/internal/db/psql/album.go new file mode 100644 index 0000000..0444b45 --- /dev/null +++ b/internal/db/psql/album.go @@ -0,0 +1,312 @@ +package psql + +import ( + "context" + "errors" + "strings" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" +) + +func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Album, error) { + l := logger.FromContext(ctx) + + var row repository.ReleasesWithTitle + var err error + + if opts.ID != 0 { + l.Debug().Msgf("Fetching album from DB with id %d", opts.ID) + row, err = d.q.GetRelease(ctx, opts.ID) + } else if opts.MusicBrainzID != uuid.Nil { + l.Debug().Msgf("Fetching album from DB with MusicBrainz Release ID %s", opts.MusicBrainzID) + row, err = d.q.GetReleaseByMbzID(ctx, &opts.MusicBrainzID) + } else if opts.ArtistID != 0 && opts.Title != "" { + l.Debug().Msgf("Fetching album from DB with artist_id %d and title %s", opts.ArtistID, opts.Title) + row, err = d.q.GetReleaseByArtistAndTitle(ctx, repository.GetReleaseByArtistAndTitleParams{ + ArtistID: opts.ArtistID, + Title: opts.Title, + }) + } else if opts.ArtistID != 0 && len(opts.Titles) > 0 { + l.Debug().Msgf("Fetching release group from DB with artist_id %d and titles %v", opts.ArtistID, opts.Titles) + row, err = d.q.GetReleaseByArtistAndTitles(ctx, repository.GetReleaseByArtistAndTitlesParams{ + ArtistID: opts.ArtistID, + Column1: opts.Titles, + }) + } else { + return nil, errors.New("insufficient information to get album") + } + + if err != nil { + return nil, err + } + + count, err := d.q.CountListensFromRelease(ctx, repository.CountListensFromReleaseParams{ + ListenedAt: time.Unix(0, 0), + ListenedAt_2: time.Now(), + ReleaseID: row.ID, + }) + if err != nil { + return nil, err + } + + return &models.Album{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Title: row.Title, + Image: row.Image, + VariousArtists: row.VariousArtists, + ListenCount: count, + }, nil +} + +func (d *Psql) SaveAlbum(ctx context.Context, opts db.SaveAlbumOpts) (*models.Album, error) { + l := logger.FromContext(ctx) + var insertMbzID *uuid.UUID + var insertImage *uuid.UUID + if opts.MusicBrainzID != uuid.Nil { + insertMbzID = &opts.MusicBrainzID + } + if opts.Image != uuid.Nil { + insertImage = &opts.Image + } + if len(opts.ArtistIDs) < 1 { + return nil, errors.New("required parameter 'ArtistIDs' missing") + } + for _, aid := range opts.ArtistIDs { + if aid == 0 { + return nil, errors.New("none of 'ArtistIDs' may be 0") + } + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return nil, err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + l.Debug().Msgf("Inserting release '%s' into DB", opts.Title) + r, err := qtx.InsertRelease(ctx, repository.InsertReleaseParams{ + MusicBrainzID: insertMbzID, + VariousArtists: opts.VariousArtists, + Image: insertImage, + ImageSource: pgtype.Text{String: opts.ImageSrc, Valid: opts.ImageSrc != ""}, + }) + if err != nil { + return nil, err + } + for _, artistId := range opts.ArtistIDs { + l.Debug().Msgf("Associating release '%s' to artist with ID %d", opts.Title, artistId) + err = qtx.AssociateArtistToRelease(ctx, repository.AssociateArtistToReleaseParams{ + ArtistID: artistId, + ReleaseID: r.ID, + }) + if err != nil { + return nil, err + } + } + l.Debug().Msgf("Saving canonical alias %s for release %d", opts.Title, r.ID) + err = qtx.InsertReleaseAlias(ctx, repository.InsertReleaseAliasParams{ + ReleaseID: r.ID, + Alias: opts.Title, + Source: "Canonical", + IsPrimary: true, + }) + if err != nil { + l.Err(err).Msgf("Failed to save canonical alias for album %d", r.ID) + } + + err = tx.Commit(ctx) + if err != nil { + return nil, err + } + + return &models.Album{ + ID: r.ID, + MbzID: r.MusicBrainzID, + Title: opts.Title, + Image: r.Image, + VariousArtists: r.VariousArtists, + }, nil +} + +func (d *Psql) AddArtistsToAlbum(ctx context.Context, opts db.AddArtistsToAlbumOpts) error { + l := logger.FromContext(ctx) + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + for _, id := range opts.ArtistIDs { + err := qtx.AssociateArtistToRelease(ctx, repository.AssociateArtistToReleaseParams{ + ReleaseID: opts.AlbumID, + ArtistID: id, + }) + if err != nil { + l.Error().Err(err).Msgf("Failed to associate release %d with artist %d", opts.AlbumID, id) + } + } + return tx.Commit(ctx) +} + +func (d *Psql) UpdateAlbum(ctx context.Context, opts db.UpdateAlbumOpts) error { + l := logger.FromContext(ctx) + if opts.ID == 0 { + return errors.New("missing album id") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + if opts.MusicBrainzID != uuid.Nil { + l.Debug().Msgf("Updating release with ID %d with MusicBrainz ID %s", opts.ID, opts.MusicBrainzID) + err := qtx.UpdateReleaseMbzID(ctx, repository.UpdateReleaseMbzIDParams{ + ID: opts.ID, + MusicBrainzID: &opts.MusicBrainzID, + }) + if err != nil { + return err + } + } + if opts.Image != uuid.Nil { + l.Debug().Msgf("Updating release with ID %d with image %s", opts.ID, opts.Image) + err := qtx.UpdateReleaseImage(ctx, repository.UpdateReleaseImageParams{ + ID: opts.ID, + Image: &opts.Image, + ImageSource: pgtype.Text{String: opts.ImageSrc, Valid: opts.ImageSrc != ""}, + }) + if err != nil { + return err + } + } + return tx.Commit(ctx) +} + +func (d *Psql) SaveAlbumAliases(ctx context.Context, id int32, aliases []string, source string) error { + l := logger.FromContext(ctx) + if id == 0 { + return errors.New("album id not specified") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + existing, err := qtx.GetAllReleaseAliases(ctx, id) + if err != nil { + return err + } + for _, v := range existing { + aliases = append(aliases, v.Alias) + } + utils.Unique(&aliases) + for _, alias := range aliases { + if strings.TrimSpace(alias) == "" { + return errors.New("aliases cannot be blank") + } + err = qtx.InsertReleaseAlias(ctx, repository.InsertReleaseAliasParams{ + Alias: strings.TrimSpace(alias), + ReleaseID: id, + Source: source, + IsPrimary: false, + }) + if err != nil { + return err + } + } + return tx.Commit(ctx) +} + +func (d *Psql) DeleteAlbum(ctx context.Context, id int32) error { + return d.q.DeleteRelease(ctx, id) +} +func (d *Psql) DeleteAlbumAlias(ctx context.Context, id int32, alias string) error { + return d.q.DeleteReleaseAlias(ctx, repository.DeleteReleaseAliasParams{ + ReleaseID: id, + Alias: alias, + }) +} + +func (d *Psql) GetAllAlbumAliases(ctx context.Context, id int32) ([]models.Alias, error) { + rows, err := d.q.GetAllReleaseAliases(ctx, id) + if err != nil { + return nil, err + } + aliases := make([]models.Alias, len(rows)) + for i, row := range rows { + aliases[i] = models.Alias{ + ID: id, + Alias: row.Alias, + Source: row.Source, + Primary: row.IsPrimary, + } + } + return aliases, nil +} + +func (d *Psql) SetPrimaryAlbumAlias(ctx context.Context, id int32, alias string) error { + l := logger.FromContext(ctx) + if id == 0 { + return errors.New("artist id not specified") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + // get all aliases + aliases, err := qtx.GetAllReleaseAliases(ctx, id) + if err != nil { + return err + } + primary := "" + exists := false + for _, v := range aliases { + if v.Alias == alias { + exists = true + } + if v.IsPrimary { + primary = v.Alias + } + } + if primary == alias { + // no-op rename + return nil + } + if !exists { + return errors.New("alias does not exist") + } + err = qtx.SetReleaseAliasPrimaryStatus(ctx, repository.SetReleaseAliasPrimaryStatusParams{ + ReleaseID: id, + Alias: alias, + IsPrimary: true, + }) + if err != nil { + return err + } + err = qtx.SetReleaseAliasPrimaryStatus(ctx, repository.SetReleaseAliasPrimaryStatusParams{ + ReleaseID: id, + Alias: primary, + IsPrimary: false, + }) + if err != nil { + return err + } + return tx.Commit(ctx) +} diff --git a/internal/db/psql/album_test.go b/internal/db/psql/album_test.go new file mode 100644 index 0000000..4b30203 --- /dev/null +++ b/internal/db/psql/album_test.go @@ -0,0 +1,319 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/db" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func truncateTestData(t *testing.T) { + err := store.Exec(context.Background(), + `TRUNCATE + artists, + artist_aliases, + tracks, + artist_tracks, + releases, + artist_releases, + release_aliases, + listens + RESTART IDENTITY CASCADE`) + require.NoError(t, err) +} + +func testDataForRelease(t *testing.T) { + truncateTestData(t) + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001')`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'ATARASHII GAKKO!', 'MusicBrainz', true)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000002')`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (2, 'Masayuki Suzuki', 'MusicBrainz', true)`) + require.NoError(t, err) +} + +func TestGetAlbum(t *testing.T) { + testDataForRelease(t) + ctx := context.Background() + + // Insert test data + rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: "Test Release Group", + ArtistIDs: []int32{1}, + }) + require.NoError(t, err) + + // Test GetAlbum by ID + result, err := store.GetAlbum(ctx, db.GetAlbumOpts{ID: rg.ID}) + require.NoError(t, err) + assert.Equal(t, rg.ID, result.ID) + assert.Equal(t, "Test Release Group", result.Title) + + // Test GetAlbum with insufficient information + _, err = store.GetAlbum(ctx, db.GetAlbumOpts{}) + assert.Error(t, err) + + truncateTestData(t) +} + +func TestSaveAlbum(t *testing.T) { + testDataForRelease(t) + ctx := context.Background() + + // Save release group with artist IDs + artistIDs := []int32{1, 2} + rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: "New Release Group", + ArtistIDs: artistIDs, + }) + require.NoError(t, err) + + // Verify release group was saved + assert.Equal(t, "New Release Group", rg.Title) + + // Verify release was created for release group + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM releases_with_title + WHERE title = $1 AND id = $2 + )`, "New Release Group", rg.ID) + require.NoError(t, err) + assert.True(t, exists, "expected release to exist") + + // Verify artist associations were created for release group + for _, aid := range artistIDs { + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artist_releases + WHERE artist_id = $1 AND release_id = $2 + )`, aid, rg.ID) + require.NoError(t, err) + assert.True(t, exists, "expected artist association to exist") + } + + truncateTestData(t) +} + +func TestUpdateAlbum(t *testing.T) { + testDataForRelease(t) + ctx := context.Background() + + rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: "Old Title", + ArtistIDs: []int32{1}, + }) + require.NoError(t, err) + + newMbzID := uuid.New() + imgid := uuid.New() + err = store.UpdateAlbum(ctx, db.UpdateAlbumOpts{ + ID: rg.ID, + MusicBrainzID: newMbzID, + Image: imgid, + ImageSrc: catalog.ImageSourceUserUpload, + }) + require.NoError(t, err) + + result, err := store.GetAlbum(ctx, db.GetAlbumOpts{ID: rg.ID}) + require.NoError(t, err) + assert.Equal(t, newMbzID, *result.MbzID) + assert.Equal(t, imgid, *result.Image) + + truncateTestData(t) +} +func TestAddArtistsToAlbum(t *testing.T) { + testDataForRelease(t) + ctx := context.Background() + + // Insert test album + rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: "Test Album", + ArtistIDs: []int32{1}, + }) + require.NoError(t, err) + + // Add additional artists to the album + err = store.AddArtistsToAlbum(ctx, db.AddArtistsToAlbumOpts{ + AlbumID: rg.ID, + ArtistIDs: []int32{2}, + }) + require.NoError(t, err) + + // Verify artist associations were created + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artist_releases + WHERE artist_id = $1 AND release_id = $2 + )`, 2, rg.ID) + require.NoError(t, err) + assert.True(t, exists, "expected artist association to exist") + + truncateTestData(t) +} +func TestSaveAlbumAliases(t *testing.T) { + testDataForRelease(t) + ctx := context.Background() + + // Insert test album + rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: "Test Album", + ArtistIDs: []int32{1}, + }) + require.NoError(t, err) + + // Save aliases for the album + aliases := []string{"Alias 1", "Alias 2"} + err = store.SaveAlbumAliases(ctx, rg.ID, aliases, "TestSource") + require.NoError(t, err) + + // Verify aliases were saved + for _, alias := range aliases { + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM release_aliases + WHERE release_id = $1 AND alias = $2 + )`, rg.ID, alias) + require.NoError(t, err) + assert.True(t, exists, "expected alias to exist") + } + + err = store.SetPrimaryAlbumAlias(ctx, 1, "Alias 1") + require.NoError(t, err) + album, err := store.GetAlbum(ctx, db.GetAlbumOpts{ID: rg.ID}) + require.NoError(t, err) + assert.Equal(t, "Alias 1", album.Title) + + err = store.SetPrimaryAlbumAlias(ctx, 1, "Fake Alias") + require.Error(t, err) + + store.SetPrimaryAlbumAlias(ctx, 1, "Album One") + + truncateTestData(t) +} +func TestDeleteAlbum(t *testing.T) { + testDataForRelease(t) + ctx := context.Background() + + testDataForTopItems(t) + + // Delete the album + err := store.DeleteAlbum(ctx, 1) + require.NoError(t, err) + + // Verify album was deleted + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM releases + WHERE id = $1 + )`, 1) + require.NoError(t, err) + assert.False(t, exists, "expected album to be deleted") + + // Verify album's track was deleted + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM tracks + WHERE id = $1 + )`, 1) + require.NoError(t, err) + assert.False(t, exists, "expected album's tracks to be deleted") + + // Verify album's listens was deleted + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.False(t, exists, "expected album's listens to be deleted") + + truncateTestData(t) +} +func TestDeleteAlbumAlias(t *testing.T) { + testDataForRelease(t) + ctx := context.Background() + + // Insert test album + rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: "Test Album", + ArtistIDs: []int32{1}, + }) + require.NoError(t, err) + + // Save aliases for the album + aliases := []string{"Alias 1", "Alias 2"} + err = store.SaveAlbumAliases(ctx, rg.ID, aliases, "TestSource") + require.NoError(t, err) + + // Delete one alias + err = store.DeleteAlbumAlias(ctx, rg.ID, "Alias 1") + require.NoError(t, err) + + // Verify alias was deleted + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM release_aliases + WHERE release_id = $1 AND alias = $2 + )`, rg.ID, "Alias 1") + require.NoError(t, err) + assert.False(t, exists, "expected alias to be deleted") + + // Verify other alias still exists + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM release_aliases + WHERE release_id = $1 AND alias = $2 + )`, rg.ID, "Alias 2") + require.NoError(t, err) + assert.True(t, exists, "expected alias to still exist") + + truncateTestData(t) +} +func TestGetAllAlbumAliases(t *testing.T) { + testDataForRelease(t) + ctx := context.Background() + + // Insert test album + rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{ + Title: "Test Album", + ArtistIDs: []int32{1}, + }) + require.NoError(t, err) + + // Save aliases for the album + aliases := []string{"Alias 1", "Alias 2"} + err = store.SaveAlbumAliases(ctx, rg.ID, aliases, "TestSource") + require.NoError(t, err) + + // Retrieve all aliases + result, err := store.GetAllAlbumAliases(ctx, rg.ID) + require.NoError(t, err) + assert.Len(t, result, len(aliases)+1) // new + canonical + + for _, alias := range aliases { + found := false + for _, res := range result { + if res.Alias == alias { + found = true + break + } + } + assert.True(t, found, "expected alias to be retrieved") + } + + truncateTestData(t) +} diff --git a/internal/db/psql/artist.go b/internal/db/psql/artist.go new file mode 100644 index 0000000..0368fc6 --- /dev/null +++ b/internal/db/psql/artist.go @@ -0,0 +1,309 @@ +package psql + +import ( + "context" + "errors" + "strings" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" +) + +func (d *Psql) GetArtist(ctx context.Context, opts db.GetArtistOpts) (*models.Artist, error) { + l := logger.FromContext(ctx) + if opts.ID != 0 { + l.Debug().Msgf("Fetching artist from DB with id %d", opts.ID) + row, err := d.q.GetArtist(ctx, opts.ID) + if err != nil { + return nil, err + } + count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{ + ListenedAt: time.Unix(0, 0), + ListenedAt_2: time.Now(), + ArtistID: row.ID, + }) + if err != nil { + return nil, err + } + return &models.Artist{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Name: row.Name, + Aliases: row.Aliases, + Image: row.Image, + ListenCount: count, + }, nil + } else if opts.MusicBrainzID != uuid.Nil { + l.Debug().Msgf("Fetching artist from DB with MusicBrainz ID %s", opts.MusicBrainzID) + row, err := d.q.GetArtistByMbzID(ctx, &opts.MusicBrainzID) + if err != nil { + return nil, err + } + count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{ + ListenedAt: time.Unix(0, 0), + ListenedAt_2: time.Now(), + ArtistID: row.ID, + }) + if err != nil { + return nil, err + } + return &models.Artist{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Name: row.Name, + Aliases: row.Aliases, + Image: row.Image, + ListenCount: count, + }, nil + } else if opts.Name != "" { + l.Debug().Msgf("Fetching artist from DB with name '%s'", opts.Name) + row, err := d.q.GetArtistByName(ctx, opts.Name) + if err != nil { + return nil, err + } + count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{ + ListenedAt: time.Unix(0, 0), + ListenedAt_2: time.Now(), + ArtistID: row.ID, + }) + if err != nil { + return nil, err + } + return &models.Artist{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Name: row.Name, + Aliases: row.Aliases, + Image: row.Image, + ListenCount: count, + }, nil + } else { + return nil, errors.New("insufficient information to get artist") + } +} + +// Inserts all unique aliases into the DB with specified source +func (d *Psql) SaveArtistAliases(ctx context.Context, id int32, aliases []string, source string) error { + l := logger.FromContext(ctx) + if id == 0 { + return errors.New("artist id not specified") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + existing, err := qtx.GetAllArtistAliases(ctx, id) + if err != nil { + return err + } + for _, v := range existing { + aliases = append(aliases, v.Alias) + } + utils.Unique(&aliases) + for _, alias := range aliases { + if strings.TrimSpace(alias) == "" { + return errors.New("aliases cannot be blank") + } + err = qtx.InsertArtistAlias(ctx, repository.InsertArtistAliasParams{ + Alias: strings.TrimSpace(alias), + ArtistID: id, + Source: source, + IsPrimary: false, + }) + if err != nil { + return err + } + } + return tx.Commit(ctx) +} + +func (d *Psql) DeleteArtist(ctx context.Context, id int32) error { + return d.q.DeleteArtist(ctx, id) +} + +// Equivalent to Psql.SaveArtist, then Psql.SaveMbzAliases +func (d *Psql) SaveArtist(ctx context.Context, opts db.SaveArtistOpts) (*models.Artist, error) { + l := logger.FromContext(ctx) + var insertMbzID *uuid.UUID + var insertImage *uuid.UUID + if opts.MusicBrainzID != uuid.Nil { + insertMbzID = &opts.MusicBrainzID + } + if opts.Image != uuid.Nil { + insertImage = &opts.Image + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return nil, err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + opts.Name = strings.TrimSpace(opts.Name) + if opts.Name == "" { + return nil, errors.New("name must not be blank") + } + l.Debug().Msgf("Inserting artist '%s' into DB", opts.Name) + a, err := qtx.InsertArtist(ctx, repository.InsertArtistParams{ + MusicBrainzID: insertMbzID, + Image: insertImage, + ImageSource: pgtype.Text{String: opts.ImageSrc, Valid: opts.ImageSrc != ""}, + }) + if err != nil { + return nil, err + } + l.Debug().Msgf("Inserting canonical alias '%s' into DB for artist with id %d", opts.Name, a.ID) + err = qtx.InsertArtistAlias(ctx, repository.InsertArtistAliasParams{ + ArtistID: a.ID, + Alias: opts.Name, + Source: "Canonical", + IsPrimary: true, + }) + if err != nil { + l.Error().Err(err).Msgf("Error inserting canonical alias for artist '%s'", opts.Name) + return nil, err + } + err = tx.Commit(ctx) + if err != nil { + l.Err(err).Msg("Failed to commit insert artist transaction") + return nil, err + } + artist := &models.Artist{ + ID: a.ID, + Name: opts.Name, + Image: a.Image, + MbzID: a.MusicBrainzID, + Aliases: []string{opts.Name}, + } + if len(opts.Aliases) > 0 { + l.Debug().Msgf("Inserting aliases '%v' into DB for artist '%s'", opts.Aliases, opts.Name) + err = d.SaveArtistAliases(ctx, a.ID, opts.Aliases, "MusicBrainz") + if err != nil { + return nil, err + } + artist.Aliases = opts.Aliases + } + return artist, nil +} + +func (d *Psql) UpdateArtist(ctx context.Context, opts db.UpdateArtistOpts) error { + l := logger.FromContext(ctx) + if opts.ID == 0 { + return errors.New("artist id not specified") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + if opts.MusicBrainzID != uuid.Nil { + l.Debug().Msgf("Updating artist with id %d with MusicBrainz ID %s", opts.ID, opts.MusicBrainzID) + err := qtx.UpdateArtistMbzID(ctx, repository.UpdateArtistMbzIDParams{ + ID: opts.ID, + MusicBrainzID: &opts.MusicBrainzID, + }) + if err != nil { + return err + } + } + if opts.Image != uuid.Nil { + l.Debug().Msgf("Updating artist with id %d with image %s", opts.ID, opts.Image) + err = qtx.UpdateArtistImage(ctx, repository.UpdateArtistImageParams{ + ID: opts.ID, + Image: &opts.Image, + ImageSource: pgtype.Text{String: opts.ImageSrc, Valid: opts.ImageSrc != ""}, + }) + if err != nil { + return err + } + } + return tx.Commit(ctx) +} + +func (d *Psql) DeleteArtistAlias(ctx context.Context, id int32, alias string) error { + return d.q.DeleteArtistAlias(ctx, repository.DeleteArtistAliasParams{ + ArtistID: id, + Alias: alias, + }) +} +func (d *Psql) GetAllArtistAliases(ctx context.Context, id int32) ([]models.Alias, error) { + rows, err := d.q.GetAllArtistAliases(ctx, id) + if err != nil { + return nil, err + } + aliases := make([]models.Alias, len(rows)) + for i, row := range rows { + aliases[i] = models.Alias{ + ID: id, + Alias: row.Alias, + Source: row.Source, + Primary: row.IsPrimary, + } + } + return aliases, nil +} + +func (d *Psql) SetPrimaryArtistAlias(ctx context.Context, id int32, alias string) error { + l := logger.FromContext(ctx) + if id == 0 { + return errors.New("artist id not specified") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + // get all aliases + aliases, err := qtx.GetAllArtistAliases(ctx, id) + if err != nil { + return err + } + primary := "" + exists := false + for _, v := range aliases { + if v.Alias == alias { + exists = true + } + if v.IsPrimary { + primary = v.Alias + } + } + if primary == alias { + // no-op rename + return nil + } + if !exists { + return errors.New("alias does not exist") + } + err = qtx.SetArtistAliasPrimaryStatus(ctx, repository.SetArtistAliasPrimaryStatusParams{ + ArtistID: id, + Alias: alias, + IsPrimary: true, + }) + if err != nil { + return err + } + err = qtx.SetArtistAliasPrimaryStatus(ctx, repository.SetArtistAliasPrimaryStatusParams{ + ArtistID: id, + Alias: primary, + IsPrimary: false, + }) + if err != nil { + return err + } + return tx.Commit(ctx) +} diff --git a/internal/db/psql/artist_test.go b/internal/db/psql/artist_test.go new file mode 100644 index 0000000..6ed5c4f --- /dev/null +++ b/internal/db/psql/artist_test.go @@ -0,0 +1,247 @@ +package psql_test + +import ( + "context" + "slices" + "testing" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/db" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetArtist(t *testing.T) { + ctx := context.Background() + mbzId := uuid.MustParse("00000000-0000-0000-0000-000000000001") + // Insert test data + artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{ + Name: "Test Artist", + MusicBrainzID: mbzId, + }) + require.NoError(t, err) + + // Test GetArtist by ID + result, err := store.GetArtist(ctx, db.GetArtistOpts{ID: artist.ID}) + require.NoError(t, err) + assert.Equal(t, artist.ID, result.ID) + assert.Equal(t, "Test Artist", result.Name) + + // Test GetArtist by Name + result, err = store.GetArtist(ctx, db.GetArtistOpts{Name: artist.Name}) + require.NoError(t, err) + assert.Equal(t, artist.ID, result.ID) + + // Test GetArtist by MusicBrainzID + result, err = store.GetArtist(ctx, db.GetArtistOpts{MusicBrainzID: mbzId}) + require.NoError(t, err) + assert.Equal(t, artist.ID, result.ID) + + // Test GetArtist with insufficient information + _, err = store.GetArtist(ctx, db.GetArtistOpts{}) + assert.Error(t, err) + + truncateTestData(t) +} + +func TestSaveAliases(t *testing.T) { + ctx := context.Background() + + // Insert test artist + artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{ + Name: "Alias Artist", + }) + require.NoError(t, err) + + // Save aliases + aliases := []string{"Alias1", "Alias2"} + err = store.SaveArtistAliases(ctx, artist.ID, aliases, "MusicBrainz") + require.NoError(t, err) + + // Verify aliases were saved + for _, alias := range aliases { + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artist_aliases + WHERE artist_id = $1 AND alias = $2 + )`, artist.ID, alias) + require.NoError(t, err) + assert.True(t, exists, "expected alias to exist") + } + + err = store.SetPrimaryArtistAlias(ctx, 1, "Alias1") + require.NoError(t, err) + artist, err = store.GetArtist(ctx, db.GetArtistOpts{ID: artist.ID}) + require.NoError(t, err) + assert.Equal(t, "Alias1", artist.Name) + + err = store.SetPrimaryArtistAlias(ctx, 1, "Fake Alias") + require.Error(t, err) + + truncateTestData(t) +} + +func TestSaveArtist(t *testing.T) { + ctx := context.Background() + + // Save artist with aliases + aliases := []string{"Alias1", "Alias2"} + artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{ + Name: "New Artist", + Aliases: aliases, + }) + require.NoError(t, err) + + // Verify artist was saved + assert.Equal(t, "New Artist", artist.Name) + + // Verify aliases were saved + for _, alias := range slices.Concat(aliases, []string{"New Artist"}) { + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artist_aliases + WHERE artist_id = $1 AND alias = $2 + )`, artist.ID, alias) + require.NoError(t, err) + assert.True(t, exists, "expected alias '%s' to exist", alias) + } + + truncateTestData(t) +} + +func TestUpdateArtist(t *testing.T) { + ctx := context.Background() + + // Insert test artist + artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{ + Name: "Old Name", + }) + require.NoError(t, err) + + imgid := uuid.New() + err = store.UpdateArtist(ctx, db.UpdateArtistOpts{ + ID: artist.ID, + Image: imgid, + ImageSrc: catalog.ImageSourceUserUpload, + }) + require.NoError(t, err) + + result, err := store.GetArtist(ctx, db.GetArtistOpts{ID: artist.ID}) + require.NoError(t, err) + assert.Equal(t, imgid, *result.Image) + + truncateTestData(t) +} +func TestGetAllArtistAliases(t *testing.T) { + ctx := context.Background() + + // Insert test artist + artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{ + Name: "Alias Artist", + Aliases: []string{"Alias1", "Alias2"}, + }) + require.NoError(t, err) + + // Retrieve all aliases + result, err := store.GetAllArtistAliases(ctx, artist.ID) + require.NoError(t, err) + assert.Len(t, result, 3) // Includes canonical alias + + // Verify aliases were retrieved + expectedAliases := []string{"Alias Artist", "Alias1", "Alias2"} + for _, alias := range expectedAliases { + found := false + for _, res := range result { + if res.Alias == alias { + found = true + break + } + } + assert.True(t, found, "expected alias '%s' to be retrieved", alias) + } + + truncateTestData(t) +} +func TestDeleteArtistAlias(t *testing.T) { + ctx := context.Background() + + // Insert test artist + artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{ + Name: "Alias Artist", + Aliases: []string{"Alias1", "Alias2"}, + }) + require.NoError(t, err) + + // Delete one alias + err = store.DeleteArtistAlias(ctx, artist.ID, "Alias1") + require.NoError(t, err) + + // Verify alias was deleted + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artist_aliases + WHERE artist_id = $1 AND alias = $2 + )`, artist.ID, "Alias1") + require.NoError(t, err) + assert.False(t, exists, "expected alias to be deleted") + + // Verify other alias still exists + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artist_aliases + WHERE artist_id = $1 AND alias = $2 + )`, artist.ID, "Alias2") + require.NoError(t, err) + assert.True(t, exists, "expected alias to still exist") + + truncateTestData(t) +} +func TestDeleteArtist(t *testing.T) { + ctx := context.Background() + + // set up a lot of test data, 4 artists, 4 albums, 4 tracks, 10 listens + testDataForTopItems(t) + + // Delete the artist + err := store.DeleteArtist(ctx, 1) + require.NoError(t, err) + + // Verify artist was deleted + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artists + WHERE id = $1 + )`, 1) + require.NoError(t, err) + assert.False(t, exists, "expected artist to be deleted") + + // Verify artist's release was deleted + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM releases + WHERE id = $1 + )`, 1) + require.NoError(t, err) + assert.False(t, exists, "expected artist's release to be deleted") + + // Verify artist's track was deleted + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM tracks + WHERE id = $1 + )`, 1) + require.NoError(t, err) + assert.False(t, exists, "expected artist's tracks to be deleted") + + // Verify artist's listens was deleted + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.False(t, exists, "expected artist's listens to be deleted") + + truncateTestData(t) +} diff --git a/internal/db/psql/counts.go b/internal/db/psql/counts.go new file mode 100644 index 0000000..5523c92 --- /dev/null +++ b/internal/db/psql/counts.go @@ -0,0 +1,70 @@ +package psql + +import ( + "context" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/repository" +) + +func (p *Psql) CountListens(ctx context.Context, period db.Period) (int64, error) { + t2 := time.Now() + t1 := db.StartTimeFromPeriod(period) + count, err := p.q.CountListens(ctx, repository.CountListensParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return 0, err + } + return count, nil +} +func (p *Psql) CountTracks(ctx context.Context, period db.Period) (int64, error) { + t2 := time.Now() + t1 := db.StartTimeFromPeriod(period) + count, err := p.q.CountTopTracks(ctx, repository.CountTopTracksParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return 0, err + } + return count, nil +} +func (p *Psql) CountAlbums(ctx context.Context, period db.Period) (int64, error) { + t2 := time.Now() + t1 := db.StartTimeFromPeriod(period) + count, err := p.q.CountTopReleases(ctx, repository.CountTopReleasesParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return 0, err + } + return count, nil +} +func (p *Psql) CountArtists(ctx context.Context, period db.Period) (int64, error) { + t2 := time.Now() + t1 := db.StartTimeFromPeriod(period) + count, err := p.q.CountTopArtists(ctx, repository.CountTopArtistsParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return 0, err + } + return count, nil +} +func (p *Psql) CountTimeListened(ctx context.Context, period db.Period) (int64, error) { + t2 := time.Now() + t1 := db.StartTimeFromPeriod(period) + count, err := p.q.CountTimeListened(ctx, repository.CountTimeListenedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return 0, err + } + return count, nil +} diff --git a/internal/db/psql/counts_test.go b/internal/db/psql/counts_test.go new file mode 100644 index 0000000..b6ddd18 --- /dev/null +++ b/internal/db/psql/counts_test.go @@ -0,0 +1,76 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCountListens(t *testing.T) { + ctx := context.Background() + testDataForTopItems(t) + + // Test CountListens + period := db.PeriodWeek + count, err := store.CountListens(ctx, period) + require.NoError(t, err) + assert.Equal(t, int64(1), count, "expected listens count to match inserted data") + + truncateTestData(t) +} + +func TestCountTracks(t *testing.T) { + ctx := context.Background() + testDataForTopItems(t) + + // Test CountTracks + period := db.PeriodMonth + count, err := store.CountTracks(ctx, period) + require.NoError(t, err) + assert.Equal(t, int64(2), count, "expected tracks count to match inserted data") + + truncateTestData(t) +} + +func TestCountAlbums(t *testing.T) { + ctx := context.Background() + testDataForTopItems(t) + + // Test CountAlbums + period := db.PeriodYear + count, err := store.CountAlbums(ctx, period) + require.NoError(t, err) + assert.Equal(t, int64(3), count, "expected albums count to match inserted data") + + truncateTestData(t) +} + +func TestCountArtists(t *testing.T) { + ctx := context.Background() + testDataForTopItems(t) + + // Test CountArtists + period := db.PeriodAllTime + count, err := store.CountArtists(ctx, period) + require.NoError(t, err) + assert.Equal(t, int64(4), count, "expected artists count to match inserted data") + + truncateTestData(t) +} + +func TestCountTimeListened(t *testing.T) { + ctx := context.Background() + testDataForTopItems(t) + + // Test CountTimeListened + period := db.PeriodMonth + count, err := store.CountTimeListened(ctx, period) + require.NoError(t, err) + // 3 listens in past month, each 100 seconds + assert.Equal(t, int64(300), count, "expected total time listened to match inserted data") + + truncateTestData(t) +} diff --git a/internal/db/psql/images.go b/internal/db/psql/images.go new file mode 100644 index 0000000..a2b7710 --- /dev/null +++ b/internal/db/psql/images.go @@ -0,0 +1,74 @@ +package psql + +import ( + "context" + "encoding/json" + "errors" + + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" +) + +func (d *Psql) ImageHasAssociation(ctx context.Context, image uuid.UUID) (bool, error) { + _, err := d.q.GetReleaseByImageID(ctx, &image) + if err == nil { + return true, err + } else if !errors.Is(err, pgx.ErrNoRows) { + return false, err + } + _, err = d.q.GetArtistByImage(ctx, &image) + if err == nil { + return true, err + } else if !errors.Is(err, pgx.ErrNoRows) { + return false, err + } + return false, nil +} + +func (d *Psql) GetImageSource(ctx context.Context, image uuid.UUID) (string, error) { + r, err := d.q.GetReleaseByImageID(ctx, &image) + if err == nil { + return r.ImageSource.String, err + } else if !errors.Is(err, pgx.ErrNoRows) { + return "", err + } + rr, err := d.q.GetArtistByImage(ctx, &image) + if err == nil { + return rr.ImageSource.String, err + } else if !errors.Is(err, pgx.ErrNoRows) { + return "", err + } + return "", nil +} + +func (d *Psql) AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.Album, error) { + l := logger.FromContext(ctx) + rows, err := d.q.GetReleasesWithoutImages(ctx, repository.GetReleasesWithoutImagesParams{ + Limit: 20, + ID: from, + }) + if err != nil { + return nil, err + } + albums := make([]*models.Album, len(rows)) + for i, row := range rows { + artists := make([]models.SimpleArtist, 0) + err = json.Unmarshal(row.Artists, &artists) + if err != nil { + l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", row.ID) + artists = nil + } + albums[i] = &models.Album{ + ID: row.ID, + Image: row.Image, + Title: row.Title, + MbzID: row.MusicBrainzID, + VariousArtists: row.VariousArtists, + Artists: artists, + } + } + return albums, nil +} diff --git a/internal/db/psql/images_test.go b/internal/db/psql/images_test.go new file mode 100644 index 0000000..c63d2b3 --- /dev/null +++ b/internal/db/psql/images_test.go @@ -0,0 +1,106 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/catalog" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func setupTestDataForImages(t *testing.T) { + truncateTestData(t) + + // Insert artists + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id, image, image_source) + VALUES ('00000000-0000-0000-0000-000000000001', '11111111-1111-1111-1111-111111111111', 'User Upload'), + ('00000000-0000-0000-0000-000000000002', NULL, NULL)`) + require.NoError(t, err) + + // Insert artist aliases + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'Artist One', 'Testing', true), + (2, 'Artist Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert albums + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id, image, image_source) + VALUES ('22222222-2222-2222-2222-222222222222', '33333333-3333-3333-3333-333333333333', 'Automatic'), + ('44444444-4444-4444-4444-444444444444', NULL, NULL)`) + require.NoError(t, err) + + // Insert release aliases + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'Album One', 'Testing', true), + (2, 'Album Two', 'Testing', true)`) + require.NoError(t, err) + + // Associate albums with artists + err = store.Exec(context.Background(), + `INSERT INTO artist_releases (artist_id, release_id) + VALUES (1, 1), (2, 2)`) + require.NoError(t, err) +} + +func TestImageHasAssociation(t *testing.T) { + ctx := context.Background() + setupTestDataForImages(t) + + // Test image with association + imageID := uuid.MustParse("11111111-1111-1111-1111-111111111111") + hasAssociation, err := store.ImageHasAssociation(ctx, imageID) + require.NoError(t, err) + assert.True(t, hasAssociation, "expected image to have an association") + + // Test image without association + imageID = uuid.MustParse("55555555-5555-5555-5555-555555555555") + hasAssociation, err = store.ImageHasAssociation(ctx, imageID) + require.NoError(t, err) + assert.False(t, hasAssociation, "expected image to have no association") + + truncateTestData(t) +} + +func TestGetImageSource(t *testing.T) { + ctx := context.Background() + setupTestDataForImages(t) + + // Test image source for an album + imageID := uuid.MustParse("33333333-3333-3333-3333-333333333333") + source, err := store.GetImageSource(ctx, imageID) + require.NoError(t, err) + assert.Equal(t, "Automatic", source, "expected image source to match") + + // Test image source for an artist + imageID = uuid.MustParse("11111111-1111-1111-1111-111111111111") + source, err = store.GetImageSource(ctx, imageID) + require.NoError(t, err) + assert.Equal(t, catalog.ImageSourceUserUpload, source, "expected image source to match") + + // Test image source for a non-existent image + imageID = uuid.MustParse("55555555-5555-5555-5555-555555555555") + source, err = store.GetImageSource(ctx, imageID) + require.NoError(t, err) + assert.Equal(t, "", source, "expected no image source for non-existent image") + + truncateTestData(t) +} + +func TestAlbumsWithoutImages(t *testing.T) { + ctx := context.Background() + setupTestDataForImages(t) + + // Test albums without images + albums, err := store.AlbumsWithoutImages(ctx, 0) + require.NoError(t, err) + require.Len(t, albums, 1, "expected one album without an image") + assert.Equal(t, "Album Two", albums[0].Title, "expected album title to match") + + truncateTestData(t) +} diff --git a/internal/db/psql/listen.go b/internal/db/psql/listen.go new file mode 100644 index 0000000..0864643 --- /dev/null +++ b/internal/db/psql/listen.go @@ -0,0 +1,218 @@ +package psql + +import ( + "context" + "encoding/json" + "errors" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/gabehf/koito/internal/utils" +) + +func (d *Psql) GetListensPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Listen], error) { + l := logger.FromContext(ctx) + offset := (opts.Page - 1) * opts.Limit + t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year) + if err != nil { + return nil, err + } + if opts.Month == 0 && opts.Year == 0 { + // use period, not date range + t2 = time.Now() + t1 = db.StartTimeFromPeriod(opts.Period) + } + if opts.Limit == 0 { + opts.Limit = DefaultItemsPerPage + } + var listens []*models.Listen + var count int64 + if opts.TrackID > 0 { + l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetLastListensFromTrackPaginated(ctx, repository.GetLastListensFromTrackPaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + ID: int32(opts.TrackID), + }) + if err != nil { + return nil, err + } + listens = make([]*models.Listen, len(rows)) + for i, row := range rows { + t := &models.Listen{ + Track: models.Track{ + Title: row.TrackTitle, + ID: row.TrackID, + }, + Time: row.ListenedAt, + } + err = json.Unmarshal(row.Artists, &t.Track.Artists) + if err != nil { + return nil, err + } + listens[i] = t + } + count, err = d.q.CountListensFromTrack(ctx, repository.CountListensFromTrackParams{ + ListenedAt: t1, + ListenedAt_2: t2, + TrackID: int32(opts.TrackID), + }) + if err != nil { + return nil, err + } + } else if opts.AlbumID > 0 { + l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetLastListensFromReleasePaginated(ctx, repository.GetLastListensFromReleasePaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + ReleaseID: int32(opts.AlbumID), + }) + if err != nil { + return nil, err + } + listens = make([]*models.Listen, len(rows)) + for i, row := range rows { + t := &models.Listen{ + Track: models.Track{ + Title: row.TrackTitle, + ID: row.TrackID, + }, + Time: row.ListenedAt, + } + err = json.Unmarshal(row.Artists, &t.Track.Artists) + if err != nil { + return nil, err + } + listens[i] = t + } + count, err = d.q.CountListensFromRelease(ctx, repository.CountListensFromReleaseParams{ + ListenedAt: t1, + ListenedAt_2: t2, + ReleaseID: int32(opts.AlbumID), + }) + if err != nil { + return nil, err + } + } else if opts.ArtistID > 0 { + l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetLastListensFromArtistPaginated(ctx, repository.GetLastListensFromArtistPaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + ArtistID: int32(opts.ArtistID), + }) + if err != nil { + return nil, err + } + listens = make([]*models.Listen, len(rows)) + for i, row := range rows { + t := &models.Listen{ + Track: models.Track{ + Title: row.TrackTitle, + ID: row.TrackID, + }, + Time: row.ListenedAt, + } + err = json.Unmarshal(row.Artists, &t.Track.Artists) + if err != nil { + return nil, err + } + listens[i] = t + } + count, err = d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{ + ListenedAt: t1, + ListenedAt_2: t2, + ArtistID: int32(opts.ArtistID), + }) + if err != nil { + return nil, err + } + } else { + l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetLastListensPaginated(ctx, repository.GetLastListensPaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + }) + if err != nil { + return nil, err + } + listens = make([]*models.Listen, len(rows)) + for i, row := range rows { + t := &models.Listen{ + Track: models.Track{ + Title: row.TrackTitle, + ID: row.TrackID, + }, + Time: row.ListenedAt, + } + err = json.Unmarshal(row.Artists, &t.Track.Artists) + if err != nil { + return nil, err + } + listens[i] = t + } + count, err = d.q.CountListens(ctx, repository.CountListensParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return nil, err + } + l.Debug().Msgf("Database responded with %d tracks out of a total %d", len(rows), count) + } + + return &db.PaginatedResponse[*models.Listen]{ + Items: listens, + TotalCount: count, + ItemsPerPage: int32(opts.Limit), + HasNextPage: int64(offset+len(listens)) < count, + CurrentPage: int32(opts.Page), + }, nil +} + +func (d *Psql) SaveListen(ctx context.Context, opts db.SaveListenOpts) error { + l := logger.FromContext(ctx) + if opts.TrackID == 0 { + return errors.New("required parameter TrackID missing") + } + if opts.Time.IsZero() { + opts.Time = time.Now() + } + var client *string + if opts.Client != "" { + client = &opts.Client + } + l.Debug().Msgf("Inserting listen for track with id %d at time %v into DB", opts.TrackID, opts.Time) + return d.q.InsertListen(ctx, repository.InsertListenParams{ + TrackID: opts.TrackID, + ListenedAt: opts.Time, + UserID: opts.UserID, + Client: client, + }) +} + +func (d *Psql) DeleteListen(ctx context.Context, trackId int32, listenedAt time.Time) error { + l := logger.FromContext(ctx) + if trackId == 0 { + return errors.New("required parameter 'trackId' missing") + } + l.Debug().Msgf("Deleting listen from track %d at time %s from DB", trackId, listenedAt) + return d.q.DeleteListen(ctx, repository.DeleteListenParams{ + TrackID: trackId, + ListenedAt: listenedAt, + }) +} diff --git a/internal/db/psql/listen_activity.go b/internal/db/psql/listen_activity.go new file mode 100644 index 0000000..5f57f92 --- /dev/null +++ b/internal/db/psql/listen_activity.go @@ -0,0 +1,109 @@ +package psql + +import ( + "context" + "errors" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/repository" +) + +func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts) ([]db.ListenActivityItem, error) { + l := logger.FromContext(ctx) + if opts.Month != 0 && opts.Year == 0 { + return nil, errors.New("year must be specified with month") + } + // Default to range = 12 if not set + if opts.Range == 0 { + opts.Range = db.DefaultRange + } + t1, t2 := db.ListenActivityOptsToTimes(opts) + var listenActivity []db.ListenActivityItem + if opts.AlbumID > 0 { + l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for release group %d", + opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.AlbumID) + rows, err := d.q.ListenActivityForRelease(ctx, repository.ListenActivityForReleaseParams{ + Column1: t1, + Column2: t2, + Column3: stepToInterval(opts.Step), + ReleaseID: opts.AlbumID, + }) + if err != nil { + return nil, err + } + listenActivity = make([]db.ListenActivityItem, len(rows)) + for i, row := range rows { + t := db.ListenActivityItem{ + Start: row.BucketStart, + Listens: row.ListenCount, + } + listenActivity[i] = t + } + l.Debug().Msgf("Database responded with %d steps", len(rows)) + } else if opts.ArtistID > 0 { + l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for artist %d", + opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.ArtistID) + rows, err := d.q.ListenActivityForArtist(ctx, repository.ListenActivityForArtistParams{ + Column1: t1, + Column2: t2, + Column3: stepToInterval(opts.Step), + ArtistID: opts.ArtistID, + }) + if err != nil { + return nil, err + } + listenActivity = make([]db.ListenActivityItem, len(rows)) + for i, row := range rows { + t := db.ListenActivityItem{ + Start: row.BucketStart, + Listens: row.ListenCount, + } + listenActivity[i] = t + } + l.Debug().Msgf("Database responded with %d steps", len(rows)) + } else if opts.TrackID > 0 { + l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for track %d", + opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.TrackID) + rows, err := d.q.ListenActivityForTrack(ctx, repository.ListenActivityForTrackParams{ + Column1: t1, + Column2: t2, + Column3: stepToInterval(opts.Step), + ID: opts.TrackID, + }) + if err != nil { + return nil, err + } + listenActivity = make([]db.ListenActivityItem, len(rows)) + for i, row := range rows { + t := db.ListenActivityItem{ + Start: row.BucketStart, + Listens: row.ListenCount, + } + listenActivity[i] = t + } + l.Debug().Msgf("Database responded with %d steps", len(rows)) + } else { + l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v", + opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05")) + rows, err := d.q.ListenActivity(ctx, repository.ListenActivityParams{ + Column1: t1, + Column2: t2, + Column3: stepToInterval(opts.Step), + }) + if err != nil { + return nil, err + } + listenActivity = make([]db.ListenActivityItem, len(rows)) + for i, row := range rows { + t := db.ListenActivityItem{ + Start: row.BucketStart, + Listens: row.ListenCount, + } + listenActivity[i] = t + } + l.Debug().Msgf("Database responded with %d steps", len(rows)) + } + + return listenActivity, nil +} diff --git a/internal/db/psql/listen_activity_test.go b/internal/db/psql/listen_activity_test.go new file mode 100644 index 0000000..ccfdfe5 --- /dev/null +++ b/internal/db/psql/listen_activity_test.go @@ -0,0 +1,211 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func flattenListenCounts(items []db.ListenActivityItem) []int64 { + ret := make([]int64, len(items)) + for i, v := range items { + ret[i] = v.Listens + } + return ret +} + +func TestListenActivity(t *testing.T) { + truncateTestData(t) + + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002')`) + require.NoError(t, err) + + // Move artist names into artist_aliases + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'Artist One', 'Testing', true), + (2, 'Artist Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert release groups + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000011'), + ('00000000-0000-0000-0000-000000000022')`) + require.NoError(t, err) + + // Move release titles into release_aliases + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'Release One', 'Testing', true), + (2, 'Release Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert tracks + err = store.Exec(context.Background(), + `INSERT INTO tracks (musicbrainz_id, release_id) + VALUES ('11111111-1111-1111-1111-111111111111', 1), + ('22222222-2222-2222-2222-222222222222', 2)`) + require.NoError(t, err) + + // Move track titles into track_aliases + err = store.Exec(context.Background(), + `INSERT INTO track_aliases (track_id, alias, source, is_primary) + VALUES (1, 'Track One', 'Testing', true), + (2, 'Track Two', 'Testing', true)`) + require.NoError(t, err) + + // Associate tracks with artists + err = store.Exec(context.Background(), + `INSERT INTO artist_tracks (artist_id, track_id) + VALUES (1, 1), (2, 2)`) + require.NoError(t, err) + + // Insert listens + err = store.Exec(context.Background(), + `INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, NOW() - INTERVAL '1 day'), + (1, 1, NOW() - INTERVAL '2 days'), + (1, 1, NOW() - INTERVAL '1 week 1 day'), + (1, 1, NOW() - INTERVAL '1 month 1 day'), + (1, 1, NOW() - INTERVAL '1 year 1 day'), + (1, 2, NOW() - INTERVAL '1 day'), + (1, 2, NOW() - INTERVAL '2 days'), + (1, 2, NOW() - INTERVAL '1 week 1 day'), + (1, 2, NOW() - INTERVAL '1 month 1 day'), + (1, 2, NOW() - INTERVAL '1 year 1 day')`) + require.NoError(t, err) + + ctx := context.Background() + + // Test for opts.Step = db.StepDay + activity, err := store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepDay}) + require.NoError(t, err) + require.Len(t, activity, db.DefaultRange) + assert.Equal(t, []int64{0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 2, 0}, flattenListenCounts(activity)) + + // Truncate listens table and insert specific dates for testing opts.Step = db.StepMonth + err = store.Exec(context.Background(), `TRUNCATE TABLE listens`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, NOW() - INTERVAL '1 month'), + (1, 1, NOW() - INTERVAL '2 months'), + (1, 1, NOW() - INTERVAL '3 months'), + (1, 2, NOW() - INTERVAL '1 month'), + (1, 2, NOW() - INTERVAL '2 months')`) + require.NoError(t, err) + + activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepMonth, Range: 8}) + require.NoError(t, err) + require.Len(t, activity, 8) + assert.Equal(t, []int64{0, 0, 0, 0, 1, 2, 2, 0}, flattenListenCounts(activity)) + + // Truncate listens table and insert specific dates for testing opts.Step = db.StepYear + err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, NOW() - INTERVAL '1 year'), + (1, 1, NOW() - INTERVAL '2 years'), + (1, 2, NOW() - INTERVAL '1 year'), + (1, 2, NOW() - INTERVAL '3 years')`) + require.NoError(t, err) + + activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepYear}) + require.NoError(t, err) + require.Len(t, activity, db.DefaultRange) + assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 0}, flattenListenCounts(activity)) + // Truncate and insert data for a specific month/year + err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`) + require.NoError(t, err) + + err = store.Exec(context.Background(), ` + INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, DATE '2024-03-10'), + (1, 2, DATE '2024-03-20')`) + require.NoError(t, err) + + activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + Step: db.StepDay, + Month: 3, + Year: 2024, + }) + require.NoError(t, err) + require.Len(t, activity, 31) // number of days in march + assert.EqualValues(t, 1, activity[8].Listens) + assert.EqualValues(t, 1, activity[18].Listens) + + // Truncate and insert listens associated with two different albums + err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`) + require.NoError(t, err) + + err = store.Exec(context.Background(), ` + INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, NOW() - INTERVAL '1 day'), (1, 1, NOW() - INTERVAL '2 days'), + (1, 2, NOW() - INTERVAL '1 day')`) + require.NoError(t, err) + + activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + Step: db.StepDay, + AlbumID: 1, // Track 1 only + }) + require.NoError(t, err) + require.Len(t, activity, db.DefaultRange) + assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0}, flattenListenCounts(activity)) + + activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + Step: db.StepDay, + TrackID: 1, // Track 1 only + }) + require.NoError(t, err) + require.Len(t, activity, db.DefaultRange) + assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0}, flattenListenCounts(activity)) + + activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + Step: db.StepDay, + ArtistID: 2, // Should only include listens to Track 2 + }) + require.NoError(t, err) + require.Len(t, activity, db.DefaultRange) + assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0}, flattenListenCounts(activity)) + + // month without year is disallowed + _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + Step: db.StepDay, + Month: 5, + }) + require.Error(t, err) + + // invalid options + _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + Year: -10, + }) + require.Error(t, err) + _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + Year: 2025, + Month: -10, + }) + require.Error(t, err) + _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + Range: -1, + }) + require.Error(t, err) + _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + AlbumID: -1, + }) + require.Error(t, err) + _, err = store.GetListenActivity(ctx, db.ListenActivityOpts{ + ArtistID: -1, + }) + require.Error(t, err) + +} diff --git a/internal/db/psql/listen_test.go b/internal/db/psql/listen_test.go new file mode 100644 index 0000000..b0fbd96 --- /dev/null +++ b/internal/db/psql/listen_test.go @@ -0,0 +1,219 @@ +package psql_test + +import ( + "context" + "testing" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testDataForListens(t *testing.T) { + truncateTestData(t) + // Insert artists + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002')`) + require.NoError(t, err) + + // Insert artist aliases + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'Artist One', 'Testing', true), + (2, 'Artist Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert release groups + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000011'), + ('00000000-0000-0000-0000-000000000022')`) + require.NoError(t, err) + + // Insert release aliases + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'Release One', 'Testing', true), + (2, 'Release Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert tracks + err = store.Exec(context.Background(), + `INSERT INTO tracks (musicbrainz_id, release_id) + VALUES ('11111111-1111-1111-1111-111111111111', 1), + ('22222222-2222-2222-2222-222222222222', 2)`) + require.NoError(t, err) + + // Insert track aliases + err = store.Exec(context.Background(), + `INSERT INTO track_aliases (track_id, alias, source, is_primary) + VALUES (1, 'Track One', 'Testing', true), + (2, 'Track Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert artist track associations + err = store.Exec(context.Background(), + `INSERT INTO artist_tracks (track_id, artist_id) + VALUES (1, 1), + (2, 2)`) + require.NoError(t, err) +} + +func TestGetListens(t *testing.T) { + testDataForTopItems(t) + ctx := context.Background() + + // Test valid + resp, err := store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Len(t, resp.Items, 10) + assert.Equal(t, int64(10), resp.TotalCount) + require.Len(t, resp.Items[0].Track.Artists, 1) + require.Len(t, resp.Items[1].Track.Artists, 1) + // ensure tracks are in the right order (time, desc) + assert.Equal(t, "Artist Four", resp.Items[0].Track.Artists[0].Name) + assert.Equal(t, "Artist Three", resp.Items[1].Track.Artists[0].Name) + + // Test pagination + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + require.Len(t, resp.Items[0].Track.Artists, 1) + assert.Equal(t, true, resp.HasNextPage) + assert.EqualValues(t, 2, resp.CurrentPage) + assert.EqualValues(t, 1, resp.ItemsPerPage) + assert.EqualValues(t, 10, resp.TotalCount) + assert.Equal(t, "Artist Three", resp.Items[0].Track.Artists[0].Name) + + // Test page out of range + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 10, Page: 10, Period: db.PeriodAllTime}) + require.NoError(t, err) + assert.Empty(t, resp.Items) + assert.False(t, resp.HasNextPage) + + // Test invalid inputs + _, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: -1, Page: 0}) + assert.Error(t, err) + + _, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: -1}) + assert.Error(t, err) + + // Test specify period + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay}) + require.NoError(t, err) + require.Len(t, resp.Items, 0) // empty + assert.Equal(t, int64(0), resp.TotalCount) + // should default to PeriodDay + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{}) + require.NoError(t, err) + require.Len(t, resp.Items, 0) // empty + assert.Equal(t, int64(0), resp.TotalCount) + + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth}) + require.NoError(t, err) + require.Len(t, resp.Items, 3) + assert.Equal(t, int64(3), resp.TotalCount) + + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear}) + require.NoError(t, err) + require.Len(t, resp.Items, 6) + assert.Equal(t, int64(6), resp.TotalCount) + + // Test filter by artists, releases, and tracks + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, ArtistID: 1}) + require.NoError(t, err) + require.Len(t, resp.Items, 4) + assert.Equal(t, int64(4), resp.TotalCount) + + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2}) + require.NoError(t, err) + require.Len(t, resp.Items, 3) + assert.Equal(t, int64(3), resp.TotalCount) + + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, TrackID: 3}) + require.NoError(t, err) + require.Len(t, resp.Items, 2) + assert.Equal(t, int64(2), resp.TotalCount) + // when both artistID and albumID are specified, artist id is ignored + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2, ArtistID: 1}) + require.NoError(t, err) + require.Len(t, resp.Items, 3) + assert.Equal(t, int64(3), resp.TotalCount) + + // Test specify dates + + testDataAbsoluteListenTimes(t) + + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Year: 2023}) + require.NoError(t, err) + require.Len(t, resp.Items, 4) + assert.Equal(t, int64(4), resp.TotalCount) + + resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024}) + require.NoError(t, err) + require.Len(t, resp.Items, 3) + assert.Equal(t, int64(3), resp.TotalCount) + + // invalid, year required with month + _, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Month: 10}) + require.Error(t, err) + +} + +func TestSaveListen(t *testing.T) { + testDataForListens(t) + ctx := context.Background() + + // Test SaveListen with valid inputs + err := store.SaveListen(ctx, db.SaveListenOpts{ + TrackID: 1, + Time: time.Now(), + UserID: 1, + }) + require.NoError(t, err) + + // Verify the listen was saved + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.True(t, exists, "expected listen to exist") + + // Test SaveListen with missing TrackID + err = store.SaveListen(ctx, db.SaveListenOpts{ + TrackID: 0, + Time: time.Now(), + }) + assert.Error(t, err) +} + +func TestDeleteListen(t *testing.T) { + testDataForListens(t) + ctx := context.Background() + + err := store.Exec(ctx, ` + INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, to_timestamp(1749464138.0))`) + require.NoError(t, err) + + err = store.DeleteListen(ctx, 1, time.Unix(1749464138, 0)) + require.NoError(t, err) + + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM listens + WHERE track_id = $1 + )`, 1) + require.NoError(t, err) + assert.False(t, exists, "expected listen to be deleted") +} diff --git a/internal/db/psql/merge.go b/internal/db/psql/merge.go new file mode 100644 index 0000000..91bce1a --- /dev/null +++ b/internal/db/psql/merge.go @@ -0,0 +1,109 @@ +package psql + +import ( + "context" + + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/repository" + "github.com/jackc/pgx/v5" +) + +func (d *Psql) MergeTracks(ctx context.Context, fromId, toId int32) error { + l := logger.FromContext(ctx) + l.Info().Msgf("Merging track %d into track %d", fromId, toId) + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + err = qtx.UpdateTrackIdForListens(ctx, repository.UpdateTrackIdForListensParams{ + TrackID: fromId, + TrackID_2: toId, + }) + if err != nil { + return err + } + err = qtx.CleanOrphanedEntries(ctx) + if err != nil { + l.Err(err).Msg("Failed to clean orphaned entries") + return err + } + return tx.Commit(ctx) +} + +func (d *Psql) MergeAlbums(ctx context.Context, fromId, toId int32) error { + l := logger.FromContext(ctx) + l.Info().Msgf("Merging album %d into album %d", fromId, toId) + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + err = qtx.UpdateReleaseForAll(ctx, repository.UpdateReleaseForAllParams{ + ReleaseID: fromId, + ReleaseID_2: toId, + }) + if err != nil { + return err + } + err = qtx.CleanOrphanedEntries(ctx) + if err != nil { + l.Err(err).Msg("Failed to clean orphaned entries") + return err + } + return tx.Commit(ctx) +} + +func (d *Psql) MergeArtists(ctx context.Context, fromId, toId int32) error { + l := logger.FromContext(ctx) + l.Info().Msgf("Merging artist %d into artist %d", fromId, toId) + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + err = qtx.DeleteConflictingArtistTracks(ctx, repository.DeleteConflictingArtistTracksParams{ + ArtistID: fromId, + ArtistID_2: toId, + }) + if err != nil { + l.Err(err).Msg("Failed to delete conflicting artist tracks") + return err + } + err = qtx.DeleteConflictingArtistReleases(ctx, repository.DeleteConflictingArtistReleasesParams{ + ArtistID: fromId, + ArtistID_2: toId, + }) + if err != nil { + l.Err(err).Msg("Failed to delete conflicting artist releases") + return err + } + err = qtx.UpdateArtistTracks(ctx, repository.UpdateArtistTracksParams{ + ArtistID: fromId, + ArtistID_2: toId, + }) + if err != nil { + l.Err(err).Msg("Failed to update artist tracks") + return err + } + err = qtx.UpdateArtistReleases(ctx, repository.UpdateArtistReleasesParams{ + ArtistID: fromId, + ArtistID_2: toId, + }) + if err != nil { + l.Err(err).Msg("Failed to update artist releases") + return err + } + err = qtx.CleanOrphanedEntries(ctx) + if err != nil { + l.Err(err).Msg("Failed to clean orphaned entries") + return err + } + return tx.Commit(ctx) +} diff --git a/internal/db/psql/merge_test.go b/internal/db/psql/merge_test.go new file mode 100644 index 0000000..ceb612e --- /dev/null +++ b/internal/db/psql/merge_test.go @@ -0,0 +1,124 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func setupTestDataForMerge(t *testing.T) { + truncateTestData(t) + // Insert artists + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002')`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'Artist One', 'Testing', true), + (2, 'Artist Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert albums + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id) + VALUES ('11111111-1111-1111-1111-111111111111'), + ('22222222-2222-2222-2222-222222222222')`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'Album One', 'Testing', true), + (2, 'Album Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert tracks + err = store.Exec(context.Background(), + `INSERT INTO tracks (musicbrainz_id, release_id) + VALUES ('33333333-3333-3333-3333-333333333333', 1), + ('44444444-4444-4444-4444-444444444444', 2)`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO track_aliases (track_id, alias, source, is_primary) + VALUES (1, 'Track One', 'Testing', true), + (2, 'Track Two', 'Testing', true)`) + require.NoError(t, err) + + // Associate artists with albums and tracks + err = store.Exec(context.Background(), + `INSERT INTO artist_releases (artist_id, release_id) + VALUES (1, 1), (2, 2)`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO artist_tracks (artist_id, track_id) + VALUES (1, 1), (2, 2)`) + require.NoError(t, err) + + // Insert listens + err = store.Exec(context.Background(), + `INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, NOW() - INTERVAL '1 day'), + (1, 2, NOW() - INTERVAL '2 days')`) + require.NoError(t, err) +} + +func TestMergeTracks(t *testing.T) { + ctx := context.Background() + setupTestDataForMerge(t) + + // Merge Track 1 into Track 2 + err := store.MergeTracks(ctx, 1, 2) + require.NoError(t, err) + + // Verify listens are updated + var count int + count, err = store.Count(ctx, `SELECT COUNT(*) FROM listens WHERE track_id = 2`) + require.NoError(t, err) + assert.Equal(t, 2, count, "expected all listens to be merged into Track 2") + + truncateTestData(t) +} + +func TestMergeAlbums(t *testing.T) { + ctx := context.Background() + setupTestDataForMerge(t) + + // Merge Album 1 into Album 2 + err := store.MergeAlbums(ctx, 1, 2) + require.NoError(t, err) + + // Verify tracks are updated + var count int + count, err = store.Count(ctx, `SELECT COUNT(*) FROM tracks WHERE release_id = 2`) + require.NoError(t, err) + assert.Equal(t, 2, count, "expected all tracks to be merged into Album 2") + + truncateTestData(t) +} + +func TestMergeArtists(t *testing.T) { + ctx := context.Background() + setupTestDataForMerge(t) + + // Merge Artist 1 into Artist 2 + err := store.MergeArtists(ctx, 1, 2) + require.NoError(t, err) + + // Verify artist associations are updated + var count int + count, err = store.Count(ctx, `SELECT COUNT(*) FROM artist_tracks WHERE artist_id = 2`) + require.NoError(t, err) + assert.Equal(t, 2, count, "expected all tracks to be associated with Artist 2") + + count, err = store.Count(ctx, `SELECT COUNT(*) FROM artist_releases WHERE artist_id = 2`) + require.NoError(t, err) + assert.Equal(t, 2, count, "expected all releases to be associated with Artist 2") + + truncateTestData(t) +} diff --git a/internal/db/psql/psql.go b/internal/db/psql/psql.go new file mode 100644 index 0000000..2e52d94 --- /dev/null +++ b/internal/db/psql/psql.go @@ -0,0 +1,119 @@ +// package psql implements the db.DB interface using psx and a sql generated repository +package psql + +import ( + "context" + "database/sql" + "fmt" + "path/filepath" + "runtime" + "time" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/repository" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" + "github.com/jackc/pgx/v5/pgxpool" + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/pressly/goose/v3" +) + +const ( + DefaultItemsPerPage = 20 +) + +type Psql struct { + q *repository.Queries + conn *pgxpool.Pool +} + +func New() (*Psql, error) { + ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second) + defer cancel() + + config, err := pgxpool.ParseConfig(cfg.DatabaseUrl()) + if err != nil { + return nil, fmt.Errorf("failed to parse pgx config: %w", err) + } + + config.ConnConfig.ConnectTimeout = 15 * time.Second + + pool, err := pgxpool.NewWithConfig(ctx, config) + if err != nil { + return nil, fmt.Errorf("failed to create pgx pool: %w", err) + } + + if err := pool.Ping(ctx); err != nil { + pool.Close() + return nil, fmt.Errorf("database not reachable: %w", err) + } + + sqlDB, err := sql.Open("pgx", cfg.DatabaseUrl()) + if err != nil { + return nil, fmt.Errorf("failed to open db for migrations: %w", err) + } + + _, filename, _, ok := runtime.Caller(0) + if !ok { + return nil, fmt.Errorf("unable to get caller info") + } + migrationsPath := filepath.Join(filepath.Dir(filename), "..", "..", "..", "db", "migrations") + + if err := goose.Up(sqlDB, migrationsPath); err != nil { + return nil, fmt.Errorf("goose failed: %w", err) + } + _ = sqlDB.Close() + + return &Psql{ + q: repository.New(pool), + conn: pool, + }, nil +} + +// Not part of the DB interface this package implements. Only used for testing. +func (d *Psql) Exec(ctx context.Context, query string, args ...any) error { + _, err := d.conn.Exec(ctx, query, args...) + return err +} + +// Not part of the DB interface this package implements. Only used for testing. +func (d *Psql) RowExists(ctx context.Context, query string, args ...any) (bool, error) { + var exists bool + err := d.conn.QueryRow(ctx, query, args...).Scan(&exists) + return exists, err +} + +func (p *Psql) Count(ctx context.Context, query string, args ...any) (count int, err error) { + err = p.conn.QueryRow(ctx, query, args...).Scan(&count) + return +} + +// Exposes p.conn.QueryRow. Only used for testing. Not part of the DB interface this package implements. +func (p *Psql) QueryRow(ctx context.Context, query string, args ...any) pgx.Row { + return p.conn.QueryRow(ctx, query, args...) +} + +func (d *Psql) Close(ctx context.Context) { + d.conn.Close() +} + +func (d *Psql) Ping(ctx context.Context) error { + return d.conn.Ping(ctx) +} + +func stepToInterval(p db.StepInterval) pgtype.Interval { + var interval pgtype.Interval + switch p { + case db.StepDay: + interval.Days = 1 + case db.StepWeek: + interval.Days = 7 + case db.StepMonth: + interval.Months = 1 + case db.StepYear: + interval.Months = 12 + } + interval.Valid = true + return interval +} diff --git a/internal/db/psql/psql_test.go b/internal/db/psql/psql_test.go new file mode 100644 index 0000000..7da1ce4 --- /dev/null +++ b/internal/db/psql/psql_test.go @@ -0,0 +1,186 @@ +package psql_test + +import ( + "context" + "fmt" + "log" + "testing" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db/psql" + _ "github.com/gabehf/koito/testing_init" + "github.com/ory/dockertest/v3" + "github.com/stretchr/testify/require" +) + +var store *psql.Psql + +func getTestGetenv(resource *dockertest.Resource) func(string) string { + return func(env string) string { + switch env { + case cfg.DATABASE_URL_ENV: + return fmt.Sprintf("postgres://postgres:secret@localhost:%s", resource.GetPort("5432/tcp")) + default: + return "" + } + } +} + +func TestMain(m *testing.M) { + // uses a sensible default on windows (tcp/http) and linux/osx (socket) + pool, err := dockertest.NewPool("") + if err != nil { + log.Fatalf("Could not construct pool: %s", err) + } + + // uses pool to try to connect to Docker + err = pool.Client.Ping() + if err != nil { + log.Fatalf("Could not connect to Docker: %s", err) + } + + // pulls an image, creates a container based on it and runs it + resource, err := pool.Run("postgres", "latest", []string{"POSTGRES_PASSWORD=secret"}) + if err != nil { + log.Fatalf("Could not start resource: %s", err) + } + + err = cfg.Load(getTestGetenv(resource)) + if err != nil { + log.Fatalf("Could not load cfg: %s", err) + } + + // exponential backoff-retry, because the application in the container might not be ready to accept connections yet + if err := pool.Retry(func() error { + var err error + store, err = psql.New() + if err != nil { + log.Println("Failed to connect to test database, retrying...") + return err + } + return store.Ping(context.Background()) + }); err != nil { + log.Fatalf("Could not connect to database: %s", err) + } + + // as of go1.15 testing.M returns the exit code of m.Run(), so it is safe to use defer here + defer func() { + if err := pool.Purge(resource); err != nil { + log.Fatalf("Could not purge resource: %s", err) + } + }() + + // insert a user into the db with id 1 to use for tests + err = store.Exec(context.Background(), `INSERT INTO users (username, password) VALUES ('test', DECODE('abc123', 'hex'))`) + if err != nil { + log.Fatalf("Failed to insert test user: %v", err) + } + + m.Run() +} + +func testDataForTopItems(t *testing.T) { + truncateTestData(t) + + // artist 1 has most listens older than 1 year + // artist 2 has most listens older than 1 month + // artist 3 has most listens older than 1 week + // artist 4 has least listens + + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004')`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'Artist One', 'Testing', true), + (2, 'Artist Two', 'Testing', true), + (3, 'Artist Three', 'Testing', true), + (4, 'Artist Four', 'Testing', true)`) + require.NoError(t, err) + + // Insert release groups + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000011'), + ('00000000-0000-0000-0000-000000000022'), + ('00000000-0000-0000-0000-000000000033'), + ('00000000-0000-0000-0000-000000000044')`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'Release One', 'Testing', true), + (2, 'Release Two', 'Testing', true), + (3, 'Release Three', 'Testing', true), + (4, 'Release Four', 'Testing', true)`) + require.NoError(t, err) + + // Insert release groups + err = store.Exec(context.Background(), + `INSERT INTO artist_releases (release_id, artist_id) + VALUES (1, 1), (2, 2), (3, 3), (4, 4)`) + require.NoError(t, err) + + // Insert tracks + err = store.Exec(context.Background(), + `INSERT INTO tracks (musicbrainz_id, release_id, duration) + VALUES ('11111111-1111-1111-1111-111111111111', 1, 100), + ('22222222-2222-2222-2222-222222222222', 2, 100), + ('33333333-3333-3333-3333-333333333333', 3, 100), + ('44444444-4444-4444-4444-444444444444', 4, 100)`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO track_aliases (track_id, alias, source, is_primary) + VALUES (1, 'Track One', 'Testing', true), + (2, 'Track Two', 'Testing', true), + (3, 'Track Three', 'Testing', true), + (4, 'Track Four', 'Testing', true)`) + require.NoError(t, err) + + // Associate tracks with artists + err = store.Exec(context.Background(), + `INSERT INTO artist_tracks (artist_id, track_id) + VALUES (1, 1), (2, 2), (3, 3), (4, 4)`) + require.NoError(t, err) + + // Insert listens + err = store.Exec(context.Background(), + `INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, NOW() - INTERVAL '2 years 1 day'), + (1, 1, NOW() - INTERVAL '2 years 2 days'), + (1, 1, NOW() - INTERVAL '2 years 3 days'), + (1, 1, NOW() - INTERVAL '2 years 4 days'), + (1, 2, NOW() - INTERVAL '2 months 1 day'), + (1, 2, NOW() - INTERVAL '2 months 2 days'), + (1, 2, NOW() - INTERVAL '2 months 3 days'), + (1, 3, NOW() - INTERVAL '2 weeks'), + (1, 3, NOW() - INTERVAL '2 weeks 1 day'), + (1, 4, NOW() - INTERVAL '2 days')`) + require.NoError(t, err) +} + +func testDataAbsoluteListenTimes(t *testing.T) { + err := store.Exec(context.Background(), + `TRUNCATE listens`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO listens (user_id, track_id, listened_at) + VALUES (1, 1, '2023-06-22 19:11:25-07'), + (1, 1, '2023-06-22 19:12:25-07'), + (1, 1, '2023-06-22 19:13:25-07'), + (1, 1, '2023-06-22 19:14:25-07'), + (1, 2, '2024-06-22 19:15:25-07'), + (1, 2, '2024-06-22 19:16:25-07'), + (1, 2, '2024-06-22 19:17:25-07'), + (1, 3, '2024-10-02 19:18:25-07'), + (1, 3, '2024-10-02 19:19:25-07'), + (1, 4, '2025-05-16 19:20:25-07')`) + require.NoError(t, err) +} diff --git a/internal/db/psql/search.go b/internal/db/psql/search.go new file mode 100644 index 0000000..69250fb --- /dev/null +++ b/internal/db/psql/search.go @@ -0,0 +1,151 @@ +package psql + +import ( + "context" + "encoding/json" + + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/jackc/pgx/v5/pgtype" +) + +const searchItemLimit = 5 +const substringSearchLength = 6 + +func (d *Psql) SearchArtists(ctx context.Context, q string) ([]*models.Artist, error) { + if len(q) < substringSearchLength { + rows, err := d.q.SearchArtistsBySubstring(ctx, repository.SearchArtistsBySubstringParams{ + Column1: pgtype.Text{String: q, Valid: true}, + Limit: searchItemLimit, + }) + if err != nil { + return nil, err + } + ret := make([]*models.Artist, len(rows)) + for i, row := range rows { + ret[i] = &models.Artist{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Name: row.Name, + Image: row.Image, + } + } + return ret, nil + } else { + rows, err := d.q.SearchArtists(ctx, repository.SearchArtistsParams{ + Similarity: q, + Limit: searchItemLimit, + }) + if err != nil { + return nil, err + } + ret := make([]*models.Artist, len(rows)) + for i, row := range rows { + ret[i] = &models.Artist{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Name: row.Name, + Image: row.Image, + } + } + return ret, nil + } +} + +func (d *Psql) SearchAlbums(ctx context.Context, q string) ([]*models.Album, error) { + if len(q) < substringSearchLength { + rows, err := d.q.SearchReleasesBySubstring(ctx, repository.SearchReleasesBySubstringParams{ + Column1: pgtype.Text{String: q, Valid: true}, + Limit: searchItemLimit, + }) + if err != nil { + return nil, err + } + ret := make([]*models.Album, len(rows)) + for i, row := range rows { + ret[i] = &models.Album{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Title: row.Title, + VariousArtists: row.VariousArtists, + Image: row.Image, + } + err = json.Unmarshal(row.Artists, &ret[i].Artists) + if err != nil { + return nil, err + } + } + return ret, nil + } else { + rows, err := d.q.SearchReleases(ctx, repository.SearchReleasesParams{ + Similarity: q, + Limit: searchItemLimit, + }) + if err != nil { + return nil, err + } + ret := make([]*models.Album, len(rows)) + for i, row := range rows { + ret[i] = &models.Album{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Title: row.Title, + VariousArtists: row.VariousArtists, + Image: row.Image, + } + err = json.Unmarshal(row.Artists, &ret[i].Artists) + if err != nil { + return nil, err + } + } + return ret, nil + } +} + +func (d *Psql) SearchTracks(ctx context.Context, q string) ([]*models.Track, error) { + if len(q) < substringSearchLength { + rows, err := d.q.SearchTracksBySubstring(ctx, repository.SearchTracksBySubstringParams{ + Column1: pgtype.Text{String: q, Valid: true}, + Limit: searchItemLimit, + }) + if err != nil { + return nil, err + } + ret := make([]*models.Track, len(rows)) + for i, row := range rows { + ret[i] = &models.Track{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Title: row.Title, + Image: row.Image, + } + err = json.Unmarshal(row.Artists, &ret[i].Artists) + if err != nil { + return nil, err + } + } + return ret, nil + } else { + rows, err := d.q.SearchTracks(ctx, repository.SearchTracksParams{ + Similarity: q, + Limit: searchItemLimit, + }) + if err != nil { + return nil, err + } + ret := make([]*models.Track, len(rows)) + for i, row := range rows { + ret[i] = &models.Track{ + ID: row.ID, + MbzID: row.MusicBrainzID, + Title: row.Title, + Image: row.Image, + } + err = json.Unmarshal(row.Artists, &ret[i].Artists) + if err != nil { + return nil, err + } + } + return ret, nil + } +} diff --git a/internal/db/psql/search_test.go b/internal/db/psql/search_test.go new file mode 100644 index 0000000..b607d76 --- /dev/null +++ b/internal/db/psql/search_test.go @@ -0,0 +1,116 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func setupTestDataForSearch(t *testing.T) { + truncateTestData(t) + + // Insert artists + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002')`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'Artist One With A Long Name', 'Testing', true), + (2, 'Artist Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert albums + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id, various_artists) + VALUES ('11111111-1111-1111-1111-111111111111', false), + ('22222222-2222-2222-2222-222222222222', true)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'Album One With A Long Name', 'Testing', true), + (2, 'Album Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert tracks + err = store.Exec(context.Background(), + `INSERT INTO tracks (musicbrainz_id, release_id) + VALUES ('33333333-3333-3333-3333-333333333333', 1), + ('44444444-4444-4444-4444-444444444444', 2)`) + require.NoError(t, err) + err = store.Exec(context.Background(), + `INSERT INTO track_aliases (track_id, alias, source, is_primary) + VALUES (1, 'Track One With A Long Name', 'Testing', true), + (2, 'Track Two', 'Testing', true)`) + require.NoError(t, err) + + // Associate artists with albums and tracks + err = store.Exec(context.Background(), + `INSERT INTO artist_releases (artist_id, release_id) + VALUES (1, 1), (2, 2)`) + require.NoError(t, err) + + err = store.Exec(context.Background(), + `INSERT INTO artist_tracks (artist_id, track_id) + VALUES (1, 1), (2, 2)`) + require.NoError(t, err) +} + +func TestSearchArtists(t *testing.T) { + ctx := context.Background() + setupTestDataForSearch(t) + + // Search for "Artist One With A Long Name" + results, err := store.SearchArtists(ctx, "Artist One With A Long Name") + require.NoError(t, err) + require.Len(t, results, 1) + assert.Equal(t, "Artist One With A Long Name", results[0].Name) + + // Search for substring "Artist" + results, err = store.SearchArtists(ctx, "Arti") + require.NoError(t, err) + require.Len(t, results, 2) + + truncateTestData(t) +} + +func TestSearchAlbums(t *testing.T) { + ctx := context.Background() + setupTestDataForSearch(t) + + // Search for "Album One With A Long Name" + results, err := store.SearchAlbums(ctx, "Album One With A Long Name") + require.NoError(t, err) + require.Len(t, results, 1) + assert.Equal(t, "Album One With A Long Name", results[0].Title) + + // Search for substring "Album" + results, err = store.SearchAlbums(ctx, "Albu") + require.NoError(t, err) + require.Len(t, results, 2) + assert.NotNil(t, results[0].Artists) + + truncateTestData(t) +} + +func TestSearchTracks(t *testing.T) { + ctx := context.Background() + setupTestDataForSearch(t) + + // Search for "Track One With A Long Name" + results, err := store.SearchTracks(ctx, "Track One With A Long Name") + require.NoError(t, err) + require.Len(t, results, 1) + assert.Equal(t, "Track One With A Long Name", results[0].Title) + + // Search for substring "Track" + results, err = store.SearchTracks(ctx, "Trac") + require.NoError(t, err) + require.Len(t, results, 2) + assert.NotNil(t, results[0].Artists) + + truncateTestData(t) +} diff --git a/internal/db/psql/sessions.go b/internal/db/psql/sessions.go new file mode 100644 index 0000000..d279121 --- /dev/null +++ b/internal/db/psql/sessions.go @@ -0,0 +1,59 @@ +package psql + +import ( + "context" + "errors" + "time" + + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" +) + +func (d *Psql) SaveSession(ctx context.Context, userID int32, expiresAt time.Time, persistent bool) (*models.Session, error) { + session, err := d.q.InsertSession(ctx, repository.InsertSessionParams{ + ID: uuid.New(), + UserID: userID, + ExpiresAt: expiresAt, + Persistent: persistent, + }) + if err != nil { + return nil, err + } + return &models.Session{ + ID: session.ID, + UserID: session.UserID, + CreatedAt: session.CreatedAt, + ExpiresAt: session.ExpiresAt, + Persistent: session.Persistent, + }, nil +} + +func (d *Psql) RefreshSession(ctx context.Context, sessionId uuid.UUID, expiresAt time.Time) error { + return d.q.UpdateSessionExpiry(ctx, repository.UpdateSessionExpiryParams{ + ID: sessionId, + ExpiresAt: expiresAt, + }) +} + +func (d *Psql) DeleteSession(ctx context.Context, sessionId uuid.UUID) error { + return d.q.DeleteSession(ctx, sessionId) +} + +// Returns nil, nil when no database entries are found +func (d *Psql) GetUserBySession(ctx context.Context, sessionId uuid.UUID) (*models.User, error) { + row, err := d.q.GetUserBySession(ctx, sessionId) + if errors.Is(err, pgx.ErrNoRows) { + return nil, nil + } else if err != nil { + return nil, err + } + + return &models.User{ + ID: row.ID, + Username: row.Username, + Password: row.Password, + Role: models.UserRole(row.Role), + }, nil +} diff --git a/internal/db/psql/sessions_test.go b/internal/db/psql/sessions_test.go new file mode 100644 index 0000000..5044446 --- /dev/null +++ b/internal/db/psql/sessions_test.go @@ -0,0 +1,101 @@ +package psql_test + +import ( + "context" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func truncateTestDataForSessions(t *testing.T) { + err := store.Exec(context.Background(), + `TRUNCATE + sessions + RESTART IDENTITY CASCADE`, + ) + require.NoError(t, err) +} +func TestSaveSession(t *testing.T) { + ctx := context.Background() + + // Save a session for the user + expiresAt := time.Now().Add(24 * time.Hour).UTC() + session, err := store.SaveSession(ctx, 1, expiresAt, true) + require.NoError(t, err) + require.NotNil(t, session) + assert.Equal(t, int32(1), session.UserID) + assert.Equal(t, true, session.Persistent) + assert.WithinDuration(t, expiresAt, session.ExpiresAt, time.Second) + + truncateTestDataForSessions(t) +} + +func TestRefreshSession(t *testing.T) { + ctx := context.Background() + + // Save a session first + expiresAt := time.Now().Add(-1 * time.Minute) + session, err := store.SaveSession(ctx, 1, expiresAt, true) + require.NoError(t, err) + + // Refresh the session expiry + newExpiresAt := time.Now().Add(48 * time.Hour) + err = store.RefreshSession(ctx, session.ID, newExpiresAt) + require.NoError(t, err) + + // Can only retrieve a session with an expiresAt > time.Now() + _, err = store.GetUserBySession(ctx, session.ID) + require.NoError(t, err) + + truncateTestDataForSessions(t) +} + +func TestDeleteSession(t *testing.T) { + ctx := context.Background() + + // Save a session first + expiresAt := time.Now().Add(24 * time.Hour) + session, err := store.SaveSession(ctx, 1, expiresAt, true) + require.NoError(t, err) + + // Delete the session + err = store.DeleteSession(ctx, session.ID) + require.NoError(t, err) + + // Verify the session was deleted + var count int + count, err = store.Count(ctx, `SELECT COUNT(*) FROM sessions WHERE id = $1`, session.ID) + require.NoError(t, err) + assert.Equal(t, 0, count) + + truncateTestDataForSessions(t) +} + +func TestGetUserBySession(t *testing.T) { + ctx := context.Background() + + // Save a session first + expiresAt := time.Now().Add(24 * time.Hour) + session, err := store.SaveSession(ctx, 1, expiresAt, true) + require.NoError(t, err) + + // Get the user by session + user, err := store.GetUserBySession(ctx, session.ID) + require.NoError(t, err) + require.NotNil(t, user) + assert.Equal(t, int32(1), user.ID) + assert.Equal(t, "test", user.Username) + assert.Equal(t, []uint8([]byte{0xab, 0xc1, 0x23}), user.Password) + assert.Equal(t, "user", string(user.Role)) + + // Test for a non-existent session + nonExistentSessionID := uuid.New() + user, err = store.GetUserBySession(ctx, nonExistentSessionID) + require.NoError(t, err) + assert.Nil(t, user) + + truncateTestDataForSessions(t) +} diff --git a/internal/db/psql/top_albums.go b/internal/db/psql/top_albums.go new file mode 100644 index 0000000..b44334d --- /dev/null +++ b/internal/db/psql/top_albums.go @@ -0,0 +1,119 @@ +package psql + +import ( + "context" + "encoding/json" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/gabehf/koito/internal/utils" +) + +func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Album], error) { + l := logger.FromContext(ctx) + offset := (opts.Page - 1) * opts.Limit + t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year) + if err != nil { + return nil, err + } + if opts.Month == 0 && opts.Year == 0 { + // use period, not date range + t2 = time.Now() + t1 = db.StartTimeFromPeriod(opts.Period) + } + if opts.Limit == 0 { + opts.Limit = DefaultItemsPerPage + } + + var rgs []*models.Album + var count int64 + + if opts.ArtistID != 0 { + l.Debug().Msgf("Fetching top %d albums from artist id %d with period %s on page %d from range %v to %v", + opts.Limit, opts.ArtistID, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + + rows, err := d.q.GetTopReleasesFromArtist(ctx, repository.GetTopReleasesFromArtistParams{ + ArtistID: int32(opts.ArtistID), + Limit: int32(opts.Limit), + Offset: int32(offset), + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return nil, err + } + rgs = make([]*models.Album, len(rows)) + l.Debug().Msgf("Database responded with %d items", len(rows)) + for i, v := range rows { + artists := make([]models.SimpleArtist, 0) + err = json.Unmarshal(v.Artists, &artists) + if err != nil { + l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", v.ID) + artists = nil + } + rgs[i] = &models.Album{ + ID: v.ID, + MbzID: v.MusicBrainzID, + Title: v.Title, + Image: v.Image, + Artists: artists, + VariousArtists: v.VariousArtists, + ListenCount: v.ListenCount, + } + } + count, err = d.q.CountReleasesFromArtist(ctx, int32(opts.ArtistID)) + if err != nil { + return nil, err + } + } else { + l.Debug().Msgf("Fetching top %d albums with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetTopReleasesPaginated(ctx, repository.GetTopReleasesPaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + }) + if err != nil { + return nil, err + } + rgs = make([]*models.Album, len(rows)) + l.Debug().Msgf("Database responded with %d items", len(rows)) + for i, row := range rows { + artists := make([]models.SimpleArtist, 0) + err = json.Unmarshal(row.Artists, &artists) + if err != nil { + l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", row.ID) + artists = nil + } + t := &models.Album{ + Title: row.Title, + MbzID: row.MusicBrainzID, + ID: row.ID, + Image: row.Image, + Artists: artists, + VariousArtists: row.VariousArtists, + ListenCount: row.ListenCount, + } + rgs[i] = t + } + count, err = d.q.CountTopReleases(ctx, repository.CountTopReleasesParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return nil, err + } + l.Debug().Msgf("Database responded with %d albums out of a total %d", len(rows), count) + } + return &db.PaginatedResponse[*models.Album]{ + Items: rgs, + TotalCount: count, + ItemsPerPage: int32(opts.Limit), + HasNextPage: int64(offset+len(rgs)) < count, + CurrentPage: int32(opts.Page), + }, nil +} diff --git a/internal/db/psql/top_albums_test.go b/internal/db/psql/top_albums_test.go new file mode 100644 index 0000000..d698be6 --- /dev/null +++ b/internal/db/psql/top_albums_test.go @@ -0,0 +1,103 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetTopAlbumsPaginated(t *testing.T) { + testDataForTopItems(t) + ctx := context.Background() + + // Test valid + resp, err := store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Len(t, resp.Items, 4) + assert.Equal(t, int64(4), resp.TotalCount) + assert.Equal(t, "Release One", resp.Items[0].Title) + assert.Equal(t, "Release Two", resp.Items[1].Title) + assert.Equal(t, "Release Three", resp.Items[2].Title) + assert.Equal(t, "Release Four", resp.Items[3].Title) + + // Test pagination + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, "Release Two", resp.Items[0].Title) + + // Test page out of range + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Empty(t, resp.Items) + assert.False(t, resp.HasNextPage) + + // Test invalid inputs + _, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: -1, Page: 0}) + assert.Error(t, err) + + _, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: -1}) + assert.Error(t, err) + + // Test specify period + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay}) + require.NoError(t, err) + require.Len(t, resp.Items, 0) // empty + assert.Equal(t, int64(0), resp.TotalCount) + // should default to PeriodDay + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{}) + require.NoError(t, err) + require.Len(t, resp.Items, 0) // empty + assert.Equal(t, int64(0), resp.TotalCount) + + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Release Four", resp.Items[0].Title) + + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth}) + require.NoError(t, err) + require.Len(t, resp.Items, 2) + assert.Equal(t, int64(2), resp.TotalCount) + assert.Equal(t, "Release Three", resp.Items[0].Title) + assert.Equal(t, "Release Four", resp.Items[1].Title) + + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear}) + require.NoError(t, err) + require.Len(t, resp.Items, 3) + assert.Equal(t, int64(3), resp.TotalCount) + assert.Equal(t, "Release Two", resp.Items[0].Title) + assert.Equal(t, "Release Three", resp.Items[1].Title) + assert.Equal(t, "Release Four", resp.Items[2].Title) + + // test specific artist + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear, ArtistID: 2}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Release Two", resp.Items[0].Title) + + // Test specify dates + + testDataAbsoluteListenTimes(t) + + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Year: 2023}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Release One", resp.Items[0].Title) + + resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Release Two", resp.Items[0].Title) + + // invalid, year required with month + _, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Month: 10}) + require.Error(t, err) +} diff --git a/internal/db/psql/top_artists.go b/internal/db/psql/top_artists.go new file mode 100644 index 0000000..980f89d --- /dev/null +++ b/internal/db/psql/top_artists.go @@ -0,0 +1,67 @@ +package psql + +import ( + "context" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/gabehf/koito/internal/utils" +) + +func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Artist], error) { + l := logger.FromContext(ctx) + offset := (opts.Page - 1) * opts.Limit + t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year) + if err != nil { + return nil, err + } + if opts.Month == 0 && opts.Year == 0 { + // use period, not date range + t2 = time.Now() + t1 = db.StartTimeFromPeriod(opts.Period) + } + if opts.Limit == 0 { + opts.Limit = DefaultItemsPerPage + } + l.Debug().Msgf("Fetching top %d artists with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetTopArtistsPaginated(ctx, repository.GetTopArtistsPaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + }) + if err != nil { + return nil, err + } + rgs := make([]*models.Artist, len(rows)) + for i, row := range rows { + t := &models.Artist{ + Name: row.Name, + MbzID: row.MusicBrainzID, + ID: row.ID, + Image: row.Image, + ListenCount: row.ListenCount, + } + rgs[i] = t + } + count, err := d.q.CountTopArtists(ctx, repository.CountTopArtistsParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return nil, err + } + l.Debug().Msgf("Database responded with %d artists out of a total %d", len(rows), count) + + return &db.PaginatedResponse[*models.Artist]{ + Items: rgs, + TotalCount: count, + ItemsPerPage: int32(opts.Limit), + HasNextPage: int64(offset+len(rgs)) < count, + CurrentPage: int32(opts.Page), + }, nil +} diff --git a/internal/db/psql/top_artists_test.go b/internal/db/psql/top_artists_test.go new file mode 100644 index 0000000..2f261a0 --- /dev/null +++ b/internal/db/psql/top_artists_test.go @@ -0,0 +1,96 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetTopArtistsPaginated(t *testing.T) { + testDataForTopItems(t) + ctx := context.Background() + + // Test valid + resp, err := store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Len(t, resp.Items, 4) + assert.Equal(t, int64(4), resp.TotalCount) + assert.Equal(t, "Artist One", resp.Items[0].Name) + assert.Equal(t, "Artist Two", resp.Items[1].Name) + assert.Equal(t, "Artist Three", resp.Items[2].Name) + assert.Equal(t, "Artist Four", resp.Items[3].Name) + + // Test pagination + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, "Artist Two", resp.Items[0].Name) + + // Test page out of range + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime}) + require.NoError(t, err) + assert.Empty(t, resp.Items) + assert.False(t, resp.HasNextPage) + + // Test invalid inputs + _, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: -1, Page: 0}) + assert.Error(t, err) + + _, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: -1}) + assert.Error(t, err) + + // Test specify period + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay}) + require.NoError(t, err) + require.Len(t, resp.Items, 0) // empty + assert.Equal(t, int64(0), resp.TotalCount) + // should default to PeriodDay + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{}) + require.NoError(t, err) + require.Len(t, resp.Items, 0) // empty + assert.Equal(t, int64(0), resp.TotalCount) + + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Artist Four", resp.Items[0].Name) + + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth}) + require.NoError(t, err) + require.Len(t, resp.Items, 2) + assert.Equal(t, int64(2), resp.TotalCount) + assert.Equal(t, "Artist Three", resp.Items[0].Name) + assert.Equal(t, "Artist Four", resp.Items[1].Name) + + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear}) + require.NoError(t, err) + require.Len(t, resp.Items, 3) + assert.Equal(t, int64(3), resp.TotalCount) + assert.Equal(t, "Artist Two", resp.Items[0].Name) + assert.Equal(t, "Artist Three", resp.Items[1].Name) + assert.Equal(t, "Artist Four", resp.Items[2].Name) + + // Test specify dates + + testDataAbsoluteListenTimes(t) + + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Year: 2023}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Artist One", resp.Items[0].Name) + + resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Artist Two", resp.Items[0].Name) + + // invalid, year required with month + _, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Month: 10}) + require.Error(t, err) +} diff --git a/internal/db/psql/top_tracks.go b/internal/db/psql/top_tracks.go new file mode 100644 index 0000000..765b3a6 --- /dev/null +++ b/internal/db/psql/top_tracks.go @@ -0,0 +1,160 @@ +package psql + +import ( + "context" + "encoding/json" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/gabehf/koito/internal/utils" +) + +func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Track], error) { + l := logger.FromContext(ctx) + offset := (opts.Page - 1) * opts.Limit + t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year) + if err != nil { + return nil, err + } + if opts.Month == 0 && opts.Year == 0 { + // use period, not date range + t2 = time.Now() + t1 = db.StartTimeFromPeriod(opts.Period) + } + if opts.Limit == 0 { + opts.Limit = DefaultItemsPerPage + } + var tracks []*models.Track + var count int64 + if opts.AlbumID > 0 { + l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetTopTracksInReleasePaginated(ctx, repository.GetTopTracksInReleasePaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + ReleaseID: int32(opts.AlbumID), + }) + if err != nil { + return nil, err + } + tracks = make([]*models.Track, len(rows)) + for i, row := range rows { + artists := make([]models.SimpleArtist, 0) + err = json.Unmarshal(row.Artists, &artists) + if err != nil { + l.Err(err).Msgf("Error unmarshalling artists for track with id %d", row.ID) + artists = nil + } + t := &models.Track{ + Title: row.Title, + MbzID: row.MusicBrainzID, + ID: row.ID, + ListenCount: row.ListenCount, + Image: row.Image, + AlbumID: row.ReleaseID, + Artists: artists, + } + tracks[i] = t + } + count, err = d.q.CountTopTracksByRelease(ctx, repository.CountTopTracksByReleaseParams{ + ListenedAt: t1, + ListenedAt_2: t2, + ReleaseID: int32(opts.AlbumID), + }) + if err != nil { + return nil, err + } + } else if opts.ArtistID > 0 { + l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetTopTracksByArtistPaginated(ctx, repository.GetTopTracksByArtistPaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + ArtistID: int32(opts.ArtistID), + }) + if err != nil { + return nil, err + } + tracks = make([]*models.Track, len(rows)) + for i, row := range rows { + artists := make([]models.SimpleArtist, 0) + err = json.Unmarshal(row.Artists, &artists) + if err != nil { + l.Err(err).Msgf("Error unmarshalling artists for track with id %d", row.ID) + artists = nil + } + t := &models.Track{ + Title: row.Title, + MbzID: row.MusicBrainzID, + ID: row.ID, + Image: row.Image, + ListenCount: row.ListenCount, + AlbumID: row.ReleaseID, + Artists: artists, + } + tracks[i] = t + } + count, err = d.q.CountTopTracksByArtist(ctx, repository.CountTopTracksByArtistParams{ + ListenedAt: t1, + ListenedAt_2: t2, + ArtistID: int32(opts.ArtistID), + }) + if err != nil { + return nil, err + } + } else { + l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v", + opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006")) + rows, err := d.q.GetTopTracksPaginated(ctx, repository.GetTopTracksPaginatedParams{ + ListenedAt: t1, + ListenedAt_2: t2, + Limit: int32(opts.Limit), + Offset: int32(offset), + }) + if err != nil { + return nil, err + } + tracks = make([]*models.Track, len(rows)) + for i, row := range rows { + artists := make([]models.SimpleArtist, 0) + err = json.Unmarshal(row.Artists, &artists) + if err != nil { + l.Err(err).Msgf("Error unmarshalling artists for track with id %d", row.ID) + artists = nil + } + t := &models.Track{ + Title: row.Title, + MbzID: row.MusicBrainzID, + ID: row.ID, + Image: row.Image, + ListenCount: row.ListenCount, + AlbumID: row.ReleaseID, + Artists: artists, + } + tracks[i] = t + } + count, err = d.q.CountTopTracks(ctx, repository.CountTopTracksParams{ + ListenedAt: t1, + ListenedAt_2: t2, + }) + if err != nil { + return nil, err + } + l.Debug().Msgf("Database responded with %d tracks out of a total %d", len(rows), count) + } + + return &db.PaginatedResponse[*models.Track]{ + Items: tracks, + TotalCount: count, + ItemsPerPage: int32(opts.Limit), + HasNextPage: int64(offset+len(tracks)) < count, + CurrentPage: int32(opts.Page), + }, nil +} diff --git a/internal/db/psql/top_tracks_test.go b/internal/db/psql/top_tracks_test.go new file mode 100644 index 0000000..89e63f1 --- /dev/null +++ b/internal/db/psql/top_tracks_test.go @@ -0,0 +1,118 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetTopTracksPaginated(t *testing.T) { + testDataForTopItems(t) + ctx := context.Background() + + // Test valid + resp, err := store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Len(t, resp.Items, 4) + assert.Equal(t, int64(4), resp.TotalCount) + assert.Equal(t, "Track One", resp.Items[0].Title) + assert.Equal(t, "Track Two", resp.Items[1].Title) + assert.Equal(t, "Track Three", resp.Items[2].Title) + assert.Equal(t, "Track Four", resp.Items[3].Title) + // ensure artists are included + require.Len(t, resp.Items[0].Artists, 1) + assert.Equal(t, "Artist One", resp.Items[0].Artists[0].Name) + + // Test pagination + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, "Track Two", resp.Items[0].Title) + + // Test page out of range + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime}) + require.NoError(t, err) + assert.Empty(t, resp.Items) + assert.False(t, resp.HasNextPage) + + // Test invalid inputs + _, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: -1, Page: 0}) + assert.Error(t, err) + + _, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: -1}) + assert.Error(t, err) + + // Test specify period + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay}) + require.NoError(t, err) + require.Len(t, resp.Items, 0) // empty + assert.Equal(t, int64(0), resp.TotalCount) + // should default to PeriodDay + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{}) + require.NoError(t, err) + require.Len(t, resp.Items, 0) // empty + assert.Equal(t, int64(0), resp.TotalCount) + + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Track Four", resp.Items[0].Title) + + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth}) + require.NoError(t, err) + require.Len(t, resp.Items, 2) + assert.Equal(t, int64(2), resp.TotalCount) + assert.Equal(t, "Track Three", resp.Items[0].Title) + assert.Equal(t, "Track Four", resp.Items[1].Title) + + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear}) + require.NoError(t, err) + require.Len(t, resp.Items, 3) + assert.Equal(t, int64(3), resp.TotalCount) + assert.Equal(t, "Track Two", resp.Items[0].Title) + assert.Equal(t, "Track Three", resp.Items[1].Title) + assert.Equal(t, "Track Four", resp.Items[2].Title) + + // Test filter by artists and releases + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, ArtistID: 1}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Track One", resp.Items[0].Title) + + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Track Two", resp.Items[0].Title) + // when both artistID and albumID are specified, artist id is ignored + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2, ArtistID: 1}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Track Two", resp.Items[0].Title) + + // Test specify dates + + testDataAbsoluteListenTimes(t) + + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Year: 2023}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Track One", resp.Items[0].Title) + + resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024}) + require.NoError(t, err) + require.Len(t, resp.Items, 1) + assert.Equal(t, int64(1), resp.TotalCount) + assert.Equal(t, "Track Two", resp.Items[0].Title) + + // invalid, year required with month + _, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Month: 10}) + require.Error(t, err) +} diff --git a/internal/db/psql/track.go b/internal/db/psql/track.go new file mode 100644 index 0000000..0c3c2a4 --- /dev/null +++ b/internal/db/psql/track.go @@ -0,0 +1,298 @@ +package psql + +import ( + "context" + "errors" + "strings" + "time" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/gabehf/koito/internal/utils" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" +) + +func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Track, error) { + l := logger.FromContext(ctx) + var track models.Track + + if opts.ID != 0 { + l.Debug().Msgf("Fetching track from DB with id %d", opts.ID) + t, err := d.q.GetTrack(ctx, opts.ID) + if err != nil { + return nil, err + } + track = models.Track{ + ID: t.ID, + MbzID: t.MusicBrainzID, + Title: t.Title, + AlbumID: t.ReleaseID, + Image: t.Image, + Duration: t.Duration, + } + } else if opts.MusicBrainzID != uuid.Nil { + l.Debug().Msgf("Fetching track from DB with MusicBrainz ID %s", opts.MusicBrainzID) + t, err := d.q.GetTrackByMbzID(ctx, &opts.MusicBrainzID) + if err != nil { + return nil, err + } + track = models.Track{ + ID: t.ID, + MbzID: t.MusicBrainzID, + Title: t.Title, + AlbumID: t.ReleaseID, + Duration: t.Duration, + } + } else if len(opts.ArtistIDs) > 0 { + l.Debug().Msgf("Fetching track from DB with title '%s' and artist id(s) '%v'", opts.Title, opts.ArtistIDs) + t, err := d.q.GetTrackByTitleAndArtists(ctx, repository.GetTrackByTitleAndArtistsParams{ + Title: opts.Title, + Column2: opts.ArtistIDs, + }) + if err != nil { + return nil, err + } + track = models.Track{ + ID: t.ID, + MbzID: t.MusicBrainzID, + Title: t.Title, + AlbumID: t.ReleaseID, + Duration: t.Duration, + } + } else { + return nil, errors.New("insufficient information to get track") + } + + count, err := d.q.CountListensFromTrack(ctx, repository.CountListensFromTrackParams{ + ListenedAt: time.Unix(0, 0), + ListenedAt_2: time.Now(), + TrackID: track.ID, + }) + if err != nil { + l.Err(err).Msgf("Failed to get listen count for track with id %d", track.ID) + } + + track.ListenCount = count + + return &track, nil +} + +func (d *Psql) SaveTrack(ctx context.Context, opts db.SaveTrackOpts) (*models.Track, error) { + // create track in DB + l := logger.FromContext(ctx) + var insertMbzID *uuid.UUID + if opts.RecordingMbzID != uuid.Nil { + insertMbzID = &opts.RecordingMbzID + } + if len(opts.ArtistIDs) < 1 { + return nil, errors.New("required parameter 'ArtistIDs' missing") + } + for _, aid := range opts.ArtistIDs { + if aid == 0 { + return nil, errors.New("none of 'ArtistIDs' may be 0") + } + } + if opts.AlbumID == 0 { + return nil, errors.New("required parameter 'AlbumID' missing") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return nil, err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + l.Debug().Msgf("Inserting new track '%s' into DB", opts.Title) + trackRow, err := qtx.InsertTrack(ctx, repository.InsertTrackParams{ + MusicBrainzID: insertMbzID, + ReleaseID: opts.AlbumID, + }) + if err != nil { + return nil, err + } + // insert associated artists + for _, aid := range opts.ArtistIDs { + err = qtx.AssociateArtistToTrack(ctx, repository.AssociateArtistToTrackParams{ + ArtistID: aid, + TrackID: trackRow.ID, + }) + if err != nil { + return nil, err + } + } + // insert primary alias + err = qtx.InsertTrackAlias(ctx, repository.InsertTrackAliasParams{ + TrackID: trackRow.ID, + Alias: opts.Title, + Source: "Canonical", + IsPrimary: true, + }) + if err != nil { + return nil, err + } + err = tx.Commit(ctx) + if err != nil { + return nil, err + } + return &models.Track{ + ID: trackRow.ID, + MbzID: insertMbzID, + Title: opts.Title, + }, nil +} + +func (d *Psql) UpdateTrack(ctx context.Context, opts db.UpdateTrackOpts) error { + l := logger.FromContext(ctx) + if opts.ID == 0 { + return errors.New("track id not specified") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + if opts.MusicBrainzID != uuid.Nil { + l.Debug().Msgf("Updating MusicBrainz ID for track %d", opts.ID) + err := qtx.UpdateTrackMbzID(ctx, repository.UpdateTrackMbzIDParams{ + ID: opts.ID, + MusicBrainzID: &opts.MusicBrainzID, + }) + if err != nil { + return err + } + } + if opts.Duration != 0 { + l.Debug().Msgf("Updating duration for track %d", opts.ID) + err := qtx.UpdateTrackDuration(ctx, repository.UpdateTrackDurationParams{ + ID: opts.ID, + Duration: opts.Duration, + }) + if err != nil { + return err + } + } + return tx.Commit(ctx) +} + +func (d *Psql) SaveTrackAliases(ctx context.Context, id int32, aliases []string, source string) error { + l := logger.FromContext(ctx) + if id == 0 { + return errors.New("track id not specified") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + existing, err := qtx.GetAllTrackAliases(ctx, id) + if err != nil { + return err + } + for _, v := range existing { + aliases = append(aliases, v.Alias) + } + utils.Unique(&aliases) + for _, alias := range aliases { + if strings.TrimSpace(alias) == "" { + return errors.New("aliases cannot be blank") + } + err = qtx.InsertTrackAlias(ctx, repository.InsertTrackAliasParams{ + Alias: strings.TrimSpace(alias), + TrackID: id, + Source: source, + IsPrimary: false, + }) + if err != nil { + return err + } + } + return tx.Commit(ctx) +} + +func (d *Psql) DeleteTrack(ctx context.Context, id int32) error { + return d.q.DeleteTrack(ctx, id) +} + +func (d *Psql) DeleteTrackAlias(ctx context.Context, id int32, alias string) error { + return d.q.DeleteTrackAlias(ctx, repository.DeleteTrackAliasParams{ + TrackID: id, + Alias: alias, + }) +} + +func (d *Psql) GetAllTrackAliases(ctx context.Context, id int32) ([]models.Alias, error) { + rows, err := d.q.GetAllTrackAliases(ctx, id) + if err != nil { + return nil, err + } + aliases := make([]models.Alias, len(rows)) + for i, row := range rows { + aliases[i] = models.Alias{ + ID: id, + Alias: row.Alias, + Source: row.Source, + Primary: row.IsPrimary, + } + } + return aliases, nil +} + +func (d *Psql) SetPrimaryTrackAlias(ctx context.Context, id int32, alias string) error { + l := logger.FromContext(ctx) + if id == 0 { + return errors.New("artist id not specified") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + // get all aliases + aliases, err := qtx.GetAllTrackAliases(ctx, id) + if err != nil { + return err + } + primary := "" + exists := false + for _, v := range aliases { + if v.Alias == alias { + exists = true + } + if v.IsPrimary { + primary = v.Alias + } + } + if primary == alias { + // no-op rename + return nil + } + if !exists { + return errors.New("alias does not exist") + } + err = qtx.SetTrackAliasPrimaryStatus(ctx, repository.SetTrackAliasPrimaryStatusParams{ + TrackID: id, + Alias: alias, + IsPrimary: true, + }) + if err != nil { + return err + } + err = qtx.SetTrackAliasPrimaryStatus(ctx, repository.SetTrackAliasPrimaryStatusParams{ + TrackID: id, + Alias: primary, + IsPrimary: false, + }) + if err != nil { + return err + } + return tx.Commit(ctx) +} diff --git a/internal/db/psql/track_test.go b/internal/db/psql/track_test.go new file mode 100644 index 0000000..73bf4e0 --- /dev/null +++ b/internal/db/psql/track_test.go @@ -0,0 +1,213 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/db" + "github.com/google/uuid" + "github.com/jackc/pgx/v5" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testDataForTracks(t *testing.T) { + truncateTestData(t) + + // Insert artists + err := store.Exec(context.Background(), + `INSERT INTO artists (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002')`) + require.NoError(t, err) + + // Insert artist aliases + err = store.Exec(context.Background(), + `INSERT INTO artist_aliases (artist_id, alias, source, is_primary) + VALUES (1, 'Artist One', 'Testing', true), + (2, 'Artist Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert release groups + err = store.Exec(context.Background(), + `INSERT INTO releases (musicbrainz_id) + VALUES ('00000000-0000-0000-0000-000000000011'), + ('00000000-0000-0000-0000-000000000022')`) + require.NoError(t, err) + + // Insert release aliases + err = store.Exec(context.Background(), + `INSERT INTO release_aliases (release_id, alias, source, is_primary) + VALUES (1, 'Release Group One', 'Testing', true), + (2, 'Release Group Two', 'Testing', true)`) + require.NoError(t, err) + + // Insert tracks + err = store.Exec(context.Background(), + `INSERT INTO tracks (musicbrainz_id, release_id) + VALUES ('11111111-1111-1111-1111-111111111111', 1), + ('22222222-2222-2222-2222-222222222222', 2)`) + require.NoError(t, err) + + // Insert track aliases + err = store.Exec(context.Background(), + `INSERT INTO track_aliases (track_id, alias, source, is_primary) + VALUES (1, 'Track One', 'Testing', true), + (2, 'Track Two', 'Testing', true)`) + require.NoError(t, err) + + // Associate tracks with artists + err = store.Exec(context.Background(), + `INSERT INTO artist_tracks (artist_id, track_id) + VALUES (1, 1), (2, 2)`) + require.NoError(t, err) +} + +func TestGetTrack(t *testing.T) { + testDataForTracks(t) + ctx := context.Background() + + // Test GetTrack by ID + track, err := store.GetTrack(ctx, db.GetTrackOpts{ID: 1}) + require.NoError(t, err) + assert.Equal(t, int32(1), track.ID) + assert.Equal(t, "Track One", track.Title) + assert.Equal(t, uuid.MustParse("11111111-1111-1111-1111-111111111111"), *track.MbzID) + + // Test GetTrack by MusicBrainzID + track, err = store.GetTrack(ctx, db.GetTrackOpts{MusicBrainzID: uuid.MustParse("22222222-2222-2222-2222-222222222222")}) + require.NoError(t, err) + assert.Equal(t, int32(2), track.ID) + assert.Equal(t, "Track Two", track.Title) + + // Test GetTrack by Title and ArtistIDs + track, err = store.GetTrack(ctx, db.GetTrackOpts{ + Title: "Track One", + ArtistIDs: []int32{1}, + }) + require.NoError(t, err) + assert.Equal(t, int32(1), track.ID) + assert.Equal(t, "Track One", track.Title) + + // Test GetTrack with insufficient information + _, err = store.GetTrack(ctx, db.GetTrackOpts{}) + assert.Error(t, err) +} +func TestSaveTrack(t *testing.T) { + testDataForTracks(t) + ctx := context.Background() + + // Test SaveTrack with valid inputs + track, err := store.SaveTrack(ctx, db.SaveTrackOpts{ + Title: "New Track", + ArtistIDs: []int32{1}, + RecordingMbzID: uuid.MustParse("33333333-3333-3333-3333-333333333333"), + AlbumID: 1, + }) + require.NoError(t, err) + assert.Equal(t, "New Track", track.Title) + assert.Equal(t, uuid.MustParse("33333333-3333-3333-3333-333333333333"), *track.MbzID) + + // Verify artist associations exist + exists, err := store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM artist_tracks + WHERE artist_id = $1 AND track_id = $2 + )`, 1, track.ID) + require.NoError(t, err) + assert.True(t, exists, "expected artist association to exist") + + // Verify alias exists + exists, err = store.RowExists(ctx, ` + SELECT EXISTS ( + SELECT 1 FROM track_aliases + WHERE track_id = $1 AND is_primary = true + )`, track.ID) + require.NoError(t, err) + assert.True(t, exists, "expected primary alias to exist") + + // Test SaveTrack with missing ArtistIDs + _, err = store.SaveTrack(ctx, db.SaveTrackOpts{ + Title: "Invalid Track", + ArtistIDs: []int32{}, + RecordingMbzID: uuid.MustParse("44444444-4444-4444-4444-444444444444"), + }) + assert.Error(t, err) + + // Test SaveTrack with invalid ArtistIDs + _, err = store.SaveTrack(ctx, db.SaveTrackOpts{ + Title: "Invalid Track", + ArtistIDs: []int32{0}, + RecordingMbzID: uuid.MustParse("55555555-5555-5555-5555-555555555555"), + }) + assert.Error(t, err) +} + +func TestUpdateTrack(t *testing.T) { + testDataForTracks(t) + ctx := context.Background() + + newMbzID := uuid.MustParse("66666666-6666-6666-6666-666666666666") + newDuration := 100 + err := store.UpdateTrack(ctx, db.UpdateTrackOpts{ + ID: 1, + MusicBrainzID: newMbzID, + Duration: int32(newDuration), + }) + require.NoError(t, err) + + // Verify the update + track, err := store.GetTrack(ctx, db.GetTrackOpts{ID: 1}) + require.NoError(t, err) + require.Equal(t, newMbzID, *track.MbzID) + require.EqualValues(t, newDuration, track.Duration) + + // Test UpdateTrack with missing ID + err = store.UpdateTrack(ctx, db.UpdateTrackOpts{ + ID: 0, + MusicBrainzID: newMbzID, + Duration: int32(newDuration), + }) + assert.Error(t, err) + + // Test UpdateTrack with nil MusicBrainz ID + err = store.UpdateTrack(ctx, db.UpdateTrackOpts{ + ID: 1, + MusicBrainzID: uuid.Nil, + Duration: int32(newDuration), + }) + assert.NoError(t, err) // No update should occur +} + +func TestTrackAliases(t *testing.T) { + testDataForTracks(t) + ctx := context.Background() + + err := store.SaveTrackAliases(ctx, 1, []string{"Alias One", "Alias Two"}, "Testing") + require.NoError(t, err) + aliases, err := store.GetAllTrackAliases(ctx, 1) + require.NoError(t, err) + assert.Len(t, aliases, 3) + + err = store.SetPrimaryTrackAlias(ctx, 1, "Alias One") + require.NoError(t, err) + track, err := store.GetTrack(ctx, db.GetTrackOpts{ID: 1}) + require.NoError(t, err) + assert.Equal(t, "Alias One", track.Title) + + err = store.SetPrimaryTrackAlias(ctx, 1, "Fake Alias") + require.Error(t, err) + + store.SetPrimaryTrackAlias(ctx, 1, "Track One") +} + +func TestDeleteTrack(t *testing.T) { + testDataForTracks(t) + ctx := context.Background() + + err := store.DeleteTrack(ctx, 2) + require.NoError(t, err) + + _, err = store.Count(ctx, `SELECT * FROM tracks WHERE id = 2`) + require.ErrorIs(t, err, pgx.ErrNoRows) // no rows error +} diff --git a/internal/db/psql/user.go b/internal/db/psql/user.go new file mode 100644 index 0000000..cfc8dc7 --- /dev/null +++ b/internal/db/psql/user.go @@ -0,0 +1,219 @@ +package psql + +import ( + "context" + "errors" + "regexp" + "strings" + "unicode/utf8" + + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/models" + "github.com/gabehf/koito/internal/repository" + "github.com/jackc/pgx/v5" + "golang.org/x/crypto/bcrypt" +) + +// Returns nil, nil when no database entries are found +func (d *Psql) GetUserByUsername(ctx context.Context, username string) (*models.User, error) { + row, err := d.q.GetUserByUsername(ctx, strings.ToLower(username)) + if errors.Is(err, pgx.ErrNoRows) { + return nil, nil + } else if err != nil { + return nil, err + } + return &models.User{ + ID: row.ID, + Username: row.Username, + Password: row.Password, + Role: models.UserRole(row.Role), + }, nil +} + +// Returns nil, nil when no database entries are found +func (d *Psql) GetUserByApiKey(ctx context.Context, key string) (*models.User, error) { + row, err := d.q.GetUserByApiKey(ctx, key) + if errors.Is(err, pgx.ErrNoRows) { + return nil, nil + } else if err != nil { + return nil, err + } + return &models.User{ + ID: row.ID, + Username: row.Username, + Password: row.Password, + Role: models.UserRole(row.Role), + }, nil +} + +func (d *Psql) SaveUser(ctx context.Context, opts db.SaveUserOpts) (*models.User, error) { + l := logger.FromContext(ctx) + err := ValidateUsername(opts.Username) + if err != nil { + l.Debug().AnErr("validator_notice", err).Msgf("Username failed validation: %s", opts.Username) + return nil, err + } + pw, err := ValidateAndNormalizePassword(opts.Password) + if err != nil { + l.Debug().AnErr("validator_notice", err).Msgf("Password failed validation") + return nil, err + } + if opts.Role == "" { + opts.Role = models.UserRoleUser + } + hashPw, err := bcrypt.GenerateFromPassword([]byte(pw), bcrypt.DefaultCost) + if err != nil { + l.Err(err).Msg("Failed to generate hashed password") + return nil, err + } + u, err := d.q.InsertUser(ctx, repository.InsertUserParams{ + Username: strings.ToLower(opts.Username), + Password: hashPw, + Role: repository.Role(opts.Role), + }) + if err != nil { + return nil, err + } + return &models.User{ + ID: u.ID, + Username: u.Username, + Role: models.UserRole(u.Role), + }, nil +} +func (d *Psql) SaveApiKey(ctx context.Context, opts db.SaveApiKeyOpts) (*models.ApiKey, error) { + row, err := d.q.InsertApiKey(ctx, repository.InsertApiKeyParams{ + Key: opts.Key, + Label: opts.Label, + UserID: opts.UserID, + }) + if err != nil { + return nil, err + } + return &models.ApiKey{ + ID: row.ID, + UserID: row.UserID, + Key: row.Key, + Label: row.Label, + CreatedAt: row.CreatedAt.Time, + }, nil +} + +func (d *Psql) UpdateUser(ctx context.Context, opts db.UpdateUserOpts) error { + l := logger.FromContext(ctx) + if opts.ID == 0 { + return errors.New("user id is required") + } + tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + l.Err(err).Msg("Failed to begin transaction") + return err + } + defer tx.Rollback(ctx) + qtx := d.q.WithTx(tx) + if opts.Username != "" { + err := ValidateUsername(opts.Username) + if err != nil { + l.Debug().AnErr("validator_notice", err).Msgf("Username failed validation: %s", opts.Username) + return err + } + err = qtx.UpdateUserUsername(ctx, repository.UpdateUserUsernameParams{ + ID: opts.ID, + Username: opts.Username, + }) + if err != nil { + return err + } + } + if opts.Password != "" { + pw, err := ValidateAndNormalizePassword(opts.Password) + if err != nil { + l.Debug().AnErr("validator_notice", err).Msgf("Password failed validation") + return err + } + hashPw, err := bcrypt.GenerateFromPassword([]byte(pw), bcrypt.DefaultCost) + if err != nil { + l.Err(err).Msg("Failed to generate hashed password") + return err + } + err = qtx.UpdateUserPassword(ctx, repository.UpdateUserPasswordParams{ + ID: opts.ID, + Password: hashPw, + }) + if err != nil { + return err + } + } + return tx.Commit(ctx) +} + +func (d *Psql) GetApiKeysByUserID(ctx context.Context, id int32) ([]models.ApiKey, error) { + rows, err := d.q.GetAllApiKeysByUserID(ctx, id) + if err != nil { + return nil, err + } + keys := make([]models.ApiKey, len(rows)) + for i, row := range rows { + keys[i] = models.ApiKey{ + ID: row.ID, + Key: row.Key, + Label: row.Label, + UserID: row.UserID, + } + } + return keys, nil +} + +func (d *Psql) UpdateApiKeyLabel(ctx context.Context, opts db.UpdateApiKeyLabelOpts) error { + return d.q.UpdateApiKeyLabel(ctx, repository.UpdateApiKeyLabelParams{ + ID: opts.ID, + Label: opts.Label, + UserID: opts.UserID, + }) +} + +func (d *Psql) DeleteApiKey(ctx context.Context, id int32) error { + return d.q.DeleteApiKey(ctx, id) +} + +func (d *Psql) CountUsers(ctx context.Context) (int64, error) { + return d.q.CountUsers(ctx) +} + +const ( + maxUsernameLength = 32 + minUsernameLength = 1 + maxPasswordLength = 128 + minPasswordLength = 8 +) + +var usernameRegex = regexp.MustCompile(`^[a-zA-Z0-9_.-]+$`) + +func ValidateUsername(username string) error { + length := utf8.RuneCountInString(username) + if length < minUsernameLength || length > maxUsernameLength { + return errors.New("username must be between 1 and 32 characters") + } + if !usernameRegex.MatchString(username) { + return errors.New("username can only contain [a-zA-Z0-9_.-]") + } + return nil +} + +func ValidateAndNormalizePassword(password string) (string, error) { + length := utf8.RuneCountInString(password) + if length < minPasswordLength { + return "", errors.New("password must be at least 8 characters long") + } + if length > maxPasswordLength { + var truncated []rune + for i, r := range password { + if i >= maxPasswordLength { + break + } + truncated = append(truncated, r) + } + password = string(truncated) + } + return password, nil +} diff --git a/internal/db/psql/user_test.go b/internal/db/psql/user_test.go new file mode 100644 index 0000000..42733b8 --- /dev/null +++ b/internal/db/psql/user_test.go @@ -0,0 +1,199 @@ +package psql_test + +import ( + "context" + "testing" + + "github.com/gabehf/koito/internal/db" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/crypto/bcrypt" +) + +func setupTestDataForUsers(t *testing.T) { + truncateTestDataForUsers(t) + // Insert additional test users + err := store.Exec(context.Background(), + `INSERT INTO users (username, password, role) + VALUES ('test_user', $1, 'user'), + ('admin_user', $1, 'admin')`, []byte("hashed_password")) + require.NoError(t, err) +} + +func truncateTestDataForUsers(t *testing.T) { + err := store.Exec(context.Background(), + `DELETE FROM users WHERE id NOT IN (1)`, + ) + require.NoError(t, err) + err = store.Exec(context.Background(), + `ALTER SEQUENCE users_id_seq RESTART WITH 2`, + ) + require.NoError(t, err) + err = store.Exec(context.Background(), + `TRUNCATE api_keys RESTART IDENTITY CASCADE`, + ) + require.NoError(t, err) +} + +func TestGetUserByUsername(t *testing.T) { + ctx := context.Background() + setupTestDataForUsers(t) + + // Test fetching an existing user + user, err := store.GetUserByUsername(ctx, "test_user") + require.NoError(t, err) + require.NotNil(t, user) + assert.Equal(t, "test_user", user.Username) + assert.Equal(t, "user", string(user.Role)) + + // Test fetching a non-existent user + user, err = store.GetUserByUsername(ctx, "nonexistent_user") + require.NoError(t, err) + assert.Nil(t, user) +} + +func TestGetUserByApiKey(t *testing.T) { + ctx := context.Background() + setupTestDataForUsers(t) + + // Insert an API key for the test user + err := store.Exec(ctx, `INSERT INTO api_keys (key, label, user_id) VALUES ('test_key', 'Test Key', 2)`) + require.NoError(t, err) + + // Test fetching a user by API key + user, err := store.GetUserByApiKey(ctx, "test_key") + require.NoError(t, err) + require.NotNil(t, user) + assert.Equal(t, int32(2), user.ID) + assert.Equal(t, "test_user", user.Username) + + // Test fetching a user with a non-existent API key + user, err = store.GetUserByApiKey(ctx, "nonexistent_key") + require.NoError(t, err) + assert.Nil(t, user) +} + +func TestSaveUser(t *testing.T) { + ctx := context.Background() + setupTestDataForUsers(t) + + // Save a new user + opts := db.SaveUserOpts{ + Username: "new_user", + Password: "secure_password", + Role: "user", + } + user, err := store.SaveUser(ctx, opts) + require.NoError(t, err) + require.NotNil(t, user) + assert.Equal(t, "new_user", user.Username) + assert.Equal(t, "user", string(user.Role)) + + // Verify the password was hashed + var hashedPassword []byte + err = store.QueryRow(ctx, `SELECT password FROM users WHERE username = $1`, "new_user").Scan(&hashedPassword) + require.NoError(t, err) + assert.NoError(t, bcrypt.CompareHashAndPassword(hashedPassword, []byte(opts.Password))) + + // Test validation failures + _, err = store.SaveUser(ctx, db.SaveUserOpts{ + Username: "Q!@JH(F_H@#!*HF#*)&@", + Password: "testpassword12345", + }) + assert.Error(t, err) + _, err = store.SaveUser(ctx, db.SaveUserOpts{ + Username: "test_user", + Password: "<3", + }) + assert.Error(t, err) +} + +func TestSaveApiKey(t *testing.T) { + ctx := context.Background() + setupTestDataForUsers(t) + + // Save an API key for the test user + label := "New API Key" + opts := db.SaveApiKeyOpts{ + Key: "new_api_key", + Label: label, + UserID: 2, + } + _, err := store.SaveApiKey(ctx, opts) + require.NoError(t, err) + + // Verify the API key was saved + count, err := store.Count(ctx, `SELECT COUNT(*) FROM api_keys WHERE key = $1 AND user_id = $2`, opts.Key, opts.UserID) + require.NoError(t, err) + assert.Equal(t, 1, count) +} + +func TestGetApiKeysByUserID(t *testing.T) { + ctx := context.Background() + setupTestDataForUsers(t) + + // Insert API keys for the test user + err := store.Exec(ctx, `INSERT INTO api_keys (key, label, user_id) VALUES + ('key1', 'Key 1', 2), + ('key2', 'Key 2', 2)`) + require.NoError(t, err) + + // Fetch API keys for the test user + keys, err := store.GetApiKeysByUserID(ctx, 2) + require.NoError(t, err) + require.Len(t, keys, 2) + assert.Equal(t, "key1", keys[0].Key) + assert.Equal(t, "key2", keys[1].Key) +} + +func TestUpdateApiKeyLabel(t *testing.T) { + ctx := context.Background() + setupTestDataForUsers(t) + + // Insert an API key for the test user + err := store.Exec(ctx, `INSERT INTO api_keys (key, label, user_id) VALUES ('key_to_update', 'Old Label', 2)`) + require.NoError(t, err) + + // Update the API key label + opts := db.UpdateApiKeyLabelOpts{ + ID: 1, + Label: "Updated Label", + UserID: 2, + } + err = store.UpdateApiKeyLabel(ctx, opts) + require.NoError(t, err) + + // Verify the label was updated + var label string + err = store.QueryRow(ctx, `SELECT label FROM api_keys WHERE id = $1`, opts.ID).Scan(&label) + require.NoError(t, err) + assert.Equal(t, "Updated Label", label) +} + +func TestDeleteApiKey(t *testing.T) { + ctx := context.Background() + setupTestDataForUsers(t) + + // Insert an API key for the test user + err := store.Exec(ctx, `INSERT INTO api_keys (key, label, user_id) VALUES ('key_to_delete', 'Label', 2)`) + require.NoError(t, err) + + // Delete the API key + err = store.DeleteApiKey(ctx, 1) // Assuming the ID is auto-generated and starts from 1 + require.NoError(t, err) + + // Verify the API key was deleted + count, err := store.Count(ctx, `SELECT COUNT(*) FROM api_keys WHERE id = $1`, 1) + require.NoError(t, err) + assert.Equal(t, 0, count) +} + +func TestCountUsers(t *testing.T) { + ctx := context.Background() + setupTestDataForUsers(t) + + // Count the number of users + count, err := store.Count(ctx, `SELECT COUNT(*) FROM users`) + require.NoError(t, err) + assert.GreaterOrEqual(t, count, 3) // Special user + test users +} diff --git a/internal/db/types.go b/internal/db/types.go new file mode 100644 index 0000000..e5ecb26 --- /dev/null +++ b/internal/db/types.go @@ -0,0 +1,26 @@ +package db + +import ( + "time" +) + +type InformationSource string + +const ( + InformationSourceInferred InformationSource = "Inferred" + InformationSourceMusicBrainz InformationSource = "MusicBrainz" + InformationSourceUserProvided InformationSource = "User" +) + +type ListenActivityItem struct { + Start time.Time `json:"start_time"` + Listens int64 `json:"listens"` +} + +type PaginatedResponse[T any] struct { + Items []T `json:"items"` + TotalCount int64 `json:"total_record_count"` + ItemsPerPage int32 `json:"items_per_page"` + HasNextPage bool `json:"has_next_page"` + CurrentPage int32 `json:"current_page"` +} diff --git a/internal/images/deezer.go b/internal/images/deezer.go new file mode 100644 index 0000000..dfcd394 --- /dev/null +++ b/internal/images/deezer.go @@ -0,0 +1,189 @@ +package images + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/utils" + "github.com/gabehf/koito/queue" +) + +type DeezerClient struct { + url string + userAgent string + requestQueue *queue.RequestQueue +} + +type DeezerAlbumResponse struct { + Data []DeezerAlbum `json:"data"` +} +type DeezerAlbum struct { + Title string `json:"title"` + CoverXL string `json:"cover_xl"` + CoverSm string `json:"cover_small"` + CoverMd string `json:"cover_medium"` + CoverBig string `json:"cover_big"` +} +type DeezerArtistResponse struct { + Data []DeezerArtist `json:"data"` +} +type DeezerArtist struct { + Name string `json:"name"` + PictureXL string `json:"picture_xl"` + PictureSm string `json:"picture_small"` + PictureMd string `json:"picture_medium"` + PictureBig string `json:"picture_big"` +} + +const ( + deezerBaseUrl = "https://api.deezer.com" + albumImageEndpoint = "/search/album?q=%s" + artistImageEndpoint = "/search/artist?q=%s" +) + +func NewDeezerClient(useragent string) *DeezerClient { + ret := new(DeezerClient) + ret.url = deezerBaseUrl + ret.userAgent = useragent + ret.requestQueue = queue.NewRequestQueue(1, 1) + return ret +} + +func (c *DeezerClient) Shutdown() { + c.requestQueue.Shutdown() +} + +func (c *DeezerClient) queue(ctx context.Context, req *http.Request) ([]byte, error) { + l := logger.FromContext(ctx) + req.Header.Set("User-Agent", c.userAgent) + req.Header.Set("Accept", "application/json") + + resultChan := c.requestQueue.Enqueue(func(client *http.Client, done chan<- queue.RequestResult) { + resp, err := client.Do(req) + if err != nil { + l.Debug().Err(err).Str("url", req.RequestURI).Msg("Failed to contact ImageSrc") + done <- queue.RequestResult{Err: err} + return + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + done <- queue.RequestResult{Body: body, Err: err} + }) + + result := <-resultChan + return result.Body, result.Err +} + +func (c *DeezerClient) getEntity(ctx context.Context, endpoint string, result any) error { + l := logger.FromContext(ctx) + url := deezerBaseUrl + endpoint + l.Debug().Msgf("Sending request to ImageSrc: GET %s", url) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return err + } + l.Debug().Msg("Adding ImageSrc request to queue") + body, err := c.queue(ctx, req) + if err != nil { + l.Debug().Err(err) + return err + } + + err = json.Unmarshal(body, result) + if err != nil { + l.Debug().Err(err) + return err + } + + return nil +} + +func (c *DeezerClient) GetArtistImages(ctx context.Context, aliases []string) (string, error) { + l := logger.FromContext(ctx) + resp := new(DeezerArtistResponse) + + aliasesUniq := utils.UniqueIgnoringCase(aliases) + aliasesAscii := utils.RemoveNonAscii(aliasesUniq) + + // Deezer very often uses romanized names for foreign artists, so check those first + for _, a := range aliasesAscii { + err := c.getEntity(ctx, fmt.Sprintf(artistImageEndpoint, url.QueryEscape(fmt.Sprintf("artist:\"%s\"", a))), resp) + if err != nil { + return "", err + } + if len(resp.Data) < 1 { + return "", errors.New("artist image not found") + } + for _, v := range resp.Data { + if strings.EqualFold(v.Name, a) { + img := v.PictureXL + l.Debug().Msgf("Found artist images for %s: %v", a, img) + return img, nil + } + } + } + + // if no romanized name exists or couldn't be found, check the rest + for _, a := range utils.RemoveInBoth(aliasesUniq, aliasesAscii) { + err := c.getEntity(ctx, fmt.Sprintf(artistImageEndpoint, url.QueryEscape(fmt.Sprintf("artist:\"%s\"", a))), resp) + if err != nil { + return "", err + } + if len(resp.Data) < 1 { + return "", errors.New("artist image not found") + } + for _, v := range resp.Data { + if strings.EqualFold(v.Name, a) { + img := v.PictureXL + l.Debug().Msgf("Found artist images for %s: %v", a, img) + return img, nil + } + } + } + return "", errors.New("artist image not found") +} + +func (c *DeezerClient) GetAlbumImages(ctx context.Context, artists []string, album string) (string, error) { + l := logger.FromContext(ctx) + resp := new(DeezerAlbumResponse) + l.Debug().Msgf("Finding album image for %s from artist(s) %v", album, artists) + // try to find artist + album match for all artists + for _, alias := range artists { + err := c.getEntity(ctx, fmt.Sprintf(albumImageEndpoint, url.QueryEscape(fmt.Sprintf("artist:\"%s\"album:\"%s\"", alias, album))), resp) + if err != nil { + return "", err + } + if len(resp.Data) > 0 { + for _, v := range resp.Data { + if strings.EqualFold(v.Title, album) { + img := v.CoverXL + l.Debug().Msgf("Found album images for %s: %v", album, img) + return img, nil + } + } + } + } + + // if none are found, try to find an album just by album title + err := c.getEntity(ctx, fmt.Sprintf(albumImageEndpoint, url.QueryEscape(fmt.Sprintf("album:\"%s\"", album))), resp) + if err != nil { + return "", err + } + for _, v := range resp.Data { + if strings.EqualFold(v.Title, album) { + img := v.CoverXL + l.Debug().Msgf("Found album images for %s: %v", album, img) + return img, nil + } + } + + return "", errors.New("album image not found") +} diff --git a/internal/images/imagesrc.go b/internal/images/imagesrc.go new file mode 100644 index 0000000..912e9c3 --- /dev/null +++ b/internal/images/imagesrc.go @@ -0,0 +1,103 @@ +// package imagesrc defines interfaces for album and artist image providers +package images + +import ( + "context" + "fmt" + "net/http" + "sync" + + "github.com/gabehf/koito/internal/logger" + "github.com/google/uuid" +) + +type ImageSource struct { + deezerEnabled bool + deezerC *DeezerClient + caaEnabled bool +} +type ImageSourceOpts struct { + UserAgent string + EnableCAA bool + EnableDeezer bool +} + +var once sync.Once +var imgsrc ImageSource + +type ArtistImageOpts struct { + Aliases []string +} + +type AlbumImageOpts struct { + Artists []string + Album string + ReleaseMbzID *uuid.UUID + ReleaseGroupMbzID *uuid.UUID +} + +const caaBaseUrl = "https://coverartarchive.org" + +// all functions are no-op if no providers are enabled +func Initialize(opts ImageSourceOpts) { + once.Do(func() { + if opts.EnableCAA { + imgsrc.caaEnabled = true + } + if opts.EnableDeezer { + imgsrc.deezerEnabled = true + imgsrc.deezerC = NewDeezerClient(opts.UserAgent) + } + }) +} + +func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) { + l := logger.FromContext(ctx) + if imgsrc.deezerC != nil { + img, err := imgsrc.deezerC.GetArtistImages(ctx, opts.Aliases) + if err != nil { + return "", err + } + return img, nil + } + l.Warn().Msg("No image providers are enabled") + return "", nil +} +func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) { + l := logger.FromContext(ctx) + if imgsrc.caaEnabled { + l.Debug().Msg("Attempting to find album image from CoverArtArchive") + if opts.ReleaseMbzID != nil && *opts.ReleaseMbzID != uuid.Nil { + url := fmt.Sprintf(caaBaseUrl+"/release/%s/front", opts.ReleaseMbzID.String()) + resp, err := http.DefaultClient.Head(url) + if err != nil { + return "", err + } + if resp.StatusCode == 200 { + return url, nil + } + l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release ID") + } + if opts.ReleaseGroupMbzID != nil && *opts.ReleaseGroupMbzID != uuid.Nil { + url := fmt.Sprintf(caaBaseUrl+"/release-group/%s/front", opts.ReleaseGroupMbzID.String()) + resp, err := http.DefaultClient.Head(url) + if err != nil { + return "", err + } + if resp.StatusCode == 200 { + return url, nil + } + l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release group ID") + } + } + if imgsrc.deezerEnabled { + l.Debug().Msg("Attempting to find album image from Deezer") + img, err := imgsrc.deezerC.GetAlbumImages(ctx, opts.Artists, opts.Album) + if err != nil { + return "", err + } + return img, nil + } + l.Warn().Msg("No image providers are enabled") + return "", nil +} diff --git a/internal/images/mock.go b/internal/images/mock.go new file mode 100644 index 0000000..9b19abd --- /dev/null +++ b/internal/images/mock.go @@ -0,0 +1,28 @@ +package images + +import ( + "context" + "errors" +) + +type MockFinder struct{} + +func (m *MockFinder) GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) { + return "", nil +} + +func (m *MockFinder) GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) { + return "", nil +} +func (m *MockFinder) Shutdown() {} + +type ErrorFinder struct{} + +func (m *ErrorFinder) GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) { + return "", errors.New("mock error") +} + +func (m *ErrorFinder) GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) { + return "", errors.New("mock error") +} +func (m *ErrorFinder) Shutdown() {} diff --git a/internal/importer/importer.go b/internal/importer/importer.go new file mode 100644 index 0000000..ead7eca --- /dev/null +++ b/internal/importer/importer.go @@ -0,0 +1 @@ +package importer diff --git a/internal/importer/maloja.go b/internal/importer/maloja.go new file mode 100644 index 0000000..392f254 --- /dev/null +++ b/internal/importer/maloja.go @@ -0,0 +1,90 @@ +package importer + +import ( + "context" + "encoding/json" + "os" + "path" + "strings" + "time" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/utils" +) + +type MalojaExport struct { + Scrobbles []MalojaExportItem `json:"scrobbles"` +} +type MalojaExportItem struct { + Time int64 `json:"time"` + Track MalojaTrack `json:"track"` +} +type MalojaTrack struct { + Artists []string `json:"artists"` + Title string `json:"title"` + Album struct { + Title string `json:"albumtitle"` + } `json:"album"` +} + +func ImportMalojaFile(ctx context.Context, store db.DB, filename string) error { + l := logger.FromContext(ctx) + l.Info().Msgf("Beginning maloja import on file: %s", filename) + file, err := os.Open(path.Join(cfg.ConfigDir(), "import", filename)) + if err != nil { + l.Err(err).Msgf("Failed to read import file: %s", filename) + return err + } + export := new(MalojaExport) + err = json.NewDecoder(file).Decode(&export) + if err != nil { + return err + } + for _, item := range export.Scrobbles { + martists := make([]string, 0) + // Maloja has a tendency to have the the artist order ['feature', 'main \u2022 feature'], so + // here we try to turn that artist array into ['main', 'feature'] + item.Track.Artists = utils.MoveFirstMatchToFront(item.Track.Artists, " \u2022 ") + for _, an := range item.Track.Artists { + ans := strings.Split(an, " \u2022 ") + martists = append(martists, ans...) + } + artists := utils.UniqueIgnoringCase(martists) + if len(item.Track.Artists) < 1 || item.Track.Title == "" { + l.Debug().Msg("Skipping invalid maloja import item") + continue + } + ts := time.Unix(item.Time, 0) + opts := catalog.SubmitListenOpts{ + MbzCaller: &mbz.MusicBrainzClient{}, + Artist: item.Track.Artists[0], + ArtistNames: artists, + TrackTitle: item.Track.Title, + ReleaseTitle: item.Track.Album.Title, + Time: ts, + UserID: 1, + } + err = catalog.SubmitListen(ctx, store, opts) + if err != nil { + l.Err(err).Msg("Failed to import maloja playback item") + return err + } + } + _, err = os.Stat(path.Join(cfg.ConfigDir(), "import_complete")) + if err != nil { + err = os.Mkdir(path.Join(cfg.ConfigDir(), "import_complete"), 0744) + if err != nil { + l.Err(err).Msg("Failed to create import_complete dir! Import files must be removed from the import directory manually, or else the importer will run on every app start") + } + } + err = os.Rename(path.Join(cfg.ConfigDir(), "import", filename), path.Join(cfg.ConfigDir(), "import_complete", filename)) + if err != nil { + l.Err(err).Msg("Failed to move file to import_complete dir! Import files must be removed from the import directory manually, or else the importer will run on every app start") + } + l.Info().Msgf("Finished importing %s; imported %d items", filename, len(export.Scrobbles)) + return nil +} diff --git a/internal/importer/spotify.go b/internal/importer/spotify.go new file mode 100644 index 0000000..d79feed --- /dev/null +++ b/internal/importer/spotify.go @@ -0,0 +1,76 @@ +package importer + +import ( + "context" + "encoding/json" + "os" + "path" + "time" + + "github.com/gabehf/koito/internal/catalog" + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/db" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/internal/mbz" +) + +type SpotifyExportItem struct { + Timestamp time.Time `json:"ts"` + TrackName string `json:"master_metadata_track_name"` + ArtistName string `json:"master_metadata_album_artist_name"` + AlbumName string `json:"master_metadata_album_album_name"` + ReasonEnd string `json:"reason_end"` + MsPlayed int32 `json:"ms_played"` +} + +func ImportSpotifyFile(ctx context.Context, store db.DB, filename string) error { + l := logger.FromContext(ctx) + l.Info().Msgf("Beginning spotify import on file: %s", filename) + file, err := os.Open(path.Join(cfg.ConfigDir(), "import", filename)) + if err != nil { + l.Err(err).Msgf("Failed to read import file: %s", filename) + return err + } + export := make([]SpotifyExportItem, 0) + err = json.NewDecoder(file).Decode(&export) + if err != nil { + return err + } + for _, item := range export { + if item.ReasonEnd != "trackdone" { + continue + } + dur := item.MsPlayed + if item.TrackName == "" || item.ArtistName == "" { + l.Debug().Msg("Skipping non-track item") + continue + } + opts := catalog.SubmitListenOpts{ + MbzCaller: &mbz.MusicBrainzClient{}, + Artist: item.ArtistName, + TrackTitle: item.TrackName, + ReleaseTitle: item.AlbumName, + Duration: dur / 1000, + Time: item.Timestamp, + UserID: 1, + } + err = catalog.SubmitListen(ctx, store, opts) + if err != nil { + l.Err(err).Msg("Failed to import spotify playback item") + return err + } + } + _, err = os.Stat(path.Join(cfg.ConfigDir(), "import_complete")) + if err != nil { + err = os.Mkdir(path.Join(cfg.ConfigDir(), "import_complete"), 0744) + if err != nil { + l.Err(err).Msg("Failed to create import_complete dir! Import files must be removed from the import directory manually, or else the importer will run on every app start") + } + } + err = os.Rename(path.Join(cfg.ConfigDir(), "import", filename), path.Join(cfg.ConfigDir(), "import_complete", filename)) + if err != nil { + l.Err(err).Msg("Failed to move file to import_complete dir! Import files must be removed from the import directory manually, or else the importer will run on every app start") + } + l.Info().Msgf("Finished importing %s; imported %d items", filename, len(export)) + return nil +} diff --git a/internal/logger/logger.go b/internal/logger/logger.go new file mode 100644 index 0000000..11d9c6c --- /dev/null +++ b/internal/logger/logger.go @@ -0,0 +1,57 @@ +package logger + +import ( + "context" + "net/http" + "os" + "sync" + + "github.com/gabehf/koito/internal/cfg" + "github.com/rs/zerolog" +) + +var once sync.Once +var logger zerolog.Logger + +// Define a key type to avoid context key collisions +type contextKey string + +const loggerKey contextKey = "logger" + +func Get() *zerolog.Logger { + once.Do(func() { + zerolog.TimeFieldFormat = zerolog.TimeFormatUnixMs + + logLevel := cfg.LogLevel() + + logger = zerolog.New(os.Stdout). + Level(zerolog.Level(logLevel)). + With(). + Timestamp(). + // Caller(). + Logger() + }) + return &logger +} + +// injects the logger into context +func Inject(r *http.Request, l *zerolog.Logger) *http.Request { + ctx := context.WithValue(r.Context(), loggerKey, l) + r = r.WithContext(ctx) + return r +} + +func NewContext(l *zerolog.Logger) context.Context { + ctx := context.WithValue(context.Background(), loggerKey, l) + return ctx +} + +// retrieves the logger from context +func FromContext(ctx context.Context) *zerolog.Logger { + logger, ok := ctx.Value(loggerKey).(*zerolog.Logger) + if !ok || logger == nil { + defaultLogger := zerolog.New(os.Stdout) + return &defaultLogger + } + return logger +} diff --git a/internal/mbz/artist.go b/internal/mbz/artist.go new file mode 100644 index 0000000..e35f625 --- /dev/null +++ b/internal/mbz/artist.go @@ -0,0 +1,57 @@ +package mbz + +import ( + "context" + "errors" + "slices" + + "github.com/gabehf/koito/internal/logger" + "github.com/google/uuid" +) + +type MusicBrainzArtist struct { + Name string `json:"name"` + SortName string `json:"sort_name"` + Gender string `json:"gender"` + Area MusicBrainzArea `json:"area"` + Aliases []MusicBrainzArtistAlias `json:"aliases"` +} +type MusicBrainzArtistAlias struct { + Name string `json:"name"` + Type string `json:"type"` + Primary bool `json:"primary"` +} + +const artistAliasFmtStr = "%s/ws/2/artist/%s?inc=aliases" + +func (c *MusicBrainzClient) getArtist(ctx context.Context, id uuid.UUID) (*MusicBrainzArtist, error) { + var mbzArtist *MusicBrainzArtist + err := c.getEntity(ctx, artistAliasFmtStr, id, mbzArtist) + if err != nil { + return nil, err + } + return mbzArtist, nil +} + +// Returns the artist name at index 0, and all primary aliases after. +func (c *MusicBrainzClient) GetArtistPrimaryAliases(ctx context.Context, id uuid.UUID) ([]string, error) { + l := logger.FromContext(ctx) + artist, err := c.getArtist(ctx, id) + if err != nil { + return nil, err + } + if artist == nil { + return nil, errors.New("artist could not be found by musicbrainz") + } + used := make(map[string]bool) + ret := make([]string, 1) + ret[0] = artist.Name + used[artist.Name] = true + for _, alias := range artist.Aliases { + if alias.Primary && !slices.Contains(ret, alias.Name) { + l.Debug().Msgf("Found primary alias '%s' for artist '%s'", alias.Name, artist.Name) + ret = append(ret, alias.Name) + } + } + return ret, nil +} diff --git a/internal/mbz/mbz.go b/internal/mbz/mbz.go new file mode 100644 index 0000000..873fd13 --- /dev/null +++ b/internal/mbz/mbz.go @@ -0,0 +1,92 @@ +// package mbz provides functions for interacting with the musicbrainz api +package mbz + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/gabehf/koito/internal/cfg" + "github.com/gabehf/koito/internal/logger" + "github.com/gabehf/koito/queue" + "github.com/google/uuid" +) + +type MusicBrainzArea struct { + Name string `json:"name"` + Iso3166_1Codes []string `json:"iso-3166-1-codes"` +} + +type MusicBrainzClient struct { + url string + userAgent string + requestQueue *queue.RequestQueue +} + +type MusicBrainzCaller interface { + GetArtistPrimaryAliases(ctx context.Context, id uuid.UUID) ([]string, error) + GetReleaseTitles(ctx context.Context, RGID uuid.UUID) ([]string, error) + GetTrack(ctx context.Context, id uuid.UUID) (*MusicBrainzTrack, error) + GetReleaseGroup(ctx context.Context, id uuid.UUID) (*MusicBrainzReleaseGroup, error) + GetRelease(ctx context.Context, id uuid.UUID) (*MusicBrainzRelease, error) + Shutdown() +} + +func NewMusicBrainzClient() *MusicBrainzClient { + ret := new(MusicBrainzClient) + ret.url = cfg.MusicBrainzUrl() + ret.userAgent = "Koito/0.0.1 (contact@koito.app)" + ret.requestQueue = queue.NewRequestQueue(1, 1) + return ret +} + +func (c *MusicBrainzClient) Shutdown() { + c.requestQueue.Shutdown() +} + +func (c *MusicBrainzClient) getEntity(ctx context.Context, fmtStr string, id uuid.UUID, result any) error { + l := logger.FromContext(ctx) + url := fmt.Sprintf(fmtStr, c.url, id.String()) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return err + } + l.Debug().Msg("Adding MusicBrainz request to queue") + body, err := c.queue(ctx, req) + if err != nil { + l.Debug().Err(err) + return err + } + + err = json.Unmarshal(body, result) + if err != nil { + l.Debug().Err(err) + return err + } + + return nil +} + +func (c *MusicBrainzClient) queue(ctx context.Context, req *http.Request) ([]byte, error) { + l := logger.FromContext(ctx) + req.Header.Set("User-Agent", c.userAgent) + req.Header.Set("Accept", "application/json") + + resultChan := c.requestQueue.Enqueue(func(client *http.Client, done chan<- queue.RequestResult) { + resp, err := client.Do(req) + if err != nil { + l.Debug().Err(err).Str("url", req.RequestURI).Msg("Failed to contact MusicBrainz") + done <- queue.RequestResult{Err: err} + return + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + done <- queue.RequestResult{Body: body, Err: err} + }) + + result := <-resultChan + return result.Body, result.Err +} diff --git a/internal/mbz/mock.go b/internal/mbz/mock.go new file mode 100644 index 0000000..b70e237 --- /dev/null +++ b/internal/mbz/mock.go @@ -0,0 +1,93 @@ +package mbz + +import ( + "context" + "fmt" + "slices" + + "github.com/google/uuid" +) + +// implements a mock caller + +type MbzMockCaller struct { + Artists map[uuid.UUID]*MusicBrainzArtist + ReleaseGroups map[uuid.UUID]*MusicBrainzReleaseGroup + Releases map[uuid.UUID]*MusicBrainzRelease + Tracks map[uuid.UUID]*MusicBrainzTrack +} + +func (m *MbzMockCaller) GetReleaseGroup(ctx context.Context, id uuid.UUID) (*MusicBrainzReleaseGroup, error) { + releaseGroup, exists := m.ReleaseGroups[id] + if !exists { + return nil, fmt.Errorf("release group with ID %s not found", id) + } + return releaseGroup, nil +} + +func (m *MbzMockCaller) GetRelease(ctx context.Context, id uuid.UUID) (*MusicBrainzRelease, error) { + release, exists := m.Releases[id] + if !exists { + return nil, fmt.Errorf("release group with ID %s not found", id) + } + return release, nil +} + +func (m *MbzMockCaller) GetReleaseTitles(ctx context.Context, RGID uuid.UUID) ([]string, error) { + rg, exists := m.ReleaseGroups[RGID] + if !exists { + return nil, fmt.Errorf("release with ID %s not found", RGID) + } + + var titles []string + for _, release := range rg.Releases { + if !slices.Contains(titles, release.Title) { + titles = append(titles, release.Title) + } + } + return titles, nil +} + +func (m *MbzMockCaller) GetTrack(ctx context.Context, id uuid.UUID) (*MusicBrainzTrack, error) { + track, exists := m.Tracks[id] + if !exists { + return nil, fmt.Errorf("track with ID %s not found", id) + } + return track, nil +} + +func (m *MbzMockCaller) GetArtistPrimaryAliases(ctx context.Context, id uuid.UUID) ([]string, error) { + name := m.Artists[id].Name + ss := make([]string, len(m.Artists[id].Aliases)+1) + ss[0] = name + for i, alias := range m.Artists[id].Aliases { + ss[i+1] = alias.Name + } + return ss, nil +} + +func (m *MbzMockCaller) Shutdown() {} + +type MbzErrorCaller struct{} + +func (m *MbzErrorCaller) GetReleaseGroup(ctx context.Context, id uuid.UUID) (*MusicBrainzReleaseGroup, error) { + return nil, fmt.Errorf("error: GetReleaseGroup not implemented") +} + +func (m *MbzErrorCaller) GetRelease(ctx context.Context, id uuid.UUID) (*MusicBrainzRelease, error) { + return nil, fmt.Errorf("error: GetRelease not implemented") +} + +func (m *MbzErrorCaller) GetReleaseTitles(ctx context.Context, RGID uuid.UUID) ([]string, error) { + return nil, fmt.Errorf("error: GetReleaseTitles not implemented") +} + +func (m *MbzErrorCaller) GetTrack(ctx context.Context, id uuid.UUID) (*MusicBrainzTrack, error) { + return nil, fmt.Errorf("error: GetTrack not implemented") +} + +func (m *MbzErrorCaller) GetArtistPrimaryAliases(ctx context.Context, id uuid.UUID) ([]string, error) { + return nil, fmt.Errorf("error: GetArtistPrimaryAliases not implemented") +} + +func (m *MbzErrorCaller) Shutdown() {} diff --git a/internal/mbz/release.go b/internal/mbz/release.go new file mode 100644 index 0000000..594e576 --- /dev/null +++ b/internal/mbz/release.go @@ -0,0 +1,92 @@ +package mbz + +import ( + "context" + "slices" + + "github.com/google/uuid" +) + +type MusicBrainzReleaseGroup struct { + Title string `json:"title"` + Type string `json:"primary_type"` + ArtistCredit []MusicBrainzArtistCredit `json:"artist-credit"` + Releases []MusicBrainzRelease `json:"releases"` +} +type MusicBrainzRelease struct { + Title string `json:"title"` + ID string `json:"id"` + ArtistCredit []MusicBrainzArtistCredit `json:"artist-credit"` + Status string `json:"status"` + TextRepresentation TextRepresentation `json:"text-representation"` +} +type MusicBrainzArtistCredit struct { + Artist MusicBrainzArtist `json:"artist"` + Name string `json:"name"` +} +type TextRepresentation struct { + Language string `json:"language"` + Script string `json:"script"` +} + +const releaseGroupFmtStr = "%s/ws/2/release-group/%s?inc=releases+artists" +const releaseFmtStr = "%s/ws/2/release/%s?inc=artists" + +func (c *MusicBrainzClient) GetReleaseGroup(ctx context.Context, id uuid.UUID) (*MusicBrainzReleaseGroup, error) { + mbzRG := new(MusicBrainzReleaseGroup) + err := c.getEntity(ctx, releaseGroupFmtStr, id, mbzRG) + if err != nil { + return nil, err + } + return mbzRG, nil +} + +func (c *MusicBrainzClient) GetRelease(ctx context.Context, id uuid.UUID) (*MusicBrainzRelease, error) { + mbzRelease := new(MusicBrainzRelease) + err := c.getEntity(ctx, releaseFmtStr, id, mbzRelease) + if err != nil { + return nil, err + } + return mbzRelease, nil +} + +func (c *MusicBrainzClient) GetReleaseTitles(ctx context.Context, RGID uuid.UUID) ([]string, error) { + releaseGroup, err := c.GetReleaseGroup(ctx, RGID) + if err != nil { + return nil, err + } + + var titles []string + for _, release := range releaseGroup.Releases { + if !slices.Contains(titles, release.Title) { + titles = append(titles, release.Title) + } + } + + return titles, nil +} + +func ReleaseGroupToTitles(rg *MusicBrainzReleaseGroup) []string { + var titles []string + for _, release := range rg.Releases { + if !slices.Contains(titles, release.Title) { + titles = append(titles, release.Title) + } + } + return titles +} + +// Searches for Pseudo-Releases of release groups with Latin script, and returns them as an array +func (c *MusicBrainzClient) GetLatinTitles(ctx context.Context, id uuid.UUID) ([]string, error) { + rg, err := c.GetReleaseGroup(ctx, id) + if err != nil { + return nil, err + } + titles := make([]string, 0) + for _, r := range rg.Releases { + if r.Status == "Pseudo-Release" && r.TextRepresentation.Script == "Latn" { // not a typo + titles = append(titles, r.Title) + } + } + return titles, nil +} diff --git a/internal/mbz/track.go b/internal/mbz/track.go new file mode 100644 index 0000000..6998a9f --- /dev/null +++ b/internal/mbz/track.go @@ -0,0 +1,23 @@ +package mbz + +import ( + "context" + + "github.com/google/uuid" +) + +type MusicBrainzTrack struct { + Title string `json:"title"` +} + +const recordingFmtStr = "%s/ws/2/recording/%s" + +// Returns the artist name at index 0, and all primary aliases after. +func (c *MusicBrainzClient) GetTrack(ctx context.Context, id uuid.UUID) (*MusicBrainzTrack, error) { + track := new(MusicBrainzTrack) + err := c.getEntity(ctx, recordingFmtStr, id, track) + if err != nil { + return nil, err + } + return track, nil +} diff --git a/internal/models/album.go b/internal/models/album.go new file mode 100644 index 0000000..90b8cdd --- /dev/null +++ b/internal/models/album.go @@ -0,0 +1,20 @@ +package models + +import "github.com/google/uuid" + +type Album struct { + ID int32 `json:"id"` + MbzID *uuid.UUID `json:"musicbrainz_id"` + Title string `json:"title"` + Image *uuid.UUID `json:"image"` + Artists []SimpleArtist `json:"artists"` + VariousArtists bool `json:"is_various_artists"` + ListenCount int64 `json:"listen_count"` +} + +// type SimpleAlbum struct { +// ID int32 `json:"id"` +// Title string `json:"title"` +// VariousArtists bool `json:"is_various_artists"` +// Image uuid.UUID `json:"image"` +// } diff --git a/internal/models/alias.go b/internal/models/alias.go new file mode 100644 index 0000000..a263af6 --- /dev/null +++ b/internal/models/alias.go @@ -0,0 +1,8 @@ +package models + +type Alias struct { + ID int32 `json:"id"` + Alias string `json:"alias"` + Source string `json:"source"` + Primary bool `json:"is_primary"` +} diff --git a/internal/models/artist.go b/internal/models/artist.go new file mode 100644 index 0000000..b240370 --- /dev/null +++ b/internal/models/artist.go @@ -0,0 +1,17 @@ +package models + +import "github.com/google/uuid" + +type Artist struct { + ID int32 `json:"id"` + MbzID *uuid.UUID `json:"musicbrainz_id"` + Name string `json:"name"` + Aliases []string `json:"aliases"` + Image *uuid.UUID `json:"image"` + ListenCount int64 `json:"listen_count"` +} + +type SimpleArtist struct { + ID int32 `json:"id"` + Name string `json:"name"` +} diff --git a/internal/models/listen.go b/internal/models/listen.go new file mode 100644 index 0000000..24c6db4 --- /dev/null +++ b/internal/models/listen.go @@ -0,0 +1,11 @@ +package models + +import ( + "time" +) + +// a Listen is the same thing as a 'scrobble' but i despise the word scrobble so i will not use it +type Listen struct { + Time time.Time `json:"time"` + Track Track `json:"track"` +} diff --git a/internal/models/track.go b/internal/models/track.go new file mode 100644 index 0000000..386a2fc --- /dev/null +++ b/internal/models/track.go @@ -0,0 +1,14 @@ +package models + +import "github.com/google/uuid" + +type Track struct { + ID int32 `json:"id"` + Title string `json:"title"` + Artists []SimpleArtist `json:"artists"` + MbzID *uuid.UUID `json:"musicbrainz_id"` + ListenCount int64 `json:"listen_count"` + Duration int32 `json:"duration"` + Image *uuid.UUID `json:"image"` + AlbumID int32 `json:"album_id"` +} diff --git a/internal/models/user.go b/internal/models/user.go new file mode 100644 index 0000000..43e1198 --- /dev/null +++ b/internal/models/user.go @@ -0,0 +1,37 @@ +package models + +import ( + "time" + + "github.com/google/uuid" +) + +type UserRole string + +const ( + UserRoleUser UserRole = "user" + UserRoleAdmin UserRole = "admin" +) + +type User struct { + ID int32 `json:"id"` + Username string `json:"username"` + Role UserRole `json:"role"` // 'admin' | 'user' + Password []byte `json:"-"` +} + +type ApiKey struct { + ID int32 `json:"id"` + Key string `json:"key"` + Label string `json:"label"` + UserID int32 `json:"user_id"` + CreatedAt time.Time `json:"created_at"` +} + +type Session struct { + ID uuid.UUID + UserID int32 + CreatedAt time.Time + ExpiresAt time.Time + Persistent bool +} diff --git a/internal/repository/alias.sql.go b/internal/repository/alias.sql.go new file mode 100644 index 0000000..c40ce22 --- /dev/null +++ b/internal/repository/alias.sql.go @@ -0,0 +1,316 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: alias.sql + +package repository + +import ( + "context" +) + +const deleteArtistAlias = `-- name: DeleteArtistAlias :exec +DELETE FROM artist_aliases +WHERE artist_id = $1 +AND alias = $2 +AND is_primary = false +` + +type DeleteArtistAliasParams struct { + ArtistID int32 + Alias string +} + +func (q *Queries) DeleteArtistAlias(ctx context.Context, arg DeleteArtistAliasParams) error { + _, err := q.db.Exec(ctx, deleteArtistAlias, arg.ArtistID, arg.Alias) + return err +} + +const deleteReleaseAlias = `-- name: DeleteReleaseAlias :exec +DELETE FROM release_aliases +WHERE release_id = $1 +AND alias = $2 +AND is_primary = false +` + +type DeleteReleaseAliasParams struct { + ReleaseID int32 + Alias string +} + +func (q *Queries) DeleteReleaseAlias(ctx context.Context, arg DeleteReleaseAliasParams) error { + _, err := q.db.Exec(ctx, deleteReleaseAlias, arg.ReleaseID, arg.Alias) + return err +} + +const deleteTrackAlias = `-- name: DeleteTrackAlias :exec +DELETE FROM track_aliases +WHERE track_id = $1 +AND alias = $2 +AND is_primary = false +` + +type DeleteTrackAliasParams struct { + TrackID int32 + Alias string +} + +func (q *Queries) DeleteTrackAlias(ctx context.Context, arg DeleteTrackAliasParams) error { + _, err := q.db.Exec(ctx, deleteTrackAlias, arg.TrackID, arg.Alias) + return err +} + +const getAllArtistAliases = `-- name: GetAllArtistAliases :many +SELECT artist_id, alias, source, is_primary FROM artist_aliases +WHERE artist_id = $1 ORDER BY is_primary DESC +` + +func (q *Queries) GetAllArtistAliases(ctx context.Context, artistID int32) ([]ArtistAlias, error) { + rows, err := q.db.Query(ctx, getAllArtistAliases, artistID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ArtistAlias + for rows.Next() { + var i ArtistAlias + if err := rows.Scan( + &i.ArtistID, + &i.Alias, + &i.Source, + &i.IsPrimary, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getAllReleaseAliases = `-- name: GetAllReleaseAliases :many +SELECT release_id, alias, source, is_primary FROM release_aliases +WHERE release_id = $1 ORDER BY is_primary DESC +` + +func (q *Queries) GetAllReleaseAliases(ctx context.Context, releaseID int32) ([]ReleaseAlias, error) { + rows, err := q.db.Query(ctx, getAllReleaseAliases, releaseID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ReleaseAlias + for rows.Next() { + var i ReleaseAlias + if err := rows.Scan( + &i.ReleaseID, + &i.Alias, + &i.Source, + &i.IsPrimary, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getAllTrackAliases = `-- name: GetAllTrackAliases :many +SELECT track_id, alias, is_primary, source FROM track_aliases +WHERE track_id = $1 ORDER BY is_primary DESC +` + +func (q *Queries) GetAllTrackAliases(ctx context.Context, trackID int32) ([]TrackAlias, error) { + rows, err := q.db.Query(ctx, getAllTrackAliases, trackID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []TrackAlias + for rows.Next() { + var i TrackAlias + if err := rows.Scan( + &i.TrackID, + &i.Alias, + &i.IsPrimary, + &i.Source, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getArtistAlias = `-- name: GetArtistAlias :one +SELECT artist_id, alias, source, is_primary FROM artist_aliases +WHERE alias = $1 LIMIT 1 +` + +func (q *Queries) GetArtistAlias(ctx context.Context, alias string) (ArtistAlias, error) { + row := q.db.QueryRow(ctx, getArtistAlias, alias) + var i ArtistAlias + err := row.Scan( + &i.ArtistID, + &i.Alias, + &i.Source, + &i.IsPrimary, + ) + return i, err +} + +const getReleaseAlias = `-- name: GetReleaseAlias :one +SELECT release_id, alias, source, is_primary FROM release_aliases +WHERE alias = $1 LIMIT 1 +` + +func (q *Queries) GetReleaseAlias(ctx context.Context, alias string) (ReleaseAlias, error) { + row := q.db.QueryRow(ctx, getReleaseAlias, alias) + var i ReleaseAlias + err := row.Scan( + &i.ReleaseID, + &i.Alias, + &i.Source, + &i.IsPrimary, + ) + return i, err +} + +const getTrackAlias = `-- name: GetTrackAlias :one +SELECT track_id, alias, is_primary, source FROM track_aliases +WHERE alias = $1 LIMIT 1 +` + +func (q *Queries) GetTrackAlias(ctx context.Context, alias string) (TrackAlias, error) { + row := q.db.QueryRow(ctx, getTrackAlias, alias) + var i TrackAlias + err := row.Scan( + &i.TrackID, + &i.Alias, + &i.IsPrimary, + &i.Source, + ) + return i, err +} + +const insertArtistAlias = `-- name: InsertArtistAlias :exec +INSERT INTO artist_aliases (artist_id, alias, source, is_primary) +VALUES ($1, $2, $3, $4) +ON CONFLICT DO NOTHING +` + +type InsertArtistAliasParams struct { + ArtistID int32 + Alias string + Source string + IsPrimary bool +} + +func (q *Queries) InsertArtistAlias(ctx context.Context, arg InsertArtistAliasParams) error { + _, err := q.db.Exec(ctx, insertArtistAlias, + arg.ArtistID, + arg.Alias, + arg.Source, + arg.IsPrimary, + ) + return err +} + +const insertReleaseAlias = `-- name: InsertReleaseAlias :exec +INSERT INTO release_aliases (release_id, alias, source, is_primary) +VALUES ($1, $2, $3, $4) +ON CONFLICT DO NOTHING +` + +type InsertReleaseAliasParams struct { + ReleaseID int32 + Alias string + Source string + IsPrimary bool +} + +func (q *Queries) InsertReleaseAlias(ctx context.Context, arg InsertReleaseAliasParams) error { + _, err := q.db.Exec(ctx, insertReleaseAlias, + arg.ReleaseID, + arg.Alias, + arg.Source, + arg.IsPrimary, + ) + return err +} + +const insertTrackAlias = `-- name: InsertTrackAlias :exec +INSERT INTO track_aliases (track_id, alias, source, is_primary) +VALUES ($1, $2, $3, $4) +ON CONFLICT DO NOTHING +` + +type InsertTrackAliasParams struct { + TrackID int32 + Alias string + Source string + IsPrimary bool +} + +func (q *Queries) InsertTrackAlias(ctx context.Context, arg InsertTrackAliasParams) error { + _, err := q.db.Exec(ctx, insertTrackAlias, + arg.TrackID, + arg.Alias, + arg.Source, + arg.IsPrimary, + ) + return err +} + +const setArtistAliasPrimaryStatus = `-- name: SetArtistAliasPrimaryStatus :exec +UPDATE artist_aliases SET is_primary = $1 WHERE artist_id = $2 AND alias = $3 +` + +type SetArtistAliasPrimaryStatusParams struct { + IsPrimary bool + ArtistID int32 + Alias string +} + +func (q *Queries) SetArtistAliasPrimaryStatus(ctx context.Context, arg SetArtistAliasPrimaryStatusParams) error { + _, err := q.db.Exec(ctx, setArtistAliasPrimaryStatus, arg.IsPrimary, arg.ArtistID, arg.Alias) + return err +} + +const setReleaseAliasPrimaryStatus = `-- name: SetReleaseAliasPrimaryStatus :exec +UPDATE release_aliases SET is_primary = $1 WHERE release_id = $2 AND alias = $3 +` + +type SetReleaseAliasPrimaryStatusParams struct { + IsPrimary bool + ReleaseID int32 + Alias string +} + +func (q *Queries) SetReleaseAliasPrimaryStatus(ctx context.Context, arg SetReleaseAliasPrimaryStatusParams) error { + _, err := q.db.Exec(ctx, setReleaseAliasPrimaryStatus, arg.IsPrimary, arg.ReleaseID, arg.Alias) + return err +} + +const setTrackAliasPrimaryStatus = `-- name: SetTrackAliasPrimaryStatus :exec +UPDATE track_aliases SET is_primary = $1 WHERE track_id = $2 AND alias = $3 +` + +type SetTrackAliasPrimaryStatusParams struct { + IsPrimary bool + TrackID int32 + Alias string +} + +func (q *Queries) SetTrackAliasPrimaryStatus(ctx context.Context, arg SetTrackAliasPrimaryStatusParams) error { + _, err := q.db.Exec(ctx, setTrackAliasPrimaryStatus, arg.IsPrimary, arg.TrackID, arg.Alias) + return err +} diff --git a/internal/repository/artist.sql.go b/internal/repository/artist.sql.go new file mode 100644 index 0000000..3d01e1a --- /dev/null +++ b/internal/repository/artist.sql.go @@ -0,0 +1,418 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: artist.sql + +package repository + +import ( + "context" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const countTopArtists = `-- name: CountTopArtists :one +SELECT COUNT(DISTINCT at.artist_id) AS total_count +FROM listens l +JOIN artist_tracks at ON l.track_id = at.track_id +WHERE l.listened_at BETWEEN $1 AND $2 +` + +type CountTopArtistsParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time +} + +func (q *Queries) CountTopArtists(ctx context.Context, arg CountTopArtistsParams) (int64, error) { + row := q.db.QueryRow(ctx, countTopArtists, arg.ListenedAt, arg.ListenedAt_2) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const deleteArtist = `-- name: DeleteArtist :exec +DELETE FROM artists WHERE id = $1 +` + +func (q *Queries) DeleteArtist(ctx context.Context, id int32) error { + _, err := q.db.Exec(ctx, deleteArtist, id) + return err +} + +const deleteConflictingArtistReleases = `-- name: DeleteConflictingArtistReleases :exec +DELETE FROM artist_releases ar +WHERE ar.artist_id = $1 + AND release_id IN ( + SELECT ar.release_id FROM artist_releases ar WHERE ar.artist_id = $2 + ) +` + +type DeleteConflictingArtistReleasesParams struct { + ArtistID int32 + ArtistID_2 int32 +} + +func (q *Queries) DeleteConflictingArtistReleases(ctx context.Context, arg DeleteConflictingArtistReleasesParams) error { + _, err := q.db.Exec(ctx, deleteConflictingArtistReleases, arg.ArtistID, arg.ArtistID_2) + return err +} + +const deleteConflictingArtistTracks = `-- name: DeleteConflictingArtistTracks :exec +DELETE FROM artist_tracks at +WHERE at.artist_id = $1 + AND track_id IN ( + SELECT at.track_id FROM artist_tracks at WHERE at.artist_id = $2 + ) +` + +type DeleteConflictingArtistTracksParams struct { + ArtistID int32 + ArtistID_2 int32 +} + +func (q *Queries) DeleteConflictingArtistTracks(ctx context.Context, arg DeleteConflictingArtistTracksParams) error { + _, err := q.db.Exec(ctx, deleteConflictingArtistTracks, arg.ArtistID, arg.ArtistID_2) + return err +} + +const getArtist = `-- name: GetArtist :one +SELECT + a.id, a.musicbrainz_id, a.image, a.image_source, a.name, + array_agg(aa.alias)::text[] AS aliases +FROM artists_with_name a +LEFT JOIN artist_aliases aa ON a.id = aa.artist_id +WHERE a.id = $1 +GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name +` + +type GetArtistRow struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + ImageSource pgtype.Text + Name string + Aliases []string +} + +func (q *Queries) GetArtist(ctx context.Context, id int32) (GetArtistRow, error) { + row := q.db.QueryRow(ctx, getArtist, id) + var i GetArtistRow + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.ImageSource, + &i.Name, + &i.Aliases, + ) + return i, err +} + +const getArtistByImage = `-- name: GetArtistByImage :one +SELECT id, musicbrainz_id, image, image_source FROM artists WHERE image = $1 LIMIT 1 +` + +func (q *Queries) GetArtistByImage(ctx context.Context, image *uuid.UUID) (Artist, error) { + row := q.db.QueryRow(ctx, getArtistByImage, image) + var i Artist + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.ImageSource, + ) + return i, err +} + +const getArtistByMbzID = `-- name: GetArtistByMbzID :one +SELECT + a.id, a.musicbrainz_id, a.image, a.image_source, a.name, + array_agg(aa.alias)::text[] AS aliases +FROM artists_with_name a +LEFT JOIN artist_aliases aa ON a.id = aa.artist_id +WHERE a.musicbrainz_id = $1 +GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name +` + +type GetArtistByMbzIDRow struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + ImageSource pgtype.Text + Name string + Aliases []string +} + +func (q *Queries) GetArtistByMbzID(ctx context.Context, musicbrainzID *uuid.UUID) (GetArtistByMbzIDRow, error) { + row := q.db.QueryRow(ctx, getArtistByMbzID, musicbrainzID) + var i GetArtistByMbzIDRow + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.ImageSource, + &i.Name, + &i.Aliases, + ) + return i, err +} + +const getArtistByName = `-- name: GetArtistByName :one +WITH artist_with_aliases AS ( + SELECT + a.id, a.musicbrainz_id, a.image, a.image_source, a.name, + COALESCE(array_agg(aa.alias), '{}')::text[] AS aliases + FROM artists_with_name a + LEFT JOIN artist_aliases aa ON a.id = aa.artist_id + WHERE a.id IN ( + SELECT aa2.artist_id FROM artist_aliases aa2 WHERE aa2.alias = $1 + ) + GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name +) +SELECT id, musicbrainz_id, image, image_source, name, aliases FROM artist_with_aliases +` + +type GetArtistByNameRow struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + ImageSource pgtype.Text + Name string + Aliases []string +} + +func (q *Queries) GetArtistByName(ctx context.Context, alias string) (GetArtistByNameRow, error) { + row := q.db.QueryRow(ctx, getArtistByName, alias) + var i GetArtistByNameRow + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.ImageSource, + &i.Name, + &i.Aliases, + ) + return i, err +} + +const getReleaseArtists = `-- name: GetReleaseArtists :many +SELECT + a.id, a.musicbrainz_id, a.image, a.image_source, a.name +FROM artists_with_name a +LEFT JOIN artist_releases ar ON a.id = ar.artist_id +WHERE ar.release_id = $1 +GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name +` + +func (q *Queries) GetReleaseArtists(ctx context.Context, releaseID int32) ([]ArtistsWithName, error) { + rows, err := q.db.Query(ctx, getReleaseArtists, releaseID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ArtistsWithName + for rows.Next() { + var i ArtistsWithName + if err := rows.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.ImageSource, + &i.Name, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getTopArtistsPaginated = `-- name: GetTopArtistsPaginated :many +SELECT + a.id, + a.name, + a.musicbrainz_id, + a.image, + COUNT(*) AS listen_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN artist_tracks at ON at.track_id = t.id +JOIN artists_with_name a ON a.id = at.artist_id +WHERE l.listened_at BETWEEN $1 AND $2 +GROUP BY a.id, a.name, a.musicbrainz_id, a.image, a.image_source, a.name +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4 +` + +type GetTopArtistsPaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 +} + +type GetTopArtistsPaginatedRow struct { + ID int32 + Name string + MusicBrainzID *uuid.UUID + Image *uuid.UUID + ListenCount int64 +} + +func (q *Queries) GetTopArtistsPaginated(ctx context.Context, arg GetTopArtistsPaginatedParams) ([]GetTopArtistsPaginatedRow, error) { + rows, err := q.db.Query(ctx, getTopArtistsPaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetTopArtistsPaginatedRow + for rows.Next() { + var i GetTopArtistsPaginatedRow + if err := rows.Scan( + &i.ID, + &i.Name, + &i.MusicBrainzID, + &i.Image, + &i.ListenCount, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getTrackArtists = `-- name: GetTrackArtists :many +SELECT + a.id, a.musicbrainz_id, a.image, a.image_source, a.name +FROM artists_with_name a +LEFT JOIN artist_tracks at ON a.id = at.artist_id +WHERE at.track_id = $1 +GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name +` + +func (q *Queries) GetTrackArtists(ctx context.Context, trackID int32) ([]ArtistsWithName, error) { + rows, err := q.db.Query(ctx, getTrackArtists, trackID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ArtistsWithName + for rows.Next() { + var i ArtistsWithName + if err := rows.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.ImageSource, + &i.Name, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const insertArtist = `-- name: InsertArtist :one +INSERT INTO artists (musicbrainz_id, image, image_source) +VALUES ($1, $2, $3) +RETURNING id, musicbrainz_id, image, image_source +` + +type InsertArtistParams struct { + MusicBrainzID *uuid.UUID + Image *uuid.UUID + ImageSource pgtype.Text +} + +func (q *Queries) InsertArtist(ctx context.Context, arg InsertArtistParams) (Artist, error) { + row := q.db.QueryRow(ctx, insertArtist, arg.MusicBrainzID, arg.Image, arg.ImageSource) + var i Artist + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.ImageSource, + ) + return i, err +} + +const updateArtistImage = `-- name: UpdateArtistImage :exec +UPDATE artists SET image = $2, image_source = $3 +WHERE id = $1 +` + +type UpdateArtistImageParams struct { + ID int32 + Image *uuid.UUID + ImageSource pgtype.Text +} + +func (q *Queries) UpdateArtistImage(ctx context.Context, arg UpdateArtistImageParams) error { + _, err := q.db.Exec(ctx, updateArtistImage, arg.ID, arg.Image, arg.ImageSource) + return err +} + +const updateArtistMbzID = `-- name: UpdateArtistMbzID :exec +UPDATE artists SET musicbrainz_id = $2 +WHERE id = $1 +` + +type UpdateArtistMbzIDParams struct { + ID int32 + MusicBrainzID *uuid.UUID +} + +func (q *Queries) UpdateArtistMbzID(ctx context.Context, arg UpdateArtistMbzIDParams) error { + _, err := q.db.Exec(ctx, updateArtistMbzID, arg.ID, arg.MusicBrainzID) + return err +} + +const updateArtistReleases = `-- name: UpdateArtistReleases :exec +UPDATE artist_releases +SET artist_id = $2 +WHERE artist_id = $1 +` + +type UpdateArtistReleasesParams struct { + ArtistID int32 + ArtistID_2 int32 +} + +func (q *Queries) UpdateArtistReleases(ctx context.Context, arg UpdateArtistReleasesParams) error { + _, err := q.db.Exec(ctx, updateArtistReleases, arg.ArtistID, arg.ArtistID_2) + return err +} + +const updateArtistTracks = `-- name: UpdateArtistTracks :exec +UPDATE artist_tracks +SET artist_id = $2 +WHERE artist_id = $1 +` + +type UpdateArtistTracksParams struct { + ArtistID int32 + ArtistID_2 int32 +} + +func (q *Queries) UpdateArtistTracks(ctx context.Context, arg UpdateArtistTracksParams) error { + _, err := q.db.Exec(ctx, updateArtistTracks, arg.ArtistID, arg.ArtistID_2) + return err +} diff --git a/internal/repository/db.go b/internal/repository/db.go new file mode 100644 index 0000000..89e33c0 --- /dev/null +++ b/internal/repository/db.go @@ -0,0 +1,32 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 + +package repository + +import ( + "context" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" +) + +type DBTX interface { + Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error) + Query(context.Context, string, ...interface{}) (pgx.Rows, error) + QueryRow(context.Context, string, ...interface{}) pgx.Row +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx pgx.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/internal/repository/etc.sql.go b/internal/repository/etc.sql.go new file mode 100644 index 0000000..7664959 --- /dev/null +++ b/internal/repository/etc.sql.go @@ -0,0 +1,26 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: etc.sql + +package repository + +import ( + "context" +) + +const cleanOrphanedEntries = `-- name: CleanOrphanedEntries :exec +DO $$ +BEGIN + DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l); + DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t); + DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at); +END $$ +` + +// DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t); +// DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg); +func (q *Queries) CleanOrphanedEntries(ctx context.Context) error { + _, err := q.db.Exec(ctx, cleanOrphanedEntries) + return err +} diff --git a/internal/repository/listen.sql.go b/internal/repository/listen.sql.go new file mode 100644 index 0000000..d3567c3 --- /dev/null +++ b/internal/repository/listen.sql.go @@ -0,0 +1,742 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: listen.sql + +package repository + +import ( + "context" + "time" + + "github.com/jackc/pgx/v5/pgtype" +) + +const countListens = `-- name: CountListens :one +SELECT COUNT(*) AS total_count +FROM listens l +WHERE l.listened_at BETWEEN $1 AND $2 +` + +type CountListensParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time +} + +func (q *Queries) CountListens(ctx context.Context, arg CountListensParams) (int64, error) { + row := q.db.QueryRow(ctx, countListens, arg.ListenedAt, arg.ListenedAt_2) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const countListensFromArtist = `-- name: CountListensFromArtist :one +SELECT COUNT(*) AS total_count +FROM listens l +JOIN artist_tracks at ON l.track_id = at.track_id +WHERE l.listened_at BETWEEN $1 AND $2 + AND at.artist_id = $3 +` + +type CountListensFromArtistParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + ArtistID int32 +} + +func (q *Queries) CountListensFromArtist(ctx context.Context, arg CountListensFromArtistParams) (int64, error) { + row := q.db.QueryRow(ctx, countListensFromArtist, arg.ListenedAt, arg.ListenedAt_2, arg.ArtistID) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const countListensFromRelease = `-- name: CountListensFromRelease :one +SELECT COUNT(*) AS total_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $3 +` + +type CountListensFromReleaseParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + ReleaseID int32 +} + +func (q *Queries) CountListensFromRelease(ctx context.Context, arg CountListensFromReleaseParams) (int64, error) { + row := q.db.QueryRow(ctx, countListensFromRelease, arg.ListenedAt, arg.ListenedAt_2, arg.ReleaseID) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const countListensFromTrack = `-- name: CountListensFromTrack :one +SELECT COUNT(*) AS total_count +FROM listens l +WHERE l.listened_at BETWEEN $1 AND $2 + AND l.track_id = $3 +` + +type CountListensFromTrackParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + TrackID int32 +} + +func (q *Queries) CountListensFromTrack(ctx context.Context, arg CountListensFromTrackParams) (int64, error) { + row := q.db.QueryRow(ctx, countListensFromTrack, arg.ListenedAt, arg.ListenedAt_2, arg.TrackID) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const countTimeListened = `-- name: CountTimeListened :one +SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 +` + +type CountTimeListenedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time +} + +func (q *Queries) CountTimeListened(ctx context.Context, arg CountTimeListenedParams) (int64, error) { + row := q.db.QueryRow(ctx, countTimeListened, arg.ListenedAt, arg.ListenedAt_2) + var seconds_listened int64 + err := row.Scan(&seconds_listened) + return seconds_listened, err +} + +const countTimeListenedToArtist = `-- name: CountTimeListenedToArtist :one +SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN artist_tracks at ON t.id = at.track_id +WHERE l.listened_at BETWEEN $1 AND $2 + AND at.artist_id = $3 +` + +type CountTimeListenedToArtistParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + ArtistID int32 +} + +func (q *Queries) CountTimeListenedToArtist(ctx context.Context, arg CountTimeListenedToArtistParams) (int64, error) { + row := q.db.QueryRow(ctx, countTimeListenedToArtist, arg.ListenedAt, arg.ListenedAt_2, arg.ArtistID) + var seconds_listened int64 + err := row.Scan(&seconds_listened) + return seconds_listened, err +} + +const countTimeListenedToRelease = `-- name: CountTimeListenedToRelease :one +SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $3 +` + +type CountTimeListenedToReleaseParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + ReleaseID int32 +} + +func (q *Queries) CountTimeListenedToRelease(ctx context.Context, arg CountTimeListenedToReleaseParams) (int64, error) { + row := q.db.QueryRow(ctx, countTimeListenedToRelease, arg.ListenedAt, arg.ListenedAt_2, arg.ReleaseID) + var seconds_listened int64 + err := row.Scan(&seconds_listened) + return seconds_listened, err +} + +const countTimeListenedToTrack = `-- name: CountTimeListenedToTrack :one +SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.id = $3 +` + +type CountTimeListenedToTrackParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + ID int32 +} + +func (q *Queries) CountTimeListenedToTrack(ctx context.Context, arg CountTimeListenedToTrackParams) (int64, error) { + row := q.db.QueryRow(ctx, countTimeListenedToTrack, arg.ListenedAt, arg.ListenedAt_2, arg.ID) + var seconds_listened int64 + err := row.Scan(&seconds_listened) + return seconds_listened, err +} + +const deleteListen = `-- name: DeleteListen :exec +DELETE FROM listens WHERE track_id = $1 AND listened_at = $2 +` + +type DeleteListenParams struct { + TrackID int32 + ListenedAt time.Time +} + +func (q *Queries) DeleteListen(ctx context.Context, arg DeleteListenParams) error { + _, err := q.db.Exec(ctx, deleteListen, arg.TrackID, arg.ListenedAt) + return err +} + +const getLastListensFromArtistPaginated = `-- name: GetLastListensFromArtistPaginated :many +SELECT + l.track_id, l.listened_at, l.client, l.user_id, + t.title AS track_title, + t.release_id AS release_id, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +JOIN artist_tracks at ON t.id = at.track_id +WHERE at.artist_id = $5 + AND l.listened_at BETWEEN $1 AND $2 +ORDER BY l.listened_at DESC +LIMIT $3 OFFSET $4 +` + +type GetLastListensFromArtistPaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 + ArtistID int32 +} + +type GetLastListensFromArtistPaginatedRow struct { + TrackID int32 + ListenedAt time.Time + Client *string + UserID int32 + TrackTitle string + ReleaseID int32 + Artists []byte +} + +func (q *Queries) GetLastListensFromArtistPaginated(ctx context.Context, arg GetLastListensFromArtistPaginatedParams) ([]GetLastListensFromArtistPaginatedRow, error) { + rows, err := q.db.Query(ctx, getLastListensFromArtistPaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + arg.ArtistID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetLastListensFromArtistPaginatedRow + for rows.Next() { + var i GetLastListensFromArtistPaginatedRow + if err := rows.Scan( + &i.TrackID, + &i.ListenedAt, + &i.Client, + &i.UserID, + &i.TrackTitle, + &i.ReleaseID, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getLastListensFromReleasePaginated = `-- name: GetLastListensFromReleasePaginated :many +SELECT + l.track_id, l.listened_at, l.client, l.user_id, + t.title AS track_title, + t.release_id AS release_id, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $5 +ORDER BY l.listened_at DESC +LIMIT $3 OFFSET $4 +` + +type GetLastListensFromReleasePaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 + ReleaseID int32 +} + +type GetLastListensFromReleasePaginatedRow struct { + TrackID int32 + ListenedAt time.Time + Client *string + UserID int32 + TrackTitle string + ReleaseID int32 + Artists []byte +} + +func (q *Queries) GetLastListensFromReleasePaginated(ctx context.Context, arg GetLastListensFromReleasePaginatedParams) ([]GetLastListensFromReleasePaginatedRow, error) { + rows, err := q.db.Query(ctx, getLastListensFromReleasePaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + arg.ReleaseID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetLastListensFromReleasePaginatedRow + for rows.Next() { + var i GetLastListensFromReleasePaginatedRow + if err := rows.Scan( + &i.TrackID, + &i.ListenedAt, + &i.Client, + &i.UserID, + &i.TrackTitle, + &i.ReleaseID, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getLastListensFromTrackPaginated = `-- name: GetLastListensFromTrackPaginated :many +SELECT + l.track_id, l.listened_at, l.client, l.user_id, + t.title AS track_title, + t.release_id AS release_id, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.id = $5 +ORDER BY l.listened_at DESC +LIMIT $3 OFFSET $4 +` + +type GetLastListensFromTrackPaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 + ID int32 +} + +type GetLastListensFromTrackPaginatedRow struct { + TrackID int32 + ListenedAt time.Time + Client *string + UserID int32 + TrackTitle string + ReleaseID int32 + Artists []byte +} + +func (q *Queries) GetLastListensFromTrackPaginated(ctx context.Context, arg GetLastListensFromTrackPaginatedParams) ([]GetLastListensFromTrackPaginatedRow, error) { + rows, err := q.db.Query(ctx, getLastListensFromTrackPaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + arg.ID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetLastListensFromTrackPaginatedRow + for rows.Next() { + var i GetLastListensFromTrackPaginatedRow + if err := rows.Scan( + &i.TrackID, + &i.ListenedAt, + &i.Client, + &i.UserID, + &i.TrackTitle, + &i.ReleaseID, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getLastListensPaginated = `-- name: GetLastListensPaginated :many +SELECT + l.track_id, l.listened_at, l.client, l.user_id, + t.title AS track_title, + t.release_id AS release_id, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 +ORDER BY l.listened_at DESC +LIMIT $3 OFFSET $4 +` + +type GetLastListensPaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 +} + +type GetLastListensPaginatedRow struct { + TrackID int32 + ListenedAt time.Time + Client *string + UserID int32 + TrackTitle string + ReleaseID int32 + Artists []byte +} + +func (q *Queries) GetLastListensPaginated(ctx context.Context, arg GetLastListensPaginatedParams) ([]GetLastListensPaginatedRow, error) { + rows, err := q.db.Query(ctx, getLastListensPaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetLastListensPaginatedRow + for rows.Next() { + var i GetLastListensPaginatedRow + if err := rows.Scan( + &i.TrackID, + &i.ListenedAt, + &i.Client, + &i.UserID, + &i.TrackTitle, + &i.ReleaseID, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const insertListen = `-- name: InsertListen :exec +INSERT INTO listens (track_id, listened_at, user_id, client) +VALUES ($1, $2, $3, $4) +ON CONFLICT DO NOTHING +` + +type InsertListenParams struct { + TrackID int32 + ListenedAt time.Time + UserID int32 + Client *string +} + +func (q *Queries) InsertListen(ctx context.Context, arg InsertListenParams) error { + _, err := q.db.Exec(ctx, insertListen, + arg.TrackID, + arg.ListenedAt, + arg.UserID, + arg.Client, + ) + return err +} + +const listenActivity = `-- name: ListenActivity :many +WITH buckets AS ( + SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start +), +bucketed_listens AS ( + SELECT + b.bucket_start, + COUNT(l.listened_at) AS listen_count + FROM buckets b + LEFT JOIN listens l + ON l.listened_at >= b.bucket_start + AND l.listened_at < b.bucket_start + $3::interval + GROUP BY b.bucket_start + ORDER BY b.bucket_start +) +SELECT bucket_start, listen_count FROM bucketed_listens +` + +type ListenActivityParams struct { + Column1 time.Time + Column2 time.Time + Column3 pgtype.Interval +} + +type ListenActivityRow struct { + BucketStart time.Time + ListenCount int64 +} + +func (q *Queries) ListenActivity(ctx context.Context, arg ListenActivityParams) ([]ListenActivityRow, error) { + rows, err := q.db.Query(ctx, listenActivity, arg.Column1, arg.Column2, arg.Column3) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListenActivityRow + for rows.Next() { + var i ListenActivityRow + if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listenActivityForArtist = `-- name: ListenActivityForArtist :many +WITH buckets AS ( + SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start +), +filtered_listens AS ( + SELECT l.track_id, l.listened_at, l.client, l.user_id + FROM listens l + JOIN artist_tracks t ON l.track_id = t.track_id + WHERE t.artist_id = $4 +), +bucketed_listens AS ( + SELECT + b.bucket_start, + COUNT(l.listened_at) AS listen_count + FROM buckets b + LEFT JOIN filtered_listens l + ON l.listened_at >= b.bucket_start + AND l.listened_at < b.bucket_start + $3::interval + GROUP BY b.bucket_start + ORDER BY b.bucket_start +) +SELECT bucket_start, listen_count FROM bucketed_listens +` + +type ListenActivityForArtistParams struct { + Column1 time.Time + Column2 time.Time + Column3 pgtype.Interval + ArtistID int32 +} + +type ListenActivityForArtistRow struct { + BucketStart time.Time + ListenCount int64 +} + +func (q *Queries) ListenActivityForArtist(ctx context.Context, arg ListenActivityForArtistParams) ([]ListenActivityForArtistRow, error) { + rows, err := q.db.Query(ctx, listenActivityForArtist, + arg.Column1, + arg.Column2, + arg.Column3, + arg.ArtistID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListenActivityForArtistRow + for rows.Next() { + var i ListenActivityForArtistRow + if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listenActivityForRelease = `-- name: ListenActivityForRelease :many +WITH buckets AS ( + SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start +), +filtered_listens AS ( + SELECT l.track_id, l.listened_at, l.client, l.user_id + FROM listens l + JOIN tracks t ON l.track_id = t.id + WHERE t.release_id = $4 +), +bucketed_listens AS ( + SELECT + b.bucket_start, + COUNT(l.listened_at) AS listen_count + FROM buckets b + LEFT JOIN filtered_listens l + ON l.listened_at >= b.bucket_start + AND l.listened_at < b.bucket_start + $3::interval + GROUP BY b.bucket_start + ORDER BY b.bucket_start +) +SELECT bucket_start, listen_count FROM bucketed_listens +` + +type ListenActivityForReleaseParams struct { + Column1 time.Time + Column2 time.Time + Column3 pgtype.Interval + ReleaseID int32 +} + +type ListenActivityForReleaseRow struct { + BucketStart time.Time + ListenCount int64 +} + +func (q *Queries) ListenActivityForRelease(ctx context.Context, arg ListenActivityForReleaseParams) ([]ListenActivityForReleaseRow, error) { + rows, err := q.db.Query(ctx, listenActivityForRelease, + arg.Column1, + arg.Column2, + arg.Column3, + arg.ReleaseID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListenActivityForReleaseRow + for rows.Next() { + var i ListenActivityForReleaseRow + if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listenActivityForTrack = `-- name: ListenActivityForTrack :many +WITH buckets AS ( + SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start +), +filtered_listens AS ( + SELECT l.track_id, l.listened_at, l.client, l.user_id + FROM listens l + JOIN tracks t ON l.track_id = t.id + WHERE t.id = $4 +), +bucketed_listens AS ( + SELECT + b.bucket_start, + COUNT(l.listened_at) AS listen_count + FROM buckets b + LEFT JOIN filtered_listens l + ON l.listened_at >= b.bucket_start + AND l.listened_at < b.bucket_start + $3::interval + GROUP BY b.bucket_start + ORDER BY b.bucket_start +) +SELECT bucket_start, listen_count FROM bucketed_listens +` + +type ListenActivityForTrackParams struct { + Column1 time.Time + Column2 time.Time + Column3 pgtype.Interval + ID int32 +} + +type ListenActivityForTrackRow struct { + BucketStart time.Time + ListenCount int64 +} + +func (q *Queries) ListenActivityForTrack(ctx context.Context, arg ListenActivityForTrackParams) ([]ListenActivityForTrackRow, error) { + rows, err := q.db.Query(ctx, listenActivityForTrack, + arg.Column1, + arg.Column2, + arg.Column3, + arg.ID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListenActivityForTrackRow + for rows.Next() { + var i ListenActivityForTrackRow + if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateTrackIdForListens = `-- name: UpdateTrackIdForListens :exec +UPDATE listens SET track_id = $2 +WHERE track_id = $1 +` + +type UpdateTrackIdForListensParams struct { + TrackID int32 + TrackID_2 int32 +} + +func (q *Queries) UpdateTrackIdForListens(ctx context.Context, arg UpdateTrackIdForListensParams) error { + _, err := q.db.Exec(ctx, updateTrackIdForListens, arg.TrackID, arg.TrackID_2) + return err +} diff --git a/internal/repository/models.go b/internal/repository/models.go new file mode 100644 index 0000000..d1dc41f --- /dev/null +++ b/internal/repository/models.go @@ -0,0 +1,164 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 + +package repository + +import ( + "database/sql/driver" + "fmt" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +type Role string + +const ( + RoleAdmin Role = "admin" + RoleUser Role = "user" +) + +func (e *Role) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = Role(s) + case string: + *e = Role(s) + default: + return fmt.Errorf("unsupported scan type for Role: %T", src) + } + return nil +} + +type NullRole struct { + Role Role + Valid bool // Valid is true if Role is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullRole) Scan(value interface{}) error { + if value == nil { + ns.Role, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.Role.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullRole) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.Role), nil +} + +type ApiKey struct { + ID int32 + Key string + UserID int32 + CreatedAt pgtype.Timestamp + Label string +} + +type Artist struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + ImageSource pgtype.Text +} + +type ArtistAlias struct { + ArtistID int32 + Alias string + Source string + IsPrimary bool +} + +type ArtistRelease struct { + ArtistID int32 + ReleaseID int32 +} + +type ArtistTrack struct { + ArtistID int32 + TrackID int32 +} + +type ArtistsWithName struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + ImageSource pgtype.Text + Name string +} + +type Listen struct { + TrackID int32 + ListenedAt time.Time + Client *string + UserID int32 +} + +type Release struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + VariousArtists bool + ImageSource pgtype.Text +} + +type ReleaseAlias struct { + ReleaseID int32 + Alias string + Source string + IsPrimary bool +} + +type ReleasesWithTitle struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + VariousArtists bool + ImageSource pgtype.Text + Title string +} + +type Session struct { + ID uuid.UUID + UserID int32 + CreatedAt time.Time + ExpiresAt time.Time + Persistent bool +} + +type Track struct { + ID int32 + MusicBrainzID *uuid.UUID + Duration int32 + ReleaseID int32 +} + +type TrackAlias struct { + TrackID int32 + Alias string + IsPrimary bool + Source string +} + +type TracksWithTitle struct { + ID int32 + MusicBrainzID *uuid.UUID + Duration int32 + ReleaseID int32 + Title string +} + +type User struct { + ID int32 + Username string + Role Role + Password []byte +} diff --git a/internal/repository/release.sql.go b/internal/repository/release.sql.go new file mode 100644 index 0000000..06a936e --- /dev/null +++ b/internal/repository/release.sql.go @@ -0,0 +1,462 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: release.sql + +package repository + +import ( + "context" + "time" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const associateArtistToRelease = `-- name: AssociateArtistToRelease :exec +INSERT INTO artist_releases (artist_id, release_id) +VALUES ($1, $2) +ON CONFLICT DO NOTHING +` + +type AssociateArtistToReleaseParams struct { + ArtistID int32 + ReleaseID int32 +} + +func (q *Queries) AssociateArtistToRelease(ctx context.Context, arg AssociateArtistToReleaseParams) error { + _, err := q.db.Exec(ctx, associateArtistToRelease, arg.ArtistID, arg.ReleaseID) + return err +} + +const countReleasesFromArtist = `-- name: CountReleasesFromArtist :one +SELECT COUNT(*) +FROM releases r +JOIN artist_releases ar ON r.id = ar.release_id +WHERE ar.artist_id = $1 +` + +func (q *Queries) CountReleasesFromArtist(ctx context.Context, artistID int32) (int64, error) { + row := q.db.QueryRow(ctx, countReleasesFromArtist, artistID) + var count int64 + err := row.Scan(&count) + return count, err +} + +const countTopReleases = `-- name: CountTopReleases :one +SELECT COUNT(DISTINCT r.id) AS total_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN releases r ON t.release_id = r.id +WHERE l.listened_at BETWEEN $1 AND $2 +` + +type CountTopReleasesParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time +} + +func (q *Queries) CountTopReleases(ctx context.Context, arg CountTopReleasesParams) (int64, error) { + row := q.db.QueryRow(ctx, countTopReleases, arg.ListenedAt, arg.ListenedAt_2) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const deleteRelease = `-- name: DeleteRelease :exec +DELETE FROM releases WHERE id = $1 +` + +func (q *Queries) DeleteRelease(ctx context.Context, id int32) error { + _, err := q.db.Exec(ctx, deleteRelease, id) + return err +} + +const deleteReleasesFromArtist = `-- name: DeleteReleasesFromArtist :exec +DELETE FROM releases r +USING artist_releases ar +WHERE ar.release_id = r.id + AND ar.artist_id = $1 +` + +func (q *Queries) DeleteReleasesFromArtist(ctx context.Context, artistID int32) error { + _, err := q.db.Exec(ctx, deleteReleasesFromArtist, artistID) + return err +} + +const getRelease = `-- name: GetRelease :one +SELECT id, musicbrainz_id, image, various_artists, image_source, title FROM releases_with_title +WHERE id = $1 LIMIT 1 +` + +func (q *Queries) GetRelease(ctx context.Context, id int32) (ReleasesWithTitle, error) { + row := q.db.QueryRow(ctx, getRelease, id) + var i ReleasesWithTitle + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + &i.Title, + ) + return i, err +} + +const getReleaseByArtistAndTitle = `-- name: GetReleaseByArtistAndTitle :one +SELECT r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title +FROM releases_with_title r +JOIN artist_releases ar ON r.id = ar.release_id +WHERE r.title = $1 AND ar.artist_id = $2 +LIMIT 1 +` + +type GetReleaseByArtistAndTitleParams struct { + Title string + ArtistID int32 +} + +func (q *Queries) GetReleaseByArtistAndTitle(ctx context.Context, arg GetReleaseByArtistAndTitleParams) (ReleasesWithTitle, error) { + row := q.db.QueryRow(ctx, getReleaseByArtistAndTitle, arg.Title, arg.ArtistID) + var i ReleasesWithTitle + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + &i.Title, + ) + return i, err +} + +const getReleaseByArtistAndTitles = `-- name: GetReleaseByArtistAndTitles :one +SELECT r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title +FROM releases_with_title r +JOIN artist_releases ar ON r.id = ar.release_id +WHERE r.title = ANY ($1::TEXT[]) AND ar.artist_id = $2 +LIMIT 1 +` + +type GetReleaseByArtistAndTitlesParams struct { + Column1 []string + ArtistID int32 +} + +func (q *Queries) GetReleaseByArtistAndTitles(ctx context.Context, arg GetReleaseByArtistAndTitlesParams) (ReleasesWithTitle, error) { + row := q.db.QueryRow(ctx, getReleaseByArtistAndTitles, arg.Column1, arg.ArtistID) + var i ReleasesWithTitle + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + &i.Title, + ) + return i, err +} + +const getReleaseByImageID = `-- name: GetReleaseByImageID :one +SELECT id, musicbrainz_id, image, various_artists, image_source FROM releases +WHERE image = $1 LIMIT 1 +` + +func (q *Queries) GetReleaseByImageID(ctx context.Context, image *uuid.UUID) (Release, error) { + row := q.db.QueryRow(ctx, getReleaseByImageID, image) + var i Release + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + ) + return i, err +} + +const getReleaseByMbzID = `-- name: GetReleaseByMbzID :one +SELECT id, musicbrainz_id, image, various_artists, image_source, title FROM releases_with_title +WHERE musicbrainz_id = $1 LIMIT 1 +` + +func (q *Queries) GetReleaseByMbzID(ctx context.Context, musicbrainzID *uuid.UUID) (ReleasesWithTitle, error) { + row := q.db.QueryRow(ctx, getReleaseByMbzID, musicbrainzID) + var i ReleasesWithTitle + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + &i.Title, + ) + return i, err +} + +const getReleasesWithoutImages = `-- name: GetReleasesWithoutImages :many +SELECT + r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON a.id = ar.artist_id + WHERE ar.release_id = r.id + ) AS artists +FROM releases_with_title r +WHERE r.image IS NULL + AND r.id > $2 +ORDER BY r.id ASC +LIMIT $1 +` + +type GetReleasesWithoutImagesParams struct { + Limit int32 + ID int32 +} + +type GetReleasesWithoutImagesRow struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + VariousArtists bool + ImageSource pgtype.Text + Title string + Artists []byte +} + +func (q *Queries) GetReleasesWithoutImages(ctx context.Context, arg GetReleasesWithoutImagesParams) ([]GetReleasesWithoutImagesRow, error) { + rows, err := q.db.Query(ctx, getReleasesWithoutImages, arg.Limit, arg.ID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetReleasesWithoutImagesRow + for rows.Next() { + var i GetReleasesWithoutImagesRow + if err := rows.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + &i.Title, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getTopReleasesFromArtist = `-- name: GetTopReleasesFromArtist :many +SELECT + r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title, + COUNT(*) AS listen_count, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON ar.artist_id = a.id + WHERE ar.release_id = r.id + ) AS artists +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN releases_with_title r ON t.release_id = r.id +JOIN artist_releases ar ON r.id = ar.release_id +WHERE ar.artist_id = $5 + AND l.listened_at BETWEEN $1 AND $2 +GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4 +` + +type GetTopReleasesFromArtistParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 + ArtistID int32 +} + +type GetTopReleasesFromArtistRow struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + VariousArtists bool + ImageSource pgtype.Text + Title string + ListenCount int64 + Artists []byte +} + +func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleasesFromArtistParams) ([]GetTopReleasesFromArtistRow, error) { + rows, err := q.db.Query(ctx, getTopReleasesFromArtist, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + arg.ArtistID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetTopReleasesFromArtistRow + for rows.Next() { + var i GetTopReleasesFromArtistRow + if err := rows.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + &i.Title, + &i.ListenCount, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getTopReleasesPaginated = `-- name: GetTopReleasesPaginated :many +SELECT + r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title, + COUNT(*) AS listen_count, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON ar.artist_id = a.id + WHERE ar.release_id = r.id + ) AS artists +FROM listens l +JOIN tracks t ON l.track_id = t.id +JOIN releases_with_title r ON t.release_id = r.id +WHERE l.listened_at BETWEEN $1 AND $2 +GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4 +` + +type GetTopReleasesPaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 +} + +type GetTopReleasesPaginatedRow struct { + ID int32 + MusicBrainzID *uuid.UUID + Image *uuid.UUID + VariousArtists bool + ImageSource pgtype.Text + Title string + ListenCount int64 + Artists []byte +} + +func (q *Queries) GetTopReleasesPaginated(ctx context.Context, arg GetTopReleasesPaginatedParams) ([]GetTopReleasesPaginatedRow, error) { + rows, err := q.db.Query(ctx, getTopReleasesPaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetTopReleasesPaginatedRow + for rows.Next() { + var i GetTopReleasesPaginatedRow + if err := rows.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + &i.Title, + &i.ListenCount, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const insertRelease = `-- name: InsertRelease :one +INSERT INTO releases (musicbrainz_id, various_artists, image, image_source) +VALUES ($1, $2, $3, $4) +RETURNING id, musicbrainz_id, image, various_artists, image_source +` + +type InsertReleaseParams struct { + MusicBrainzID *uuid.UUID + VariousArtists bool + Image *uuid.UUID + ImageSource pgtype.Text +} + +func (q *Queries) InsertRelease(ctx context.Context, arg InsertReleaseParams) (Release, error) { + row := q.db.QueryRow(ctx, insertRelease, + arg.MusicBrainzID, + arg.VariousArtists, + arg.Image, + arg.ImageSource, + ) + var i Release + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.ImageSource, + ) + return i, err +} + +const updateReleaseImage = `-- name: UpdateReleaseImage :exec +UPDATE releases SET image = $2, image_source = $3 +WHERE id = $1 +` + +type UpdateReleaseImageParams struct { + ID int32 + Image *uuid.UUID + ImageSource pgtype.Text +} + +func (q *Queries) UpdateReleaseImage(ctx context.Context, arg UpdateReleaseImageParams) error { + _, err := q.db.Exec(ctx, updateReleaseImage, arg.ID, arg.Image, arg.ImageSource) + return err +} + +const updateReleaseMbzID = `-- name: UpdateReleaseMbzID :exec +UPDATE releases SET musicbrainz_id = $2 +WHERE id = $1 +` + +type UpdateReleaseMbzIDParams struct { + ID int32 + MusicBrainzID *uuid.UUID +} + +func (q *Queries) UpdateReleaseMbzID(ctx context.Context, arg UpdateReleaseMbzIDParams) error { + _, err := q.db.Exec(ctx, updateReleaseMbzID, arg.ID, arg.MusicBrainzID) + return err +} diff --git a/internal/repository/search.sql.go b/internal/repository/search.sql.go new file mode 100644 index 0000000..eed25a8 --- /dev/null +++ b/internal/repository/search.sql.go @@ -0,0 +1,431 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: search.sql + +package repository + +import ( + "context" + + "github.com/google/uuid" + "github.com/jackc/pgx/v5/pgtype" +) + +const searchArtists = `-- name: SearchArtists :many +SELECT id, name, musicbrainz_id, image, score +FROM ( + SELECT + a.id, + a.name, + a.musicbrainz_id, + a.image, + similarity(aa.alias, $1) AS score, + ROW_NUMBER() OVER (PARTITION BY a.id ORDER BY similarity(aa.alias, $1) DESC) AS rn + FROM artist_aliases aa + JOIN artists_with_name a ON aa.artist_id = a.id + WHERE similarity(aa.alias, $1) > 0.28 +) ranked +WHERE rn = 1 +ORDER BY score DESC +LIMIT $2 +` + +type SearchArtistsParams struct { + Similarity string + Limit int32 +} + +type SearchArtistsRow struct { + ID int32 + Name string + MusicBrainzID *uuid.UUID + Image *uuid.UUID + Score float32 +} + +func (q *Queries) SearchArtists(ctx context.Context, arg SearchArtistsParams) ([]SearchArtistsRow, error) { + rows, err := q.db.Query(ctx, searchArtists, arg.Similarity, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []SearchArtistsRow + for rows.Next() { + var i SearchArtistsRow + if err := rows.Scan( + &i.ID, + &i.Name, + &i.MusicBrainzID, + &i.Image, + &i.Score, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchArtistsBySubstring = `-- name: SearchArtistsBySubstring :many +SELECT id, name, musicbrainz_id, image, score +FROM ( + SELECT + a.id, + a.name, + a.musicbrainz_id, + a.image, + 1.0 AS score, -- why + ROW_NUMBER() OVER (PARTITION BY a.id ORDER BY aa.alias) AS rn + FROM artist_aliases aa + JOIN artists_with_name a ON aa.artist_id = a.id + WHERE aa.alias ILIKE $1 || '%' +) ranked +WHERE rn = 1 +ORDER BY score DESC +LIMIT $2 +` + +type SearchArtistsBySubstringParams struct { + Column1 pgtype.Text + Limit int32 +} + +type SearchArtistsBySubstringRow struct { + ID int32 + Name string + MusicBrainzID *uuid.UUID + Image *uuid.UUID + Score float64 +} + +func (q *Queries) SearchArtistsBySubstring(ctx context.Context, arg SearchArtistsBySubstringParams) ([]SearchArtistsBySubstringRow, error) { + rows, err := q.db.Query(ctx, searchArtistsBySubstring, arg.Column1, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []SearchArtistsBySubstringRow + for rows.Next() { + var i SearchArtistsBySubstringRow + if err := rows.Scan( + &i.ID, + &i.Name, + &i.MusicBrainzID, + &i.Image, + &i.Score, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchReleases = `-- name: SearchReleases :many +SELECT + ranked.id, + ranked.title, + ranked.musicbrainz_id, + ranked.image, + ranked.various_artists, + ranked.score, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON ar.artist_id = a.id + WHERE ar.release_id = ranked.id + ) AS artists +FROM ( + SELECT + r.id, + r.title, + r.musicbrainz_id, + r.image, + r.various_artists, + similarity(ra.alias, $1) AS score, + ROW_NUMBER() OVER (PARTITION BY r.id ORDER BY similarity(ra.alias, $1) DESC) AS rn + FROM release_aliases ra + JOIN releases_with_title r ON ra.release_id = r.id + WHERE similarity(ra.alias, $1) > 0.28 +) ranked +WHERE rn = 1 +ORDER BY score DESC, title +LIMIT $2 +` + +type SearchReleasesParams struct { + Similarity string + Limit int32 +} + +type SearchReleasesRow struct { + ID int32 + Title string + MusicBrainzID *uuid.UUID + Image *uuid.UUID + VariousArtists bool + Score float32 + Artists []byte +} + +func (q *Queries) SearchReleases(ctx context.Context, arg SearchReleasesParams) ([]SearchReleasesRow, error) { + rows, err := q.db.Query(ctx, searchReleases, arg.Similarity, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []SearchReleasesRow + for rows.Next() { + var i SearchReleasesRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.Score, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchReleasesBySubstring = `-- name: SearchReleasesBySubstring :many +SELECT + ranked.id, + ranked.title, + ranked.musicbrainz_id, + ranked.image, + ranked.various_artists, + ranked.score, + ( + SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name)) + FROM artists_with_name a + JOIN artist_releases ar ON ar.artist_id = a.id + WHERE ar.release_id = ranked.id + ) AS artists +FROM ( + SELECT + r.id, + r.title, + r.musicbrainz_id, + r.image, + r.various_artists, + 1.0 AS score, -- idk why + ROW_NUMBER() OVER (PARTITION BY r.id ORDER BY ra.alias) AS rn + FROM release_aliases ra + JOIN releases_with_title r ON ra.release_id = r.id + WHERE ra.alias ILIKE $1 || '%' +) ranked +WHERE rn = 1 +ORDER BY score DESC, title +LIMIT $2 +` + +type SearchReleasesBySubstringParams struct { + Column1 pgtype.Text + Limit int32 +} + +type SearchReleasesBySubstringRow struct { + ID int32 + Title string + MusicBrainzID *uuid.UUID + Image *uuid.UUID + VariousArtists bool + Score float64 + Artists []byte +} + +func (q *Queries) SearchReleasesBySubstring(ctx context.Context, arg SearchReleasesBySubstringParams) ([]SearchReleasesBySubstringRow, error) { + rows, err := q.db.Query(ctx, searchReleasesBySubstring, arg.Column1, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []SearchReleasesBySubstringRow + for rows.Next() { + var i SearchReleasesBySubstringRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.MusicBrainzID, + &i.Image, + &i.VariousArtists, + &i.Score, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchTracks = `-- name: SearchTracks :many +SELECT + ranked.id, + ranked.title, + ranked.musicbrainz_id, + ranked.release_id, + ranked.image, + ranked.score, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = ranked.id + ) AS artists +FROM ( + SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + similarity(ta.alias, $1) AS score, + ROW_NUMBER() OVER (PARTITION BY t.id ORDER BY similarity(ta.alias, $1) DESC) AS rn + FROM track_aliases ta + JOIN tracks_with_title t ON ta.track_id = t.id + JOIN releases r ON t.release_id = r.id + WHERE similarity(ta.alias, $1) > 0.28 +) ranked +WHERE rn = 1 +ORDER BY score DESC, title +LIMIT $2 +` + +type SearchTracksParams struct { + Similarity string + Limit int32 +} + +type SearchTracksRow struct { + ID int32 + Title string + MusicBrainzID *uuid.UUID + ReleaseID int32 + Image *uuid.UUID + Score float32 + Artists []byte +} + +func (q *Queries) SearchTracks(ctx context.Context, arg SearchTracksParams) ([]SearchTracksRow, error) { + rows, err := q.db.Query(ctx, searchTracks, arg.Similarity, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []SearchTracksRow + for rows.Next() { + var i SearchTracksRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.MusicBrainzID, + &i.ReleaseID, + &i.Image, + &i.Score, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const searchTracksBySubstring = `-- name: SearchTracksBySubstring :many +SELECT + ranked.id, + ranked.title, + ranked.musicbrainz_id, + ranked.release_id, + ranked.image, + ranked.score, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = ranked.id + ) AS artists +FROM ( + SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + 1.0 AS score, + ROW_NUMBER() OVER (PARTITION BY t.id ORDER BY ta.alias) AS rn + FROM track_aliases ta + JOIN tracks_with_title t ON ta.track_id = t.id + JOIN releases r ON t.release_id = r.id + WHERE ta.alias ILIKE $1 || '%' +) ranked +WHERE rn = 1 +ORDER BY score DESC, title +LIMIT $2 +` + +type SearchTracksBySubstringParams struct { + Column1 pgtype.Text + Limit int32 +} + +type SearchTracksBySubstringRow struct { + ID int32 + Title string + MusicBrainzID *uuid.UUID + ReleaseID int32 + Image *uuid.UUID + Score float64 + Artists []byte +} + +func (q *Queries) SearchTracksBySubstring(ctx context.Context, arg SearchTracksBySubstringParams) ([]SearchTracksBySubstringRow, error) { + rows, err := q.db.Query(ctx, searchTracksBySubstring, arg.Column1, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []SearchTracksBySubstringRow + for rows.Next() { + var i SearchTracksBySubstringRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.MusicBrainzID, + &i.ReleaseID, + &i.Image, + &i.Score, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/internal/repository/sessions.sql.go b/internal/repository/sessions.sql.go new file mode 100644 index 0000000..2985620 --- /dev/null +++ b/internal/repository/sessions.sql.go @@ -0,0 +1,120 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: sessions.sql + +package repository + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const deleteSession = `-- name: DeleteSession :exec +DELETE FROM sessions WHERE id = $1 +` + +func (q *Queries) DeleteSession(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, deleteSession, id) + return err +} + +const getSession = `-- name: GetSession :one +SELECT id, user_id, created_at, expires_at, persistent FROM sessions WHERE id = $1 AND expires_at > NOW() +` + +func (q *Queries) GetSession(ctx context.Context, id uuid.UUID) (Session, error) { + row := q.db.QueryRow(ctx, getSession, id) + var i Session + err := row.Scan( + &i.ID, + &i.UserID, + &i.CreatedAt, + &i.ExpiresAt, + &i.Persistent, + ) + return i, err +} + +const getUserBySession = `-- name: GetUserBySession :one +SELECT u.id, username, role, password, s.id, user_id, created_at, expires_at, persistent +FROM users u +JOIN sessions s ON u.id = s.user_id +WHERE s.id = $1 +` + +type GetUserBySessionRow struct { + ID int32 + Username string + Role Role + Password []byte + ID_2 uuid.UUID + UserID int32 + CreatedAt time.Time + ExpiresAt time.Time + Persistent bool +} + +func (q *Queries) GetUserBySession(ctx context.Context, id uuid.UUID) (GetUserBySessionRow, error) { + row := q.db.QueryRow(ctx, getUserBySession, id) + var i GetUserBySessionRow + err := row.Scan( + &i.ID, + &i.Username, + &i.Role, + &i.Password, + &i.ID_2, + &i.UserID, + &i.CreatedAt, + &i.ExpiresAt, + &i.Persistent, + ) + return i, err +} + +const insertSession = `-- name: InsertSession :one +INSERT INTO sessions (id, user_id, expires_at, persistent) +VALUES ($1, $2, $3, $4) +RETURNING id, user_id, created_at, expires_at, persistent +` + +type InsertSessionParams struct { + ID uuid.UUID + UserID int32 + ExpiresAt time.Time + Persistent bool +} + +func (q *Queries) InsertSession(ctx context.Context, arg InsertSessionParams) (Session, error) { + row := q.db.QueryRow(ctx, insertSession, + arg.ID, + arg.UserID, + arg.ExpiresAt, + arg.Persistent, + ) + var i Session + err := row.Scan( + &i.ID, + &i.UserID, + &i.CreatedAt, + &i.ExpiresAt, + &i.Persistent, + ) + return i, err +} + +const updateSessionExpiry = `-- name: UpdateSessionExpiry :exec +UPDATE sessions SET expires_at = $2 WHERE id = $1 +` + +type UpdateSessionExpiryParams struct { + ID uuid.UUID + ExpiresAt time.Time +} + +func (q *Queries) UpdateSessionExpiry(ctx context.Context, arg UpdateSessionExpiryParams) error { + _, err := q.db.Exec(ctx, updateSessionExpiry, arg.ID, arg.ExpiresAt) + return err +} diff --git a/internal/repository/track.sql.go b/internal/repository/track.sql.go new file mode 100644 index 0000000..a31316b --- /dev/null +++ b/internal/repository/track.sql.go @@ -0,0 +1,504 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: track.sql + +package repository + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const associateArtistToTrack = `-- name: AssociateArtistToTrack :exec +INSERT INTO artist_tracks (artist_id, track_id) +VALUES ($1, $2) +ON CONFLICT DO NOTHING +` + +type AssociateArtistToTrackParams struct { + ArtistID int32 + TrackID int32 +} + +func (q *Queries) AssociateArtistToTrack(ctx context.Context, arg AssociateArtistToTrackParams) error { + _, err := q.db.Exec(ctx, associateArtistToTrack, arg.ArtistID, arg.TrackID) + return err +} + +const countTopTracks = `-- name: CountTopTracks :one +SELECT COUNT(DISTINCT l.track_id) AS total_count +FROM listens l +WHERE l.listened_at BETWEEN $1 AND $2 +` + +type CountTopTracksParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time +} + +func (q *Queries) CountTopTracks(ctx context.Context, arg CountTopTracksParams) (int64, error) { + row := q.db.QueryRow(ctx, countTopTracks, arg.ListenedAt, arg.ListenedAt_2) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const countTopTracksByArtist = `-- name: CountTopTracksByArtist :one +SELECT COUNT(DISTINCT l.track_id) AS total_count +FROM listens l +JOIN artist_tracks at ON l.track_id = at.track_id +WHERE l.listened_at BETWEEN $1 AND $2 +AND at.artist_id = $3 +` + +type CountTopTracksByArtistParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + ArtistID int32 +} + +func (q *Queries) CountTopTracksByArtist(ctx context.Context, arg CountTopTracksByArtistParams) (int64, error) { + row := q.db.QueryRow(ctx, countTopTracksByArtist, arg.ListenedAt, arg.ListenedAt_2, arg.ArtistID) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const countTopTracksByRelease = `-- name: CountTopTracksByRelease :one +SELECT COUNT(DISTINCT l.track_id) AS total_count +FROM listens l +JOIN tracks t ON l.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 +AND t.release_id = $3 +` + +type CountTopTracksByReleaseParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + ReleaseID int32 +} + +func (q *Queries) CountTopTracksByRelease(ctx context.Context, arg CountTopTracksByReleaseParams) (int64, error) { + row := q.db.QueryRow(ctx, countTopTracksByRelease, arg.ListenedAt, arg.ListenedAt_2, arg.ReleaseID) + var total_count int64 + err := row.Scan(&total_count) + return total_count, err +} + +const deleteTrack = `-- name: DeleteTrack :exec +DELETE FROM tracks WHERE id = $1 +` + +func (q *Queries) DeleteTrack(ctx context.Context, id int32) error { + _, err := q.db.Exec(ctx, deleteTrack, id) + return err +} + +const getAllTracksFromArtist = `-- name: GetAllTracksFromArtist :many +SELECT t.id, t.musicbrainz_id, t.duration, t.release_id, t.title +FROM tracks_with_title t +JOIN artist_tracks at ON t.id = at.track_id +WHERE at.artist_id = $1 +` + +func (q *Queries) GetAllTracksFromArtist(ctx context.Context, artistID int32) ([]TracksWithTitle, error) { + rows, err := q.db.Query(ctx, getAllTracksFromArtist, artistID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []TracksWithTitle + for rows.Next() { + var i TracksWithTitle + if err := rows.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Duration, + &i.ReleaseID, + &i.Title, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getTopTracksByArtistPaginated = `-- name: GetTopTracksByArtistPaginated :many +SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + COUNT(*) AS listen_count, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at2 + JOIN artists_with_name a ON a.id = at2.artist_id + WHERE at2.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +JOIN releases r ON t.release_id = r.id +JOIN artist_tracks at ON at.track_id = t.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND at.artist_id = $5 +GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4 +` + +type GetTopTracksByArtistPaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 + ArtistID int32 +} + +type GetTopTracksByArtistPaginatedRow struct { + ID int32 + Title string + MusicBrainzID *uuid.UUID + ReleaseID int32 + Image *uuid.UUID + ListenCount int64 + Artists []byte +} + +func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopTracksByArtistPaginatedParams) ([]GetTopTracksByArtistPaginatedRow, error) { + rows, err := q.db.Query(ctx, getTopTracksByArtistPaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + arg.ArtistID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetTopTracksByArtistPaginatedRow + for rows.Next() { + var i GetTopTracksByArtistPaginatedRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.MusicBrainzID, + &i.ReleaseID, + &i.Image, + &i.ListenCount, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getTopTracksInReleasePaginated = `-- name: GetTopTracksInReleasePaginated :many +SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + COUNT(*) AS listen_count, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at2 + JOIN artists_with_name a ON a.id = at2.artist_id + WHERE at2.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +JOIN releases r ON t.release_id = r.id +WHERE l.listened_at BETWEEN $1 AND $2 + AND t.release_id = $5 +GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4 +` + +type GetTopTracksInReleasePaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 + ReleaseID int32 +} + +type GetTopTracksInReleasePaginatedRow struct { + ID int32 + Title string + MusicBrainzID *uuid.UUID + ReleaseID int32 + Image *uuid.UUID + ListenCount int64 + Artists []byte +} + +func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTopTracksInReleasePaginatedParams) ([]GetTopTracksInReleasePaginatedRow, error) { + rows, err := q.db.Query(ctx, getTopTracksInReleasePaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + arg.ReleaseID, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetTopTracksInReleasePaginatedRow + for rows.Next() { + var i GetTopTracksInReleasePaginatedRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.MusicBrainzID, + &i.ReleaseID, + &i.Image, + &i.ListenCount, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getTopTracksPaginated = `-- name: GetTopTracksPaginated :many +SELECT + t.id, + t.title, + t.musicbrainz_id, + t.release_id, + r.image, + COUNT(*) AS listen_count, + ( + SELECT json_agg(json_build_object('id', a.id, 'name', a.name)) + FROM artist_tracks at + JOIN artists_with_name a ON a.id = at.artist_id + WHERE at.track_id = t.id + ) AS artists +FROM listens l +JOIN tracks_with_title t ON l.track_id = t.id +JOIN releases r ON t.release_id = r.id +WHERE l.listened_at BETWEEN $1 AND $2 +GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image +ORDER BY listen_count DESC +LIMIT $3 OFFSET $4 +` + +type GetTopTracksPaginatedParams struct { + ListenedAt time.Time + ListenedAt_2 time.Time + Limit int32 + Offset int32 +} + +type GetTopTracksPaginatedRow struct { + ID int32 + Title string + MusicBrainzID *uuid.UUID + ReleaseID int32 + Image *uuid.UUID + ListenCount int64 + Artists []byte +} + +func (q *Queries) GetTopTracksPaginated(ctx context.Context, arg GetTopTracksPaginatedParams) ([]GetTopTracksPaginatedRow, error) { + rows, err := q.db.Query(ctx, getTopTracksPaginated, + arg.ListenedAt, + arg.ListenedAt_2, + arg.Limit, + arg.Offset, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetTopTracksPaginatedRow + for rows.Next() { + var i GetTopTracksPaginatedRow + if err := rows.Scan( + &i.ID, + &i.Title, + &i.MusicBrainzID, + &i.ReleaseID, + &i.Image, + &i.ListenCount, + &i.Artists, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getTrack = `-- name: GetTrack :one +SELECT + t.id, t.musicbrainz_id, t.duration, t.release_id, t.title, + r.image +FROM tracks_with_title t +JOIN releases r ON t.release_id = r.id +WHERE t.id = $1 LIMIT 1 +` + +type GetTrackRow struct { + ID int32 + MusicBrainzID *uuid.UUID + Duration int32 + ReleaseID int32 + Title string + Image *uuid.UUID +} + +func (q *Queries) GetTrack(ctx context.Context, id int32) (GetTrackRow, error) { + row := q.db.QueryRow(ctx, getTrack, id) + var i GetTrackRow + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Duration, + &i.ReleaseID, + &i.Title, + &i.Image, + ) + return i, err +} + +const getTrackByMbzID = `-- name: GetTrackByMbzID :one +SELECT id, musicbrainz_id, duration, release_id, title FROM tracks_with_title +WHERE musicbrainz_id = $1 LIMIT 1 +` + +func (q *Queries) GetTrackByMbzID(ctx context.Context, musicbrainzID *uuid.UUID) (TracksWithTitle, error) { + row := q.db.QueryRow(ctx, getTrackByMbzID, musicbrainzID) + var i TracksWithTitle + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Duration, + &i.ReleaseID, + &i.Title, + ) + return i, err +} + +const getTrackByTitleAndArtists = `-- name: GetTrackByTitleAndArtists :one +SELECT t.id, t.musicbrainz_id, t.duration, t.release_id, t.title +FROM tracks_with_title t +JOIN artist_tracks at ON at.track_id = t.id +WHERE t.title = $1 + AND at.artist_id = ANY($2::int[]) +GROUP BY t.id, t.title, t.musicbrainz_id, t.duration, t.release_id +HAVING COUNT(DISTINCT at.artist_id) = cardinality($2::int[]) +` + +type GetTrackByTitleAndArtistsParams struct { + Title string + Column2 []int32 +} + +func (q *Queries) GetTrackByTitleAndArtists(ctx context.Context, arg GetTrackByTitleAndArtistsParams) (TracksWithTitle, error) { + row := q.db.QueryRow(ctx, getTrackByTitleAndArtists, arg.Title, arg.Column2) + var i TracksWithTitle + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Duration, + &i.ReleaseID, + &i.Title, + ) + return i, err +} + +const insertTrack = `-- name: InsertTrack :one +INSERT INTO tracks (musicbrainz_id, release_id, duration) +VALUES ($1, $2, $3) +RETURNING id, musicbrainz_id, duration, release_id +` + +type InsertTrackParams struct { + MusicBrainzID *uuid.UUID + ReleaseID int32 + Duration int32 +} + +func (q *Queries) InsertTrack(ctx context.Context, arg InsertTrackParams) (Track, error) { + row := q.db.QueryRow(ctx, insertTrack, arg.MusicBrainzID, arg.ReleaseID, arg.Duration) + var i Track + err := row.Scan( + &i.ID, + &i.MusicBrainzID, + &i.Duration, + &i.ReleaseID, + ) + return i, err +} + +const updateReleaseForAll = `-- name: UpdateReleaseForAll :exec +UPDATE tracks SET release_id = $2 +WHERE release_id = $1 +` + +type UpdateReleaseForAllParams struct { + ReleaseID int32 + ReleaseID_2 int32 +} + +func (q *Queries) UpdateReleaseForAll(ctx context.Context, arg UpdateReleaseForAllParams) error { + _, err := q.db.Exec(ctx, updateReleaseForAll, arg.ReleaseID, arg.ReleaseID_2) + return err +} + +const updateTrackDuration = `-- name: UpdateTrackDuration :exec +UPDATE tracks SET duration = $2 +WHERE id = $1 +` + +type UpdateTrackDurationParams struct { + ID int32 + Duration int32 +} + +func (q *Queries) UpdateTrackDuration(ctx context.Context, arg UpdateTrackDurationParams) error { + _, err := q.db.Exec(ctx, updateTrackDuration, arg.ID, arg.Duration) + return err +} + +const updateTrackMbzID = `-- name: UpdateTrackMbzID :exec +UPDATE tracks SET musicbrainz_id = $2 +WHERE id = $1 +` + +type UpdateTrackMbzIDParams struct { + ID int32 + MusicBrainzID *uuid.UUID +} + +func (q *Queries) UpdateTrackMbzID(ctx context.Context, arg UpdateTrackMbzIDParams) error { + _, err := q.db.Exec(ctx, updateTrackMbzID, arg.ID, arg.MusicBrainzID) + return err +} diff --git a/internal/repository/users.sql.go b/internal/repository/users.sql.go new file mode 100644 index 0000000..8278f41 --- /dev/null +++ b/internal/repository/users.sql.go @@ -0,0 +1,210 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: users.sql + +package repository + +import ( + "context" +) + +const countApiKeys = `-- name: CountApiKeys :one +SELECT COUNT(*) FROM api_keys WHERE user_id = $1 +` + +func (q *Queries) CountApiKeys(ctx context.Context, userID int32) (int64, error) { + row := q.db.QueryRow(ctx, countApiKeys, userID) + var count int64 + err := row.Scan(&count) + return count, err +} + +const countUsers = `-- name: CountUsers :one +SELECT COUNT(*) FROM users +` + +func (q *Queries) CountUsers(ctx context.Context) (int64, error) { + row := q.db.QueryRow(ctx, countUsers) + var count int64 + err := row.Scan(&count) + return count, err +} + +const deleteApiKey = `-- name: DeleteApiKey :exec +DELETE FROM api_keys WHERE id = $1 +` + +func (q *Queries) DeleteApiKey(ctx context.Context, id int32) error { + _, err := q.db.Exec(ctx, deleteApiKey, id) + return err +} + +const deleteUser = `-- name: DeleteUser :exec +DELETE FROM users WHERE id = $1 +` + +func (q *Queries) DeleteUser(ctx context.Context, id int32) error { + _, err := q.db.Exec(ctx, deleteUser, id) + return err +} + +const getAllApiKeysByUserID = `-- name: GetAllApiKeysByUserID :many +SELECT ak.id, ak.key, ak.user_id, ak.created_at, ak.label +FROM api_keys ak +JOIN users u ON ak.user_id = u.id +WHERE u.id = $1 +` + +func (q *Queries) GetAllApiKeysByUserID(ctx context.Context, id int32) ([]ApiKey, error) { + rows, err := q.db.Query(ctx, getAllApiKeysByUserID, id) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ApiKey + for rows.Next() { + var i ApiKey + if err := rows.Scan( + &i.ID, + &i.Key, + &i.UserID, + &i.CreatedAt, + &i.Label, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getUserByApiKey = `-- name: GetUserByApiKey :one +SELECT u.id, u.username, u.role, u.password +FROM users u +JOIN api_keys ak ON u.id = ak.user_id +WHERE ak.key = $1 +` + +func (q *Queries) GetUserByApiKey(ctx context.Context, key string) (User, error) { + row := q.db.QueryRow(ctx, getUserByApiKey, key) + var i User + err := row.Scan( + &i.ID, + &i.Username, + &i.Role, + &i.Password, + ) + return i, err +} + +const getUserByUsername = `-- name: GetUserByUsername :one +SELECT id, username, role, password FROM users WHERE username = $1 +` + +func (q *Queries) GetUserByUsername(ctx context.Context, username string) (User, error) { + row := q.db.QueryRow(ctx, getUserByUsername, username) + var i User + err := row.Scan( + &i.ID, + &i.Username, + &i.Role, + &i.Password, + ) + return i, err +} + +const insertApiKey = `-- name: InsertApiKey :one +INSERT INTO api_keys (user_id, key, label) +VALUES ($1, $2, $3) +RETURNING id, key, user_id, created_at, label +` + +type InsertApiKeyParams struct { + UserID int32 + Key string + Label string +} + +func (q *Queries) InsertApiKey(ctx context.Context, arg InsertApiKeyParams) (ApiKey, error) { + row := q.db.QueryRow(ctx, insertApiKey, arg.UserID, arg.Key, arg.Label) + var i ApiKey + err := row.Scan( + &i.ID, + &i.Key, + &i.UserID, + &i.CreatedAt, + &i.Label, + ) + return i, err +} + +const insertUser = `-- name: InsertUser :one +INSERT INTO users (username, password, role) +VALUES ($1, $2, $3) +RETURNING id, username, role, password +` + +type InsertUserParams struct { + Username string + Password []byte + Role Role +} + +func (q *Queries) InsertUser(ctx context.Context, arg InsertUserParams) (User, error) { + row := q.db.QueryRow(ctx, insertUser, arg.Username, arg.Password, arg.Role) + var i User + err := row.Scan( + &i.ID, + &i.Username, + &i.Role, + &i.Password, + ) + return i, err +} + +const updateApiKeyLabel = `-- name: UpdateApiKeyLabel :exec +UPDATE api_keys SET label = $3 WHERE id = $1 AND user_id = $2 +` + +type UpdateApiKeyLabelParams struct { + ID int32 + UserID int32 + Label string +} + +func (q *Queries) UpdateApiKeyLabel(ctx context.Context, arg UpdateApiKeyLabelParams) error { + _, err := q.db.Exec(ctx, updateApiKeyLabel, arg.ID, arg.UserID, arg.Label) + return err +} + +const updateUserPassword = `-- name: UpdateUserPassword :exec +UPDATE users SET password = $2 WHERE id = $1 +` + +type UpdateUserPasswordParams struct { + ID int32 + Password []byte +} + +func (q *Queries) UpdateUserPassword(ctx context.Context, arg UpdateUserPasswordParams) error { + _, err := q.db.Exec(ctx, updateUserPassword, arg.ID, arg.Password) + return err +} + +const updateUserUsername = `-- name: UpdateUserUsername :exec +UPDATE users SET username = $2 WHERE id = $1 +` + +type UpdateUserUsernameParams struct { + ID int32 + Username string +} + +func (q *Queries) UpdateUserUsername(ctx context.Context, arg UpdateUserUsernameParams) error { + _, err := q.db.Exec(ctx, updateUserUsername, arg.ID, arg.Username) + return err +} diff --git a/internal/utils/utils.go b/internal/utils/utils.go new file mode 100644 index 0000000..fdd2b80 --- /dev/null +++ b/internal/utils/utils.go @@ -0,0 +1,313 @@ +package utils + +import ( + "crypto/rand" + "encoding/json" + "errors" + "fmt" + "io" + "math/big" + "net/http" + "os" + "strings" + "time" + + "github.com/gabehf/koito/internal/mbz" + "github.com/gabehf/koito/internal/models" + "github.com/google/uuid" +) + +func IDFromString(s string) string { + s = strings.ToLower(s) + s = strings.ReplaceAll(s, " ", "-") + return s +} + +func ParseUUIDSlice(str []string) ([]uuid.UUID, error) { + ret := make([]uuid.UUID, 0) + for _, s := range str { + parsed, err := uuid.Parse(s) + if err != nil { + continue + } + ret = append(ret, parsed) + } + return ret, nil +} + +func FlattenArtistMbzIDs(artists []*models.Artist) []uuid.UUID { + ids := make([]uuid.UUID, 0) + for _, a := range artists { + if a.MbzID == nil || *a.MbzID == uuid.Nil { + continue + } + ids = append(ids, *a.MbzID) + } + return ids +} + +func FlattenArtistNames(artists []*models.Artist) []string { + names := make([]string, 0) + for _, a := range artists { + names = append(names, a.Aliases...) + } + return names +} + +func FlattenSimpleArtistNames(artists []models.SimpleArtist) []string { + names := make([]string, 0) + for _, a := range artists { + names = append(names, a.Name) + } + return names +} + +func FlattenMbzArtistCreditNames(artists []mbz.MusicBrainzArtistCredit) []string { + names := make([]string, len(artists)) + for i, a := range artists { + names[i] = a.Name + } + return names +} + +func FlattenArtistIDs(artists []*models.Artist) []int32 { + ids := make([]int32, len(artists)) + for i, a := range artists { + ids[i] = a.ID + } + return ids +} + +// DateRange takes optional week, month, and year. If all are 0, it returns the zero time range. +// If only year is provided, it returns the full year. +// If both month and year are provided, it returns the start and end of that month. +// If week and year are provided, it returns the start and end of that week. +// If only week or month is provided without a year, it's considered invalid. +func DateRange(week, month, year int) (time.Time, time.Time, error) { + if week == 0 && month == 0 && year == 0 { + // No filter applied + return time.Time{}, time.Time{}, nil + } + + if month != 0 && (month < 1 || month > 12) { + return time.Time{}, time.Time{}, errors.New("invalid month") + } + + if week != 0 && (week < 1 || week > 53) { + return time.Time{}, time.Time{}, errors.New("invalid week") + } + + if year < 1 { + return time.Time{}, time.Time{}, errors.New("invalid year") + } + + loc := time.Local + + if week != 0 { + if month != 0 { + return time.Time{}, time.Time{}, errors.New("cannot specify both week and month") + } + // Specific week + start := time.Date(year, 1, 1, 0, 0, 0, 0, loc) + start = start.AddDate(0, 0, (week-1)*7) + end := start.AddDate(0, 0, 7) + return start, end, nil + } + + if month == 0 { + // Whole year + start := time.Date(year, 1, 1, 0, 0, 0, 0, loc) + end := start.AddDate(1, 0, 0) + return start, end, nil + } + + // Specific month + start := time.Date(year, time.Month(month), 1, 0, 0, 0, 0, loc) + end := start.AddDate(0, 1, 0) + return start, end, nil +} + +// CopyFile copies a file from src to dst. If src and dst files exist, and are +// the same, then return success. Otherise, attempt to create a hard link +// between the two files. If that fail, copy the file contents from src to dst. +func CopyFile(src, dst string) (err error) { + sfi, err := os.Stat(src) + if err != nil { + return + } + if !sfi.Mode().IsRegular() { + // cannot copy non-regular files (e.g., directories, + // symlinks, devices, etc.) + return fmt.Errorf("non-regular source file %s (%q)", sfi.Name(), sfi.Mode().String()) + } + dfi, err := os.Stat(dst) + if err != nil { + if !os.IsNotExist(err) { + return + } + } else { + if !(dfi.Mode().IsRegular()) { + return fmt.Errorf("non-regular destination file %s (%q)", dfi.Name(), dfi.Mode().String()) + } + if os.SameFile(sfi, dfi) { + return + } + } + if err = os.Link(src, dst); err == nil { + return + } + err = copyFileContents(src, dst) + return +} + +// copyFileContents copies the contents of the file named src to the file named +// by dst. The file will be created if it does not already exist. If the +// destination file exists, all it's contents will be replaced by the contents +// of the source file. +func copyFileContents(src, dst string) (err error) { + in, err := os.Open(src) + if err != nil { + return + } + defer in.Close() + out, err := os.Create(dst) + if err != nil { + return + } + defer func() { + cerr := out.Close() + if err == nil { + err = cerr + } + }() + if _, err = io.Copy(out, in); err != nil { + return + } + err = out.Sync() + return +} + +// Returns the same slice, but with all strings that are equal (with strings.EqualFold) +// included only once +func UniqueIgnoringCase(s []string) []string { + unique := []string{} + + for _, str := range s { + isDuplicate := false + for _, u := range unique { + if strings.EqualFold(str, u) { + isDuplicate = true + break + } + } + if !isDuplicate { + unique = append(unique, str) + } + } + + return unique +} + +// Removes duplicates in a string set +func Unique(xs *[]string) { + found := make(map[string]bool) + j := 0 + for i, x := range *xs { + if !found[x] { + found[x] = true + (*xs)[j] = (*xs)[i] + j++ + } + } + *xs = (*xs)[:j] +} + +// Returns the same slice, but with all entries that contain non ASCII characters removed +func RemoveNonAscii(s []string) []string { + filtered := []string{} + for _, str := range s { + isAscii := true + for _, r := range str { + if r > 127 { + isAscii = false + break + } + } + if isAscii { + filtered = append(filtered, str) + } + } + return filtered +} + +// Returns only items that are in one slice but not the other +func RemoveInBoth(s, c []string) []string { + result := []string{} + set := make(map[string]struct{}) + + for _, str := range c { + set[str] = struct{}{} + } + + for _, str := range s { + if _, exists := set[str]; !exists { + result = append(result, str) + } + } + + return result +} + +// MoveFirstMatchToFront moves the first string containing the substring to the front of the slice. +func MoveFirstMatchToFront(slice []string, substring string) []string { + for i, s := range slice { + if strings.Contains(s, substring) { + if i == 0 { + return slice // already at the front + } + // Move the matching element to the front + return append([]string{slice[i]}, append(slice[:i], slice[i+1:]...)...) + } + } + // No match found, return unchanged + return slice +} + +// Taken with little modification from +// https://gist.github.com/dopey/c69559607800d2f2f90b1b1ed4e550fb?permalink_comment_id=3527095#gistcomment-3527095 +func GenerateRandomString(length int) (string, error) { + const letters = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-" + ret := make([]byte, length) + for i := range length { + num, err := rand.Int(rand.Reader, big.NewInt(int64(len(letters)))) + if err != nil { + return "", err + } + ret[i] = letters[num.Int64()] + } + + return string(ret), nil +} + +// Essentially the same as utils.WriteError(w, `{"error": "message"}`, code) +func WriteError(w http.ResponseWriter, message string, code int) { + http.Error(w, fmt.Sprintf(`{"error":"%s"}`, message), code) +} + +// Sets content type and status code, and encodes data to json +func WriteJSON(w http.ResponseWriter, status int, data any) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + json.NewEncoder(w).Encode(data) +} + +// Returns true if more than one string is not empty +func MoreThanOneString(s ...string) bool { + count := 0 + for _, str := range s { + if str != "" { + count++ + } + } + return count > 1 +} diff --git a/internal/utils/utils_test.go b/internal/utils/utils_test.go new file mode 100644 index 0000000..c423d16 --- /dev/null +++ b/internal/utils/utils_test.go @@ -0,0 +1,49 @@ +package utils_test + +import ( + "testing" + + "github.com/gabehf/koito/internal/utils" + "github.com/stretchr/testify/assert" +) + +func TestRemoveNonAscii(t *testing.T) { + expected := [][]string{ + []string{"test1", "test2"}, []string{"test1", "test2"}, + []string{"ネクライトーキー", "NECRY TALKIE"}, []string{"NECRY TALKIE"}, + []string{"BFY#& cn&W,KE|"}, []string{"BFY#& cn&W,KE|"}, + []string{"もっさ"}, []string{}, + } + + for i := 0; i < len(expected)/2; i = i + 2 { + r := utils.RemoveNonAscii(expected[i]) + assert.EqualValues(t, expected[i+1], r) + } +} + +func TestUniqueIgnoringCase(t *testing.T) { + expected := [][]string{ + []string{"Necry Talkie", "NECRY TALKIE"}, []string{"Necry Talkie"}, + []string{"ネクライトーキー", "NECRY TALKIE"}, []string{"ネクライトーキー", "NECRY TALKIE"}, + []string{"BFY#& cn&W,KE|"}, []string{"BFY#& cn&W,KE|"}, + []string{"もっさ"}, []string{"もっさ"}, + } + + for i := 0; i < len(expected)/2; i = i + 2 { + r := utils.UniqueIgnoringCase(expected[i]) + assert.EqualValues(t, expected[i+1], r) + } +} + +func TestRemoveInBoth(t *testing.T) { + expected := [][]string{ + {"Necry Talkie", "NECRY TALKIE"}, {"Necry Talkie"}, {"NECRY TALKIE"}, + {"ネクライトーキー", "NECRY TALKIE"}, {"ネクライトーキー", "NECRY TALKIE"}, {}, + {"BFY#& cn&W,KE|", "bleh"}, {"BFY#& cn&W,KE|"}, {"bleh"}, + } + + for i := 0; i < len(expected)/3; i = i + 3 { + r := utils.RemoveInBoth(expected[i], expected[i+1]) + assert.EqualValues(t, expected[i+2], r) + } +} diff --git a/queue/queue.go b/queue/queue.go new file mode 100644 index 0000000..3f1e7fa --- /dev/null +++ b/queue/queue.go @@ -0,0 +1,81 @@ +package queue + +import ( + "context" + "log" + "net/http" + "sync" + "time" + + "golang.org/x/time/rate" +) + +// RequestResult holds the result of a queued request. +type RequestResult struct { + Body []byte + Err error +} + +// RequestFunc is a function that performs an HTTP request using the provided client, +// and sends its result to the given result channel. +type RequestFunc func(client *http.Client, done chan<- RequestResult) + +type RequestQueue struct { + client *http.Client + limiter *rate.Limiter + queue chan func(*http.Client) // now this is a wrapped closure + wg sync.WaitGroup + ctx context.Context + cancel context.CancelFunc +} + +// NewRequestQueue creates a new rate-limited request queue. +// `rps` = requests per second, `burst` = burst capacity +func NewRequestQueue(rps int, burst int) *RequestQueue { + ctx, cancel := context.WithCancel(context.Background()) + q := &RequestQueue{ + client: &http.Client{Timeout: 10 * time.Second}, + limiter: rate.NewLimiter(rate.Every(time.Second/time.Duration(rps)), burst), + queue: make(chan func(*http.Client), 100), // accepts wrapped closures + ctx: ctx, + cancel: cancel, + } + q.start() + return q +} + +// Enqueue adds a new request to the queue and returns a result channel. +func (q *RequestQueue) Enqueue(job RequestFunc) <-chan RequestResult { + resultChan := make(chan RequestResult, 1) + q.queue <- func(client *http.Client) { + job(client, resultChan) + } + return resultChan +} + +// start begins the worker loop. +func (q *RequestQueue) start() { + q.wg.Add(1) + go func() { + defer q.wg.Done() + for { + select { + case <-q.ctx.Done(): + return + case job := <-q.queue: + if err := q.limiter.Wait(q.ctx); err != nil { + log.Println("[queue] limiter wait failed:", err) + continue + } + go job(q.client) + } + } + }() +} + +// Shutdown stops the queue and waits for the worker to finish. +func (q *RequestQueue) Shutdown() { + q.cancel() + q.wg.Wait() + close(q.queue) +} diff --git a/romanizer/romanizer.go b/romanizer/romanizer.go new file mode 100644 index 0000000..ec782f3 --- /dev/null +++ b/romanizer/romanizer.go @@ -0,0 +1,27 @@ +// unused +package romanizer + +import ( + "regexp" + "strings" + + "github.com/gosimple/unidecode" +) + +// regex to match only Latin letters, numbers, basic punctuation and spaces +var latinCharset = regexp.MustCompile(`^[\p{Latin}\p{P}\p{N}\p{Zs}]+$`) + +// Romanize returns a romanized version of the input string if it contains non-Latin characters. +// If the input is already in Latin script, it returns an empty string. +func Romanize(input string) string { + trimmed := strings.TrimSpace(input) + if trimmed == "" { + return "" + } + + if latinCharset.MatchString(trimmed) { + return "" + } + + return strings.TrimSpace(unidecode.Unidecode(trimmed)) +} diff --git a/sqlc.yaml b/sqlc.yaml new file mode 100644 index 0000000..45095c3 --- /dev/null +++ b/sqlc.yaml @@ -0,0 +1,57 @@ +version: "2" +sql: + - engine: "postgresql" + schema: "./db/migrations" + queries: "./db/queries" + gen: + go: + package: "repository" + out: "internal/repository" + sql_package: "pgx/v5" + rename: + musicbrainz_id: "MusicBrainzID" + overrides: + - column: "artists.id" + go_type: "int32" + - column: "release_groups.id" + go_type: "int32" + - column: "tracks.id" + go_type: "int32" + - column: "listens.id" + go_type: "int32" + - db_type: "uuid" + go_type: + import: "github.com/google/uuid" + type: "UUID" + pointer: true + nullable: true + - db_type: "uuid" + go_type: + import: "github.com/google/uuid" + type: "UUID" + - db_type: "timestamptz" + go_type: + import: "time" + type: "Time" + - db_type: "timestamp" + go_type: + import: "time" + type: "Time" + - column: "bucketed_listens.bucket_start" + go_type: + import: "time" + type: "Time" + - column: "listens.client" + go_type: + type: "string" + pointer: true + nullable: true + - column: "sessions.expires_at" + go_type: + import: "time" + type: "Time" + - column: "sessions.created_at" + go_type: + import: "time" + type: "Time" + diff --git a/static/Streaming_History_Audio_spotify_import_test.json b/static/Streaming_History_Audio_spotify_import_test.json new file mode 100644 index 0000000..71186e4 --- /dev/null +++ b/static/Streaming_History_Audio_spotify_import_test.json @@ -0,0 +1,977 @@ +[ + { + "ts": "2025-04-28T21:04:35Z", + "platform": "windows", + "ms_played": 1603, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "only my railgun", + "master_metadata_album_artist_name": "fripSide", + "master_metadata_album_album_name": "infinite synthesis", + "spotify_track_uri": "spotify:track:3aJ2aJz5xL03hpaqdPS7Ah", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745874272, + "incognito_mode": false + }, + { + "ts": "2025-04-28T21:04:49Z", + "platform": "windows", + "ms_played": 10953, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "LEVEL5-judgelight-", + "master_metadata_album_artist_name": "fripSide", + "master_metadata_album_album_name": "infinite synthesis", + "spotify_track_uri": "spotify:track:0hjif67e3mBkrPIlRDXHLS", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745874274, + "incognito_mode": false + }, + { + "ts": "2025-04-28T21:16:38Z", + "platform": "windows", + "ms_played": 93283, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Red Liberation", + "master_metadata_album_artist_name": "fripSide", + "master_metadata_album_album_name": "infinite Resonance 2", + "spotify_track_uri": "spotify:track:2B8geqnq9YIym0ixYn83Pd", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "logout", + "shuffle": false, + "skipped": false, + "offline": false, + "offline_timestamp": 1745874288, + "incognito_mode": false + }, + { + "ts": "2025-04-28T22:29:29Z", + "platform": "android", + "ms_played": 9640, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "New Genesis", + "master_metadata_album_artist_name": "Ado", + "master_metadata_album_album_name": "UTA'S SONGS ONE PIECE FILM RED", + "spotify_track_uri": "spotify:track:6A8NfypDHuwiLWbo4a1yca", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "logout", + "shuffle": true, + "skipped": false, + "offline": false, + "offline_timestamp": 1745878757, + "incognito_mode": false + }, + { + "ts": "2025-04-29T00:37:09Z", + "platform": "windows", + "ms_played": 181028, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Clairvoyant", + "master_metadata_album_artist_name": "The Story So Far", + "master_metadata_album_album_name": "The Story So Far / Stick To Your Guns Split", + "spotify_track_uri": "spotify:track:5fgnsSQYKIlEn2KTQcGjh2", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "playbtn", + "reason_end": "trackdone", + "shuffle": false, + "skipped": false, + "offline": false, + "offline_timestamp": 1745886847, + "incognito_mode": false + }, + { + "ts": "2025-04-29T00:59:36Z", + "platform": "windows", + "ms_played": 824, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Clairvoyant", + "master_metadata_album_artist_name": "The Story So Far", + "master_metadata_album_album_name": "The Story So Far / Stick To Your Guns Split", + "spotify_track_uri": "spotify:track:5fgnsSQYKIlEn2KTQcGjh2", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "trackdone", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745887028, + "incognito_mode": false + }, + { + "ts": "2025-04-29T01:15:07Z", + "platform": "windows", + "ms_played": 4443, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "比翼の羽根", + "master_metadata_album_artist_name": "eufonius", + "master_metadata_album_album_name": "比翼の羽根", + "spotify_track_uri": "spotify:track:6FFshKmfm9h5MBpnsRO73c", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": true, + "skipped": true, + "offline": false, + "offline_timestamp": 1745888374, + "incognito_mode": false + }, + { + "ts": "2025-04-29T01:16:02Z", + "platform": "windows", + "ms_played": 36093, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "ファジーネーブル", + "master_metadata_album_artist_name": "Conton Candy", + "master_metadata_album_album_name": "melt pop", + "spotify_track_uri": "spotify:track:3FniX6mJvTQWruKp5PDexD", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745889306, + "incognito_mode": false + }, + { + "ts": "2025-04-29T01:25:35Z", + "platform": "windows", + "ms_played": 54615, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Bad Spanish", + "master_metadata_album_artist_name": "Nervous Dater", + "master_metadata_album_album_name": "Don't Be a Stranger", + "spotify_track_uri": "spotify:track:793ILNfrm9dQyp3k0P53HG", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745889361, + "incognito_mode": false + }, + { + "ts": "2025-04-29T01:28:08Z", + "platform": "windows", + "ms_played": 153153, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Fight Song", + "master_metadata_album_artist_name": "Rachel Platten", + "master_metadata_album_album_name": "Wildfire", + "spotify_track_uri": "spotify:track:37f4ITSlgPX81ad2EvmVQr", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745889934, + "incognito_mode": false + }, + { + "ts": "2025-04-29T01:29:19Z", + "platform": "windows", + "ms_played": 70473, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Cake By The Ocean", + "master_metadata_album_artist_name": "DNCE", + "master_metadata_album_album_name": "DNCE", + "spotify_track_uri": "spotify:track:76hfruVvmfQbw0eYn1nmeC", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745890087, + "incognito_mode": false + }, + { + "ts": "2025-04-29T01:31:08Z", + "platform": "windows", + "ms_played": 108465, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "The Sweet Escape", + "master_metadata_album_artist_name": "Gwen Stefani", + "master_metadata_album_album_name": "The Sweet Escape", + "spotify_track_uri": "spotify:track:66ZcOcouenzZEnzTJvoFmH", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745890158, + "incognito_mode": false + }, + { + "ts": "2025-04-29T01:33:19Z", + "platform": "windows", + "ms_played": 130353, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "two", + "master_metadata_album_artist_name": "bbno$", + "master_metadata_album_album_name": "two", + "spotify_track_uri": "spotify:track:6DRZmJa38MaMNthwG3fCBD", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745890267, + "incognito_mode": false + }, + { + "ts": "2025-04-29T01:37:48Z", + "platform": "windows", + "ms_played": 35993, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "C’est La Vie (with bbno$ & Rich Brian)", + "master_metadata_album_artist_name": "Yung Gravy", + "master_metadata_album_album_name": "Marvelous", + "spotify_track_uri": "spotify:track:3QqOcLtTU8zzlQRJCZzttP", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745890398, + "incognito_mode": false + }, + { + "ts": "2025-04-29T02:23:28Z", + "platform": "windows", + "ms_played": 22337, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Metal", + "master_metadata_album_artist_name": "The Beths", + "master_metadata_album_album_name": "Metal", + "spotify_track_uri": "spotify:track:6KF6TkyYpEWKg6BZ3OYJz7", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "logout", + "shuffle": false, + "skipped": false, + "offline": false, + "offline_timestamp": 1745890667, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:14:32Z", + "platform": "windows", + "ms_played": 21414, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "SHINUNA!", + "master_metadata_album_artist_name": "Kocchi no Kento", + "master_metadata_album_album_name": "SHINUNA!", + "spotify_track_uri": "spotify:track:5QSo4Jbok8O9EgeDkumK9q", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "logout", + "shuffle": false, + "skipped": false, + "offline": false, + "offline_timestamp": 1745905178, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:36:54Z", + "platform": "windows", + "ms_played": 53063, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "I'm getting on the bus to the other world, see ya!", + "master_metadata_album_artist_name": "TUYU", + "master_metadata_album_album_name": "It's Raining After All", + "spotify_track_uri": "spotify:track:3rCJptQKkXrTx6qUXqz7dD", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745947869, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:37:41Z", + "platform": "windows", + "ms_played": 28193, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Harukaze", + "master_metadata_album_artist_name": "Matsuri", + "master_metadata_album_album_name": "Harukaze", + "spotify_track_uri": "spotify:track:21Jj7td1D5HQYr18MLZTLS", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948214, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:38:16Z", + "platform": "windows", + "ms_played": 10763, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Otona no Jijo", + "master_metadata_album_artist_name": "Za Ninngenn", + "master_metadata_album_album_name": "Sanman", + "spotify_track_uri": "spotify:track:6BfDkvp3wJq7cA0xDWDHAI", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948260, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:39:06Z", + "platform": "windows", + "ms_played": 4393, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Mela!", + "master_metadata_album_artist_name": "Ryokuoushoku Shakai", + "master_metadata_album_album_name": "Mela!", + "spotify_track_uri": "spotify:track:6IO5nn84TKArsi3cjpIqaD", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948296, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:41:01Z", + "platform": "windows", + "ms_played": 12676, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "moved along", + "master_metadata_album_artist_name": "wilt", + "master_metadata_album_album_name": "moved along", + "spotify_track_uri": "spotify:track:3CZZnpgvHNR71M4QnkQjzl", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": true, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948346, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:42:44Z", + "platform": "windows", + "ms_played": 33263, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Air Guitar", + "master_metadata_album_artist_name": "Sobs", + "master_metadata_album_album_name": "Air Guitar", + "spotify_track_uri": "spotify:track:1ZL73Fic49PdXUSvL69wh8", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": true, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948460, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:42:56Z", + "platform": "windows", + "ms_played": 9943, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Pharmacist", + "master_metadata_album_artist_name": "Alvvays", + "master_metadata_album_album_name": "Blue Rev", + "spotify_track_uri": "spotify:track:3r2vyNnqFKr6IraCqLtoBI", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": true, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948563, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:44:41Z", + "platform": "windows", + "ms_played": 28573, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "freequent letdown", + "master_metadata_album_artist_name": "illuminati hotties", + "master_metadata_album_album_name": "FREE I.H: This Is Not the One You've Been Waiting For", + "spotify_track_uri": "spotify:track:5ZJfOkp2r5AbLjRdnu3UQd", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": true, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948576, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:46:12Z", + "platform": "windows", + "ms_played": 6041, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Choke", + "master_metadata_album_artist_name": "I DONT KNOW HOW BUT THEY FOUND ME", + "master_metadata_album_album_name": "Choke", + "spotify_track_uri": "spotify:track:37mfTcSlX60JtAvAETytGs", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948680, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:47:27Z", + "platform": "windows", + "ms_played": 28523, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "We Own the Night", + "master_metadata_album_artist_name": "Dance Gavin Dance", + "master_metadata_album_album_name": "Instant Gratification", + "spotify_track_uri": "spotify:track:7xCcUcMcGsIYGKUVgBZUw5", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948771, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:52:13Z", + "platform": "windows", + "ms_played": 72901, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "鏡面の波", + "master_metadata_album_artist_name": "YURiKA", + "master_metadata_album_album_name": "鏡面の波", + "spotify_track_uri": "spotify:track:17pYAFEZjKZFU5PHiUMzqx", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745948847, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:52:34Z", + "platform": "windows", + "ms_played": 15443, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "パレイド", + "master_metadata_album_artist_name": "syh", + "master_metadata_album_album_name": "パレイド", + "spotify_track_uri": "spotify:track:7uXzW6dPhkd4NbRv8sLNS6", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745949133, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:53:33Z", + "platform": "windows", + "ms_played": 40213, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Burning Friday Night", + "master_metadata_album_artist_name": "Lucky Kilimanjaro", + "master_metadata_album_album_name": "FULLCOLOR", + "spotify_track_uri": "spotify:track:1NlkoYEA1ndLQIKzXTPh9V", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745949154, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:53:45Z", + "platform": "windows", + "ms_played": 11946, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Better Things", + "master_metadata_album_artist_name": "aespa", + "master_metadata_album_album_name": "Drama - The 4th Mini Album", + "spotify_track_uri": "spotify:track:330IIz7d75eqAsKq1xhzXR", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745949212, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:53:57Z", + "platform": "windows", + "ms_played": 9953, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Thirsty", + "master_metadata_album_artist_name": "aespa", + "master_metadata_album_album_name": "MY WORLD - The 3rd Mini Album", + "spotify_track_uri": "spotify:track:6nICBdDevG4NZysIqDFPEa", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745949225, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:54:47Z", + "platform": "windows", + "ms_played": 44470, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Lucid Dream", + "master_metadata_album_artist_name": "aespa", + "master_metadata_album_album_name": "Savage - The 1st Mini Album", + "spotify_track_uri": "spotify:track:285Bh5EkbxGGE76ge8JDbH", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745949237, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:55:59Z", + "platform": "windows", + "ms_played": 70353, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Girls Never Die", + "master_metadata_album_artist_name": "tripleS", + "master_metadata_album_album_name": "", + "spotify_track_uri": "spotify:track:45OflED18VsURGw2z0Y6Cv", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745949286, + "incognito_mode": false + }, + { + "ts": "2025-04-29T17:58:34Z", + "platform": "windows", + "ms_played": 8546, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Midas Touch", + "master_metadata_album_artist_name": "KISS OF LIFE", + "master_metadata_album_album_name": "Midas Touch", + "spotify_track_uri": "spotify:track:0vaxYDAuAO1nPolC6bQp7V", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745949358, + "incognito_mode": false + }, + { + "ts": "2025-04-29T19:59:23Z", + "platform": "windows", + "ms_played": 3033, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "I Will Go To You Like the First Snow", + "master_metadata_album_artist_name": "AILEE", + "master_metadata_album_album_name": "Guardian (Original Television Soundtrack), Pt. 9", + "spotify_track_uri": "spotify:track:2BPXILn0MqOe5WroVXlvN1", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "logout", + "shuffle": false, + "skipped": false, + "offline": false, + "offline_timestamp": 1745949513, + "incognito_mode": false + }, + { + "ts": "2025-04-29T21:55:06Z", + "platform": "windows", + "ms_played": 45964, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Ready or Not", + "master_metadata_album_artist_name": "Bridgit Mendler", + "master_metadata_album_album_name": "Hello My Name Is...", + "spotify_track_uri": "spotify:track:5xvUgoVED1F4mBu8FL0HaW", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745963659, + "incognito_mode": false + }, + { + "ts": "2025-04-29T22:04:53Z", + "platform": "windows", + "ms_played": 44523, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "お勉強しといてよ", + "master_metadata_album_artist_name": "ZUTOMAYO", + "master_metadata_album_album_name": "お勉強しといてよ", + "spotify_track_uri": "spotify:track:6k90ibcH1z8Mx9684nfuLW", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745963706, + "incognito_mode": false + }, + { + "ts": "2025-04-29T22:06:52Z", + "platform": "windows", + "ms_played": 8893, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "Finale.", + "master_metadata_album_artist_name": "eill", + "master_metadata_album_album_name": "my dream box", + "spotify_track_uri": "spotify:track:2uGJ89l8tciHkYxzJF3xv6", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "endplay", + "shuffle": false, + "skipped": true, + "offline": false, + "offline_timestamp": 1745964293, + "incognito_mode": false + }, + { + "ts": "2025-04-29T23:12:45Z", + "platform": "windows", + "ms_played": 5883, + "conn_country": "US", + "ip_addr": "x.x.x.x", + "master_metadata_track_name": "ただ君に晴れ", + "master_metadata_album_artist_name": "ヨルシカ", + "master_metadata_album_album_name": "負け犬にアンコールはいらない", + "spotify_track_uri": "spotify:track:3wJHCry960drNlAUGrJLmz", + "episode_name": null, + "episode_show_name": null, + "spotify_episode_uri": null, + "audiobook_title": null, + "audiobook_uri": null, + "audiobook_chapter_uri": null, + "audiobook_chapter_title": null, + "reason_start": "clickrow", + "reason_end": "logout", + "shuffle": false, + "skipped": false, + "offline": false, + "offline_timestamp": 1745964412, + "incognito_mode": false + } +] \ No newline at end of file diff --git a/static/maloja_import_test.json b/static/maloja_import_test.json new file mode 100644 index 0000000..7846020 --- /dev/null +++ b/static/maloja_import_test.json @@ -0,0 +1,771 @@ +{ + "maloja": { + "export_time": 1748738994 + }, + "scrobbles": [ + { + "time": 1746434410, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "\u4f0a\u5439", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746434682, + "track": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "title": "\u673d\u3061\u306a\u3044\u51a0", + "album": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746434899, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "\u30ad\u30df\u3060\u3051\u306e", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746435135, + "track": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "title": "Kokoro", + "album": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746435351, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "Tsunagu", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746435518, + "track": { + "artists": [ + "Magnify Tokyo \u2022 Philip Shibata", + "Philip Shibata" + ], + "title": "\u4e3b\u306e\u5fa1\u540d\u3092", + "album": { + "artists": [ + "Magnify Tokyo \u2022 Philip Shibata", + "Philip Shibata" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746435766, + "track": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "title": "\u306a\u3093\u3069\u3067\u3082", + "album": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746436009, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "Yomichi", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746436289, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "\u5341\u5b57\u67b6\u306e\u9670\u306b", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746436515, + "track": { + "artists": [ + "Lauren Horii", + "Philip Shibata", + "Magnify Tokyo \u2022 Philip Shibata \u2022 Lauren Horii" + ], + "title": "\u3042\u306a\u305f\u3060\u3051\u304c", + "album": { + "artists": [ + "Lauren Horii", + "Philip Shibata", + "Magnify Tokyo \u2022 Philip Shibata \u2022 Lauren Horii" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746565073, + "track": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "title": "Kokoro", + "album": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746565287, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "Tsunagu", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746565454, + "track": { + "artists": [ + "Magnify Tokyo \u2022 Philip Shibata", + "Philip Shibata" + ], + "title": "\u4e3b\u306e\u5fa1\u540d\u3092", + "album": { + "artists": [ + "Magnify Tokyo \u2022 Philip Shibata", + "Philip Shibata" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746565702, + "track": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "title": "\u306a\u3093\u3069\u3067\u3082", + "album": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746565942, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "Yomichi", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746675800, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "\u4f0a\u5439", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746676072, + "track": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "title": "\u673d\u3061\u306a\u3044\u51a0", + "album": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746676289, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "\u30ad\u30df\u3060\u3051\u306e", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746676518, + "track": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "title": "Kokoro", + "album": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746676732, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "Tsunagu", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746838922, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "\u4f0a\u5439", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746839194, + "track": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "title": "\u673d\u3061\u306a\u3044\u51a0", + "album": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746839410, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "\u30ad\u30df\u3060\u3051\u306e", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746839640, + "track": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "title": "Kokoro", + "album": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746839853, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "Tsunagu", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746840020, + "track": { + "artists": [ + "Magnify Tokyo \u2022 Philip Shibata", + "Philip Shibata" + ], + "title": "\u4e3b\u306e\u5fa1\u540d\u3092", + "album": { + "artists": [ + "Magnify Tokyo \u2022 Philip Shibata", + "Philip Shibata" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746840268, + "track": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "title": "\u306a\u3093\u3069\u3067\u3082", + "album": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1746840511, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "Yomichi", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747139500, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "\u4f0a\u5439", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747139772, + "track": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "title": "\u673d\u3061\u306a\u3044\u51a0", + "album": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747139988, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "\u30ad\u30df\u3060\u3051\u306e", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747140218, + "track": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "title": "Kokoro", + "album": { + "artists": [ + "Lauren Horii", + "Magnify Tokyo \u2022 Lauren Horii" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747140431, + "track": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "title": "Tsunagu", + "album": { + "artists": [ + "Kanade Ishihara", + "Magnify Tokyo \u2022 Kanade Ishihara" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747140598, + "track": { + "artists": [ + "Magnify Tokyo \u2022 Philip Shibata", + "Philip Shibata" + ], + "title": "\u4e3b\u306e\u5fa1\u540d\u3092", + "album": { + "artists": [ + "Magnify Tokyo \u2022 Philip Shibata", + "Philip Shibata" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747140846, + "track": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "title": "\u306a\u3093\u3069\u3067\u3082", + "album": { + "artists": [ + "Magnify Tokyo \u2022 J.Rio", + "J.Rio" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747141089, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "Yomichi", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747141369, + "track": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "title": "\u5341\u5b57\u67b6\u306e\u9670\u306b", + "album": { + "artists": [ + "Cherish", + "Magnify Tokyo \u2022 Cherish" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + }, + { + "time": 1747141595, + "track": { + "artists": [ + "Lauren Horii", + "Philip Shibata", + "Magnify Tokyo \u2022 Philip Shibata \u2022 Lauren Horii" + ], + "title": "\u3042\u306a\u305f\u3060\u3051\u304c", + "album": { + "artists": [ + "Lauren Horii", + "Philip Shibata", + "Magnify Tokyo \u2022 Philip Shibata \u2022 Lauren Horii" + ], + "albumtitle": "Magnify Tokyo" + }, + "length": null + }, + "duration": null, + "origin": "client:default" + } + ] + } \ No newline at end of file diff --git a/static/yuu.jpg b/static/yuu.jpg new file mode 100644 index 0000000..fdbaf3d Binary files /dev/null and b/static/yuu.jpg differ diff --git a/testing_init/testing_init.go b/testing_init/testing_init.go new file mode 100644 index 0000000..c1f628c --- /dev/null +++ b/testing_init/testing_init.go @@ -0,0 +1,16 @@ +package testing_init + +import ( + "os" + "path" + "runtime" +) + +func init() { + _, filename, _, _ := runtime.Caller(0) + dir := path.Join(path.Dir(filename), "..") + err := os.Chdir(dir) + if err != nil { + panic(err) + } +}