mirror of
https://github.com/gabehf/Koito.git
synced 2026-04-22 12:01:52 -07:00
chore: initial public commit
This commit is contained in:
commit
fc9054b78c
250 changed files with 32809 additions and 0 deletions
243
internal/catalog/associate_album.go
Normal file
243
internal/catalog/associate_album.go
Normal file
|
|
@ -0,0 +1,243 @@
|
|||
package catalog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"slices"
|
||||
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/images"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
)
|
||||
|
||||
type AssociateAlbumOpts struct {
|
||||
Artists []*models.Artist
|
||||
ReleaseMbzID uuid.UUID
|
||||
ReleaseGroupMbzID uuid.UUID
|
||||
ReleaseName string
|
||||
TrackName string // required
|
||||
Mbzc mbz.MusicBrainzCaller
|
||||
}
|
||||
|
||||
func AssociateAlbum(ctx context.Context, d db.DB, opts AssociateAlbumOpts) (*models.Album, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.TrackName == "" {
|
||||
return nil, errors.New("required parameter TrackName missing")
|
||||
}
|
||||
releaseTitle := opts.ReleaseName
|
||||
if releaseTitle == "" {
|
||||
releaseTitle = opts.TrackName
|
||||
}
|
||||
if opts.ReleaseMbzID != uuid.Nil {
|
||||
l.Debug().Msgf("Associating album '%s' by MusicBrainz release ID", releaseTitle)
|
||||
return matchAlbumByMbzReleaseID(ctx, d, opts)
|
||||
} else {
|
||||
l.Debug().Msgf("Associating album '%s' by title and artist", releaseTitle)
|
||||
return matchAlbumByTitle(ctx, d, opts)
|
||||
}
|
||||
}
|
||||
|
||||
func matchAlbumByMbzReleaseID(ctx context.Context, d db.DB, opts AssociateAlbumOpts) (*models.Album, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
a, err := d.GetAlbum(ctx, db.GetAlbumOpts{MusicBrainzID: opts.ReleaseMbzID})
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Found release '%s' by MusicBrainz Release ID", a.Title)
|
||||
return &models.Album{
|
||||
ID: a.ID,
|
||||
MbzID: &opts.ReleaseMbzID,
|
||||
Title: a.Title,
|
||||
VariousArtists: a.VariousArtists,
|
||||
Image: a.Image,
|
||||
}, nil
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, err
|
||||
} else {
|
||||
l.Debug().Msgf("Album '%s' could not be found by MusicBrainz Release ID", opts.ReleaseName)
|
||||
rg, err := createOrUpdateAlbumWithMbzReleaseID(ctx, d, opts)
|
||||
if err != nil {
|
||||
return matchAlbumByTitle(ctx, d, opts)
|
||||
}
|
||||
return rg, nil
|
||||
}
|
||||
}
|
||||
|
||||
func createOrUpdateAlbumWithMbzReleaseID(ctx context.Context, d db.DB, opts AssociateAlbumOpts) (*models.Album, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
release, err := opts.Mbzc.GetRelease(ctx, opts.ReleaseMbzID)
|
||||
if err != nil {
|
||||
l.Warn().Msg("MusicBrainz unreachable, falling back to release title matching")
|
||||
return matchAlbumByTitle(ctx, d, opts)
|
||||
}
|
||||
var album *models.Album
|
||||
titles := []string{release.Title, opts.ReleaseName}
|
||||
utils.Unique(&titles)
|
||||
l.Debug().Msgf("Searching for albums '%v' from artist id %d in DB", titles, opts.Artists[0].ID)
|
||||
album, err = d.GetAlbum(ctx, db.GetAlbumOpts{
|
||||
ArtistID: opts.Artists[0].ID,
|
||||
Titles: titles,
|
||||
})
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Found album %s, updating with MusicBrainz Release ID...", album.Title)
|
||||
err := d.UpdateAlbum(ctx, db.UpdateAlbumOpts{
|
||||
ID: album.ID,
|
||||
MusicBrainzID: opts.ReleaseMbzID,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to update album with MusicBrainz Release ID")
|
||||
return nil, err
|
||||
}
|
||||
l.Debug().Msgf("Updated album '%s' with MusicBrainz Release ID", album.Title)
|
||||
if opts.ReleaseGroupMbzID != uuid.Nil {
|
||||
aliases, err := opts.Mbzc.GetReleaseTitles(ctx, opts.ReleaseGroupMbzID)
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Associating aliases '%s' with Release '%s'", aliases, album.Title)
|
||||
err = d.SaveAlbumAliases(ctx, album.ID, aliases, "MusicBrainz")
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to save aliases")
|
||||
}
|
||||
} else {
|
||||
l.Info().AnErr("err", err).Msg("Failed to get release group from MusicBrainz")
|
||||
}
|
||||
}
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
l.Err(err).Msg("Error while searching for album by MusicBrainz Release ID")
|
||||
return nil, err
|
||||
} else {
|
||||
l.Debug().Msgf("Album %s could not be found. Creating...", release.Title)
|
||||
var variousArtists bool
|
||||
for _, artistCredit := range release.ArtistCredit {
|
||||
if artistCredit.Name == "Various Artists" {
|
||||
l.Debug().Msgf("MusicBrainz release group '%s' detected as being a Various Artists compilation release", release.Title)
|
||||
variousArtists = true
|
||||
}
|
||||
}
|
||||
l.Debug().Msg("Searching for album images...")
|
||||
var imgid uuid.UUID
|
||||
imgUrl, err := images.GetAlbumImage(ctx, images.AlbumImageOpts{
|
||||
Artists: utils.UniqueIgnoringCase(slices.Concat(utils.FlattenMbzArtistCreditNames(release.ArtistCredit), utils.FlattenArtistNames(opts.Artists))),
|
||||
Album: release.Title,
|
||||
ReleaseMbzID: &opts.ReleaseMbzID,
|
||||
})
|
||||
if err == nil && imgUrl != "" {
|
||||
var size ImageSize
|
||||
if cfg.FullImageCacheEnabled() {
|
||||
size = ImageSizeFull
|
||||
} else {
|
||||
size = ImageSizeLarge
|
||||
}
|
||||
imgid = uuid.New()
|
||||
l.Debug().Msg("Downloading album image from source...")
|
||||
err = DownloadAndCacheImage(ctx, imgid, imgUrl, size)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to cache image")
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
l.Debug().Msgf("Failed to get album images for %s: %s", release.Title, err.Error())
|
||||
}
|
||||
album, err = d.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: release.Title,
|
||||
MusicBrainzID: opts.ReleaseMbzID,
|
||||
ArtistIDs: utils.FlattenArtistIDs(opts.Artists),
|
||||
VariousArtists: variousArtists,
|
||||
Image: imgid,
|
||||
ImageSrc: imgUrl,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if opts.ReleaseGroupMbzID != uuid.Nil {
|
||||
aliases, err := opts.Mbzc.GetReleaseTitles(ctx, opts.ReleaseGroupMbzID)
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Associating aliases '%s' with Release '%s'", aliases, album.Title)
|
||||
err = d.SaveAlbumAliases(ctx, album.ID, aliases, "MusicBrainz")
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to save aliases")
|
||||
}
|
||||
} else {
|
||||
l.Info().AnErr("err", err).Msg("Failed to get release group from MusicBrainz")
|
||||
}
|
||||
}
|
||||
l.Info().Msgf("Created album '%s' with MusicBrainz Release ID", album.Title)
|
||||
}
|
||||
return &models.Album{
|
||||
ID: album.ID,
|
||||
MbzID: &opts.ReleaseMbzID,
|
||||
Title: album.Title,
|
||||
VariousArtists: album.VariousArtists,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func matchAlbumByTitle(ctx context.Context, d db.DB, opts AssociateAlbumOpts) (*models.Album, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
var releaseName string
|
||||
if opts.ReleaseName != "" {
|
||||
releaseName = opts.ReleaseName
|
||||
} else {
|
||||
releaseName = opts.TrackName
|
||||
}
|
||||
a, err := d.GetAlbum(ctx, db.GetAlbumOpts{
|
||||
Title: releaseName,
|
||||
ArtistID: opts.Artists[0].ID,
|
||||
})
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Found album '%s' by artist and title", a.Title)
|
||||
if a.MbzID == nil && opts.ReleaseMbzID != uuid.Nil {
|
||||
l.Debug().Msgf("Updating album with id %d with MusicBrainz ID %s", a.ID, opts.ReleaseMbzID)
|
||||
err = d.UpdateAlbum(ctx, db.UpdateAlbumOpts{
|
||||
ID: a.ID,
|
||||
MusicBrainzID: opts.ReleaseMbzID,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to associate existing release with MusicBrainz ID")
|
||||
}
|
||||
}
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, err
|
||||
} else {
|
||||
var imgid uuid.UUID
|
||||
imgUrl, err := images.GetAlbumImage(ctx, images.AlbumImageOpts{
|
||||
Artists: utils.FlattenArtistNames(opts.Artists),
|
||||
Album: opts.ReleaseName,
|
||||
ReleaseMbzID: &opts.ReleaseMbzID,
|
||||
})
|
||||
if err == nil && imgUrl != "" {
|
||||
var size ImageSize
|
||||
if cfg.FullImageCacheEnabled() {
|
||||
size = ImageSizeFull
|
||||
} else {
|
||||
size = ImageSizeLarge
|
||||
}
|
||||
imgid = uuid.New()
|
||||
l.Debug().Msg("Downloading album image from source...")
|
||||
err = DownloadAndCacheImage(ctx, imgid, imgUrl, size)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to cache image")
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
l.Debug().Msgf("Failed to get album images for %s: %s", opts.ReleaseName, err.Error())
|
||||
}
|
||||
a, err = d.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: releaseName,
|
||||
ArtistIDs: utils.FlattenArtistIDs(opts.Artists),
|
||||
Image: imgid,
|
||||
MusicBrainzID: opts.ReleaseMbzID,
|
||||
ImageSrc: imgUrl,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Info().Msgf("Created album '%s' with artist and title", a.Title)
|
||||
}
|
||||
return &models.Album{
|
||||
ID: a.ID,
|
||||
Title: a.Title,
|
||||
}, nil
|
||||
}
|
||||
231
internal/catalog/associate_artists.go
Normal file
231
internal/catalog/associate_artists.go
Normal file
|
|
@ -0,0 +1,231 @@
|
|||
package catalog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/images"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
)
|
||||
|
||||
type AssociateArtistsOpts struct {
|
||||
ArtistMbzIDs []uuid.UUID
|
||||
ArtistNames []string
|
||||
ArtistName string
|
||||
TrackTitle string
|
||||
Mbzc mbz.MusicBrainzCaller
|
||||
}
|
||||
|
||||
func AssociateArtists(ctx context.Context, d db.DB, opts AssociateArtistsOpts) ([]*models.Artist, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
|
||||
var result []*models.Artist
|
||||
|
||||
if len(opts.ArtistMbzIDs) > 0 {
|
||||
l.Debug().Msg("Associating artists by MusicBrainz ID(s)")
|
||||
mbzMatches, err := matchArtistsByMBID(ctx, d, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, mbzMatches...)
|
||||
}
|
||||
|
||||
if len(opts.ArtistNames) > len(result) {
|
||||
l.Debug().Msg("Associating artists by list of artist names")
|
||||
nameMatches, err := matchArtistsByNames(ctx, opts.ArtistNames, result, d)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, nameMatches...)
|
||||
}
|
||||
|
||||
if len(result) < 1 {
|
||||
allArtists := slices.Concat(opts.ArtistNames, ParseArtists(opts.ArtistName, opts.TrackTitle))
|
||||
l.Debug().Msgf("Associating artists by artist name(s) %v and track title '%s'", allArtists, opts.TrackTitle)
|
||||
fallbackMatches, err := matchArtistsByNames(ctx, allArtists, nil, d)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, fallbackMatches...)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func matchArtistsByMBID(ctx context.Context, d db.DB, opts AssociateArtistsOpts) ([]*models.Artist, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
var result []*models.Artist
|
||||
|
||||
for _, id := range opts.ArtistMbzIDs {
|
||||
if id == uuid.Nil {
|
||||
l.Warn().Msg("Provided artist has uuid.Nil MusicBrainzID")
|
||||
return matchArtistsByNames(ctx, opts.ArtistNames, result, d)
|
||||
}
|
||||
a, err := d.GetArtist(ctx, db.GetArtistOpts{
|
||||
MusicBrainzID: id,
|
||||
})
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Artist '%s' found by MusicBrainz ID", a.Name)
|
||||
result = append(result, a)
|
||||
continue
|
||||
}
|
||||
|
||||
if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(opts.ArtistNames) < 1 {
|
||||
opts.ArtistNames = slices.Concat(opts.ArtistNames, ParseArtists(opts.ArtistName, opts.TrackTitle))
|
||||
}
|
||||
a, err = resolveAliasOrCreateArtist(ctx, id, opts.ArtistNames, d, opts.Mbzc)
|
||||
if err != nil {
|
||||
l.Warn().Msg("MusicBrainz unreachable, falling back to artist name matching")
|
||||
return matchArtistsByNames(ctx, opts.ArtistNames, result, d)
|
||||
// return nil, err
|
||||
}
|
||||
result = append(result, a)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
func resolveAliasOrCreateArtist(ctx context.Context, mbzID uuid.UUID, names []string, d db.DB, mbz mbz.MusicBrainzCaller) (*models.Artist, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
|
||||
aliases, err := mbz.GetArtistPrimaryAliases(ctx, mbzID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Debug().Msgf("Got aliases %v from MusicBrainz", aliases)
|
||||
|
||||
for _, alias := range aliases {
|
||||
a, err := d.GetArtist(ctx, db.GetArtistOpts{
|
||||
Name: alias,
|
||||
})
|
||||
if err == nil && (a.MbzID == nil || *a.MbzID == uuid.Nil) {
|
||||
a.MbzID = &mbzID
|
||||
l.Debug().Msgf("Alias '%s' found in DB. Associating with MusicBrainz ID...", alias)
|
||||
if updateErr := d.UpdateArtist(ctx, db.UpdateArtistOpts{ID: a.ID, MusicBrainzID: mbzID}); updateErr != nil {
|
||||
return nil, updateErr
|
||||
}
|
||||
if saveAliasErr := d.SaveArtistAliases(ctx, a.ID, aliases, "MusicBrainz"); saveAliasErr != nil {
|
||||
return nil, saveAliasErr
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
}
|
||||
|
||||
canonical := aliases[0]
|
||||
for _, alias := range aliases {
|
||||
for _, name := range names {
|
||||
if strings.EqualFold(alias, name) {
|
||||
l.Debug().Msgf("Canonical name for artist is '%s'", alias)
|
||||
canonical = alias
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var imgid uuid.UUID
|
||||
imgUrl, err := images.GetArtistImage(ctx, images.ArtistImageOpts{
|
||||
Aliases: aliases,
|
||||
})
|
||||
if err == nil && imgUrl != "" {
|
||||
var size ImageSize
|
||||
if cfg.FullImageCacheEnabled() {
|
||||
size = ImageSizeFull
|
||||
} else {
|
||||
size = ImageSizeLarge
|
||||
}
|
||||
imgid = uuid.New()
|
||||
l.Debug().Msg("Downloading artist image from source...")
|
||||
err = DownloadAndCacheImage(ctx, imgid, imgUrl, size)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to cache image")
|
||||
}
|
||||
} else if err != nil {
|
||||
l.Warn().Msgf("Failed to get artist image from ImageSrc: %s", err.Error())
|
||||
}
|
||||
|
||||
u, err := d.SaveArtist(ctx, db.SaveArtistOpts{
|
||||
MusicBrainzID: mbzID,
|
||||
Name: canonical,
|
||||
Aliases: aliases,
|
||||
Image: imgid,
|
||||
ImageSrc: imgUrl,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Info().Msgf("Created artist '%s' with MusicBrainz Artist ID", canonical)
|
||||
return u, nil
|
||||
}
|
||||
|
||||
func matchArtistsByNames(ctx context.Context, names []string, existing []*models.Artist, d db.DB) ([]*models.Artist, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
var result []*models.Artist
|
||||
|
||||
for _, name := range names {
|
||||
if artistExists(name, existing) || artistExists(name, result) {
|
||||
l.Debug().Msgf("Artist '%s' already found, skipping...", name)
|
||||
continue
|
||||
}
|
||||
a, err := d.GetArtist(ctx, db.GetArtistOpts{
|
||||
Name: name,
|
||||
})
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Artist '%s' found in DB", name)
|
||||
result = append(result, a)
|
||||
continue
|
||||
}
|
||||
if errors.Is(err, pgx.ErrNoRows) {
|
||||
var imgid uuid.UUID
|
||||
imgUrl, err := images.GetArtistImage(ctx, images.ArtistImageOpts{
|
||||
Aliases: []string{name},
|
||||
})
|
||||
if err == nil && imgUrl != "" {
|
||||
var size ImageSize
|
||||
if cfg.FullImageCacheEnabled() {
|
||||
size = ImageSizeFull
|
||||
} else {
|
||||
size = ImageSizeLarge
|
||||
}
|
||||
imgid = uuid.New()
|
||||
l.Debug().Msg("Downloading artist image from source...")
|
||||
err = DownloadAndCacheImage(ctx, imgid, imgUrl, size)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to cache image")
|
||||
}
|
||||
} else if err != nil {
|
||||
l.Debug().Msgf("Failed to get artist images for %s: %s", name, err.Error())
|
||||
}
|
||||
a, err = d.SaveArtist(ctx, db.SaveArtistOpts{Name: name, Image: imgid, ImageSrc: imgUrl})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Info().Msgf("Created artist '%s' with artist name", name)
|
||||
result = append(result, a)
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func artistExists(name string, artists []*models.Artist) bool {
|
||||
for _, a := range artists {
|
||||
allAliases := append(a.Aliases, a.Name)
|
||||
for _, alias := range allAliases {
|
||||
if strings.EqualFold(name, alias) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
119
internal/catalog/associate_track.go
Normal file
119
internal/catalog/associate_track.go
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
package catalog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
)
|
||||
|
||||
type AssociateTrackOpts struct {
|
||||
ArtistIDs []int32
|
||||
AlbumID int32
|
||||
TrackMbzID uuid.UUID
|
||||
TrackName string
|
||||
Duration int32
|
||||
Mbzc mbz.MusicBrainzCaller
|
||||
}
|
||||
|
||||
func AssociateTrack(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*models.Track, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.TrackName == "" {
|
||||
return nil, errors.New("missing required parameter 'opts.TrackName'")
|
||||
}
|
||||
if len(opts.ArtistIDs) < 1 {
|
||||
return nil, errors.New("at least one artist id must be specified")
|
||||
}
|
||||
if opts.AlbumID == 0 {
|
||||
return nil, errors.New("release group id must be specified")
|
||||
}
|
||||
// first, try to match track Mbz ID
|
||||
if opts.TrackMbzID != uuid.Nil {
|
||||
l.Debug().Msgf("Associating track '%s' by MusicBrainz recording ID", opts.TrackName)
|
||||
return matchTrackByMbzID(ctx, d, opts)
|
||||
} else {
|
||||
l.Debug().Msgf("Associating track '%s' by title and artist", opts.TrackName)
|
||||
return matchTrackByTitleAndArtist(ctx, d, opts)
|
||||
}
|
||||
}
|
||||
|
||||
// If no match is found, will call matchTrackByTitleAndArtist and associate the Mbz ID with the result
|
||||
func matchTrackByMbzID(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*models.Track, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
track, err := d.GetTrack(ctx, db.GetTrackOpts{
|
||||
MusicBrainzID: opts.TrackMbzID,
|
||||
})
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Found track '%s' by MusicBrainz ID", track.Title)
|
||||
return track, nil
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, err
|
||||
} else {
|
||||
l.Debug().Msgf("Track '%s' could not be found by MusicBrainz ID", opts.TrackName)
|
||||
track, err := matchTrackByTitleAndArtist(ctx, d, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Debug().Msgf("Updating track '%s' with MusicBrainz ID %s", opts.TrackName, opts.TrackMbzID)
|
||||
err = d.UpdateTrack(ctx, db.UpdateTrackOpts{
|
||||
ID: track.ID,
|
||||
MusicBrainzID: opts.TrackMbzID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
track.MbzID = &opts.TrackMbzID
|
||||
return track, nil
|
||||
}
|
||||
}
|
||||
|
||||
func matchTrackByTitleAndArtist(ctx context.Context, d db.DB, opts AssociateTrackOpts) (*models.Track, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
// try provided track title
|
||||
track, err := d.GetTrack(ctx, db.GetTrackOpts{
|
||||
Title: opts.TrackName,
|
||||
ArtistIDs: opts.ArtistIDs,
|
||||
})
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Track '%s' found by title and artist match", track.Title)
|
||||
return track, nil
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, err
|
||||
} else {
|
||||
if opts.TrackMbzID != uuid.Nil {
|
||||
mbzTrack, err := opts.Mbzc.GetTrack(ctx, opts.TrackMbzID)
|
||||
if err == nil {
|
||||
track, err := d.GetTrack(ctx, db.GetTrackOpts{
|
||||
Title: mbzTrack.Title,
|
||||
ArtistIDs: opts.ArtistIDs,
|
||||
})
|
||||
if err == nil {
|
||||
l.Debug().Msgf("Track '%s' found by MusicBrainz title and artist match", opts.TrackName)
|
||||
return track, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
l.Debug().Msgf("Track '%s' could not be found by title and artist match", opts.TrackName)
|
||||
t, err := d.SaveTrack(ctx, db.SaveTrackOpts{
|
||||
RecordingMbzID: opts.TrackMbzID,
|
||||
AlbumID: opts.AlbumID,
|
||||
Title: opts.TrackName,
|
||||
ArtistIDs: opts.ArtistIDs,
|
||||
Duration: opts.Duration,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if opts.TrackMbzID == uuid.Nil {
|
||||
l.Info().Msgf("Created track '%s' with title and artist", opts.TrackName)
|
||||
} else {
|
||||
l.Info().Msgf("Created track '%s' with MusicBrainz Recording ID", opts.TrackName)
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
228
internal/catalog/catalog.go
Normal file
228
internal/catalog/catalog.go
Normal file
|
|
@ -0,0 +1,228 @@
|
|||
// Package catalog manages the internal metadata of the catalog of music the user has submitted listens for.
|
||||
// This includes artists, releases (album, single, ep, etc), and tracks, as well as ingesting
|
||||
// listens submitted both via the API(s) and other methods.
|
||||
package catalog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type GetListensOpts struct {
|
||||
ArtistID int32
|
||||
ReleaseGroupID int32
|
||||
TrackID int32
|
||||
Limit int
|
||||
}
|
||||
|
||||
type SaveListenOpts struct {
|
||||
TrackID int32
|
||||
Time time.Time
|
||||
}
|
||||
|
||||
type SubmitListenOpts struct {
|
||||
// When true, skips registering the listen and only associates or creates the
|
||||
// artist, release, release group, and track in DB
|
||||
SkipSaveListen bool
|
||||
|
||||
MbzCaller mbz.MusicBrainzCaller
|
||||
ArtistNames []string
|
||||
Artist string
|
||||
ArtistMbzIDs []uuid.UUID
|
||||
TrackTitle string
|
||||
RecordingMbzID uuid.UUID
|
||||
Duration int32 // in seconds
|
||||
ReleaseTitle string
|
||||
ReleaseMbzID uuid.UUID
|
||||
ReleaseGroupMbzID uuid.UUID
|
||||
Time time.Time
|
||||
UserID int32
|
||||
Client string
|
||||
}
|
||||
|
||||
const (
|
||||
ImageSourceUserUpload = "User Upload"
|
||||
)
|
||||
|
||||
func SubmitListen(ctx context.Context, store db.DB, opts SubmitListenOpts) error {
|
||||
l := logger.FromContext(ctx)
|
||||
|
||||
if opts.Artist == "" || opts.TrackTitle == "" {
|
||||
return errors.New("track name and artist are required")
|
||||
}
|
||||
|
||||
artists, err := AssociateArtists(
|
||||
ctx,
|
||||
store,
|
||||
AssociateArtistsOpts{
|
||||
ArtistMbzIDs: opts.ArtistMbzIDs,
|
||||
ArtistNames: opts.ArtistNames,
|
||||
ArtistName: opts.Artist,
|
||||
Mbzc: opts.MbzCaller,
|
||||
TrackTitle: opts.TrackTitle,
|
||||
})
|
||||
if err != nil {
|
||||
l.Error().Err(err).Msg("Failed to associate artists to listen")
|
||||
return err
|
||||
} else if len(artists) < 1 {
|
||||
l.Debug().Msg("Failed to associate any artists to release")
|
||||
}
|
||||
|
||||
artistIDs := make([]int32, len(artists))
|
||||
|
||||
for i, artist := range artists {
|
||||
artistIDs[i] = artist.ID
|
||||
l.Debug().Any("artist", artist).Msg("Matched listen to artist")
|
||||
}
|
||||
rg, err := AssociateAlbum(ctx, store, AssociateAlbumOpts{
|
||||
ReleaseMbzID: opts.ReleaseMbzID,
|
||||
ReleaseGroupMbzID: opts.ReleaseGroupMbzID,
|
||||
ReleaseName: opts.ReleaseTitle,
|
||||
TrackName: opts.TrackTitle,
|
||||
Mbzc: opts.MbzCaller,
|
||||
Artists: artists,
|
||||
})
|
||||
if err != nil {
|
||||
l.Error().Err(err).Msg("Failed to associate release group to listen")
|
||||
return err
|
||||
}
|
||||
|
||||
// ensure artists are associated with release group
|
||||
store.AddArtistsToAlbum(ctx, db.AddArtistsToAlbumOpts{
|
||||
ArtistIDs: artistIDs,
|
||||
AlbumID: rg.ID,
|
||||
})
|
||||
|
||||
track, err := AssociateTrack(ctx, store, AssociateTrackOpts{
|
||||
ArtistIDs: artistIDs,
|
||||
AlbumID: rg.ID,
|
||||
TrackMbzID: opts.RecordingMbzID,
|
||||
TrackName: opts.TrackTitle,
|
||||
Duration: opts.Duration,
|
||||
Mbzc: opts.MbzCaller,
|
||||
})
|
||||
if err != nil {
|
||||
l.Error().Err(err).Msg("Failed to associate track to listen")
|
||||
return err
|
||||
}
|
||||
|
||||
if track.Duration == 0 && opts.Duration != 0 {
|
||||
err := store.UpdateTrack(ctx, db.UpdateTrackOpts{
|
||||
ID: track.ID,
|
||||
Duration: opts.Duration,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Failed to update duration for track %s", track.Title)
|
||||
}
|
||||
}
|
||||
|
||||
if opts.SkipSaveListen {
|
||||
return nil
|
||||
}
|
||||
|
||||
l.Info().Msgf("Received listen: '%s' by %s, from release '%s'", track.Title, buildArtistStr(artists), rg.Title)
|
||||
|
||||
return store.SaveListen(ctx, db.SaveListenOpts{
|
||||
TrackID: track.ID,
|
||||
Time: opts.Time,
|
||||
UserID: opts.UserID,
|
||||
Client: opts.Client,
|
||||
})
|
||||
}
|
||||
|
||||
func buildArtistStr(artists []*models.Artist) string {
|
||||
artistNames := make([]string, len(artists))
|
||||
for i, artist := range artists {
|
||||
artistNames[i] = artist.Name
|
||||
}
|
||||
return strings.Join(artistNames, " & ")
|
||||
}
|
||||
|
||||
var (
|
||||
// Bracketed feat patterns
|
||||
bracketFeatPatterns = []*regexp.Regexp{
|
||||
regexp.MustCompile(`(?i)\(feat\. ([^)]*)\)`),
|
||||
regexp.MustCompile(`(?i)\[feat\. ([^\]]*)\]`),
|
||||
}
|
||||
// Inline feat (not in brackets)
|
||||
inlineFeatPattern = regexp.MustCompile(`(?i)feat\. ([^()\[\]]+)$`)
|
||||
|
||||
// Delimiters only used inside feat. sections
|
||||
featSplitDelimiters = regexp.MustCompile(`(?i)\s*(?:,|&|and|·)\s*`)
|
||||
|
||||
// Delimiter for separating artists in main string (rare but real usage)
|
||||
mainArtistDotSplitter = regexp.MustCompile(`\s+·\s+`)
|
||||
)
|
||||
|
||||
// ParseArtists extracts all contributing artist names from the artist and title strings
|
||||
func ParseArtists(artist string, title string) []string {
|
||||
seen := make(map[string]struct{})
|
||||
var out []string
|
||||
|
||||
add := func(name string) {
|
||||
name = strings.TrimSpace(name)
|
||||
if name == "" {
|
||||
return
|
||||
}
|
||||
if _, exists := seen[name]; !exists {
|
||||
seen[name] = struct{}{}
|
||||
out = append(out, name)
|
||||
}
|
||||
}
|
||||
|
||||
foundFeat := false
|
||||
|
||||
// Extract bracketed features from artist
|
||||
for _, re := range bracketFeatPatterns {
|
||||
if matches := re.FindStringSubmatch(artist); matches != nil {
|
||||
foundFeat = true
|
||||
artist = strings.Replace(artist, matches[0], "", 1)
|
||||
for _, name := range featSplitDelimiters.Split(matches[1], -1) {
|
||||
add(name)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Extract inline feat. from artist
|
||||
if matches := inlineFeatPattern.FindStringSubmatch(artist); matches != nil {
|
||||
foundFeat = true
|
||||
artist = strings.Replace(artist, matches[0], "", 1)
|
||||
for _, name := range featSplitDelimiters.Split(matches[1], -1) {
|
||||
add(name)
|
||||
}
|
||||
}
|
||||
|
||||
// Add base artist(s)
|
||||
if foundFeat {
|
||||
add(strings.TrimSpace(artist))
|
||||
} else {
|
||||
// Only split on " · " in base artist string
|
||||
for _, name := range mainArtistDotSplitter.Split(artist, -1) {
|
||||
add(name)
|
||||
}
|
||||
}
|
||||
|
||||
// Extract features from title
|
||||
for _, re := range bracketFeatPatterns {
|
||||
if matches := re.FindStringSubmatch(title); matches != nil {
|
||||
for _, name := range featSplitDelimiters.Split(matches[1], -1) {
|
||||
add(name)
|
||||
}
|
||||
}
|
||||
}
|
||||
if matches := inlineFeatPattern.FindStringSubmatch(title); matches != nil {
|
||||
for _, name := range featSplitDelimiters.Split(matches[1], -1) {
|
||||
add(name)
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
366
internal/catalog/catalog_test.go
Normal file
366
internal/catalog/catalog_test.go
Normal file
|
|
@ -0,0 +1,366 @@
|
|||
package catalog_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/catalog"
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/db/psql"
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
_ "github.com/gabehf/koito/testing_init"
|
||||
"github.com/google/uuid"
|
||||
"github.com/ory/dockertest/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var (
|
||||
mbzArtistData = map[uuid.UUID]*mbz.MusicBrainzArtist{
|
||||
uuid.MustParse("00000000-0000-0000-0000-000000000001"): {
|
||||
Name: "ATARASHII GAKKO!",
|
||||
SortName: "Atarashii Gakko",
|
||||
Aliases: []mbz.MusicBrainzArtistAlias{
|
||||
{
|
||||
Name: "新しい学校のリーダーズ",
|
||||
Type: "Artist name",
|
||||
Primary: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
mbzReleaseGroupData = map[uuid.UUID]*mbz.MusicBrainzReleaseGroup{
|
||||
uuid.MustParse("00000000-0000-0000-0000-000000000011"): {
|
||||
Title: "AG! Calling",
|
||||
Type: "Album",
|
||||
ArtistCredit: []mbz.MusicBrainzArtistCredit{
|
||||
{
|
||||
Artist: mbz.MusicBrainzArtist{
|
||||
Name: "ATARASHII GAKKO!",
|
||||
Aliases: []mbz.MusicBrainzArtistAlias{
|
||||
{
|
||||
Name: "新しい学校のリーダーズ",
|
||||
Type: "Artist name",
|
||||
Primary: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
Name: "ATARASHII GAKKO!",
|
||||
},
|
||||
},
|
||||
Releases: []mbz.MusicBrainzRelease{
|
||||
{
|
||||
Title: "AG! Calling",
|
||||
ID: "00000000-0000-0000-0000-000000000101",
|
||||
ArtistCredit: []mbz.MusicBrainzArtistCredit{
|
||||
{
|
||||
Artist: mbz.MusicBrainzArtist{
|
||||
Name: "ATARASHII GAKKO!",
|
||||
Aliases: []mbz.MusicBrainzArtistAlias{
|
||||
{
|
||||
Name: "ATARASHII GAKKO!",
|
||||
Type: "Artist name",
|
||||
Primary: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
Name: "ATARASHII GAKKO!",
|
||||
},
|
||||
},
|
||||
Status: "Official",
|
||||
},
|
||||
{
|
||||
Title: "AG! Calling - Alt Title",
|
||||
ID: "00000000-0000-0000-0000-000000000102",
|
||||
ArtistCredit: []mbz.MusicBrainzArtistCredit{
|
||||
{
|
||||
Artist: mbz.MusicBrainzArtist{
|
||||
Name: "ATARASHII GAKKO!",
|
||||
Aliases: []mbz.MusicBrainzArtistAlias{
|
||||
{
|
||||
Name: "ATARASHII GAKKO!",
|
||||
Type: "Artist name",
|
||||
Primary: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
Name: "ATARASHII GAKKO!",
|
||||
},
|
||||
},
|
||||
Status: "Official",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
mbzReleaseData = map[uuid.UUID]*mbz.MusicBrainzRelease{
|
||||
uuid.MustParse("00000000-0000-0000-0000-000000000101"): {
|
||||
Title: "AG! Calling",
|
||||
ID: "00000000-0000-0000-0000-000000000101",
|
||||
ArtistCredit: []mbz.MusicBrainzArtistCredit{
|
||||
{
|
||||
Artist: mbz.MusicBrainzArtist{
|
||||
Name: "ATARASHII GAKKO!",
|
||||
Aliases: []mbz.MusicBrainzArtistAlias{
|
||||
{
|
||||
Name: "新しい学校のリーダーズ",
|
||||
Type: "Artist name",
|
||||
Primary: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
Name: "ATARASHII GAKKO!",
|
||||
},
|
||||
},
|
||||
Status: "Official",
|
||||
},
|
||||
uuid.MustParse("00000000-0000-0000-0000-000000000202"): {
|
||||
Title: "EVANGELION FINALLY",
|
||||
ID: "00000000-0000-0000-0000-000000000202",
|
||||
ArtistCredit: []mbz.MusicBrainzArtistCredit{
|
||||
{
|
||||
Artist: mbz.MusicBrainzArtist{
|
||||
Name: "Various Artists",
|
||||
},
|
||||
Name: "Various Artists",
|
||||
},
|
||||
},
|
||||
Status: "Official",
|
||||
},
|
||||
}
|
||||
mbzTrackData = map[uuid.UUID]*mbz.MusicBrainzTrack{
|
||||
uuid.MustParse("00000000-0000-0000-0000-000000001001"): {
|
||||
Title: "Tokyo Calling",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
var store *psql.Psql
|
||||
|
||||
func getTestGetenv(resource *dockertest.Resource) func(string) string {
|
||||
dir, err := utils.GenerateRandomString(8)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return func(env string) string {
|
||||
switch env {
|
||||
case cfg.ENABLE_STRUCTURED_LOGGING_ENV:
|
||||
return "true"
|
||||
case cfg.LOG_LEVEL_ENV:
|
||||
return "debug"
|
||||
case cfg.DATABASE_URL_ENV:
|
||||
return fmt.Sprintf("postgres://postgres:secret@localhost:%s", resource.GetPort("5432/tcp"))
|
||||
case cfg.CONFIG_DIR_ENV:
|
||||
return dir
|
||||
case cfg.DISABLE_DEEZER_ENV, cfg.DISABLE_COVER_ART_ARCHIVE_ENV, cfg.DISABLE_MUSICBRAINZ_ENV, cfg.ENABLE_FULL_IMAGE_CACHE_ENV:
|
||||
return "true"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func truncateTestData(t *testing.T) {
|
||||
err := store.Exec(context.Background(),
|
||||
`TRUNCATE
|
||||
artists,
|
||||
artist_aliases,
|
||||
tracks,
|
||||
artist_tracks,
|
||||
releases,
|
||||
artist_releases,
|
||||
release_aliases,
|
||||
listens
|
||||
RESTART IDENTITY CASCADE`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func setupTestDataWithMbzIDs(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001')`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'ATARASHII GAKKO!', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000101')`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'AG! Calling', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_releases (artist_id, release_id)
|
||||
VALUES (1, 1)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO tracks (release_id, musicbrainz_id)
|
||||
VALUES (1, '00000000-0000-0000-0000-000000001001')`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES (1, 'Tokyo Calling', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_tracks (artist_id, track_id)
|
||||
VALUES (1, 1)`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func setupTestDataSansMbzIDs(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES (NULL)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'ATARASHII GAKKO!', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id)
|
||||
VALUES (NULL)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'AG! Calling', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_releases (artist_id, release_id)
|
||||
VALUES (1, 1)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO tracks (release_id, musicbrainz_id)
|
||||
VALUES (1, NULL)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES (1, 'Tokyo Calling', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_tracks (artist_id, track_id)
|
||||
VALUES (1, 1)`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
pool, err := dockertest.NewPool("")
|
||||
if err != nil {
|
||||
log.Fatalf("Could not construct pool: %s", err)
|
||||
}
|
||||
|
||||
if err := pool.Client.Ping(); err != nil {
|
||||
log.Fatalf("Could not connect to Docker: %s", err)
|
||||
}
|
||||
|
||||
resource, err := pool.Run("postgres", "latest", []string{"POSTGRES_PASSWORD=secret"})
|
||||
if err != nil {
|
||||
log.Fatalf("Could not start resource: %s", err)
|
||||
}
|
||||
|
||||
err = cfg.Load(getTestGetenv(resource))
|
||||
if err != nil {
|
||||
log.Fatalf("Could not load cfg: %s", err)
|
||||
}
|
||||
|
||||
if err := pool.Retry(func() error {
|
||||
var err error
|
||||
store, err = psql.New()
|
||||
if err != nil {
|
||||
log.Println("Failed to connect to test database, retrying...")
|
||||
return err
|
||||
}
|
||||
return store.Ping(context.Background())
|
||||
}); err != nil {
|
||||
log.Fatalf("Could not connect to database: %s", err)
|
||||
}
|
||||
|
||||
// insert a user into the db with id 1 to use for tests
|
||||
err = store.Exec(context.Background(), `INSERT INTO users (username, password) VALUES ('test', DECODE('abc123', 'hex'))`)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to insert test user: %v", err)
|
||||
}
|
||||
|
||||
code := m.Run()
|
||||
|
||||
// You can't defer this because os.Exit doesn't care for defer
|
||||
if err := pool.Purge(resource); err != nil {
|
||||
log.Fatalf("Could not purge resource: %s", err)
|
||||
}
|
||||
|
||||
err = os.RemoveAll(cfg.ConfigDir())
|
||||
if err != nil {
|
||||
log.Fatalf("Could not remove temporary config dir: %v", err)
|
||||
}
|
||||
|
||||
os.Exit(code)
|
||||
}
|
||||
|
||||
// From: https://brandur.org/fragments/go-equal-time
|
||||
// EqualTime compares two times in a way that's safer and with better fail
|
||||
// output than a call to `require.Equal` would produce.
|
||||
//
|
||||
// It takes care to:
|
||||
//
|
||||
// - Strip off monotonic portions of timestamps so they aren't considered for
|
||||
// purposes of comparison.
|
||||
//
|
||||
// - Truncate nanoseconds in a functionally equivalent way to how pgx would do
|
||||
// it so that times that have round-tripped from Postgres can still be
|
||||
// compared. Postgres only stores times to the microsecond level.
|
||||
//
|
||||
// - Use formatted, human-friendly time outputs so that in case of a failure,
|
||||
// the discrepancy is easier to pick out.
|
||||
func EqualTime(t testing.TB, t1, t2 time.Time) {
|
||||
// Note that leaving off the nanosecond portion will have the effect of
|
||||
// truncating it rather than rounding to the nearest microsecond, which
|
||||
// functionally matches pgx's behavior while persisting.
|
||||
const rfc3339Micro = "2006-01-02T15:04:05.999999Z07:00"
|
||||
|
||||
require.Equal(t,
|
||||
t1.Format(rfc3339Micro),
|
||||
t2.Format(rfc3339Micro),
|
||||
)
|
||||
}
|
||||
|
||||
func TestArtistStringParse(t *testing.T) {
|
||||
type input struct {
|
||||
Name string
|
||||
Title string
|
||||
}
|
||||
cases := map[input][]string{
|
||||
// only one artist
|
||||
{"NELKE", ""}: {"NELKE"},
|
||||
{"The Brook & The Bluff", ""}: {"The Brook & The Bluff"},
|
||||
{"half·alive", ""}: {"half·alive"},
|
||||
// Earth, Wind, & Fire
|
||||
{"Earth, Wind & Fire", "The Very Best of Earth, Wind & Fire"}: {"Earth, Wind & Fire"},
|
||||
// only artists in artist string
|
||||
{"Carly Rae Jepsen feat. Rufus Wainwright", ""}: {"Carly Rae Jepsen", "Rufus Wainwright"},
|
||||
{"Mimi (feat. HATSUNE MIKU & KAFU)", ""}: {"Mimi", "HATSUNE MIKU", "KAFU"},
|
||||
{"Magnify Tokyo · Kanade Ishihara", ""}: {"Magnify Tokyo", "Kanade Ishihara"},
|
||||
{"Daft Punk [feat. Paul Williams]", ""}: {"Daft Punk", "Paul Williams"},
|
||||
// primary artist in artist string, features in title
|
||||
{"Tyler, The Creator", "CA (feat. Alice Smith, Leon Ware & Clem Creevy)"}: {"Tyler, The Creator", "Alice Smith", "Leon Ware", "Clem Creevy"},
|
||||
{"ONE OK ROCK", "C.U.R.I.O.S.I.T.Y. (feat. Paledusk and CHICO CARLITO)"}: {"ONE OK ROCK", "Paledusk", "CHICO CARLITO"},
|
||||
{"Rat Tally", "In My Car feat. Madeline Kenney"}: {"Rat Tally", "Madeline Kenney"},
|
||||
// artists in both
|
||||
{"Daft Punk feat. Julian Casablancas", "Instant Crush (feat. Julian Casablancas)"}: {"Daft Punk", "Julian Casablancas"},
|
||||
{"Paramore (feat. Joy Williams)", "Hate to See Your Heart Break feat. Joy Williams"}: {"Paramore", "Joy Williams"},
|
||||
}
|
||||
|
||||
for in, out := range cases {
|
||||
artists := catalog.ParseArtists(in.Name, in.Title)
|
||||
assert.ElementsMatch(t, out, artists)
|
||||
}
|
||||
}
|
||||
266
internal/catalog/images.go
Normal file
266
internal/catalog/images.go
Normal file
|
|
@ -0,0 +1,266 @@
|
|||
package catalog
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/google/uuid"
|
||||
"github.com/h2non/bimg"
|
||||
)
|
||||
|
||||
type ImageSize string
|
||||
|
||||
const (
|
||||
ImageSizeSmall ImageSize = "small"
|
||||
ImageSizeMedium ImageSize = "medium"
|
||||
ImageSizeLarge ImageSize = "large"
|
||||
// imageSizeXL ImageSize = "xl"
|
||||
ImageSizeFull ImageSize = "full"
|
||||
|
||||
ImageCacheDir = "image_cache"
|
||||
)
|
||||
|
||||
func ParseImageSize(size string) (ImageSize, error) {
|
||||
switch strings.ToLower(size) {
|
||||
case "small":
|
||||
return ImageSizeSmall, nil
|
||||
case "medium":
|
||||
return ImageSizeMedium, nil
|
||||
case "large":
|
||||
return ImageSizeLarge, nil
|
||||
// case "xl":
|
||||
// return imageSizeXL, nil
|
||||
case "full":
|
||||
return ImageSizeFull, nil
|
||||
default:
|
||||
return "", fmt.Errorf("unknown image size: %s", size)
|
||||
}
|
||||
}
|
||||
func GetImageSize(size ImageSize) int {
|
||||
var px int
|
||||
switch size {
|
||||
case "small":
|
||||
px = 48
|
||||
case "medium":
|
||||
px = 256
|
||||
case "large":
|
||||
px = 500
|
||||
case "xl":
|
||||
px = 1000
|
||||
}
|
||||
return px
|
||||
}
|
||||
|
||||
func SourceImageDir() string {
|
||||
if cfg.FullImageCacheEnabled() {
|
||||
return path.Join(cfg.ConfigDir(), ImageCacheDir, "full")
|
||||
} else {
|
||||
return path.Join(cfg.ConfigDir(), ImageCacheDir, "large")
|
||||
}
|
||||
}
|
||||
|
||||
// ValidateImageURL checks if the URL points to a valid image by performing a HEAD request.
|
||||
func ValidateImageURL(url string) error {
|
||||
resp, err := http.Head(url)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to perform HEAD request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("HEAD request failed, status code: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
contentType := resp.Header.Get("Content-Type")
|
||||
if !strings.HasPrefix(contentType, "image/") {
|
||||
return fmt.Errorf("URL does not point to an image, content type: %s", contentType)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DownloadAndCacheImage downloads an image from the given URL, then calls CompressAndSaveImage.
|
||||
func DownloadAndCacheImage(ctx context.Context, id uuid.UUID, url string, size ImageSize) error {
|
||||
l := logger.FromContext(ctx)
|
||||
err := ValidateImageURL(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
l.Debug().Msgf("Downloading image for ID %s", id)
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to download image: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("failed to download image, status code: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
return CompressAndSaveImage(ctx, id.String(), size, resp.Body)
|
||||
}
|
||||
|
||||
// Compresses an image to the specified size, then saves it to the correct cache folder.
|
||||
func CompressAndSaveImage(ctx context.Context, filename string, size ImageSize, body io.Reader) error {
|
||||
l := logger.FromContext(ctx)
|
||||
|
||||
if size == ImageSizeFull {
|
||||
return saveImage(filename, size, body)
|
||||
}
|
||||
|
||||
l.Debug().Msg("Creating resized image")
|
||||
compressed, err := compressImage(size, body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return saveImage(filename, size, compressed)
|
||||
}
|
||||
|
||||
// SaveImage saves an image to the image_cache/{size} folder
|
||||
func saveImage(filename string, size ImageSize, data io.Reader) error {
|
||||
configDir := cfg.ConfigDir()
|
||||
cacheDir := filepath.Join(configDir, ImageCacheDir)
|
||||
|
||||
// Ensure the cache directory exists
|
||||
err := os.MkdirAll(filepath.Join(cacheDir, string(size)), os.ModePerm)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create full image cache directory: %w", err)
|
||||
}
|
||||
|
||||
// Create a file in the cache directory
|
||||
imagePath := filepath.Join(cacheDir, string(size), filename)
|
||||
file, err := os.Create(imagePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create image file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Save the image to the file
|
||||
_, err = io.Copy(file, data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save image: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func compressImage(size ImageSize, data io.Reader) (io.Reader, error) {
|
||||
imgBytes, err := io.ReadAll(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
px := GetImageSize(size)
|
||||
// Resize with bimg
|
||||
imgBytes, err = bimg.NewImage(imgBytes).Process(bimg.Options{
|
||||
Width: px,
|
||||
Height: px,
|
||||
Crop: true,
|
||||
Quality: 85,
|
||||
StripMetadata: true,
|
||||
Type: bimg.WEBP,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(imgBytes) == 0 {
|
||||
return nil, fmt.Errorf("compression failed")
|
||||
}
|
||||
return bytes.NewReader(imgBytes), nil
|
||||
}
|
||||
|
||||
func DeleteImage(filename uuid.UUID) error {
|
||||
configDir := cfg.ConfigDir()
|
||||
cacheDir := filepath.Join(configDir, ImageCacheDir)
|
||||
|
||||
// err := os.Remove(path.Join(cacheDir, "xl", filename.String()))
|
||||
// if err != nil && !os.IsNotExist(err) {
|
||||
// return err
|
||||
// }
|
||||
err := os.Remove(path.Join(cacheDir, "full", filename.String()))
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
err = os.Remove(path.Join(cacheDir, "large", filename.String()))
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
err = os.Remove(path.Join(cacheDir, "medium", filename.String()))
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
err = os.Remove(path.Join(cacheDir, "small", filename.String()))
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Finds any images in all image_cache folders and deletes them if they are not associated with
|
||||
// an album or artist.
|
||||
func PruneOrphanedImages(ctx context.Context, store db.DB) error {
|
||||
l := logger.FromContext(ctx)
|
||||
|
||||
configDir := cfg.ConfigDir()
|
||||
cacheDir := filepath.Join(configDir, ImageCacheDir)
|
||||
|
||||
count := 0
|
||||
// go through every folder to find orphaned images
|
||||
// store already processed images to speed up pruining
|
||||
memo := make(map[string]bool)
|
||||
for _, dir := range []string{"large", "medium", "small", "full"} {
|
||||
c, err := pruneDirImgs(ctx, store, path.Join(cacheDir, dir), memo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
count += c
|
||||
}
|
||||
l.Info().Msgf("Purged %d images", count)
|
||||
return nil
|
||||
}
|
||||
|
||||
// returns the number of pruned images
|
||||
func pruneDirImgs(ctx context.Context, store db.DB, path string, memo map[string]bool) (int, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
count := 0
|
||||
files, err := os.ReadDir(path)
|
||||
if err != nil {
|
||||
l.Info().Msgf("Failed to read from directory %s; skipping for prune", path)
|
||||
files = []os.DirEntry{}
|
||||
}
|
||||
for _, file := range files {
|
||||
fn := file.Name()
|
||||
imageid, err := uuid.Parse(fn)
|
||||
if err != nil {
|
||||
l.Debug().Msgf("Filename does not appear to be UUID: %s", fn)
|
||||
continue
|
||||
}
|
||||
exists, err := store.ImageHasAssociation(ctx, imageid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
} else if exists {
|
||||
continue
|
||||
}
|
||||
// image does not have association
|
||||
l.Debug().Msgf("Deleting image: %s", imageid)
|
||||
err = DeleteImage(imageid)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Error purging orphaned images")
|
||||
}
|
||||
if memo != nil {
|
||||
memo[fn] = true
|
||||
}
|
||||
count++
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
74
internal/catalog/images_test.go
Normal file
74
internal/catalog/images_test.go
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
package catalog_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/catalog"
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestImageLifecycle(t *testing.T) {
|
||||
|
||||
// serve yuu.jpg as test image
|
||||
imageBytes, err := os.ReadFile(filepath.Join("static", "yuu.jpg"))
|
||||
require.NoError(t, err)
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "image/jpeg")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write(imageBytes)
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
imgID := uuid.New()
|
||||
|
||||
err = catalog.DownloadAndCacheImage(context.Background(), imgID, server.URL, catalog.ImageSizeFull)
|
||||
require.NoError(t, err)
|
||||
err = catalog.DownloadAndCacheImage(context.Background(), imgID, server.URL, catalog.ImageSizeMedium)
|
||||
require.NoError(t, err)
|
||||
|
||||
// ensure download is correct
|
||||
|
||||
imagePath := filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "full", imgID.String())
|
||||
_, err = os.Stat(imagePath)
|
||||
assert.NoError(t, err)
|
||||
imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "medium", imgID.String())
|
||||
_, err = os.Stat(imagePath)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.NoError(t, catalog.DeleteImage(imgID))
|
||||
|
||||
// ensure delete works
|
||||
|
||||
imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "full", imgID.String())
|
||||
_, err = os.Stat(imagePath)
|
||||
assert.Error(t, err)
|
||||
imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "medium", imgID.String())
|
||||
_, err = os.Stat(imagePath)
|
||||
assert.Error(t, err)
|
||||
|
||||
// re-download for prune
|
||||
|
||||
err = catalog.DownloadAndCacheImage(context.Background(), imgID, server.URL, catalog.ImageSizeFull)
|
||||
require.NoError(t, err)
|
||||
err = catalog.DownloadAndCacheImage(context.Background(), imgID, server.URL, catalog.ImageSizeMedium)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.NoError(t, catalog.PruneOrphanedImages(context.Background(), store))
|
||||
|
||||
// ensure prune works
|
||||
|
||||
imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "full", imgID.String())
|
||||
_, err = os.Stat(imagePath)
|
||||
assert.Error(t, err)
|
||||
imagePath = filepath.Join(cfg.ConfigDir(), catalog.ImageCacheDir, "medium", imgID.String())
|
||||
_, err = os.Stat(imagePath)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
858
internal/catalog/submit_listen_test.go
Normal file
858
internal/catalog/submit_listen_test.go
Normal file
|
|
@ -0,0 +1,858 @@
|
|||
package catalog_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/catalog"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// this file is very long
|
||||
|
||||
func TestSubmitListen_CreateAllMbzIDs(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// artist gets created with musicbrainz id
|
||||
// release group gets created with mbz id
|
||||
// track gets created with mbz id
|
||||
// test listen time is opts time
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Artists: mbzArtistData,
|
||||
ReleaseGroups: mbzReleaseGroupData,
|
||||
Releases: mbzReleaseData,
|
||||
Tracks: mbzTrackData,
|
||||
}
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseGroupMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000011")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
ReleaseGroupMbzID: releaseGroupMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// Verify that listen time is correct
|
||||
p, err := store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 1})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, p.Items, 1)
|
||||
l := p.Items[0]
|
||||
EqualTime(t, opts.Time, l.Time)
|
||||
}
|
||||
|
||||
func TestSubmitListen_CreateAllMbzIDsNoReleaseGroupID(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// release group gets created with release id
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Artists: mbzArtistData,
|
||||
ReleaseGroups: mbzReleaseGroupData,
|
||||
Releases: mbzReleaseData,
|
||||
Tracks: mbzTrackData,
|
||||
}
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM releases_with_title
|
||||
WHERE title = $1
|
||||
)`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected release to be created")
|
||||
}
|
||||
|
||||
func TestSubmitListen_CreateAllNoMbzIDs(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// artist gets created with artist names
|
||||
// release group gets created with artist and title
|
||||
// track gets created with title and artist
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{}
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
TrackTitle: "Tokyo Calling",
|
||||
ReleaseTitle: "AG! Calling",
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
}
|
||||
|
||||
func TestSubmitListen_CreateAllNoMbzIDsNoArtistNamesNoReleaseTitle(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// artists get created with artist and track title
|
||||
// release group gets created with artist and track title
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{}
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
uuid.MustParse("00000000-0000-0000-0000-000000000000"),
|
||||
},
|
||||
Artist: "Rat Tally",
|
||||
TrackTitle: "In My Car feat. Madeline Kenney",
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM releases_with_title
|
||||
WHERE title = $1
|
||||
)`, opts.TrackTitle)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected created release to have track title as title")
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artists_with_name
|
||||
WHERE name = $1
|
||||
)`, "Rat Tally")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected primary artist to be created")
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artists_with_name
|
||||
WHERE name = $1
|
||||
)`, "Madeline Kenney")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected featured artist to be created")
|
||||
}
|
||||
|
||||
func TestSubmitListen_MatchAllMbzIDs(t *testing.T) {
|
||||
setupTestDataWithMbzIDs(t)
|
||||
|
||||
// artist gets matched with musicbrainz id
|
||||
// release gets matched with mbz id
|
||||
// track gets matched with mbz id
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Artists: mbzArtistData,
|
||||
Releases: mbzReleaseData,
|
||||
Tracks: mbzTrackData,
|
||||
}
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM tracks_with_title WHERE title = $1
|
||||
`, "Tokyo Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate track created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases_with_title WHERE title = $1
|
||||
`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate release group created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM artists_with_name WHERE name = $1
|
||||
`, "ATARASHII GAKKO!")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate artist created")
|
||||
}
|
||||
|
||||
func TestSubmitListen_MatchTrackFromMbzTitle(t *testing.T) {
|
||||
setupTestDataSansMbzIDs(t)
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Tracks: mbzTrackData,
|
||||
}
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
TrackTitle: "Tokyo Calling - Alt Title",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM tracks_with_title WHERE title = $1
|
||||
`, "Tokyo Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate track created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases_with_title WHERE title = $1
|
||||
`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate release group created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM artists_with_name WHERE name = $1
|
||||
`, "ATARASHII GAKKO!")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate artist created")
|
||||
}
|
||||
|
||||
func TestSubmitListen_VariousArtistsRelease(t *testing.T) {
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Releases: mbzReleaseData,
|
||||
}
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000202")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ARIANNE"},
|
||||
Artist: "ARIANNE",
|
||||
TrackTitle: "KOMM, SUSSER TOD (M-10 Director's Edit version)",
|
||||
ReleaseTitle: "Evangelion Finally",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases WHERE various_artists = $1
|
||||
`, true)
|
||||
require.NoError(t, err)
|
||||
assert.EqualValues(t, 1, count)
|
||||
}
|
||||
|
||||
func TestSubmitListen_MatchOneArtistMbzIDOneArtistName(t *testing.T) {
|
||||
setupTestDataWithMbzIDs(t)
|
||||
|
||||
// artist gets matched with musicbrainz id
|
||||
// release gets matched with mbz id
|
||||
// track gets matched with mbz id
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Artists: mbzArtistData,
|
||||
Releases: mbzReleaseData,
|
||||
Tracks: mbzTrackData,
|
||||
}
|
||||
// i really do want to use real tracks for tests but i dont wanna set up all the data for one test
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!", "Fake Artist"},
|
||||
Artist: "ATARASHII GAKKO! feat. Fake Artist",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM tracks_with_title WHERE title = $1
|
||||
`, "Tokyo Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate track created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases_with_title WHERE title = $1
|
||||
`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate release group created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM artists_with_name WHERE name = $1
|
||||
`, "ATARASHII GAKKO!")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate artist created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM artists_with_name WHERE name = $1
|
||||
`, "Fake Artist")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "expected featured artist to be created")
|
||||
}
|
||||
|
||||
func TestSubmitListen_MatchAllMbzIDsNoReleaseGroupIDNoTrackID(t *testing.T) {
|
||||
setupTestDataWithMbzIDs(t)
|
||||
|
||||
// release group gets matched with release id
|
||||
// track gets matched with title and artist
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Artists: mbzArtistData,
|
||||
ReleaseGroups: mbzReleaseGroupData,
|
||||
Releases: mbzReleaseData,
|
||||
Tracks: mbzTrackData,
|
||||
}
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases_with_title WHERE title = $1
|
||||
`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate release created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM tracks_with_title WHERE title = $1
|
||||
`, "Tokyo Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate track created")
|
||||
}
|
||||
|
||||
func TestSubmitListen_MatchNoMbzIDs(t *testing.T) {
|
||||
setupTestDataSansMbzIDs(t)
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{}
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
TrackTitle: "Tokyo Calling",
|
||||
ReleaseTitle: "AG! Calling",
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM artists_with_name WHERE name = $1 AND musicbrainz_id IS NULL
|
||||
`, "ATARASHII GAKKO!")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate artist created or has been associated with fake musicbrainz id")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases_with_title WHERE title = $1 AND musicbrainz_id IS NULL
|
||||
`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate release created or has been associated with fake musicbrainz id")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 AND musicbrainz_id IS NULL
|
||||
`, "Tokyo Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate track created or has been associated with fake musicbrainz id")
|
||||
}
|
||||
|
||||
func TestSubmitListen_UpdateTrackDuration(t *testing.T) {
|
||||
setupTestDataSansMbzIDs(t)
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{}
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
TrackTitle: "Tokyo Calling",
|
||||
ReleaseTitle: "AG! Calling",
|
||||
Time: time.Now(),
|
||||
Duration: 191,
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM tracks_with_title WHERE title = $1 AND duration = 191
|
||||
`, "Tokyo Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "expected duration to be updated")
|
||||
}
|
||||
|
||||
func TestSubmitListen_MatchFromTrackTitleNoMbzIDs(t *testing.T) {
|
||||
setupTestDataSansMbzIDs(t)
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Artists: mbzArtistData,
|
||||
}
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
uuid.MustParse("00000000-0000-0000-0000-000000000001"),
|
||||
},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
TrackTitle: "Tokyo Calling",
|
||||
ReleaseTitle: "AG! Calling",
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT * FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM artists_with_name WHERE name = $1
|
||||
`, "ATARASHII GAKKO!")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate artist created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases_with_title WHERE title = $1
|
||||
`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate release created")
|
||||
}
|
||||
|
||||
func TestSubmitListen_AssociateAllMbzIDs(t *testing.T) {
|
||||
setupTestDataSansMbzIDs(t)
|
||||
|
||||
// existing artist gets associated with mbz id (also updates aliases)
|
||||
// exisiting release gets associated with mbz id
|
||||
// existing track gets associated with mbz id (with new artist association)
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Artists: mbzArtistData,
|
||||
Releases: mbzReleaseData,
|
||||
Tracks: mbzTrackData,
|
||||
}
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM tracks_with_title WHERE title = $1
|
||||
`, "Tokyo Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate track created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases_with_title WHERE title = $1
|
||||
`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate release created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM artists_with_name WHERE name = $1
|
||||
`, "ATARASHII GAKKO!")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate artist created")
|
||||
|
||||
// Verify that the mbz ids were saved
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM tracks
|
||||
WHERE musicbrainz_id = $1
|
||||
)`, trackMbzID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected track row with mbz id to exist")
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artists
|
||||
WHERE musicbrainz_id = $1
|
||||
)`, artistMbzID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected artist row with mbz id to exist")
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM releases
|
||||
WHERE musicbrainz_id = $1
|
||||
)`, releaseMbzID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected release row with mbz id to exist")
|
||||
}
|
||||
|
||||
func TestSubmitListen_AssociateAllMbzIDsWithMbzUnreachable(t *testing.T) {
|
||||
setupTestDataSansMbzIDs(t)
|
||||
|
||||
// existing artist gets associated with mbz id (also updates aliases)
|
||||
// exisiting release gets associated with mbz id
|
||||
// existing track gets associated with mbz id (with new artist association)
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzErrorCaller{}
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM tracks_with_title WHERE title = $1
|
||||
`, "Tokyo Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate track created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM releases_with_title WHERE title = $1
|
||||
`, "AG! Calling")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate release created")
|
||||
count, err = store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM artists_with_name WHERE name = $1
|
||||
`, "ATARASHII GAKKO!")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "duplicate artist created")
|
||||
|
||||
// Verify that the mbz ids were saved
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM tracks
|
||||
WHERE musicbrainz_id = $1
|
||||
)`, trackMbzID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected track row with mbz id to exist")
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artists
|
||||
WHERE musicbrainz_id = $1
|
||||
)`, artistMbzID)
|
||||
require.NoError(t, err)
|
||||
// as artist names and mbz ids can be ids with unknown order
|
||||
assert.False(t, exists, "artists cannot be associated with mbz ids when mbz is unreachable")
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM releases
|
||||
WHERE musicbrainz_id = $1
|
||||
)`, releaseMbzID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected release row with mbz id to exist")
|
||||
}
|
||||
|
||||
func TestSubmitListen_AssociateReleaseAliases(t *testing.T) {
|
||||
setupTestDataSansMbzIDs(t)
|
||||
|
||||
// existing artist gets associated with mbz id (also updates aliases)
|
||||
// exisiting release group gets associated with mbz id
|
||||
// existing track gets associated with mbz id (with new artist association)
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzMockCaller{
|
||||
Artists: mbzArtistData,
|
||||
Releases: mbzReleaseData,
|
||||
Tracks: mbzTrackData,
|
||||
ReleaseGroups: mbzReleaseGroupData,
|
||||
}
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseGroupMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000011")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
ReleaseGroupMbzID: releaseGroupMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
|
||||
// verify that track, release group, and artist are existing ones and not duplicates
|
||||
count, err := store.Count(ctx, `
|
||||
SELECT COUNT(*) FROM release_aliases WHERE alias = $1
|
||||
`, "AG! Calling - Alt Title")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count, "expected release alias to exist")
|
||||
}
|
||||
|
||||
func TestSubmitListen_MusicBrainzUnreachable(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// test don't fail when mbz unreachable
|
||||
|
||||
ctx := context.Background()
|
||||
mbzc := &mbz.MbzErrorCaller{}
|
||||
artistMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
releaseGroupMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000011")
|
||||
releaseMbzID := uuid.MustParse("00000000-0000-0000-0000-000000000101")
|
||||
trackMbzID := uuid.MustParse("00000000-0000-0000-0000-000000001001")
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: mbzc,
|
||||
ArtistNames: []string{"ATARASHII GAKKO!"},
|
||||
Artist: "ATARASHII GAKKO!",
|
||||
ArtistMbzIDs: []uuid.UUID{
|
||||
artistMbzID,
|
||||
},
|
||||
TrackTitle: "Tokyo Calling",
|
||||
RecordingMbzID: trackMbzID,
|
||||
ReleaseTitle: "AG! Calling",
|
||||
ReleaseMbzID: releaseMbzID,
|
||||
ReleaseGroupMbzID: releaseGroupMbzID,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
}
|
||||
|
||||
err := catalog.SubmitListen(ctx, store, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen row to exist")
|
||||
}
|
||||
280
internal/cfg/cfg.go
Normal file
280
internal/cfg/cfg.go
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
package cfg
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultBaseUrl = "http://127.0.0.1"
|
||||
defaultListenPort = 4110
|
||||
defaultMusicBrainzUrl = "https://musicbrainz.org"
|
||||
)
|
||||
|
||||
const (
|
||||
BASE_URL_ENV = "KOITO_BASE_URL"
|
||||
DATABASE_URL_ENV = "KOITO_DATABASE_URL"
|
||||
BIND_ADDR_ENV = "KOITO_BIND_ADDR"
|
||||
LISTEN_PORT_ENV = "KOITO_LISTEN_PORT"
|
||||
ENABLE_STRUCTURED_LOGGING_ENV = "KOITO_ENABLE_STRUCTURED_LOGGING"
|
||||
ENABLE_FULL_IMAGE_CACHE_ENV = "KOITO_ENABLE_FULL_IMAGE_CACHE"
|
||||
LOG_LEVEL_ENV = "KOITO_LOG_LEVEL"
|
||||
MUSICBRAINZ_URL_ENV = "KOITO_MUSICBRAINZ_URL"
|
||||
ENABLE_LBZ_RELAY_ENV = "KOITO_ENABLE_LBZ_RELAY"
|
||||
LBZ_RELAY_URL_ENV = "KOITO_LBZ_RELAY_URL"
|
||||
LBZ_RELAY_TOKEN_ENV = "KOITO_LBZ_RELAY_TOKEN"
|
||||
LASTFM_API_KEY_ENV = "KOITO_LASTFM_API_KEY"
|
||||
CONFIG_DIR_ENV = "KOITO_CONFIG_DIR"
|
||||
DEFAULT_USERNAME_ENV = "KOITO_DEFAULT_USERNAME"
|
||||
DEFAULT_PASSWORD_ENV = "KOITO_DEFAULT_PASSWORD"
|
||||
DISABLE_DEEZER_ENV = "KOITO_DISABLE_DEEZER"
|
||||
DISABLE_COVER_ART_ARCHIVE_ENV = "KOITO_DISABLE_COVER_ART_ARCHIVE"
|
||||
DISABLE_MUSICBRAINZ_ENV = "KOITO_DISABLE_MUSICBRAINZ"
|
||||
SKIP_IMPORT_ENV = "KOITO_SKIP_IMPORT"
|
||||
ALLOWED_HOSTS_ENV = "KOITO_ALLOWED_HOSTS"
|
||||
DISABLE_RATE_LIMIT_ENV = "KOITO_DISABLE_RATE_LIMIT"
|
||||
)
|
||||
|
||||
type config struct {
|
||||
bindAddr string
|
||||
listenPort int
|
||||
configDir string
|
||||
baseUrl string
|
||||
databaseUrl string
|
||||
musicBrainzUrl string
|
||||
logLevel int
|
||||
structuredLogging bool
|
||||
enableFullImageCache bool
|
||||
lbzRelayEnabled bool
|
||||
lbzRelayUrl string
|
||||
lbzRelayToken string
|
||||
defaultPw string
|
||||
defaultUsername string
|
||||
disableDeezer bool
|
||||
disableCAA bool
|
||||
disableMusicBrainz bool
|
||||
skipImport bool
|
||||
allowedHosts []string
|
||||
allowAllHosts bool
|
||||
disableRateLimit bool
|
||||
}
|
||||
|
||||
var (
|
||||
globalConfig *config
|
||||
once sync.Once
|
||||
lock sync.RWMutex
|
||||
)
|
||||
|
||||
// Initialize initializes the global configuration using the provided getenv function.
|
||||
func Load(getenv func(string) string) error {
|
||||
var err error
|
||||
once.Do(func() {
|
||||
globalConfig, err = loadConfig(getenv)
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
// loadConfig loads the configuration from environment variables.
|
||||
func loadConfig(getenv func(string) string) (*config, error) {
|
||||
cfg := new(config)
|
||||
cfg.baseUrl = getenv(BASE_URL_ENV)
|
||||
if cfg.baseUrl == "" {
|
||||
cfg.baseUrl = defaultBaseUrl
|
||||
}
|
||||
cfg.databaseUrl = getenv(DATABASE_URL_ENV)
|
||||
if cfg.databaseUrl == "" {
|
||||
return nil, errors.New("required parameter " + DATABASE_URL_ENV + " not provided")
|
||||
}
|
||||
cfg.bindAddr = getenv(BIND_ADDR_ENV)
|
||||
var err error
|
||||
cfg.listenPort, err = strconv.Atoi(getenv(LISTEN_PORT_ENV))
|
||||
if err != nil {
|
||||
cfg.listenPort = defaultListenPort
|
||||
}
|
||||
cfg.musicBrainzUrl = getenv(MUSICBRAINZ_URL_ENV)
|
||||
if cfg.musicBrainzUrl == "" {
|
||||
cfg.musicBrainzUrl = defaultMusicBrainzUrl
|
||||
}
|
||||
if parseBool(getenv(ENABLE_LBZ_RELAY_ENV)) {
|
||||
cfg.lbzRelayEnabled = true
|
||||
cfg.lbzRelayToken = getenv(LBZ_RELAY_TOKEN_ENV)
|
||||
cfg.lbzRelayUrl = getenv(LBZ_RELAY_URL_ENV)
|
||||
}
|
||||
|
||||
cfg.disableRateLimit = parseBool(getenv(DISABLE_RATE_LIMIT_ENV))
|
||||
|
||||
cfg.structuredLogging = parseBool(getenv(ENABLE_STRUCTURED_LOGGING_ENV))
|
||||
|
||||
cfg.enableFullImageCache = parseBool(getenv(ENABLE_FULL_IMAGE_CACHE_ENV))
|
||||
cfg.disableDeezer = parseBool(getenv(DISABLE_DEEZER_ENV))
|
||||
cfg.disableCAA = parseBool(getenv(DISABLE_COVER_ART_ARCHIVE_ENV))
|
||||
cfg.disableMusicBrainz = parseBool(getenv(DISABLE_MUSICBRAINZ_ENV))
|
||||
cfg.skipImport = parseBool(getenv(SKIP_IMPORT_ENV))
|
||||
|
||||
if getenv(DEFAULT_USERNAME_ENV) == "" {
|
||||
cfg.defaultUsername = "admin"
|
||||
} else {
|
||||
cfg.defaultUsername = getenv(DEFAULT_USERNAME_ENV)
|
||||
}
|
||||
if getenv(DEFAULT_PASSWORD_ENV) == "" {
|
||||
cfg.defaultPw = "changeme"
|
||||
} else {
|
||||
cfg.defaultPw = getenv(DEFAULT_PASSWORD_ENV)
|
||||
}
|
||||
|
||||
cfg.configDir = getenv(CONFIG_DIR_ENV)
|
||||
if cfg.configDir == "" {
|
||||
cfg.configDir = "/etc/koito"
|
||||
}
|
||||
|
||||
rawHosts := getenv(ALLOWED_HOSTS_ENV)
|
||||
cfg.allowedHosts = strings.Split(rawHosts, ",")
|
||||
cfg.allowAllHosts = cfg.allowedHosts[0] == "*"
|
||||
|
||||
switch strings.ToLower(getenv(LOG_LEVEL_ENV)) {
|
||||
case "debug":
|
||||
cfg.logLevel = 0
|
||||
case "warn":
|
||||
cfg.logLevel = 2
|
||||
case "error":
|
||||
cfg.logLevel = 3
|
||||
case "fatal":
|
||||
cfg.logLevel = 4
|
||||
default:
|
||||
cfg.logLevel = 0
|
||||
}
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func parseBool(s string) bool {
|
||||
if strings.ToLower(s) == "true" {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Global accessors for configuration values
|
||||
|
||||
func ListenAddr() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return fmt.Sprintf("%s:%d", globalConfig.bindAddr, globalConfig.listenPort)
|
||||
}
|
||||
|
||||
func ConfigDir() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.configDir
|
||||
}
|
||||
|
||||
func BaseUrl() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.baseUrl
|
||||
}
|
||||
|
||||
func DatabaseUrl() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.databaseUrl
|
||||
}
|
||||
|
||||
func MusicBrainzUrl() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.musicBrainzUrl
|
||||
}
|
||||
|
||||
func LogLevel() int {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.logLevel
|
||||
}
|
||||
|
||||
func StructuredLogging() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.structuredLogging
|
||||
}
|
||||
|
||||
func LbzRelayEnabled() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.lbzRelayEnabled
|
||||
}
|
||||
|
||||
func LbzRelayUrl() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.lbzRelayUrl
|
||||
}
|
||||
|
||||
func LbzRelayToken() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.lbzRelayToken
|
||||
}
|
||||
|
||||
func DefaultPassword() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.defaultPw
|
||||
}
|
||||
|
||||
func DefaultUsername() string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.defaultUsername
|
||||
}
|
||||
|
||||
func FullImageCacheEnabled() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.enableFullImageCache
|
||||
}
|
||||
|
||||
func DeezerDisabled() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.disableDeezer
|
||||
}
|
||||
|
||||
func CoverArtArchiveDisabled() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.disableCAA
|
||||
}
|
||||
|
||||
func MusicBrainzDisabled() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.disableMusicBrainz
|
||||
}
|
||||
|
||||
func SkipImport() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.skipImport
|
||||
}
|
||||
|
||||
func AllowedHosts() []string {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.allowedHosts
|
||||
}
|
||||
|
||||
func AllowAllHosts() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.allowAllHosts
|
||||
}
|
||||
|
||||
func RateLimitDisabled() bool {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
return globalConfig.disableRateLimit
|
||||
}
|
||||
82
internal/db/db.go
Normal file
82
internal/db/db.go
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
// package db defines the database interface
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type DB interface {
|
||||
// Get
|
||||
GetArtist(ctx context.Context, opts GetArtistOpts) (*models.Artist, error)
|
||||
GetAlbum(ctx context.Context, opts GetAlbumOpts) (*models.Album, error)
|
||||
GetTrack(ctx context.Context, opts GetTrackOpts) (*models.Track, error)
|
||||
GetTopTracksPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Track], error)
|
||||
GetTopArtistsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Artist], error)
|
||||
GetTopAlbumsPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Album], error)
|
||||
GetListensPaginated(ctx context.Context, opts GetItemsOpts) (*PaginatedResponse[*models.Listen], error)
|
||||
GetListenActivity(ctx context.Context, opts ListenActivityOpts) ([]ListenActivityItem, error)
|
||||
GetAllArtistAliases(ctx context.Context, id int32) ([]models.Alias, error)
|
||||
GetAllAlbumAliases(ctx context.Context, id int32) ([]models.Alias, error)
|
||||
GetAllTrackAliases(ctx context.Context, id int32) ([]models.Alias, error)
|
||||
GetApiKeysByUserID(ctx context.Context, id int32) ([]models.ApiKey, error)
|
||||
GetUserBySession(ctx context.Context, sessionId uuid.UUID) (*models.User, error)
|
||||
GetUserByUsername(ctx context.Context, username string) (*models.User, error)
|
||||
GetUserByApiKey(ctx context.Context, key string) (*models.User, error)
|
||||
// Save
|
||||
SaveArtist(ctx context.Context, opts SaveArtistOpts) (*models.Artist, error)
|
||||
SaveArtistAliases(ctx context.Context, id int32, aliases []string, source string) error
|
||||
SaveAlbum(ctx context.Context, opts SaveAlbumOpts) (*models.Album, error)
|
||||
SaveAlbumAliases(ctx context.Context, id int32, aliases []string, source string) error
|
||||
SaveTrack(ctx context.Context, opts SaveTrackOpts) (*models.Track, error)
|
||||
SaveTrackAliases(ctx context.Context, id int32, aliases []string, source string) error
|
||||
SaveListen(ctx context.Context, opts SaveListenOpts) error
|
||||
SaveUser(ctx context.Context, opts SaveUserOpts) (*models.User, error)
|
||||
SaveApiKey(ctx context.Context, opts SaveApiKeyOpts) (*models.ApiKey, error)
|
||||
SaveSession(ctx context.Context, userId int32, expiresAt time.Time, persistent bool) (*models.Session, error)
|
||||
// Update
|
||||
UpdateArtist(ctx context.Context, opts UpdateArtistOpts) error
|
||||
UpdateTrack(ctx context.Context, opts UpdateTrackOpts) error
|
||||
UpdateAlbum(ctx context.Context, opts UpdateAlbumOpts) error
|
||||
AddArtistsToAlbum(ctx context.Context, opts AddArtistsToAlbumOpts) error
|
||||
UpdateUser(ctx context.Context, opts UpdateUserOpts) error
|
||||
UpdateApiKeyLabel(ctx context.Context, opts UpdateApiKeyLabelOpts) error
|
||||
RefreshSession(ctx context.Context, sessionId uuid.UUID, expiresAt time.Time) error
|
||||
SetPrimaryArtistAlias(ctx context.Context, id int32, alias string) error
|
||||
SetPrimaryAlbumAlias(ctx context.Context, id int32, alias string) error
|
||||
SetPrimaryTrackAlias(ctx context.Context, id int32, alias string) error
|
||||
// Delete
|
||||
DeleteArtist(ctx context.Context, id int32) error
|
||||
DeleteAlbum(ctx context.Context, id int32) error
|
||||
DeleteTrack(ctx context.Context, id int32) error
|
||||
DeleteListen(ctx context.Context, trackId int32, listenedAt time.Time) error
|
||||
DeleteArtistAlias(ctx context.Context, id int32, alias string) error
|
||||
DeleteAlbumAlias(ctx context.Context, id int32, alias string) error
|
||||
DeleteTrackAlias(ctx context.Context, id int32, alias string) error
|
||||
DeleteSession(ctx context.Context, sessionId uuid.UUID) error
|
||||
DeleteApiKey(ctx context.Context, id int32) error
|
||||
// Count
|
||||
CountListens(ctx context.Context, period Period) (int64, error)
|
||||
CountTracks(ctx context.Context, period Period) (int64, error)
|
||||
CountAlbums(ctx context.Context, period Period) (int64, error)
|
||||
CountArtists(ctx context.Context, period Period) (int64, error)
|
||||
CountTimeListened(ctx context.Context, period Period) (int64, error)
|
||||
CountUsers(ctx context.Context) (int64, error)
|
||||
// Search
|
||||
SearchArtists(ctx context.Context, q string) ([]*models.Artist, error)
|
||||
SearchAlbums(ctx context.Context, q string) ([]*models.Album, error)
|
||||
SearchTracks(ctx context.Context, q string) ([]*models.Track, error)
|
||||
// Merge
|
||||
MergeTracks(ctx context.Context, fromId, toId int32) error
|
||||
MergeAlbums(ctx context.Context, fromId, toId int32) error
|
||||
MergeArtists(ctx context.Context, fromId, toId int32) error
|
||||
// Etc
|
||||
ImageHasAssociation(ctx context.Context, image uuid.UUID) (bool, error)
|
||||
GetImageSource(ctx context.Context, image uuid.UUID) (string, error)
|
||||
AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.Album, error)
|
||||
Ping(ctx context.Context) error
|
||||
Close(ctx context.Context)
|
||||
}
|
||||
140
internal/db/opts.go
Normal file
140
internal/db/opts.go
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
package db
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type GetAlbumOpts struct {
|
||||
ID int32
|
||||
MusicBrainzID uuid.UUID
|
||||
ArtistID int32
|
||||
Title string
|
||||
Titles []string
|
||||
Image uuid.UUID
|
||||
}
|
||||
|
||||
type GetArtistOpts struct {
|
||||
ID int32
|
||||
MusicBrainzID uuid.UUID
|
||||
Name string
|
||||
Image uuid.UUID
|
||||
}
|
||||
|
||||
type GetTrackOpts struct {
|
||||
ID int32
|
||||
MusicBrainzID uuid.UUID
|
||||
Title string
|
||||
ArtistIDs []int32
|
||||
}
|
||||
|
||||
type SaveTrackOpts struct {
|
||||
Title string
|
||||
AlbumID int32
|
||||
ArtistIDs []int32
|
||||
RecordingMbzID uuid.UUID
|
||||
Duration int32
|
||||
}
|
||||
|
||||
type SaveAlbumOpts struct {
|
||||
Title string
|
||||
MusicBrainzID uuid.UUID
|
||||
Type string
|
||||
ArtistIDs []int32
|
||||
VariousArtists bool
|
||||
Image uuid.UUID
|
||||
ImageSrc string
|
||||
Aliases []string
|
||||
}
|
||||
|
||||
type SaveArtistOpts struct {
|
||||
Name string
|
||||
MusicBrainzID uuid.UUID
|
||||
Aliases []string
|
||||
Image uuid.UUID
|
||||
ImageSrc string
|
||||
}
|
||||
|
||||
type UpdateApiKeyLabelOpts struct {
|
||||
UserID int32
|
||||
ID int32
|
||||
Label string
|
||||
}
|
||||
|
||||
type SaveUserOpts struct {
|
||||
Username string
|
||||
Password string
|
||||
Role models.UserRole
|
||||
}
|
||||
|
||||
type SaveApiKeyOpts struct {
|
||||
Key string
|
||||
UserID int32
|
||||
Label string
|
||||
}
|
||||
|
||||
type SaveListenOpts struct {
|
||||
TrackID int32
|
||||
Time time.Time
|
||||
UserID int32
|
||||
Client string
|
||||
}
|
||||
|
||||
type UpdateTrackOpts struct {
|
||||
ID int32
|
||||
MusicBrainzID uuid.UUID
|
||||
Duration int32
|
||||
}
|
||||
|
||||
type UpdateArtistOpts struct {
|
||||
ID int32
|
||||
MusicBrainzID uuid.UUID
|
||||
Image uuid.UUID
|
||||
ImageSrc string
|
||||
}
|
||||
|
||||
type UpdateAlbumOpts struct {
|
||||
ID int32
|
||||
MusicBrainzID uuid.UUID
|
||||
Image uuid.UUID
|
||||
ImageSrc string
|
||||
}
|
||||
|
||||
type UpdateUserOpts struct {
|
||||
ID int32
|
||||
Username string
|
||||
Password string
|
||||
}
|
||||
|
||||
type AddArtistsToAlbumOpts struct {
|
||||
AlbumID int32
|
||||
ArtistIDs []int32
|
||||
}
|
||||
|
||||
type GetItemsOpts struct {
|
||||
Limit int
|
||||
Period Period
|
||||
Page int
|
||||
Week int // 1-52
|
||||
Month int // 1-12
|
||||
Year int
|
||||
|
||||
// Used only for getting top tracks
|
||||
ArtistID int
|
||||
AlbumID int
|
||||
|
||||
// Used for getting listens
|
||||
TrackID int
|
||||
}
|
||||
|
||||
type ListenActivityOpts struct {
|
||||
Step StepInterval
|
||||
Range int
|
||||
Month int
|
||||
Year int
|
||||
AlbumID int32
|
||||
ArtistID int32
|
||||
TrackID int32
|
||||
}
|
||||
108
internal/db/period.go
Normal file
108
internal/db/period.go
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
package db
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// should this be in db package ???
|
||||
|
||||
type Period string
|
||||
|
||||
const (
|
||||
PeriodDay Period = "day"
|
||||
PeriodWeek Period = "week"
|
||||
PeriodMonth Period = "month"
|
||||
PeriodYear Period = "year"
|
||||
PeriodAllTime Period = "all_time"
|
||||
PeriodDefault Period = "day"
|
||||
)
|
||||
|
||||
func StartTimeFromPeriod(p Period) time.Time {
|
||||
now := time.Now()
|
||||
switch p {
|
||||
case "day":
|
||||
return now.AddDate(0, 0, -1)
|
||||
case "week":
|
||||
return now.AddDate(0, 0, -7)
|
||||
case "month":
|
||||
return now.AddDate(0, -1, 0)
|
||||
case "year":
|
||||
return now.AddDate(-1, 0, 0)
|
||||
case "all_time":
|
||||
return time.Time{}
|
||||
default:
|
||||
// default 1 day
|
||||
return now.AddDate(0, 0, -1)
|
||||
}
|
||||
}
|
||||
|
||||
type StepInterval string
|
||||
|
||||
const (
|
||||
StepDay StepInterval = "day"
|
||||
StepWeek StepInterval = "week"
|
||||
StepMonth StepInterval = "month"
|
||||
StepYear StepInterval = "year"
|
||||
StepDefault StepInterval = "day"
|
||||
|
||||
DefaultRange int = 12
|
||||
)
|
||||
|
||||
// start is the time of 00:00 at the beginning of opts.Range opts.Steps ago,
|
||||
// end is the end time of the current opts.Step.
|
||||
// E.g. if step is StepWeek and range is 4, start will be the time 00:00 on Sunday on the 4th week ago,
|
||||
// and end will be 23:59:59 on Saturday at the end of the current week.
|
||||
// If opts.Year (or opts.Year + opts.Month) is provided, start and end will simply by the start and end times of that year/month.
|
||||
func ListenActivityOptsToTimes(opts ListenActivityOpts) (start, end time.Time) {
|
||||
now := time.Now()
|
||||
|
||||
// If Year (and optionally Month) are specified, use calendar boundaries
|
||||
if opts.Year != 0 {
|
||||
if opts.Month != 0 {
|
||||
// Specific month of a specific year
|
||||
start = time.Date(opts.Year, time.Month(opts.Month), 1, 0, 0, 0, 0, now.Location())
|
||||
end = start.AddDate(0, 1, 0).Add(-time.Nanosecond)
|
||||
} else {
|
||||
// Whole year
|
||||
start = time.Date(opts.Year, 1, 1, 0, 0, 0, 0, now.Location())
|
||||
end = start.AddDate(1, 0, 0).Add(-time.Nanosecond)
|
||||
}
|
||||
return start, end
|
||||
}
|
||||
|
||||
// X days ago + today = range
|
||||
opts.Range = opts.Range - 1
|
||||
|
||||
// Determine step and align accordingly
|
||||
switch opts.Step {
|
||||
case StepDay:
|
||||
today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location())
|
||||
start = today.AddDate(0, 0, -opts.Range)
|
||||
end = today.AddDate(0, 0, 1).Add(-time.Nanosecond)
|
||||
|
||||
case StepWeek:
|
||||
// Align to most recent Sunday
|
||||
weekday := int(now.Weekday()) // Sunday = 0
|
||||
startOfThisWeek := time.Date(now.Year(), now.Month(), now.Day()-weekday, 0, 0, 0, 0, now.Location())
|
||||
start = startOfThisWeek.AddDate(0, 0, -7*opts.Range)
|
||||
end = startOfThisWeek.AddDate(0, 0, 7).Add(-time.Nanosecond)
|
||||
|
||||
case StepMonth:
|
||||
firstOfThisMonth := time.Date(now.Year(), now.Month(), 1, 0, 0, 0, 0, now.Location())
|
||||
start = firstOfThisMonth.AddDate(0, -opts.Range, 0)
|
||||
end = firstOfThisMonth.AddDate(0, 1, 0).Add(-time.Nanosecond)
|
||||
|
||||
case StepYear:
|
||||
firstOfThisYear := time.Date(now.Year(), 1, 1, 0, 0, 0, 0, now.Location())
|
||||
start = firstOfThisYear.AddDate(-opts.Range, 0, 0)
|
||||
end = firstOfThisYear.AddDate(1, 0, 0).Add(-time.Nanosecond)
|
||||
|
||||
default:
|
||||
// Default to daily
|
||||
today := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location())
|
||||
start = today.AddDate(0, 0, -opts.Range)
|
||||
end = today.AddDate(0, 0, 1).Add(-time.Nanosecond)
|
||||
}
|
||||
|
||||
return start, end
|
||||
}
|
||||
28
internal/db/period_test.go
Normal file
28
internal/db/period_test.go
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
package db_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestListenActivityOptsToTimes(t *testing.T) {
|
||||
|
||||
// default range
|
||||
// opts := db.ListenActivityOpts{}
|
||||
// t1, t2 := db.ListenActivityOptsToTimes(opts)
|
||||
// t.Logf("%s to %s", t1, t2)
|
||||
// assert.WithinDuration(t, bod(time.Now().Add(-11*24*time.Hour)), t1, 5*time.Second)
|
||||
// assert.WithinDuration(t, eod(time.Now()), t2, 5*time.Second)
|
||||
}
|
||||
|
||||
func eod(t time.Time) time.Time {
|
||||
year, month, day := t.Date()
|
||||
loc := t.Location()
|
||||
return time.Date(year, month, day, 23, 59, 59, 0, loc)
|
||||
}
|
||||
|
||||
func bod(t time.Time) time.Time {
|
||||
year, month, day := t.Date()
|
||||
loc := t.Location()
|
||||
return time.Date(year, month, day, 0, 0, 0, 0, loc)
|
||||
}
|
||||
312
internal/db/psql/album.go
Normal file
312
internal/db/psql/album.go
Normal file
|
|
@ -0,0 +1,312 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
func (d *Psql) GetAlbum(ctx context.Context, opts db.GetAlbumOpts) (*models.Album, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
|
||||
var row repository.ReleasesWithTitle
|
||||
var err error
|
||||
|
||||
if opts.ID != 0 {
|
||||
l.Debug().Msgf("Fetching album from DB with id %d", opts.ID)
|
||||
row, err = d.q.GetRelease(ctx, opts.ID)
|
||||
} else if opts.MusicBrainzID != uuid.Nil {
|
||||
l.Debug().Msgf("Fetching album from DB with MusicBrainz Release ID %s", opts.MusicBrainzID)
|
||||
row, err = d.q.GetReleaseByMbzID(ctx, &opts.MusicBrainzID)
|
||||
} else if opts.ArtistID != 0 && opts.Title != "" {
|
||||
l.Debug().Msgf("Fetching album from DB with artist_id %d and title %s", opts.ArtistID, opts.Title)
|
||||
row, err = d.q.GetReleaseByArtistAndTitle(ctx, repository.GetReleaseByArtistAndTitleParams{
|
||||
ArtistID: opts.ArtistID,
|
||||
Title: opts.Title,
|
||||
})
|
||||
} else if opts.ArtistID != 0 && len(opts.Titles) > 0 {
|
||||
l.Debug().Msgf("Fetching release group from DB with artist_id %d and titles %v", opts.ArtistID, opts.Titles)
|
||||
row, err = d.q.GetReleaseByArtistAndTitles(ctx, repository.GetReleaseByArtistAndTitlesParams{
|
||||
ArtistID: opts.ArtistID,
|
||||
Column1: opts.Titles,
|
||||
})
|
||||
} else {
|
||||
return nil, errors.New("insufficient information to get album")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
count, err := d.q.CountListensFromRelease(ctx, repository.CountListensFromReleaseParams{
|
||||
ListenedAt: time.Unix(0, 0),
|
||||
ListenedAt_2: time.Now(),
|
||||
ReleaseID: row.ID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &models.Album{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Title: row.Title,
|
||||
Image: row.Image,
|
||||
VariousArtists: row.VariousArtists,
|
||||
ListenCount: count,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Psql) SaveAlbum(ctx context.Context, opts db.SaveAlbumOpts) (*models.Album, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
var insertMbzID *uuid.UUID
|
||||
var insertImage *uuid.UUID
|
||||
if opts.MusicBrainzID != uuid.Nil {
|
||||
insertMbzID = &opts.MusicBrainzID
|
||||
}
|
||||
if opts.Image != uuid.Nil {
|
||||
insertImage = &opts.Image
|
||||
}
|
||||
if len(opts.ArtistIDs) < 1 {
|
||||
return nil, errors.New("required parameter 'ArtistIDs' missing")
|
||||
}
|
||||
for _, aid := range opts.ArtistIDs {
|
||||
if aid == 0 {
|
||||
return nil, errors.New("none of 'ArtistIDs' may be 0")
|
||||
}
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return nil, err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
l.Debug().Msgf("Inserting release '%s' into DB", opts.Title)
|
||||
r, err := qtx.InsertRelease(ctx, repository.InsertReleaseParams{
|
||||
MusicBrainzID: insertMbzID,
|
||||
VariousArtists: opts.VariousArtists,
|
||||
Image: insertImage,
|
||||
ImageSource: pgtype.Text{String: opts.ImageSrc, Valid: opts.ImageSrc != ""},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, artistId := range opts.ArtistIDs {
|
||||
l.Debug().Msgf("Associating release '%s' to artist with ID %d", opts.Title, artistId)
|
||||
err = qtx.AssociateArtistToRelease(ctx, repository.AssociateArtistToReleaseParams{
|
||||
ArtistID: artistId,
|
||||
ReleaseID: r.ID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
l.Debug().Msgf("Saving canonical alias %s for release %d", opts.Title, r.ID)
|
||||
err = qtx.InsertReleaseAlias(ctx, repository.InsertReleaseAliasParams{
|
||||
ReleaseID: r.ID,
|
||||
Alias: opts.Title,
|
||||
Source: "Canonical",
|
||||
IsPrimary: true,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Failed to save canonical alias for album %d", r.ID)
|
||||
}
|
||||
|
||||
err = tx.Commit(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &models.Album{
|
||||
ID: r.ID,
|
||||
MbzID: r.MusicBrainzID,
|
||||
Title: opts.Title,
|
||||
Image: r.Image,
|
||||
VariousArtists: r.VariousArtists,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Psql) AddArtistsToAlbum(ctx context.Context, opts db.AddArtistsToAlbumOpts) error {
|
||||
l := logger.FromContext(ctx)
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
for _, id := range opts.ArtistIDs {
|
||||
err := qtx.AssociateArtistToRelease(ctx, repository.AssociateArtistToReleaseParams{
|
||||
ReleaseID: opts.AlbumID,
|
||||
ArtistID: id,
|
||||
})
|
||||
if err != nil {
|
||||
l.Error().Err(err).Msgf("Failed to associate release %d with artist %d", opts.AlbumID, id)
|
||||
}
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) UpdateAlbum(ctx context.Context, opts db.UpdateAlbumOpts) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.ID == 0 {
|
||||
return errors.New("missing album id")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
if opts.MusicBrainzID != uuid.Nil {
|
||||
l.Debug().Msgf("Updating release with ID %d with MusicBrainz ID %s", opts.ID, opts.MusicBrainzID)
|
||||
err := qtx.UpdateReleaseMbzID(ctx, repository.UpdateReleaseMbzIDParams{
|
||||
ID: opts.ID,
|
||||
MusicBrainzID: &opts.MusicBrainzID,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if opts.Image != uuid.Nil {
|
||||
l.Debug().Msgf("Updating release with ID %d with image %s", opts.ID, opts.Image)
|
||||
err := qtx.UpdateReleaseImage(ctx, repository.UpdateReleaseImageParams{
|
||||
ID: opts.ID,
|
||||
Image: &opts.Image,
|
||||
ImageSource: pgtype.Text{String: opts.ImageSrc, Valid: opts.ImageSrc != ""},
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) SaveAlbumAliases(ctx context.Context, id int32, aliases []string, source string) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if id == 0 {
|
||||
return errors.New("album id not specified")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
existing, err := qtx.GetAllReleaseAliases(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, v := range existing {
|
||||
aliases = append(aliases, v.Alias)
|
||||
}
|
||||
utils.Unique(&aliases)
|
||||
for _, alias := range aliases {
|
||||
if strings.TrimSpace(alias) == "" {
|
||||
return errors.New("aliases cannot be blank")
|
||||
}
|
||||
err = qtx.InsertReleaseAlias(ctx, repository.InsertReleaseAliasParams{
|
||||
Alias: strings.TrimSpace(alias),
|
||||
ReleaseID: id,
|
||||
Source: source,
|
||||
IsPrimary: false,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) DeleteAlbum(ctx context.Context, id int32) error {
|
||||
return d.q.DeleteRelease(ctx, id)
|
||||
}
|
||||
func (d *Psql) DeleteAlbumAlias(ctx context.Context, id int32, alias string) error {
|
||||
return d.q.DeleteReleaseAlias(ctx, repository.DeleteReleaseAliasParams{
|
||||
ReleaseID: id,
|
||||
Alias: alias,
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Psql) GetAllAlbumAliases(ctx context.Context, id int32) ([]models.Alias, error) {
|
||||
rows, err := d.q.GetAllReleaseAliases(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
aliases := make([]models.Alias, len(rows))
|
||||
for i, row := range rows {
|
||||
aliases[i] = models.Alias{
|
||||
ID: id,
|
||||
Alias: row.Alias,
|
||||
Source: row.Source,
|
||||
Primary: row.IsPrimary,
|
||||
}
|
||||
}
|
||||
return aliases, nil
|
||||
}
|
||||
|
||||
func (d *Psql) SetPrimaryAlbumAlias(ctx context.Context, id int32, alias string) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if id == 0 {
|
||||
return errors.New("artist id not specified")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
// get all aliases
|
||||
aliases, err := qtx.GetAllReleaseAliases(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
primary := ""
|
||||
exists := false
|
||||
for _, v := range aliases {
|
||||
if v.Alias == alias {
|
||||
exists = true
|
||||
}
|
||||
if v.IsPrimary {
|
||||
primary = v.Alias
|
||||
}
|
||||
}
|
||||
if primary == alias {
|
||||
// no-op rename
|
||||
return nil
|
||||
}
|
||||
if !exists {
|
||||
return errors.New("alias does not exist")
|
||||
}
|
||||
err = qtx.SetReleaseAliasPrimaryStatus(ctx, repository.SetReleaseAliasPrimaryStatusParams{
|
||||
ReleaseID: id,
|
||||
Alias: alias,
|
||||
IsPrimary: true,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = qtx.SetReleaseAliasPrimaryStatus(ctx, repository.SetReleaseAliasPrimaryStatusParams{
|
||||
ReleaseID: id,
|
||||
Alias: primary,
|
||||
IsPrimary: false,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
319
internal/db/psql/album_test.go
Normal file
319
internal/db/psql/album_test.go
Normal file
|
|
@ -0,0 +1,319 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/catalog"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func truncateTestData(t *testing.T) {
|
||||
err := store.Exec(context.Background(),
|
||||
`TRUNCATE
|
||||
artists,
|
||||
artist_aliases,
|
||||
tracks,
|
||||
artist_tracks,
|
||||
releases,
|
||||
artist_releases,
|
||||
release_aliases,
|
||||
listens
|
||||
RESTART IDENTITY CASCADE`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func testDataForRelease(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001')`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'ATARASHII GAKKO!', 'MusicBrainz', true)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000002')`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (2, 'Masayuki Suzuki', 'MusicBrainz', true)`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestGetAlbum(t *testing.T) {
|
||||
testDataForRelease(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test data
|
||||
rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: "Test Release Group",
|
||||
ArtistIDs: []int32{1},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Test GetAlbum by ID
|
||||
result, err := store.GetAlbum(ctx, db.GetAlbumOpts{ID: rg.ID})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, rg.ID, result.ID)
|
||||
assert.Equal(t, "Test Release Group", result.Title)
|
||||
|
||||
// Test GetAlbum with insufficient information
|
||||
_, err = store.GetAlbum(ctx, db.GetAlbumOpts{})
|
||||
assert.Error(t, err)
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestSaveAlbum(t *testing.T) {
|
||||
testDataForRelease(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Save release group with artist IDs
|
||||
artistIDs := []int32{1, 2}
|
||||
rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: "New Release Group",
|
||||
ArtistIDs: artistIDs,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify release group was saved
|
||||
assert.Equal(t, "New Release Group", rg.Title)
|
||||
|
||||
// Verify release was created for release group
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM releases_with_title
|
||||
WHERE title = $1 AND id = $2
|
||||
)`, "New Release Group", rg.ID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected release to exist")
|
||||
|
||||
// Verify artist associations were created for release group
|
||||
for _, aid := range artistIDs {
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artist_releases
|
||||
WHERE artist_id = $1 AND release_id = $2
|
||||
)`, aid, rg.ID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected artist association to exist")
|
||||
}
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestUpdateAlbum(t *testing.T) {
|
||||
testDataForRelease(t)
|
||||
ctx := context.Background()
|
||||
|
||||
rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: "Old Title",
|
||||
ArtistIDs: []int32{1},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
newMbzID := uuid.New()
|
||||
imgid := uuid.New()
|
||||
err = store.UpdateAlbum(ctx, db.UpdateAlbumOpts{
|
||||
ID: rg.ID,
|
||||
MusicBrainzID: newMbzID,
|
||||
Image: imgid,
|
||||
ImageSrc: catalog.ImageSourceUserUpload,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
result, err := store.GetAlbum(ctx, db.GetAlbumOpts{ID: rg.ID})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, newMbzID, *result.MbzID)
|
||||
assert.Equal(t, imgid, *result.Image)
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
func TestAddArtistsToAlbum(t *testing.T) {
|
||||
testDataForRelease(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test album
|
||||
rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: "Test Album",
|
||||
ArtistIDs: []int32{1},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Add additional artists to the album
|
||||
err = store.AddArtistsToAlbum(ctx, db.AddArtistsToAlbumOpts{
|
||||
AlbumID: rg.ID,
|
||||
ArtistIDs: []int32{2},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify artist associations were created
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artist_releases
|
||||
WHERE artist_id = $1 AND release_id = $2
|
||||
)`, 2, rg.ID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected artist association to exist")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
func TestSaveAlbumAliases(t *testing.T) {
|
||||
testDataForRelease(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test album
|
||||
rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: "Test Album",
|
||||
ArtistIDs: []int32{1},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save aliases for the album
|
||||
aliases := []string{"Alias 1", "Alias 2"}
|
||||
err = store.SaveAlbumAliases(ctx, rg.ID, aliases, "TestSource")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify aliases were saved
|
||||
for _, alias := range aliases {
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM release_aliases
|
||||
WHERE release_id = $1 AND alias = $2
|
||||
)`, rg.ID, alias)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected alias to exist")
|
||||
}
|
||||
|
||||
err = store.SetPrimaryAlbumAlias(ctx, 1, "Alias 1")
|
||||
require.NoError(t, err)
|
||||
album, err := store.GetAlbum(ctx, db.GetAlbumOpts{ID: rg.ID})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Alias 1", album.Title)
|
||||
|
||||
err = store.SetPrimaryAlbumAlias(ctx, 1, "Fake Alias")
|
||||
require.Error(t, err)
|
||||
|
||||
store.SetPrimaryAlbumAlias(ctx, 1, "Album One")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
func TestDeleteAlbum(t *testing.T) {
|
||||
testDataForRelease(t)
|
||||
ctx := context.Background()
|
||||
|
||||
testDataForTopItems(t)
|
||||
|
||||
// Delete the album
|
||||
err := store.DeleteAlbum(ctx, 1)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify album was deleted
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM releases
|
||||
WHERE id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected album to be deleted")
|
||||
|
||||
// Verify album's track was deleted
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM tracks
|
||||
WHERE id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected album's tracks to be deleted")
|
||||
|
||||
// Verify album's listens was deleted
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected album's listens to be deleted")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
func TestDeleteAlbumAlias(t *testing.T) {
|
||||
testDataForRelease(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test album
|
||||
rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: "Test Album",
|
||||
ArtistIDs: []int32{1},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save aliases for the album
|
||||
aliases := []string{"Alias 1", "Alias 2"}
|
||||
err = store.SaveAlbumAliases(ctx, rg.ID, aliases, "TestSource")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Delete one alias
|
||||
err = store.DeleteAlbumAlias(ctx, rg.ID, "Alias 1")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify alias was deleted
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM release_aliases
|
||||
WHERE release_id = $1 AND alias = $2
|
||||
)`, rg.ID, "Alias 1")
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected alias to be deleted")
|
||||
|
||||
// Verify other alias still exists
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM release_aliases
|
||||
WHERE release_id = $1 AND alias = $2
|
||||
)`, rg.ID, "Alias 2")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected alias to still exist")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
func TestGetAllAlbumAliases(t *testing.T) {
|
||||
testDataForRelease(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test album
|
||||
rg, err := store.SaveAlbum(ctx, db.SaveAlbumOpts{
|
||||
Title: "Test Album",
|
||||
ArtistIDs: []int32{1},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save aliases for the album
|
||||
aliases := []string{"Alias 1", "Alias 2"}
|
||||
err = store.SaveAlbumAliases(ctx, rg.ID, aliases, "TestSource")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Retrieve all aliases
|
||||
result, err := store.GetAllAlbumAliases(ctx, rg.ID)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, result, len(aliases)+1) // new + canonical
|
||||
|
||||
for _, alias := range aliases {
|
||||
found := false
|
||||
for _, res := range result {
|
||||
if res.Alias == alias {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.True(t, found, "expected alias to be retrieved")
|
||||
}
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
309
internal/db/psql/artist.go
Normal file
309
internal/db/psql/artist.go
Normal file
|
|
@ -0,0 +1,309 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
func (d *Psql) GetArtist(ctx context.Context, opts db.GetArtistOpts) (*models.Artist, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.ID != 0 {
|
||||
l.Debug().Msgf("Fetching artist from DB with id %d", opts.ID)
|
||||
row, err := d.q.GetArtist(ctx, opts.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
|
||||
ListenedAt: time.Unix(0, 0),
|
||||
ListenedAt_2: time.Now(),
|
||||
ArtistID: row.ID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.Artist{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Name: row.Name,
|
||||
Aliases: row.Aliases,
|
||||
Image: row.Image,
|
||||
ListenCount: count,
|
||||
}, nil
|
||||
} else if opts.MusicBrainzID != uuid.Nil {
|
||||
l.Debug().Msgf("Fetching artist from DB with MusicBrainz ID %s", opts.MusicBrainzID)
|
||||
row, err := d.q.GetArtistByMbzID(ctx, &opts.MusicBrainzID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
|
||||
ListenedAt: time.Unix(0, 0),
|
||||
ListenedAt_2: time.Now(),
|
||||
ArtistID: row.ID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.Artist{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Name: row.Name,
|
||||
Aliases: row.Aliases,
|
||||
Image: row.Image,
|
||||
ListenCount: count,
|
||||
}, nil
|
||||
} else if opts.Name != "" {
|
||||
l.Debug().Msgf("Fetching artist from DB with name '%s'", opts.Name)
|
||||
row, err := d.q.GetArtistByName(ctx, opts.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
count, err := d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
|
||||
ListenedAt: time.Unix(0, 0),
|
||||
ListenedAt_2: time.Now(),
|
||||
ArtistID: row.ID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.Artist{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Name: row.Name,
|
||||
Aliases: row.Aliases,
|
||||
Image: row.Image,
|
||||
ListenCount: count,
|
||||
}, nil
|
||||
} else {
|
||||
return nil, errors.New("insufficient information to get artist")
|
||||
}
|
||||
}
|
||||
|
||||
// Inserts all unique aliases into the DB with specified source
|
||||
func (d *Psql) SaveArtistAliases(ctx context.Context, id int32, aliases []string, source string) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if id == 0 {
|
||||
return errors.New("artist id not specified")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
existing, err := qtx.GetAllArtistAliases(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, v := range existing {
|
||||
aliases = append(aliases, v.Alias)
|
||||
}
|
||||
utils.Unique(&aliases)
|
||||
for _, alias := range aliases {
|
||||
if strings.TrimSpace(alias) == "" {
|
||||
return errors.New("aliases cannot be blank")
|
||||
}
|
||||
err = qtx.InsertArtistAlias(ctx, repository.InsertArtistAliasParams{
|
||||
Alias: strings.TrimSpace(alias),
|
||||
ArtistID: id,
|
||||
Source: source,
|
||||
IsPrimary: false,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) DeleteArtist(ctx context.Context, id int32) error {
|
||||
return d.q.DeleteArtist(ctx, id)
|
||||
}
|
||||
|
||||
// Equivalent to Psql.SaveArtist, then Psql.SaveMbzAliases
|
||||
func (d *Psql) SaveArtist(ctx context.Context, opts db.SaveArtistOpts) (*models.Artist, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
var insertMbzID *uuid.UUID
|
||||
var insertImage *uuid.UUID
|
||||
if opts.MusicBrainzID != uuid.Nil {
|
||||
insertMbzID = &opts.MusicBrainzID
|
||||
}
|
||||
if opts.Image != uuid.Nil {
|
||||
insertImage = &opts.Image
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return nil, err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
opts.Name = strings.TrimSpace(opts.Name)
|
||||
if opts.Name == "" {
|
||||
return nil, errors.New("name must not be blank")
|
||||
}
|
||||
l.Debug().Msgf("Inserting artist '%s' into DB", opts.Name)
|
||||
a, err := qtx.InsertArtist(ctx, repository.InsertArtistParams{
|
||||
MusicBrainzID: insertMbzID,
|
||||
Image: insertImage,
|
||||
ImageSource: pgtype.Text{String: opts.ImageSrc, Valid: opts.ImageSrc != ""},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Debug().Msgf("Inserting canonical alias '%s' into DB for artist with id %d", opts.Name, a.ID)
|
||||
err = qtx.InsertArtistAlias(ctx, repository.InsertArtistAliasParams{
|
||||
ArtistID: a.ID,
|
||||
Alias: opts.Name,
|
||||
Source: "Canonical",
|
||||
IsPrimary: true,
|
||||
})
|
||||
if err != nil {
|
||||
l.Error().Err(err).Msgf("Error inserting canonical alias for artist '%s'", opts.Name)
|
||||
return nil, err
|
||||
}
|
||||
err = tx.Commit(ctx)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to commit insert artist transaction")
|
||||
return nil, err
|
||||
}
|
||||
artist := &models.Artist{
|
||||
ID: a.ID,
|
||||
Name: opts.Name,
|
||||
Image: a.Image,
|
||||
MbzID: a.MusicBrainzID,
|
||||
Aliases: []string{opts.Name},
|
||||
}
|
||||
if len(opts.Aliases) > 0 {
|
||||
l.Debug().Msgf("Inserting aliases '%v' into DB for artist '%s'", opts.Aliases, opts.Name)
|
||||
err = d.SaveArtistAliases(ctx, a.ID, opts.Aliases, "MusicBrainz")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
artist.Aliases = opts.Aliases
|
||||
}
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (d *Psql) UpdateArtist(ctx context.Context, opts db.UpdateArtistOpts) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.ID == 0 {
|
||||
return errors.New("artist id not specified")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
if opts.MusicBrainzID != uuid.Nil {
|
||||
l.Debug().Msgf("Updating artist with id %d with MusicBrainz ID %s", opts.ID, opts.MusicBrainzID)
|
||||
err := qtx.UpdateArtistMbzID(ctx, repository.UpdateArtistMbzIDParams{
|
||||
ID: opts.ID,
|
||||
MusicBrainzID: &opts.MusicBrainzID,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if opts.Image != uuid.Nil {
|
||||
l.Debug().Msgf("Updating artist with id %d with image %s", opts.ID, opts.Image)
|
||||
err = qtx.UpdateArtistImage(ctx, repository.UpdateArtistImageParams{
|
||||
ID: opts.ID,
|
||||
Image: &opts.Image,
|
||||
ImageSource: pgtype.Text{String: opts.ImageSrc, Valid: opts.ImageSrc != ""},
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) DeleteArtistAlias(ctx context.Context, id int32, alias string) error {
|
||||
return d.q.DeleteArtistAlias(ctx, repository.DeleteArtistAliasParams{
|
||||
ArtistID: id,
|
||||
Alias: alias,
|
||||
})
|
||||
}
|
||||
func (d *Psql) GetAllArtistAliases(ctx context.Context, id int32) ([]models.Alias, error) {
|
||||
rows, err := d.q.GetAllArtistAliases(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
aliases := make([]models.Alias, len(rows))
|
||||
for i, row := range rows {
|
||||
aliases[i] = models.Alias{
|
||||
ID: id,
|
||||
Alias: row.Alias,
|
||||
Source: row.Source,
|
||||
Primary: row.IsPrimary,
|
||||
}
|
||||
}
|
||||
return aliases, nil
|
||||
}
|
||||
|
||||
func (d *Psql) SetPrimaryArtistAlias(ctx context.Context, id int32, alias string) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if id == 0 {
|
||||
return errors.New("artist id not specified")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
// get all aliases
|
||||
aliases, err := qtx.GetAllArtistAliases(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
primary := ""
|
||||
exists := false
|
||||
for _, v := range aliases {
|
||||
if v.Alias == alias {
|
||||
exists = true
|
||||
}
|
||||
if v.IsPrimary {
|
||||
primary = v.Alias
|
||||
}
|
||||
}
|
||||
if primary == alias {
|
||||
// no-op rename
|
||||
return nil
|
||||
}
|
||||
if !exists {
|
||||
return errors.New("alias does not exist")
|
||||
}
|
||||
err = qtx.SetArtistAliasPrimaryStatus(ctx, repository.SetArtistAliasPrimaryStatusParams{
|
||||
ArtistID: id,
|
||||
Alias: alias,
|
||||
IsPrimary: true,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = qtx.SetArtistAliasPrimaryStatus(ctx, repository.SetArtistAliasPrimaryStatusParams{
|
||||
ArtistID: id,
|
||||
Alias: primary,
|
||||
IsPrimary: false,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
247
internal/db/psql/artist_test.go
Normal file
247
internal/db/psql/artist_test.go
Normal file
|
|
@ -0,0 +1,247 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/catalog"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetArtist(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
mbzId := uuid.MustParse("00000000-0000-0000-0000-000000000001")
|
||||
// Insert test data
|
||||
artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{
|
||||
Name: "Test Artist",
|
||||
MusicBrainzID: mbzId,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Test GetArtist by ID
|
||||
result, err := store.GetArtist(ctx, db.GetArtistOpts{ID: artist.ID})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, artist.ID, result.ID)
|
||||
assert.Equal(t, "Test Artist", result.Name)
|
||||
|
||||
// Test GetArtist by Name
|
||||
result, err = store.GetArtist(ctx, db.GetArtistOpts{Name: artist.Name})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, artist.ID, result.ID)
|
||||
|
||||
// Test GetArtist by MusicBrainzID
|
||||
result, err = store.GetArtist(ctx, db.GetArtistOpts{MusicBrainzID: mbzId})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, artist.ID, result.ID)
|
||||
|
||||
// Test GetArtist with insufficient information
|
||||
_, err = store.GetArtist(ctx, db.GetArtistOpts{})
|
||||
assert.Error(t, err)
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestSaveAliases(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test artist
|
||||
artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{
|
||||
Name: "Alias Artist",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save aliases
|
||||
aliases := []string{"Alias1", "Alias2"}
|
||||
err = store.SaveArtistAliases(ctx, artist.ID, aliases, "MusicBrainz")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify aliases were saved
|
||||
for _, alias := range aliases {
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artist_aliases
|
||||
WHERE artist_id = $1 AND alias = $2
|
||||
)`, artist.ID, alias)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected alias to exist")
|
||||
}
|
||||
|
||||
err = store.SetPrimaryArtistAlias(ctx, 1, "Alias1")
|
||||
require.NoError(t, err)
|
||||
artist, err = store.GetArtist(ctx, db.GetArtistOpts{ID: artist.ID})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Alias1", artist.Name)
|
||||
|
||||
err = store.SetPrimaryArtistAlias(ctx, 1, "Fake Alias")
|
||||
require.Error(t, err)
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestSaveArtist(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Save artist with aliases
|
||||
aliases := []string{"Alias1", "Alias2"}
|
||||
artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{
|
||||
Name: "New Artist",
|
||||
Aliases: aliases,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify artist was saved
|
||||
assert.Equal(t, "New Artist", artist.Name)
|
||||
|
||||
// Verify aliases were saved
|
||||
for _, alias := range slices.Concat(aliases, []string{"New Artist"}) {
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artist_aliases
|
||||
WHERE artist_id = $1 AND alias = $2
|
||||
)`, artist.ID, alias)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected alias '%s' to exist", alias)
|
||||
}
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestUpdateArtist(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test artist
|
||||
artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{
|
||||
Name: "Old Name",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
imgid := uuid.New()
|
||||
err = store.UpdateArtist(ctx, db.UpdateArtistOpts{
|
||||
ID: artist.ID,
|
||||
Image: imgid,
|
||||
ImageSrc: catalog.ImageSourceUserUpload,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
result, err := store.GetArtist(ctx, db.GetArtistOpts{ID: artist.ID})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, imgid, *result.Image)
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
func TestGetAllArtistAliases(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test artist
|
||||
artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{
|
||||
Name: "Alias Artist",
|
||||
Aliases: []string{"Alias1", "Alias2"},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Retrieve all aliases
|
||||
result, err := store.GetAllArtistAliases(ctx, artist.ID)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, result, 3) // Includes canonical alias
|
||||
|
||||
// Verify aliases were retrieved
|
||||
expectedAliases := []string{"Alias Artist", "Alias1", "Alias2"}
|
||||
for _, alias := range expectedAliases {
|
||||
found := false
|
||||
for _, res := range result {
|
||||
if res.Alias == alias {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.True(t, found, "expected alias '%s' to be retrieved", alias)
|
||||
}
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
func TestDeleteArtistAlias(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Insert test artist
|
||||
artist, err := store.SaveArtist(ctx, db.SaveArtistOpts{
|
||||
Name: "Alias Artist",
|
||||
Aliases: []string{"Alias1", "Alias2"},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Delete one alias
|
||||
err = store.DeleteArtistAlias(ctx, artist.ID, "Alias1")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify alias was deleted
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artist_aliases
|
||||
WHERE artist_id = $1 AND alias = $2
|
||||
)`, artist.ID, "Alias1")
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected alias to be deleted")
|
||||
|
||||
// Verify other alias still exists
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artist_aliases
|
||||
WHERE artist_id = $1 AND alias = $2
|
||||
)`, artist.ID, "Alias2")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected alias to still exist")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
func TestDeleteArtist(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// set up a lot of test data, 4 artists, 4 albums, 4 tracks, 10 listens
|
||||
testDataForTopItems(t)
|
||||
|
||||
// Delete the artist
|
||||
err := store.DeleteArtist(ctx, 1)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify artist was deleted
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artists
|
||||
WHERE id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected artist to be deleted")
|
||||
|
||||
// Verify artist's release was deleted
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM releases
|
||||
WHERE id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected artist's release to be deleted")
|
||||
|
||||
// Verify artist's track was deleted
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM tracks
|
||||
WHERE id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected artist's tracks to be deleted")
|
||||
|
||||
// Verify artist's listens was deleted
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected artist's listens to be deleted")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
70
internal/db/psql/counts.go
Normal file
70
internal/db/psql/counts.go
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
)
|
||||
|
||||
func (p *Psql) CountListens(ctx context.Context, period db.Period) (int64, error) {
|
||||
t2 := time.Now()
|
||||
t1 := db.StartTimeFromPeriod(period)
|
||||
count, err := p.q.CountListens(ctx, repository.CountListensParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
func (p *Psql) CountTracks(ctx context.Context, period db.Period) (int64, error) {
|
||||
t2 := time.Now()
|
||||
t1 := db.StartTimeFromPeriod(period)
|
||||
count, err := p.q.CountTopTracks(ctx, repository.CountTopTracksParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
func (p *Psql) CountAlbums(ctx context.Context, period db.Period) (int64, error) {
|
||||
t2 := time.Now()
|
||||
t1 := db.StartTimeFromPeriod(period)
|
||||
count, err := p.q.CountTopReleases(ctx, repository.CountTopReleasesParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
func (p *Psql) CountArtists(ctx context.Context, period db.Period) (int64, error) {
|
||||
t2 := time.Now()
|
||||
t1 := db.StartTimeFromPeriod(period)
|
||||
count, err := p.q.CountTopArtists(ctx, repository.CountTopArtistsParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
func (p *Psql) CountTimeListened(ctx context.Context, period db.Period) (int64, error) {
|
||||
t2 := time.Now()
|
||||
t1 := db.StartTimeFromPeriod(period)
|
||||
count, err := p.q.CountTimeListened(ctx, repository.CountTimeListenedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
76
internal/db/psql/counts_test.go
Normal file
76
internal/db/psql/counts_test.go
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCountListens(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
testDataForTopItems(t)
|
||||
|
||||
// Test CountListens
|
||||
period := db.PeriodWeek
|
||||
count, err := store.CountListens(ctx, period)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(1), count, "expected listens count to match inserted data")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestCountTracks(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
testDataForTopItems(t)
|
||||
|
||||
// Test CountTracks
|
||||
period := db.PeriodMonth
|
||||
count, err := store.CountTracks(ctx, period)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(2), count, "expected tracks count to match inserted data")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestCountAlbums(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
testDataForTopItems(t)
|
||||
|
||||
// Test CountAlbums
|
||||
period := db.PeriodYear
|
||||
count, err := store.CountAlbums(ctx, period)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(3), count, "expected albums count to match inserted data")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestCountArtists(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
testDataForTopItems(t)
|
||||
|
||||
// Test CountArtists
|
||||
period := db.PeriodAllTime
|
||||
count, err := store.CountArtists(ctx, period)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(4), count, "expected artists count to match inserted data")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestCountTimeListened(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
testDataForTopItems(t)
|
||||
|
||||
// Test CountTimeListened
|
||||
period := db.PeriodMonth
|
||||
count, err := store.CountTimeListened(ctx, period)
|
||||
require.NoError(t, err)
|
||||
// 3 listens in past month, each 100 seconds
|
||||
assert.Equal(t, int64(300), count, "expected total time listened to match inserted data")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
74
internal/db/psql/images.go
Normal file
74
internal/db/psql/images.go
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
)
|
||||
|
||||
func (d *Psql) ImageHasAssociation(ctx context.Context, image uuid.UUID) (bool, error) {
|
||||
_, err := d.q.GetReleaseByImageID(ctx, &image)
|
||||
if err == nil {
|
||||
return true, err
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return false, err
|
||||
}
|
||||
_, err = d.q.GetArtistByImage(ctx, &image)
|
||||
if err == nil {
|
||||
return true, err
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return false, err
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func (d *Psql) GetImageSource(ctx context.Context, image uuid.UUID) (string, error) {
|
||||
r, err := d.q.GetReleaseByImageID(ctx, &image)
|
||||
if err == nil {
|
||||
return r.ImageSource.String, err
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return "", err
|
||||
}
|
||||
rr, err := d.q.GetArtistByImage(ctx, &image)
|
||||
if err == nil {
|
||||
return rr.ImageSource.String, err
|
||||
} else if !errors.Is(err, pgx.ErrNoRows) {
|
||||
return "", err
|
||||
}
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (d *Psql) AlbumsWithoutImages(ctx context.Context, from int32) ([]*models.Album, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
rows, err := d.q.GetReleasesWithoutImages(ctx, repository.GetReleasesWithoutImagesParams{
|
||||
Limit: 20,
|
||||
ID: from,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
albums := make([]*models.Album, len(rows))
|
||||
for i, row := range rows {
|
||||
artists := make([]models.SimpleArtist, 0)
|
||||
err = json.Unmarshal(row.Artists, &artists)
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", row.ID)
|
||||
artists = nil
|
||||
}
|
||||
albums[i] = &models.Album{
|
||||
ID: row.ID,
|
||||
Image: row.Image,
|
||||
Title: row.Title,
|
||||
MbzID: row.MusicBrainzID,
|
||||
VariousArtists: row.VariousArtists,
|
||||
Artists: artists,
|
||||
}
|
||||
}
|
||||
return albums, nil
|
||||
}
|
||||
106
internal/db/psql/images_test.go
Normal file
106
internal/db/psql/images_test.go
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/catalog"
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func setupTestDataForImages(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// Insert artists
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id, image, image_source)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001', '11111111-1111-1111-1111-111111111111', 'User Upload'),
|
||||
('00000000-0000-0000-0000-000000000002', NULL, NULL)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert artist aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'Artist One', 'Testing', true),
|
||||
(2, 'Artist Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert albums
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id, image, image_source)
|
||||
VALUES ('22222222-2222-2222-2222-222222222222', '33333333-3333-3333-3333-333333333333', 'Automatic'),
|
||||
('44444444-4444-4444-4444-444444444444', NULL, NULL)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert release aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'Album One', 'Testing', true),
|
||||
(2, 'Album Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Associate albums with artists
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_releases (artist_id, release_id)
|
||||
VALUES (1, 1), (2, 2)`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestImageHasAssociation(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForImages(t)
|
||||
|
||||
// Test image with association
|
||||
imageID := uuid.MustParse("11111111-1111-1111-1111-111111111111")
|
||||
hasAssociation, err := store.ImageHasAssociation(ctx, imageID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, hasAssociation, "expected image to have an association")
|
||||
|
||||
// Test image without association
|
||||
imageID = uuid.MustParse("55555555-5555-5555-5555-555555555555")
|
||||
hasAssociation, err = store.ImageHasAssociation(ctx, imageID)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, hasAssociation, "expected image to have no association")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestGetImageSource(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForImages(t)
|
||||
|
||||
// Test image source for an album
|
||||
imageID := uuid.MustParse("33333333-3333-3333-3333-333333333333")
|
||||
source, err := store.GetImageSource(ctx, imageID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Automatic", source, "expected image source to match")
|
||||
|
||||
// Test image source for an artist
|
||||
imageID = uuid.MustParse("11111111-1111-1111-1111-111111111111")
|
||||
source, err = store.GetImageSource(ctx, imageID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, catalog.ImageSourceUserUpload, source, "expected image source to match")
|
||||
|
||||
// Test image source for a non-existent image
|
||||
imageID = uuid.MustParse("55555555-5555-5555-5555-555555555555")
|
||||
source, err = store.GetImageSource(ctx, imageID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "", source, "expected no image source for non-existent image")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestAlbumsWithoutImages(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForImages(t)
|
||||
|
||||
// Test albums without images
|
||||
albums, err := store.AlbumsWithoutImages(ctx, 0)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, albums, 1, "expected one album without an image")
|
||||
assert.Equal(t, "Album Two", albums[0].Title, "expected album title to match")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
218
internal/db/psql/listen.go
Normal file
218
internal/db/psql/listen.go
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
)
|
||||
|
||||
func (d *Psql) GetListensPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Listen], error) {
|
||||
l := logger.FromContext(ctx)
|
||||
offset := (opts.Page - 1) * opts.Limit
|
||||
t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if opts.Month == 0 && opts.Year == 0 {
|
||||
// use period, not date range
|
||||
t2 = time.Now()
|
||||
t1 = db.StartTimeFromPeriod(opts.Period)
|
||||
}
|
||||
if opts.Limit == 0 {
|
||||
opts.Limit = DefaultItemsPerPage
|
||||
}
|
||||
var listens []*models.Listen
|
||||
var count int64
|
||||
if opts.TrackID > 0 {
|
||||
l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetLastListensFromTrackPaginated(ctx, repository.GetLastListensFromTrackPaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
ID: int32(opts.TrackID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listens = make([]*models.Listen, len(rows))
|
||||
for i, row := range rows {
|
||||
t := &models.Listen{
|
||||
Track: models.Track{
|
||||
Title: row.TrackTitle,
|
||||
ID: row.TrackID,
|
||||
},
|
||||
Time: row.ListenedAt,
|
||||
}
|
||||
err = json.Unmarshal(row.Artists, &t.Track.Artists)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listens[i] = t
|
||||
}
|
||||
count, err = d.q.CountListensFromTrack(ctx, repository.CountListensFromTrackParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
TrackID: int32(opts.TrackID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else if opts.AlbumID > 0 {
|
||||
l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetLastListensFromReleasePaginated(ctx, repository.GetLastListensFromReleasePaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
ReleaseID: int32(opts.AlbumID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listens = make([]*models.Listen, len(rows))
|
||||
for i, row := range rows {
|
||||
t := &models.Listen{
|
||||
Track: models.Track{
|
||||
Title: row.TrackTitle,
|
||||
ID: row.TrackID,
|
||||
},
|
||||
Time: row.ListenedAt,
|
||||
}
|
||||
err = json.Unmarshal(row.Artists, &t.Track.Artists)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listens[i] = t
|
||||
}
|
||||
count, err = d.q.CountListensFromRelease(ctx, repository.CountListensFromReleaseParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
ReleaseID: int32(opts.AlbumID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else if opts.ArtistID > 0 {
|
||||
l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetLastListensFromArtistPaginated(ctx, repository.GetLastListensFromArtistPaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
ArtistID: int32(opts.ArtistID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listens = make([]*models.Listen, len(rows))
|
||||
for i, row := range rows {
|
||||
t := &models.Listen{
|
||||
Track: models.Track{
|
||||
Title: row.TrackTitle,
|
||||
ID: row.TrackID,
|
||||
},
|
||||
Time: row.ListenedAt,
|
||||
}
|
||||
err = json.Unmarshal(row.Artists, &t.Track.Artists)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listens[i] = t
|
||||
}
|
||||
count, err = d.q.CountListensFromArtist(ctx, repository.CountListensFromArtistParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
ArtistID: int32(opts.ArtistID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
l.Debug().Msgf("Fetching %d listens with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetLastListensPaginated(ctx, repository.GetLastListensPaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listens = make([]*models.Listen, len(rows))
|
||||
for i, row := range rows {
|
||||
t := &models.Listen{
|
||||
Track: models.Track{
|
||||
Title: row.TrackTitle,
|
||||
ID: row.TrackID,
|
||||
},
|
||||
Time: row.ListenedAt,
|
||||
}
|
||||
err = json.Unmarshal(row.Artists, &t.Track.Artists)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listens[i] = t
|
||||
}
|
||||
count, err = d.q.CountListens(ctx, repository.CountListensParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Debug().Msgf("Database responded with %d tracks out of a total %d", len(rows), count)
|
||||
}
|
||||
|
||||
return &db.PaginatedResponse[*models.Listen]{
|
||||
Items: listens,
|
||||
TotalCount: count,
|
||||
ItemsPerPage: int32(opts.Limit),
|
||||
HasNextPage: int64(offset+len(listens)) < count,
|
||||
CurrentPage: int32(opts.Page),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Psql) SaveListen(ctx context.Context, opts db.SaveListenOpts) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.TrackID == 0 {
|
||||
return errors.New("required parameter TrackID missing")
|
||||
}
|
||||
if opts.Time.IsZero() {
|
||||
opts.Time = time.Now()
|
||||
}
|
||||
var client *string
|
||||
if opts.Client != "" {
|
||||
client = &opts.Client
|
||||
}
|
||||
l.Debug().Msgf("Inserting listen for track with id %d at time %v into DB", opts.TrackID, opts.Time)
|
||||
return d.q.InsertListen(ctx, repository.InsertListenParams{
|
||||
TrackID: opts.TrackID,
|
||||
ListenedAt: opts.Time,
|
||||
UserID: opts.UserID,
|
||||
Client: client,
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Psql) DeleteListen(ctx context.Context, trackId int32, listenedAt time.Time) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if trackId == 0 {
|
||||
return errors.New("required parameter 'trackId' missing")
|
||||
}
|
||||
l.Debug().Msgf("Deleting listen from track %d at time %s from DB", trackId, listenedAt)
|
||||
return d.q.DeleteListen(ctx, repository.DeleteListenParams{
|
||||
TrackID: trackId,
|
||||
ListenedAt: listenedAt,
|
||||
})
|
||||
}
|
||||
109
internal/db/psql/listen_activity.go
Normal file
109
internal/db/psql/listen_activity.go
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
)
|
||||
|
||||
func (d *Psql) GetListenActivity(ctx context.Context, opts db.ListenActivityOpts) ([]db.ListenActivityItem, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.Month != 0 && opts.Year == 0 {
|
||||
return nil, errors.New("year must be specified with month")
|
||||
}
|
||||
// Default to range = 12 if not set
|
||||
if opts.Range == 0 {
|
||||
opts.Range = db.DefaultRange
|
||||
}
|
||||
t1, t2 := db.ListenActivityOptsToTimes(opts)
|
||||
var listenActivity []db.ListenActivityItem
|
||||
if opts.AlbumID > 0 {
|
||||
l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for release group %d",
|
||||
opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.AlbumID)
|
||||
rows, err := d.q.ListenActivityForRelease(ctx, repository.ListenActivityForReleaseParams{
|
||||
Column1: t1,
|
||||
Column2: t2,
|
||||
Column3: stepToInterval(opts.Step),
|
||||
ReleaseID: opts.AlbumID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listenActivity = make([]db.ListenActivityItem, len(rows))
|
||||
for i, row := range rows {
|
||||
t := db.ListenActivityItem{
|
||||
Start: row.BucketStart,
|
||||
Listens: row.ListenCount,
|
||||
}
|
||||
listenActivity[i] = t
|
||||
}
|
||||
l.Debug().Msgf("Database responded with %d steps", len(rows))
|
||||
} else if opts.ArtistID > 0 {
|
||||
l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for artist %d",
|
||||
opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.ArtistID)
|
||||
rows, err := d.q.ListenActivityForArtist(ctx, repository.ListenActivityForArtistParams{
|
||||
Column1: t1,
|
||||
Column2: t2,
|
||||
Column3: stepToInterval(opts.Step),
|
||||
ArtistID: opts.ArtistID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listenActivity = make([]db.ListenActivityItem, len(rows))
|
||||
for i, row := range rows {
|
||||
t := db.ListenActivityItem{
|
||||
Start: row.BucketStart,
|
||||
Listens: row.ListenCount,
|
||||
}
|
||||
listenActivity[i] = t
|
||||
}
|
||||
l.Debug().Msgf("Database responded with %d steps", len(rows))
|
||||
} else if opts.TrackID > 0 {
|
||||
l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v for track %d",
|
||||
opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"), opts.TrackID)
|
||||
rows, err := d.q.ListenActivityForTrack(ctx, repository.ListenActivityForTrackParams{
|
||||
Column1: t1,
|
||||
Column2: t2,
|
||||
Column3: stepToInterval(opts.Step),
|
||||
ID: opts.TrackID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listenActivity = make([]db.ListenActivityItem, len(rows))
|
||||
for i, row := range rows {
|
||||
t := db.ListenActivityItem{
|
||||
Start: row.BucketStart,
|
||||
Listens: row.ListenCount,
|
||||
}
|
||||
listenActivity[i] = t
|
||||
}
|
||||
l.Debug().Msgf("Database responded with %d steps", len(rows))
|
||||
} else {
|
||||
l.Debug().Msgf("Fetching listen activity for %d %s(s) from %v to %v",
|
||||
opts.Range, opts.Step, t1.Format("Jan 02, 2006 15:04:05"), t2.Format("Jan 02, 2006 15:04:05"))
|
||||
rows, err := d.q.ListenActivity(ctx, repository.ListenActivityParams{
|
||||
Column1: t1,
|
||||
Column2: t2,
|
||||
Column3: stepToInterval(opts.Step),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listenActivity = make([]db.ListenActivityItem, len(rows))
|
||||
for i, row := range rows {
|
||||
t := db.ListenActivityItem{
|
||||
Start: row.BucketStart,
|
||||
Listens: row.ListenCount,
|
||||
}
|
||||
listenActivity[i] = t
|
||||
}
|
||||
l.Debug().Msgf("Database responded with %d steps", len(rows))
|
||||
}
|
||||
|
||||
return listenActivity, nil
|
||||
}
|
||||
211
internal/db/psql/listen_activity_test.go
Normal file
211
internal/db/psql/listen_activity_test.go
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func flattenListenCounts(items []db.ListenActivityItem) []int64 {
|
||||
ret := make([]int64, len(items))
|
||||
for i, v := range items {
|
||||
ret[i] = v.Listens
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func TestListenActivity(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001'),
|
||||
('00000000-0000-0000-0000-000000000002')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Move artist names into artist_aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'Artist One', 'Testing', true),
|
||||
(2, 'Artist Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert release groups
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000011'),
|
||||
('00000000-0000-0000-0000-000000000022')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Move release titles into release_aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'Release One', 'Testing', true),
|
||||
(2, 'Release Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert tracks
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO tracks (musicbrainz_id, release_id)
|
||||
VALUES ('11111111-1111-1111-1111-111111111111', 1),
|
||||
('22222222-2222-2222-2222-222222222222', 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Move track titles into track_aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES (1, 'Track One', 'Testing', true),
|
||||
(2, 'Track Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Associate tracks with artists
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_tracks (artist_id, track_id)
|
||||
VALUES (1, 1), (2, 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert listens
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, NOW() - INTERVAL '1 day'),
|
||||
(1, 1, NOW() - INTERVAL '2 days'),
|
||||
(1, 1, NOW() - INTERVAL '1 week 1 day'),
|
||||
(1, 1, NOW() - INTERVAL '1 month 1 day'),
|
||||
(1, 1, NOW() - INTERVAL '1 year 1 day'),
|
||||
(1, 2, NOW() - INTERVAL '1 day'),
|
||||
(1, 2, NOW() - INTERVAL '2 days'),
|
||||
(1, 2, NOW() - INTERVAL '1 week 1 day'),
|
||||
(1, 2, NOW() - INTERVAL '1 month 1 day'),
|
||||
(1, 2, NOW() - INTERVAL '1 year 1 day')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Test for opts.Step = db.StepDay
|
||||
activity, err := store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepDay})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, activity, db.DefaultRange)
|
||||
assert.Equal(t, []int64{0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 2, 0}, flattenListenCounts(activity))
|
||||
|
||||
// Truncate listens table and insert specific dates for testing opts.Step = db.StepMonth
|
||||
err = store.Exec(context.Background(), `TRUNCATE TABLE listens`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, NOW() - INTERVAL '1 month'),
|
||||
(1, 1, NOW() - INTERVAL '2 months'),
|
||||
(1, 1, NOW() - INTERVAL '3 months'),
|
||||
(1, 2, NOW() - INTERVAL '1 month'),
|
||||
(1, 2, NOW() - INTERVAL '2 months')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepMonth, Range: 8})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, activity, 8)
|
||||
assert.Equal(t, []int64{0, 0, 0, 0, 1, 2, 2, 0}, flattenListenCounts(activity))
|
||||
|
||||
// Truncate listens table and insert specific dates for testing opts.Step = db.StepYear
|
||||
err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, NOW() - INTERVAL '1 year'),
|
||||
(1, 1, NOW() - INTERVAL '2 years'),
|
||||
(1, 2, NOW() - INTERVAL '1 year'),
|
||||
(1, 2, NOW() - INTERVAL '3 years')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{Step: db.StepYear})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, activity, db.DefaultRange)
|
||||
assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 0}, flattenListenCounts(activity))
|
||||
// Truncate and insert data for a specific month/year
|
||||
err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(), `
|
||||
INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, DATE '2024-03-10'),
|
||||
(1, 2, DATE '2024-03-20')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
Step: db.StepDay,
|
||||
Month: 3,
|
||||
Year: 2024,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, activity, 31) // number of days in march
|
||||
assert.EqualValues(t, 1, activity[8].Listens)
|
||||
assert.EqualValues(t, 1, activity[18].Listens)
|
||||
|
||||
// Truncate and insert listens associated with two different albums
|
||||
err = store.Exec(context.Background(), `TRUNCATE TABLE listens RESTART IDENTITY`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(), `
|
||||
INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, NOW() - INTERVAL '1 day'), (1, 1, NOW() - INTERVAL '2 days'),
|
||||
(1, 2, NOW() - INTERVAL '1 day')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
Step: db.StepDay,
|
||||
AlbumID: 1, // Track 1 only
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, activity, db.DefaultRange)
|
||||
assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0}, flattenListenCounts(activity))
|
||||
|
||||
activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
Step: db.StepDay,
|
||||
TrackID: 1, // Track 1 only
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, activity, db.DefaultRange)
|
||||
assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0}, flattenListenCounts(activity))
|
||||
|
||||
activity, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
Step: db.StepDay,
|
||||
ArtistID: 2, // Should only include listens to Track 2
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, activity, db.DefaultRange)
|
||||
assert.Equal(t, []int64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0}, flattenListenCounts(activity))
|
||||
|
||||
// month without year is disallowed
|
||||
_, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
Step: db.StepDay,
|
||||
Month: 5,
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
||||
// invalid options
|
||||
_, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
Year: -10,
|
||||
})
|
||||
require.Error(t, err)
|
||||
_, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
Year: 2025,
|
||||
Month: -10,
|
||||
})
|
||||
require.Error(t, err)
|
||||
_, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
Range: -1,
|
||||
})
|
||||
require.Error(t, err)
|
||||
_, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
AlbumID: -1,
|
||||
})
|
||||
require.Error(t, err)
|
||||
_, err = store.GetListenActivity(ctx, db.ListenActivityOpts{
|
||||
ArtistID: -1,
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
||||
}
|
||||
219
internal/db/psql/listen_test.go
Normal file
219
internal/db/psql/listen_test.go
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func testDataForListens(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
// Insert artists
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001'),
|
||||
('00000000-0000-0000-0000-000000000002')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert artist aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'Artist One', 'Testing', true),
|
||||
(2, 'Artist Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert release groups
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000011'),
|
||||
('00000000-0000-0000-0000-000000000022')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert release aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'Release One', 'Testing', true),
|
||||
(2, 'Release Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert tracks
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO tracks (musicbrainz_id, release_id)
|
||||
VALUES ('11111111-1111-1111-1111-111111111111', 1),
|
||||
('22222222-2222-2222-2222-222222222222', 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert track aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES (1, 'Track One', 'Testing', true),
|
||||
(2, 'Track Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert artist track associations
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_tracks (track_id, artist_id)
|
||||
VALUES (1, 1),
|
||||
(2, 2)`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestGetListens(t *testing.T) {
|
||||
testDataForTopItems(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Test valid
|
||||
resp, err := store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 10)
|
||||
assert.Equal(t, int64(10), resp.TotalCount)
|
||||
require.Len(t, resp.Items[0].Track.Artists, 1)
|
||||
require.Len(t, resp.Items[1].Track.Artists, 1)
|
||||
// ensure tracks are in the right order (time, desc)
|
||||
assert.Equal(t, "Artist Four", resp.Items[0].Track.Artists[0].Name)
|
||||
assert.Equal(t, "Artist Three", resp.Items[1].Track.Artists[0].Name)
|
||||
|
||||
// Test pagination
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
require.Len(t, resp.Items[0].Track.Artists, 1)
|
||||
assert.Equal(t, true, resp.HasNextPage)
|
||||
assert.EqualValues(t, 2, resp.CurrentPage)
|
||||
assert.EqualValues(t, 1, resp.ItemsPerPage)
|
||||
assert.EqualValues(t, 10, resp.TotalCount)
|
||||
assert.Equal(t, "Artist Three", resp.Items[0].Track.Artists[0].Name)
|
||||
|
||||
// Test page out of range
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 10, Page: 10, Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, resp.Items)
|
||||
assert.False(t, resp.HasNextPage)
|
||||
|
||||
// Test invalid inputs
|
||||
_, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: -1, Page: 0})
|
||||
assert.Error(t, err)
|
||||
|
||||
_, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: -1})
|
||||
assert.Error(t, err)
|
||||
|
||||
// Test specify period
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 0) // empty
|
||||
assert.Equal(t, int64(0), resp.TotalCount)
|
||||
// should default to PeriodDay
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 0) // empty
|
||||
assert.Equal(t, int64(0), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 3)
|
||||
assert.Equal(t, int64(3), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 6)
|
||||
assert.Equal(t, int64(6), resp.TotalCount)
|
||||
|
||||
// Test filter by artists, releases, and tracks
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, ArtistID: 1})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 4)
|
||||
assert.Equal(t, int64(4), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 3)
|
||||
assert.Equal(t, int64(3), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, TrackID: 3})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 2)
|
||||
assert.Equal(t, int64(2), resp.TotalCount)
|
||||
// when both artistID and albumID are specified, artist id is ignored
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2, ArtistID: 1})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 3)
|
||||
assert.Equal(t, int64(3), resp.TotalCount)
|
||||
|
||||
// Test specify dates
|
||||
|
||||
testDataAbsoluteListenTimes(t)
|
||||
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Year: 2023})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 4)
|
||||
assert.Equal(t, int64(4), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 3)
|
||||
assert.Equal(t, int64(3), resp.TotalCount)
|
||||
|
||||
// invalid, year required with month
|
||||
_, err = store.GetListensPaginated(ctx, db.GetItemsOpts{Month: 10})
|
||||
require.Error(t, err)
|
||||
|
||||
}
|
||||
|
||||
func TestSaveListen(t *testing.T) {
|
||||
testDataForListens(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Test SaveListen with valid inputs
|
||||
err := store.SaveListen(ctx, db.SaveListenOpts{
|
||||
TrackID: 1,
|
||||
Time: time.Now(),
|
||||
UserID: 1,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the listen was saved
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected listen to exist")
|
||||
|
||||
// Test SaveListen with missing TrackID
|
||||
err = store.SaveListen(ctx, db.SaveListenOpts{
|
||||
TrackID: 0,
|
||||
Time: time.Now(),
|
||||
})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestDeleteListen(t *testing.T) {
|
||||
testDataForListens(t)
|
||||
ctx := context.Background()
|
||||
|
||||
err := store.Exec(ctx, `
|
||||
INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, to_timestamp(1749464138.0))`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.DeleteListen(ctx, 1, time.Unix(1749464138, 0))
|
||||
require.NoError(t, err)
|
||||
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM listens
|
||||
WHERE track_id = $1
|
||||
)`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "expected listen to be deleted")
|
||||
}
|
||||
109
internal/db/psql/merge.go
Normal file
109
internal/db/psql/merge.go
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/jackc/pgx/v5"
|
||||
)
|
||||
|
||||
func (d *Psql) MergeTracks(ctx context.Context, fromId, toId int32) error {
|
||||
l := logger.FromContext(ctx)
|
||||
l.Info().Msgf("Merging track %d into track %d", fromId, toId)
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
err = qtx.UpdateTrackIdForListens(ctx, repository.UpdateTrackIdForListensParams{
|
||||
TrackID: fromId,
|
||||
TrackID_2: toId,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = qtx.CleanOrphanedEntries(ctx)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to clean orphaned entries")
|
||||
return err
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) MergeAlbums(ctx context.Context, fromId, toId int32) error {
|
||||
l := logger.FromContext(ctx)
|
||||
l.Info().Msgf("Merging album %d into album %d", fromId, toId)
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
err = qtx.UpdateReleaseForAll(ctx, repository.UpdateReleaseForAllParams{
|
||||
ReleaseID: fromId,
|
||||
ReleaseID_2: toId,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = qtx.CleanOrphanedEntries(ctx)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to clean orphaned entries")
|
||||
return err
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) MergeArtists(ctx context.Context, fromId, toId int32) error {
|
||||
l := logger.FromContext(ctx)
|
||||
l.Info().Msgf("Merging artist %d into artist %d", fromId, toId)
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
err = qtx.DeleteConflictingArtistTracks(ctx, repository.DeleteConflictingArtistTracksParams{
|
||||
ArtistID: fromId,
|
||||
ArtistID_2: toId,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to delete conflicting artist tracks")
|
||||
return err
|
||||
}
|
||||
err = qtx.DeleteConflictingArtistReleases(ctx, repository.DeleteConflictingArtistReleasesParams{
|
||||
ArtistID: fromId,
|
||||
ArtistID_2: toId,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to delete conflicting artist releases")
|
||||
return err
|
||||
}
|
||||
err = qtx.UpdateArtistTracks(ctx, repository.UpdateArtistTracksParams{
|
||||
ArtistID: fromId,
|
||||
ArtistID_2: toId,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to update artist tracks")
|
||||
return err
|
||||
}
|
||||
err = qtx.UpdateArtistReleases(ctx, repository.UpdateArtistReleasesParams{
|
||||
ArtistID: fromId,
|
||||
ArtistID_2: toId,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to update artist releases")
|
||||
return err
|
||||
}
|
||||
err = qtx.CleanOrphanedEntries(ctx)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to clean orphaned entries")
|
||||
return err
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
124
internal/db/psql/merge_test.go
Normal file
124
internal/db/psql/merge_test.go
Normal file
|
|
@ -0,0 +1,124 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func setupTestDataForMerge(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
// Insert artists
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001'),
|
||||
('00000000-0000-0000-0000-000000000002')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'Artist One', 'Testing', true),
|
||||
(2, 'Artist Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert albums
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id)
|
||||
VALUES ('11111111-1111-1111-1111-111111111111'),
|
||||
('22222222-2222-2222-2222-222222222222')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'Album One', 'Testing', true),
|
||||
(2, 'Album Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert tracks
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO tracks (musicbrainz_id, release_id)
|
||||
VALUES ('33333333-3333-3333-3333-333333333333', 1),
|
||||
('44444444-4444-4444-4444-444444444444', 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES (1, 'Track One', 'Testing', true),
|
||||
(2, 'Track Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Associate artists with albums and tracks
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_releases (artist_id, release_id)
|
||||
VALUES (1, 1), (2, 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_tracks (artist_id, track_id)
|
||||
VALUES (1, 1), (2, 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert listens
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, NOW() - INTERVAL '1 day'),
|
||||
(1, 2, NOW() - INTERVAL '2 days')`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestMergeTracks(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForMerge(t)
|
||||
|
||||
// Merge Track 1 into Track 2
|
||||
err := store.MergeTracks(ctx, 1, 2)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify listens are updated
|
||||
var count int
|
||||
count, err = store.Count(ctx, `SELECT COUNT(*) FROM listens WHERE track_id = 2`)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 2, count, "expected all listens to be merged into Track 2")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestMergeAlbums(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForMerge(t)
|
||||
|
||||
// Merge Album 1 into Album 2
|
||||
err := store.MergeAlbums(ctx, 1, 2)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify tracks are updated
|
||||
var count int
|
||||
count, err = store.Count(ctx, `SELECT COUNT(*) FROM tracks WHERE release_id = 2`)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 2, count, "expected all tracks to be merged into Album 2")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestMergeArtists(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForMerge(t)
|
||||
|
||||
// Merge Artist 1 into Artist 2
|
||||
err := store.MergeArtists(ctx, 1, 2)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify artist associations are updated
|
||||
var count int
|
||||
count, err = store.Count(ctx, `SELECT COUNT(*) FROM artist_tracks WHERE artist_id = 2`)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 2, count, "expected all tracks to be associated with Artist 2")
|
||||
|
||||
count, err = store.Count(ctx, `SELECT COUNT(*) FROM artist_releases WHERE artist_id = 2`)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 2, count, "expected all releases to be associated with Artist 2")
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
119
internal/db/psql/psql.go
Normal file
119
internal/db/psql/psql.go
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
// package psql implements the db.DB interface using psx and a sql generated repository
|
||||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
_ "github.com/jackc/pgx/v5/stdlib"
|
||||
"github.com/pressly/goose/v3"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultItemsPerPage = 20
|
||||
)
|
||||
|
||||
type Psql struct {
|
||||
q *repository.Queries
|
||||
conn *pgxpool.Pool
|
||||
}
|
||||
|
||||
func New() (*Psql, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)
|
||||
defer cancel()
|
||||
|
||||
config, err := pgxpool.ParseConfig(cfg.DatabaseUrl())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse pgx config: %w", err)
|
||||
}
|
||||
|
||||
config.ConnConfig.ConnectTimeout = 15 * time.Second
|
||||
|
||||
pool, err := pgxpool.NewWithConfig(ctx, config)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create pgx pool: %w", err)
|
||||
}
|
||||
|
||||
if err := pool.Ping(ctx); err != nil {
|
||||
pool.Close()
|
||||
return nil, fmt.Errorf("database not reachable: %w", err)
|
||||
}
|
||||
|
||||
sqlDB, err := sql.Open("pgx", cfg.DatabaseUrl())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open db for migrations: %w", err)
|
||||
}
|
||||
|
||||
_, filename, _, ok := runtime.Caller(0)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to get caller info")
|
||||
}
|
||||
migrationsPath := filepath.Join(filepath.Dir(filename), "..", "..", "..", "db", "migrations")
|
||||
|
||||
if err := goose.Up(sqlDB, migrationsPath); err != nil {
|
||||
return nil, fmt.Errorf("goose failed: %w", err)
|
||||
}
|
||||
_ = sqlDB.Close()
|
||||
|
||||
return &Psql{
|
||||
q: repository.New(pool),
|
||||
conn: pool,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Not part of the DB interface this package implements. Only used for testing.
|
||||
func (d *Psql) Exec(ctx context.Context, query string, args ...any) error {
|
||||
_, err := d.conn.Exec(ctx, query, args...)
|
||||
return err
|
||||
}
|
||||
|
||||
// Not part of the DB interface this package implements. Only used for testing.
|
||||
func (d *Psql) RowExists(ctx context.Context, query string, args ...any) (bool, error) {
|
||||
var exists bool
|
||||
err := d.conn.QueryRow(ctx, query, args...).Scan(&exists)
|
||||
return exists, err
|
||||
}
|
||||
|
||||
func (p *Psql) Count(ctx context.Context, query string, args ...any) (count int, err error) {
|
||||
err = p.conn.QueryRow(ctx, query, args...).Scan(&count)
|
||||
return
|
||||
}
|
||||
|
||||
// Exposes p.conn.QueryRow. Only used for testing. Not part of the DB interface this package implements.
|
||||
func (p *Psql) QueryRow(ctx context.Context, query string, args ...any) pgx.Row {
|
||||
return p.conn.QueryRow(ctx, query, args...)
|
||||
}
|
||||
|
||||
func (d *Psql) Close(ctx context.Context) {
|
||||
d.conn.Close()
|
||||
}
|
||||
|
||||
func (d *Psql) Ping(ctx context.Context) error {
|
||||
return d.conn.Ping(ctx)
|
||||
}
|
||||
|
||||
func stepToInterval(p db.StepInterval) pgtype.Interval {
|
||||
var interval pgtype.Interval
|
||||
switch p {
|
||||
case db.StepDay:
|
||||
interval.Days = 1
|
||||
case db.StepWeek:
|
||||
interval.Days = 7
|
||||
case db.StepMonth:
|
||||
interval.Months = 1
|
||||
case db.StepYear:
|
||||
interval.Months = 12
|
||||
}
|
||||
interval.Valid = true
|
||||
return interval
|
||||
}
|
||||
186
internal/db/psql/psql_test.go
Normal file
186
internal/db/psql/psql_test.go
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/db/psql"
|
||||
_ "github.com/gabehf/koito/testing_init"
|
||||
"github.com/ory/dockertest/v3"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var store *psql.Psql
|
||||
|
||||
func getTestGetenv(resource *dockertest.Resource) func(string) string {
|
||||
return func(env string) string {
|
||||
switch env {
|
||||
case cfg.DATABASE_URL_ENV:
|
||||
return fmt.Sprintf("postgres://postgres:secret@localhost:%s", resource.GetPort("5432/tcp"))
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
// uses a sensible default on windows (tcp/http) and linux/osx (socket)
|
||||
pool, err := dockertest.NewPool("")
|
||||
if err != nil {
|
||||
log.Fatalf("Could not construct pool: %s", err)
|
||||
}
|
||||
|
||||
// uses pool to try to connect to Docker
|
||||
err = pool.Client.Ping()
|
||||
if err != nil {
|
||||
log.Fatalf("Could not connect to Docker: %s", err)
|
||||
}
|
||||
|
||||
// pulls an image, creates a container based on it and runs it
|
||||
resource, err := pool.Run("postgres", "latest", []string{"POSTGRES_PASSWORD=secret"})
|
||||
if err != nil {
|
||||
log.Fatalf("Could not start resource: %s", err)
|
||||
}
|
||||
|
||||
err = cfg.Load(getTestGetenv(resource))
|
||||
if err != nil {
|
||||
log.Fatalf("Could not load cfg: %s", err)
|
||||
}
|
||||
|
||||
// exponential backoff-retry, because the application in the container might not be ready to accept connections yet
|
||||
if err := pool.Retry(func() error {
|
||||
var err error
|
||||
store, err = psql.New()
|
||||
if err != nil {
|
||||
log.Println("Failed to connect to test database, retrying...")
|
||||
return err
|
||||
}
|
||||
return store.Ping(context.Background())
|
||||
}); err != nil {
|
||||
log.Fatalf("Could not connect to database: %s", err)
|
||||
}
|
||||
|
||||
// as of go1.15 testing.M returns the exit code of m.Run(), so it is safe to use defer here
|
||||
defer func() {
|
||||
if err := pool.Purge(resource); err != nil {
|
||||
log.Fatalf("Could not purge resource: %s", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// insert a user into the db with id 1 to use for tests
|
||||
err = store.Exec(context.Background(), `INSERT INTO users (username, password) VALUES ('test', DECODE('abc123', 'hex'))`)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to insert test user: %v", err)
|
||||
}
|
||||
|
||||
m.Run()
|
||||
}
|
||||
|
||||
func testDataForTopItems(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// artist 1 has most listens older than 1 year
|
||||
// artist 2 has most listens older than 1 month
|
||||
// artist 3 has most listens older than 1 week
|
||||
// artist 4 has least listens
|
||||
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001'),
|
||||
('00000000-0000-0000-0000-000000000002'),
|
||||
('00000000-0000-0000-0000-000000000003'),
|
||||
('00000000-0000-0000-0000-000000000004')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'Artist One', 'Testing', true),
|
||||
(2, 'Artist Two', 'Testing', true),
|
||||
(3, 'Artist Three', 'Testing', true),
|
||||
(4, 'Artist Four', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert release groups
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000011'),
|
||||
('00000000-0000-0000-0000-000000000022'),
|
||||
('00000000-0000-0000-0000-000000000033'),
|
||||
('00000000-0000-0000-0000-000000000044')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'Release One', 'Testing', true),
|
||||
(2, 'Release Two', 'Testing', true),
|
||||
(3, 'Release Three', 'Testing', true),
|
||||
(4, 'Release Four', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert release groups
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_releases (release_id, artist_id)
|
||||
VALUES (1, 1), (2, 2), (3, 3), (4, 4)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert tracks
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO tracks (musicbrainz_id, release_id, duration)
|
||||
VALUES ('11111111-1111-1111-1111-111111111111', 1, 100),
|
||||
('22222222-2222-2222-2222-222222222222', 2, 100),
|
||||
('33333333-3333-3333-3333-333333333333', 3, 100),
|
||||
('44444444-4444-4444-4444-444444444444', 4, 100)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES (1, 'Track One', 'Testing', true),
|
||||
(2, 'Track Two', 'Testing', true),
|
||||
(3, 'Track Three', 'Testing', true),
|
||||
(4, 'Track Four', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Associate tracks with artists
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_tracks (artist_id, track_id)
|
||||
VALUES (1, 1), (2, 2), (3, 3), (4, 4)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert listens
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, NOW() - INTERVAL '2 years 1 day'),
|
||||
(1, 1, NOW() - INTERVAL '2 years 2 days'),
|
||||
(1, 1, NOW() - INTERVAL '2 years 3 days'),
|
||||
(1, 1, NOW() - INTERVAL '2 years 4 days'),
|
||||
(1, 2, NOW() - INTERVAL '2 months 1 day'),
|
||||
(1, 2, NOW() - INTERVAL '2 months 2 days'),
|
||||
(1, 2, NOW() - INTERVAL '2 months 3 days'),
|
||||
(1, 3, NOW() - INTERVAL '2 weeks'),
|
||||
(1, 3, NOW() - INTERVAL '2 weeks 1 day'),
|
||||
(1, 4, NOW() - INTERVAL '2 days')`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func testDataAbsoluteListenTimes(t *testing.T) {
|
||||
err := store.Exec(context.Background(),
|
||||
`TRUNCATE listens`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO listens (user_id, track_id, listened_at)
|
||||
VALUES (1, 1, '2023-06-22 19:11:25-07'),
|
||||
(1, 1, '2023-06-22 19:12:25-07'),
|
||||
(1, 1, '2023-06-22 19:13:25-07'),
|
||||
(1, 1, '2023-06-22 19:14:25-07'),
|
||||
(1, 2, '2024-06-22 19:15:25-07'),
|
||||
(1, 2, '2024-06-22 19:16:25-07'),
|
||||
(1, 2, '2024-06-22 19:17:25-07'),
|
||||
(1, 3, '2024-10-02 19:18:25-07'),
|
||||
(1, 3, '2024-10-02 19:19:25-07'),
|
||||
(1, 4, '2025-05-16 19:20:25-07')`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
151
internal/db/psql/search.go
Normal file
151
internal/db/psql/search.go
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
const searchItemLimit = 5
|
||||
const substringSearchLength = 6
|
||||
|
||||
func (d *Psql) SearchArtists(ctx context.Context, q string) ([]*models.Artist, error) {
|
||||
if len(q) < substringSearchLength {
|
||||
rows, err := d.q.SearchArtistsBySubstring(ctx, repository.SearchArtistsBySubstringParams{
|
||||
Column1: pgtype.Text{String: q, Valid: true},
|
||||
Limit: searchItemLimit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := make([]*models.Artist, len(rows))
|
||||
for i, row := range rows {
|
||||
ret[i] = &models.Artist{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Name: row.Name,
|
||||
Image: row.Image,
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
} else {
|
||||
rows, err := d.q.SearchArtists(ctx, repository.SearchArtistsParams{
|
||||
Similarity: q,
|
||||
Limit: searchItemLimit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := make([]*models.Artist, len(rows))
|
||||
for i, row := range rows {
|
||||
ret[i] = &models.Artist{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Name: row.Name,
|
||||
Image: row.Image,
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Psql) SearchAlbums(ctx context.Context, q string) ([]*models.Album, error) {
|
||||
if len(q) < substringSearchLength {
|
||||
rows, err := d.q.SearchReleasesBySubstring(ctx, repository.SearchReleasesBySubstringParams{
|
||||
Column1: pgtype.Text{String: q, Valid: true},
|
||||
Limit: searchItemLimit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := make([]*models.Album, len(rows))
|
||||
for i, row := range rows {
|
||||
ret[i] = &models.Album{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Title: row.Title,
|
||||
VariousArtists: row.VariousArtists,
|
||||
Image: row.Image,
|
||||
}
|
||||
err = json.Unmarshal(row.Artists, &ret[i].Artists)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
} else {
|
||||
rows, err := d.q.SearchReleases(ctx, repository.SearchReleasesParams{
|
||||
Similarity: q,
|
||||
Limit: searchItemLimit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := make([]*models.Album, len(rows))
|
||||
for i, row := range rows {
|
||||
ret[i] = &models.Album{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Title: row.Title,
|
||||
VariousArtists: row.VariousArtists,
|
||||
Image: row.Image,
|
||||
}
|
||||
err = json.Unmarshal(row.Artists, &ret[i].Artists)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Psql) SearchTracks(ctx context.Context, q string) ([]*models.Track, error) {
|
||||
if len(q) < substringSearchLength {
|
||||
rows, err := d.q.SearchTracksBySubstring(ctx, repository.SearchTracksBySubstringParams{
|
||||
Column1: pgtype.Text{String: q, Valid: true},
|
||||
Limit: searchItemLimit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := make([]*models.Track, len(rows))
|
||||
for i, row := range rows {
|
||||
ret[i] = &models.Track{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Title: row.Title,
|
||||
Image: row.Image,
|
||||
}
|
||||
err = json.Unmarshal(row.Artists, &ret[i].Artists)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
} else {
|
||||
rows, err := d.q.SearchTracks(ctx, repository.SearchTracksParams{
|
||||
Similarity: q,
|
||||
Limit: searchItemLimit,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := make([]*models.Track, len(rows))
|
||||
for i, row := range rows {
|
||||
ret[i] = &models.Track{
|
||||
ID: row.ID,
|
||||
MbzID: row.MusicBrainzID,
|
||||
Title: row.Title,
|
||||
Image: row.Image,
|
||||
}
|
||||
err = json.Unmarshal(row.Artists, &ret[i].Artists)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
}
|
||||
116
internal/db/psql/search_test.go
Normal file
116
internal/db/psql/search_test.go
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func setupTestDataForSearch(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// Insert artists
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001'),
|
||||
('00000000-0000-0000-0000-000000000002')`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'Artist One With A Long Name', 'Testing', true),
|
||||
(2, 'Artist Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert albums
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id, various_artists)
|
||||
VALUES ('11111111-1111-1111-1111-111111111111', false),
|
||||
('22222222-2222-2222-2222-222222222222', true)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'Album One With A Long Name', 'Testing', true),
|
||||
(2, 'Album Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert tracks
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO tracks (musicbrainz_id, release_id)
|
||||
VALUES ('33333333-3333-3333-3333-333333333333', 1),
|
||||
('44444444-4444-4444-4444-444444444444', 2)`)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES (1, 'Track One With A Long Name', 'Testing', true),
|
||||
(2, 'Track Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Associate artists with albums and tracks
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_releases (artist_id, release_id)
|
||||
VALUES (1, 1), (2, 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_tracks (artist_id, track_id)
|
||||
VALUES (1, 1), (2, 2)`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestSearchArtists(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForSearch(t)
|
||||
|
||||
// Search for "Artist One With A Long Name"
|
||||
results, err := store.SearchArtists(ctx, "Artist One With A Long Name")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 1)
|
||||
assert.Equal(t, "Artist One With A Long Name", results[0].Name)
|
||||
|
||||
// Search for substring "Artist"
|
||||
results, err = store.SearchArtists(ctx, "Arti")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 2)
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestSearchAlbums(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForSearch(t)
|
||||
|
||||
// Search for "Album One With A Long Name"
|
||||
results, err := store.SearchAlbums(ctx, "Album One With A Long Name")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 1)
|
||||
assert.Equal(t, "Album One With A Long Name", results[0].Title)
|
||||
|
||||
// Search for substring "Album"
|
||||
results, err = store.SearchAlbums(ctx, "Albu")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 2)
|
||||
assert.NotNil(t, results[0].Artists)
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
|
||||
func TestSearchTracks(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForSearch(t)
|
||||
|
||||
// Search for "Track One With A Long Name"
|
||||
results, err := store.SearchTracks(ctx, "Track One With A Long Name")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 1)
|
||||
assert.Equal(t, "Track One With A Long Name", results[0].Title)
|
||||
|
||||
// Search for substring "Track"
|
||||
results, err = store.SearchTracks(ctx, "Trac")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results, 2)
|
||||
assert.NotNil(t, results[0].Artists)
|
||||
|
||||
truncateTestData(t)
|
||||
}
|
||||
59
internal/db/psql/sessions.go
Normal file
59
internal/db/psql/sessions.go
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
)
|
||||
|
||||
func (d *Psql) SaveSession(ctx context.Context, userID int32, expiresAt time.Time, persistent bool) (*models.Session, error) {
|
||||
session, err := d.q.InsertSession(ctx, repository.InsertSessionParams{
|
||||
ID: uuid.New(),
|
||||
UserID: userID,
|
||||
ExpiresAt: expiresAt,
|
||||
Persistent: persistent,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.Session{
|
||||
ID: session.ID,
|
||||
UserID: session.UserID,
|
||||
CreatedAt: session.CreatedAt,
|
||||
ExpiresAt: session.ExpiresAt,
|
||||
Persistent: session.Persistent,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Psql) RefreshSession(ctx context.Context, sessionId uuid.UUID, expiresAt time.Time) error {
|
||||
return d.q.UpdateSessionExpiry(ctx, repository.UpdateSessionExpiryParams{
|
||||
ID: sessionId,
|
||||
ExpiresAt: expiresAt,
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Psql) DeleteSession(ctx context.Context, sessionId uuid.UUID) error {
|
||||
return d.q.DeleteSession(ctx, sessionId)
|
||||
}
|
||||
|
||||
// Returns nil, nil when no database entries are found
|
||||
func (d *Psql) GetUserBySession(ctx context.Context, sessionId uuid.UUID) (*models.User, error) {
|
||||
row, err := d.q.GetUserBySession(ctx, sessionId)
|
||||
if errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &models.User{
|
||||
ID: row.ID,
|
||||
Username: row.Username,
|
||||
Password: row.Password,
|
||||
Role: models.UserRole(row.Role),
|
||||
}, nil
|
||||
}
|
||||
101
internal/db/psql/sessions_test.go
Normal file
101
internal/db/psql/sessions_test.go
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func truncateTestDataForSessions(t *testing.T) {
|
||||
err := store.Exec(context.Background(),
|
||||
`TRUNCATE
|
||||
sessions
|
||||
RESTART IDENTITY CASCADE`,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
func TestSaveSession(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Save a session for the user
|
||||
expiresAt := time.Now().Add(24 * time.Hour).UTC()
|
||||
session, err := store.SaveSession(ctx, 1, expiresAt, true)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, session)
|
||||
assert.Equal(t, int32(1), session.UserID)
|
||||
assert.Equal(t, true, session.Persistent)
|
||||
assert.WithinDuration(t, expiresAt, session.ExpiresAt, time.Second)
|
||||
|
||||
truncateTestDataForSessions(t)
|
||||
}
|
||||
|
||||
func TestRefreshSession(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Save a session first
|
||||
expiresAt := time.Now().Add(-1 * time.Minute)
|
||||
session, err := store.SaveSession(ctx, 1, expiresAt, true)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Refresh the session expiry
|
||||
newExpiresAt := time.Now().Add(48 * time.Hour)
|
||||
err = store.RefreshSession(ctx, session.ID, newExpiresAt)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Can only retrieve a session with an expiresAt > time.Now()
|
||||
_, err = store.GetUserBySession(ctx, session.ID)
|
||||
require.NoError(t, err)
|
||||
|
||||
truncateTestDataForSessions(t)
|
||||
}
|
||||
|
||||
func TestDeleteSession(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Save a session first
|
||||
expiresAt := time.Now().Add(24 * time.Hour)
|
||||
session, err := store.SaveSession(ctx, 1, expiresAt, true)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Delete the session
|
||||
err = store.DeleteSession(ctx, session.ID)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the session was deleted
|
||||
var count int
|
||||
count, err = store.Count(ctx, `SELECT COUNT(*) FROM sessions WHERE id = $1`, session.ID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, count)
|
||||
|
||||
truncateTestDataForSessions(t)
|
||||
}
|
||||
|
||||
func TestGetUserBySession(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Save a session first
|
||||
expiresAt := time.Now().Add(24 * time.Hour)
|
||||
session, err := store.SaveSession(ctx, 1, expiresAt, true)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Get the user by session
|
||||
user, err := store.GetUserBySession(ctx, session.ID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, user)
|
||||
assert.Equal(t, int32(1), user.ID)
|
||||
assert.Equal(t, "test", user.Username)
|
||||
assert.Equal(t, []uint8([]byte{0xab, 0xc1, 0x23}), user.Password)
|
||||
assert.Equal(t, "user", string(user.Role))
|
||||
|
||||
// Test for a non-existent session
|
||||
nonExistentSessionID := uuid.New()
|
||||
user, err = store.GetUserBySession(ctx, nonExistentSessionID)
|
||||
require.NoError(t, err)
|
||||
assert.Nil(t, user)
|
||||
|
||||
truncateTestDataForSessions(t)
|
||||
}
|
||||
119
internal/db/psql/top_albums.go
Normal file
119
internal/db/psql/top_albums.go
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
)
|
||||
|
||||
func (d *Psql) GetTopAlbumsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Album], error) {
|
||||
l := logger.FromContext(ctx)
|
||||
offset := (opts.Page - 1) * opts.Limit
|
||||
t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if opts.Month == 0 && opts.Year == 0 {
|
||||
// use period, not date range
|
||||
t2 = time.Now()
|
||||
t1 = db.StartTimeFromPeriod(opts.Period)
|
||||
}
|
||||
if opts.Limit == 0 {
|
||||
opts.Limit = DefaultItemsPerPage
|
||||
}
|
||||
|
||||
var rgs []*models.Album
|
||||
var count int64
|
||||
|
||||
if opts.ArtistID != 0 {
|
||||
l.Debug().Msgf("Fetching top %d albums from artist id %d with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.ArtistID, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
|
||||
rows, err := d.q.GetTopReleasesFromArtist(ctx, repository.GetTopReleasesFromArtistParams{
|
||||
ArtistID: int32(opts.ArtistID),
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rgs = make([]*models.Album, len(rows))
|
||||
l.Debug().Msgf("Database responded with %d items", len(rows))
|
||||
for i, v := range rows {
|
||||
artists := make([]models.SimpleArtist, 0)
|
||||
err = json.Unmarshal(v.Artists, &artists)
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", v.ID)
|
||||
artists = nil
|
||||
}
|
||||
rgs[i] = &models.Album{
|
||||
ID: v.ID,
|
||||
MbzID: v.MusicBrainzID,
|
||||
Title: v.Title,
|
||||
Image: v.Image,
|
||||
Artists: artists,
|
||||
VariousArtists: v.VariousArtists,
|
||||
ListenCount: v.ListenCount,
|
||||
}
|
||||
}
|
||||
count, err = d.q.CountReleasesFromArtist(ctx, int32(opts.ArtistID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
l.Debug().Msgf("Fetching top %d albums with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetTopReleasesPaginated(ctx, repository.GetTopReleasesPaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rgs = make([]*models.Album, len(rows))
|
||||
l.Debug().Msgf("Database responded with %d items", len(rows))
|
||||
for i, row := range rows {
|
||||
artists := make([]models.SimpleArtist, 0)
|
||||
err = json.Unmarshal(row.Artists, &artists)
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Error unmarshalling artists for release group with id %d", row.ID)
|
||||
artists = nil
|
||||
}
|
||||
t := &models.Album{
|
||||
Title: row.Title,
|
||||
MbzID: row.MusicBrainzID,
|
||||
ID: row.ID,
|
||||
Image: row.Image,
|
||||
Artists: artists,
|
||||
VariousArtists: row.VariousArtists,
|
||||
ListenCount: row.ListenCount,
|
||||
}
|
||||
rgs[i] = t
|
||||
}
|
||||
count, err = d.q.CountTopReleases(ctx, repository.CountTopReleasesParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Debug().Msgf("Database responded with %d albums out of a total %d", len(rows), count)
|
||||
}
|
||||
return &db.PaginatedResponse[*models.Album]{
|
||||
Items: rgs,
|
||||
TotalCount: count,
|
||||
ItemsPerPage: int32(opts.Limit),
|
||||
HasNextPage: int64(offset+len(rgs)) < count,
|
||||
CurrentPage: int32(opts.Page),
|
||||
}, nil
|
||||
}
|
||||
103
internal/db/psql/top_albums_test.go
Normal file
103
internal/db/psql/top_albums_test.go
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetTopAlbumsPaginated(t *testing.T) {
|
||||
testDataForTopItems(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Test valid
|
||||
resp, err := store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 4)
|
||||
assert.Equal(t, int64(4), resp.TotalCount)
|
||||
assert.Equal(t, "Release One", resp.Items[0].Title)
|
||||
assert.Equal(t, "Release Two", resp.Items[1].Title)
|
||||
assert.Equal(t, "Release Three", resp.Items[2].Title)
|
||||
assert.Equal(t, "Release Four", resp.Items[3].Title)
|
||||
|
||||
// Test pagination
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, "Release Two", resp.Items[0].Title)
|
||||
|
||||
// Test page out of range
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, resp.Items)
|
||||
assert.False(t, resp.HasNextPage)
|
||||
|
||||
// Test invalid inputs
|
||||
_, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: -1, Page: 0})
|
||||
assert.Error(t, err)
|
||||
|
||||
_, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: -1})
|
||||
assert.Error(t, err)
|
||||
|
||||
// Test specify period
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 0) // empty
|
||||
assert.Equal(t, int64(0), resp.TotalCount)
|
||||
// should default to PeriodDay
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 0) // empty
|
||||
assert.Equal(t, int64(0), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Release Four", resp.Items[0].Title)
|
||||
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 2)
|
||||
assert.Equal(t, int64(2), resp.TotalCount)
|
||||
assert.Equal(t, "Release Three", resp.Items[0].Title)
|
||||
assert.Equal(t, "Release Four", resp.Items[1].Title)
|
||||
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 3)
|
||||
assert.Equal(t, int64(3), resp.TotalCount)
|
||||
assert.Equal(t, "Release Two", resp.Items[0].Title)
|
||||
assert.Equal(t, "Release Three", resp.Items[1].Title)
|
||||
assert.Equal(t, "Release Four", resp.Items[2].Title)
|
||||
|
||||
// test specific artist
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear, ArtistID: 2})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Release Two", resp.Items[0].Title)
|
||||
|
||||
// Test specify dates
|
||||
|
||||
testDataAbsoluteListenTimes(t)
|
||||
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Year: 2023})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Release One", resp.Items[0].Title)
|
||||
|
||||
resp, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Release Two", resp.Items[0].Title)
|
||||
|
||||
// invalid, year required with month
|
||||
_, err = store.GetTopAlbumsPaginated(ctx, db.GetItemsOpts{Month: 10})
|
||||
require.Error(t, err)
|
||||
}
|
||||
67
internal/db/psql/top_artists.go
Normal file
67
internal/db/psql/top_artists.go
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
)
|
||||
|
||||
func (d *Psql) GetTopArtistsPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Artist], error) {
|
||||
l := logger.FromContext(ctx)
|
||||
offset := (opts.Page - 1) * opts.Limit
|
||||
t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if opts.Month == 0 && opts.Year == 0 {
|
||||
// use period, not date range
|
||||
t2 = time.Now()
|
||||
t1 = db.StartTimeFromPeriod(opts.Period)
|
||||
}
|
||||
if opts.Limit == 0 {
|
||||
opts.Limit = DefaultItemsPerPage
|
||||
}
|
||||
l.Debug().Msgf("Fetching top %d artists with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetTopArtistsPaginated(ctx, repository.GetTopArtistsPaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rgs := make([]*models.Artist, len(rows))
|
||||
for i, row := range rows {
|
||||
t := &models.Artist{
|
||||
Name: row.Name,
|
||||
MbzID: row.MusicBrainzID,
|
||||
ID: row.ID,
|
||||
Image: row.Image,
|
||||
ListenCount: row.ListenCount,
|
||||
}
|
||||
rgs[i] = t
|
||||
}
|
||||
count, err := d.q.CountTopArtists(ctx, repository.CountTopArtistsParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Debug().Msgf("Database responded with %d artists out of a total %d", len(rows), count)
|
||||
|
||||
return &db.PaginatedResponse[*models.Artist]{
|
||||
Items: rgs,
|
||||
TotalCount: count,
|
||||
ItemsPerPage: int32(opts.Limit),
|
||||
HasNextPage: int64(offset+len(rgs)) < count,
|
||||
CurrentPage: int32(opts.Page),
|
||||
}, nil
|
||||
}
|
||||
96
internal/db/psql/top_artists_test.go
Normal file
96
internal/db/psql/top_artists_test.go
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetTopArtistsPaginated(t *testing.T) {
|
||||
testDataForTopItems(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Test valid
|
||||
resp, err := store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 4)
|
||||
assert.Equal(t, int64(4), resp.TotalCount)
|
||||
assert.Equal(t, "Artist One", resp.Items[0].Name)
|
||||
assert.Equal(t, "Artist Two", resp.Items[1].Name)
|
||||
assert.Equal(t, "Artist Three", resp.Items[2].Name)
|
||||
assert.Equal(t, "Artist Four", resp.Items[3].Name)
|
||||
|
||||
// Test pagination
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, "Artist Two", resp.Items[0].Name)
|
||||
|
||||
// Test page out of range
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, resp.Items)
|
||||
assert.False(t, resp.HasNextPage)
|
||||
|
||||
// Test invalid inputs
|
||||
_, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: -1, Page: 0})
|
||||
assert.Error(t, err)
|
||||
|
||||
_, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: -1})
|
||||
assert.Error(t, err)
|
||||
|
||||
// Test specify period
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 0) // empty
|
||||
assert.Equal(t, int64(0), resp.TotalCount)
|
||||
// should default to PeriodDay
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 0) // empty
|
||||
assert.Equal(t, int64(0), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Artist Four", resp.Items[0].Name)
|
||||
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 2)
|
||||
assert.Equal(t, int64(2), resp.TotalCount)
|
||||
assert.Equal(t, "Artist Three", resp.Items[0].Name)
|
||||
assert.Equal(t, "Artist Four", resp.Items[1].Name)
|
||||
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 3)
|
||||
assert.Equal(t, int64(3), resp.TotalCount)
|
||||
assert.Equal(t, "Artist Two", resp.Items[0].Name)
|
||||
assert.Equal(t, "Artist Three", resp.Items[1].Name)
|
||||
assert.Equal(t, "Artist Four", resp.Items[2].Name)
|
||||
|
||||
// Test specify dates
|
||||
|
||||
testDataAbsoluteListenTimes(t)
|
||||
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Year: 2023})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Artist One", resp.Items[0].Name)
|
||||
|
||||
resp, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Artist Two", resp.Items[0].Name)
|
||||
|
||||
// invalid, year required with month
|
||||
_, err = store.GetTopArtistsPaginated(ctx, db.GetItemsOpts{Month: 10})
|
||||
require.Error(t, err)
|
||||
}
|
||||
160
internal/db/psql/top_tracks.go
Normal file
160
internal/db/psql/top_tracks.go
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
)
|
||||
|
||||
func (d *Psql) GetTopTracksPaginated(ctx context.Context, opts db.GetItemsOpts) (*db.PaginatedResponse[*models.Track], error) {
|
||||
l := logger.FromContext(ctx)
|
||||
offset := (opts.Page - 1) * opts.Limit
|
||||
t1, t2, err := utils.DateRange(opts.Week, opts.Month, opts.Year)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if opts.Month == 0 && opts.Year == 0 {
|
||||
// use period, not date range
|
||||
t2 = time.Now()
|
||||
t1 = db.StartTimeFromPeriod(opts.Period)
|
||||
}
|
||||
if opts.Limit == 0 {
|
||||
opts.Limit = DefaultItemsPerPage
|
||||
}
|
||||
var tracks []*models.Track
|
||||
var count int64
|
||||
if opts.AlbumID > 0 {
|
||||
l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetTopTracksInReleasePaginated(ctx, repository.GetTopTracksInReleasePaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
ReleaseID: int32(opts.AlbumID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tracks = make([]*models.Track, len(rows))
|
||||
for i, row := range rows {
|
||||
artists := make([]models.SimpleArtist, 0)
|
||||
err = json.Unmarshal(row.Artists, &artists)
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Error unmarshalling artists for track with id %d", row.ID)
|
||||
artists = nil
|
||||
}
|
||||
t := &models.Track{
|
||||
Title: row.Title,
|
||||
MbzID: row.MusicBrainzID,
|
||||
ID: row.ID,
|
||||
ListenCount: row.ListenCount,
|
||||
Image: row.Image,
|
||||
AlbumID: row.ReleaseID,
|
||||
Artists: artists,
|
||||
}
|
||||
tracks[i] = t
|
||||
}
|
||||
count, err = d.q.CountTopTracksByRelease(ctx, repository.CountTopTracksByReleaseParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
ReleaseID: int32(opts.AlbumID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else if opts.ArtistID > 0 {
|
||||
l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetTopTracksByArtistPaginated(ctx, repository.GetTopTracksByArtistPaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
ArtistID: int32(opts.ArtistID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tracks = make([]*models.Track, len(rows))
|
||||
for i, row := range rows {
|
||||
artists := make([]models.SimpleArtist, 0)
|
||||
err = json.Unmarshal(row.Artists, &artists)
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Error unmarshalling artists for track with id %d", row.ID)
|
||||
artists = nil
|
||||
}
|
||||
t := &models.Track{
|
||||
Title: row.Title,
|
||||
MbzID: row.MusicBrainzID,
|
||||
ID: row.ID,
|
||||
Image: row.Image,
|
||||
ListenCount: row.ListenCount,
|
||||
AlbumID: row.ReleaseID,
|
||||
Artists: artists,
|
||||
}
|
||||
tracks[i] = t
|
||||
}
|
||||
count, err = d.q.CountTopTracksByArtist(ctx, repository.CountTopTracksByArtistParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
ArtistID: int32(opts.ArtistID),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
l.Debug().Msgf("Fetching top %d tracks with period %s on page %d from range %v to %v",
|
||||
opts.Limit, opts.Period, opts.Page, t1.Format("Jan 02, 2006"), t2.Format("Jan 02, 2006"))
|
||||
rows, err := d.q.GetTopTracksPaginated(ctx, repository.GetTopTracksPaginatedParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
Limit: int32(opts.Limit),
|
||||
Offset: int32(offset),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tracks = make([]*models.Track, len(rows))
|
||||
for i, row := range rows {
|
||||
artists := make([]models.SimpleArtist, 0)
|
||||
err = json.Unmarshal(row.Artists, &artists)
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Error unmarshalling artists for track with id %d", row.ID)
|
||||
artists = nil
|
||||
}
|
||||
t := &models.Track{
|
||||
Title: row.Title,
|
||||
MbzID: row.MusicBrainzID,
|
||||
ID: row.ID,
|
||||
Image: row.Image,
|
||||
ListenCount: row.ListenCount,
|
||||
AlbumID: row.ReleaseID,
|
||||
Artists: artists,
|
||||
}
|
||||
tracks[i] = t
|
||||
}
|
||||
count, err = d.q.CountTopTracks(ctx, repository.CountTopTracksParams{
|
||||
ListenedAt: t1,
|
||||
ListenedAt_2: t2,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l.Debug().Msgf("Database responded with %d tracks out of a total %d", len(rows), count)
|
||||
}
|
||||
|
||||
return &db.PaginatedResponse[*models.Track]{
|
||||
Items: tracks,
|
||||
TotalCount: count,
|
||||
ItemsPerPage: int32(opts.Limit),
|
||||
HasNextPage: int64(offset+len(tracks)) < count,
|
||||
CurrentPage: int32(opts.Page),
|
||||
}, nil
|
||||
}
|
||||
118
internal/db/psql/top_tracks_test.go
Normal file
118
internal/db/psql/top_tracks_test.go
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetTopTracksPaginated(t *testing.T) {
|
||||
testDataForTopItems(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Test valid
|
||||
resp, err := store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 4)
|
||||
assert.Equal(t, int64(4), resp.TotalCount)
|
||||
assert.Equal(t, "Track One", resp.Items[0].Title)
|
||||
assert.Equal(t, "Track Two", resp.Items[1].Title)
|
||||
assert.Equal(t, "Track Three", resp.Items[2].Title)
|
||||
assert.Equal(t, "Track Four", resp.Items[3].Title)
|
||||
// ensure artists are included
|
||||
require.Len(t, resp.Items[0].Artists, 1)
|
||||
assert.Equal(t, "Artist One", resp.Items[0].Artists[0].Name)
|
||||
|
||||
// Test pagination
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 2, Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, "Track Two", resp.Items[0].Title)
|
||||
|
||||
// Test page out of range
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: 10, Period: db.PeriodAllTime})
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, resp.Items)
|
||||
assert.False(t, resp.HasNextPage)
|
||||
|
||||
// Test invalid inputs
|
||||
_, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: -1, Page: 0})
|
||||
assert.Error(t, err)
|
||||
|
||||
_, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Limit: 1, Page: -1})
|
||||
assert.Error(t, err)
|
||||
|
||||
// Test specify period
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodDay})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 0) // empty
|
||||
assert.Equal(t, int64(0), resp.TotalCount)
|
||||
// should default to PeriodDay
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 0) // empty
|
||||
assert.Equal(t, int64(0), resp.TotalCount)
|
||||
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodWeek})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Track Four", resp.Items[0].Title)
|
||||
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodMonth})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 2)
|
||||
assert.Equal(t, int64(2), resp.TotalCount)
|
||||
assert.Equal(t, "Track Three", resp.Items[0].Title)
|
||||
assert.Equal(t, "Track Four", resp.Items[1].Title)
|
||||
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodYear})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 3)
|
||||
assert.Equal(t, int64(3), resp.TotalCount)
|
||||
assert.Equal(t, "Track Two", resp.Items[0].Title)
|
||||
assert.Equal(t, "Track Three", resp.Items[1].Title)
|
||||
assert.Equal(t, "Track Four", resp.Items[2].Title)
|
||||
|
||||
// Test filter by artists and releases
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, ArtistID: 1})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Track One", resp.Items[0].Title)
|
||||
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Track Two", resp.Items[0].Title)
|
||||
// when both artistID and albumID are specified, artist id is ignored
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Period: db.PeriodAllTime, AlbumID: 2, ArtistID: 1})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Track Two", resp.Items[0].Title)
|
||||
|
||||
// Test specify dates
|
||||
|
||||
testDataAbsoluteListenTimes(t)
|
||||
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Year: 2023})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Track One", resp.Items[0].Title)
|
||||
|
||||
resp, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Month: 6, Year: 2024})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, resp.Items, 1)
|
||||
assert.Equal(t, int64(1), resp.TotalCount)
|
||||
assert.Equal(t, "Track Two", resp.Items[0].Title)
|
||||
|
||||
// invalid, year required with month
|
||||
_, err = store.GetTopTracksPaginated(ctx, db.GetItemsOpts{Month: 10})
|
||||
require.Error(t, err)
|
||||
}
|
||||
298
internal/db/psql/track.go
Normal file
298
internal/db/psql/track.go
Normal file
|
|
@ -0,0 +1,298 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
)
|
||||
|
||||
func (d *Psql) GetTrack(ctx context.Context, opts db.GetTrackOpts) (*models.Track, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
var track models.Track
|
||||
|
||||
if opts.ID != 0 {
|
||||
l.Debug().Msgf("Fetching track from DB with id %d", opts.ID)
|
||||
t, err := d.q.GetTrack(ctx, opts.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
track = models.Track{
|
||||
ID: t.ID,
|
||||
MbzID: t.MusicBrainzID,
|
||||
Title: t.Title,
|
||||
AlbumID: t.ReleaseID,
|
||||
Image: t.Image,
|
||||
Duration: t.Duration,
|
||||
}
|
||||
} else if opts.MusicBrainzID != uuid.Nil {
|
||||
l.Debug().Msgf("Fetching track from DB with MusicBrainz ID %s", opts.MusicBrainzID)
|
||||
t, err := d.q.GetTrackByMbzID(ctx, &opts.MusicBrainzID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
track = models.Track{
|
||||
ID: t.ID,
|
||||
MbzID: t.MusicBrainzID,
|
||||
Title: t.Title,
|
||||
AlbumID: t.ReleaseID,
|
||||
Duration: t.Duration,
|
||||
}
|
||||
} else if len(opts.ArtistIDs) > 0 {
|
||||
l.Debug().Msgf("Fetching track from DB with title '%s' and artist id(s) '%v'", opts.Title, opts.ArtistIDs)
|
||||
t, err := d.q.GetTrackByTitleAndArtists(ctx, repository.GetTrackByTitleAndArtistsParams{
|
||||
Title: opts.Title,
|
||||
Column2: opts.ArtistIDs,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
track = models.Track{
|
||||
ID: t.ID,
|
||||
MbzID: t.MusicBrainzID,
|
||||
Title: t.Title,
|
||||
AlbumID: t.ReleaseID,
|
||||
Duration: t.Duration,
|
||||
}
|
||||
} else {
|
||||
return nil, errors.New("insufficient information to get track")
|
||||
}
|
||||
|
||||
count, err := d.q.CountListensFromTrack(ctx, repository.CountListensFromTrackParams{
|
||||
ListenedAt: time.Unix(0, 0),
|
||||
ListenedAt_2: time.Now(),
|
||||
TrackID: track.ID,
|
||||
})
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Failed to get listen count for track with id %d", track.ID)
|
||||
}
|
||||
|
||||
track.ListenCount = count
|
||||
|
||||
return &track, nil
|
||||
}
|
||||
|
||||
func (d *Psql) SaveTrack(ctx context.Context, opts db.SaveTrackOpts) (*models.Track, error) {
|
||||
// create track in DB
|
||||
l := logger.FromContext(ctx)
|
||||
var insertMbzID *uuid.UUID
|
||||
if opts.RecordingMbzID != uuid.Nil {
|
||||
insertMbzID = &opts.RecordingMbzID
|
||||
}
|
||||
if len(opts.ArtistIDs) < 1 {
|
||||
return nil, errors.New("required parameter 'ArtistIDs' missing")
|
||||
}
|
||||
for _, aid := range opts.ArtistIDs {
|
||||
if aid == 0 {
|
||||
return nil, errors.New("none of 'ArtistIDs' may be 0")
|
||||
}
|
||||
}
|
||||
if opts.AlbumID == 0 {
|
||||
return nil, errors.New("required parameter 'AlbumID' missing")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return nil, err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
l.Debug().Msgf("Inserting new track '%s' into DB", opts.Title)
|
||||
trackRow, err := qtx.InsertTrack(ctx, repository.InsertTrackParams{
|
||||
MusicBrainzID: insertMbzID,
|
||||
ReleaseID: opts.AlbumID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// insert associated artists
|
||||
for _, aid := range opts.ArtistIDs {
|
||||
err = qtx.AssociateArtistToTrack(ctx, repository.AssociateArtistToTrackParams{
|
||||
ArtistID: aid,
|
||||
TrackID: trackRow.ID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
// insert primary alias
|
||||
err = qtx.InsertTrackAlias(ctx, repository.InsertTrackAliasParams{
|
||||
TrackID: trackRow.ID,
|
||||
Alias: opts.Title,
|
||||
Source: "Canonical",
|
||||
IsPrimary: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = tx.Commit(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.Track{
|
||||
ID: trackRow.ID,
|
||||
MbzID: insertMbzID,
|
||||
Title: opts.Title,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Psql) UpdateTrack(ctx context.Context, opts db.UpdateTrackOpts) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.ID == 0 {
|
||||
return errors.New("track id not specified")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
if opts.MusicBrainzID != uuid.Nil {
|
||||
l.Debug().Msgf("Updating MusicBrainz ID for track %d", opts.ID)
|
||||
err := qtx.UpdateTrackMbzID(ctx, repository.UpdateTrackMbzIDParams{
|
||||
ID: opts.ID,
|
||||
MusicBrainzID: &opts.MusicBrainzID,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if opts.Duration != 0 {
|
||||
l.Debug().Msgf("Updating duration for track %d", opts.ID)
|
||||
err := qtx.UpdateTrackDuration(ctx, repository.UpdateTrackDurationParams{
|
||||
ID: opts.ID,
|
||||
Duration: opts.Duration,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) SaveTrackAliases(ctx context.Context, id int32, aliases []string, source string) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if id == 0 {
|
||||
return errors.New("track id not specified")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
existing, err := qtx.GetAllTrackAliases(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, v := range existing {
|
||||
aliases = append(aliases, v.Alias)
|
||||
}
|
||||
utils.Unique(&aliases)
|
||||
for _, alias := range aliases {
|
||||
if strings.TrimSpace(alias) == "" {
|
||||
return errors.New("aliases cannot be blank")
|
||||
}
|
||||
err = qtx.InsertTrackAlias(ctx, repository.InsertTrackAliasParams{
|
||||
Alias: strings.TrimSpace(alias),
|
||||
TrackID: id,
|
||||
Source: source,
|
||||
IsPrimary: false,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) DeleteTrack(ctx context.Context, id int32) error {
|
||||
return d.q.DeleteTrack(ctx, id)
|
||||
}
|
||||
|
||||
func (d *Psql) DeleteTrackAlias(ctx context.Context, id int32, alias string) error {
|
||||
return d.q.DeleteTrackAlias(ctx, repository.DeleteTrackAliasParams{
|
||||
TrackID: id,
|
||||
Alias: alias,
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Psql) GetAllTrackAliases(ctx context.Context, id int32) ([]models.Alias, error) {
|
||||
rows, err := d.q.GetAllTrackAliases(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
aliases := make([]models.Alias, len(rows))
|
||||
for i, row := range rows {
|
||||
aliases[i] = models.Alias{
|
||||
ID: id,
|
||||
Alias: row.Alias,
|
||||
Source: row.Source,
|
||||
Primary: row.IsPrimary,
|
||||
}
|
||||
}
|
||||
return aliases, nil
|
||||
}
|
||||
|
||||
func (d *Psql) SetPrimaryTrackAlias(ctx context.Context, id int32, alias string) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if id == 0 {
|
||||
return errors.New("artist id not specified")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
// get all aliases
|
||||
aliases, err := qtx.GetAllTrackAliases(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
primary := ""
|
||||
exists := false
|
||||
for _, v := range aliases {
|
||||
if v.Alias == alias {
|
||||
exists = true
|
||||
}
|
||||
if v.IsPrimary {
|
||||
primary = v.Alias
|
||||
}
|
||||
}
|
||||
if primary == alias {
|
||||
// no-op rename
|
||||
return nil
|
||||
}
|
||||
if !exists {
|
||||
return errors.New("alias does not exist")
|
||||
}
|
||||
err = qtx.SetTrackAliasPrimaryStatus(ctx, repository.SetTrackAliasPrimaryStatusParams{
|
||||
TrackID: id,
|
||||
Alias: alias,
|
||||
IsPrimary: true,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = qtx.SetTrackAliasPrimaryStatus(ctx, repository.SetTrackAliasPrimaryStatusParams{
|
||||
TrackID: id,
|
||||
Alias: primary,
|
||||
IsPrimary: false,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
213
internal/db/psql/track_test.go
Normal file
213
internal/db/psql/track_test.go
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func testDataForTracks(t *testing.T) {
|
||||
truncateTestData(t)
|
||||
|
||||
// Insert artists
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO artists (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000001'),
|
||||
('00000000-0000-0000-0000-000000000002')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert artist aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES (1, 'Artist One', 'Testing', true),
|
||||
(2, 'Artist Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert release groups
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO releases (musicbrainz_id)
|
||||
VALUES ('00000000-0000-0000-0000-000000000011'),
|
||||
('00000000-0000-0000-0000-000000000022')`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert release aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES (1, 'Release Group One', 'Testing', true),
|
||||
(2, 'Release Group Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert tracks
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO tracks (musicbrainz_id, release_id)
|
||||
VALUES ('11111111-1111-1111-1111-111111111111', 1),
|
||||
('22222222-2222-2222-2222-222222222222', 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Insert track aliases
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES (1, 'Track One', 'Testing', true),
|
||||
(2, 'Track Two', 'Testing', true)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Associate tracks with artists
|
||||
err = store.Exec(context.Background(),
|
||||
`INSERT INTO artist_tracks (artist_id, track_id)
|
||||
VALUES (1, 1), (2, 2)`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestGetTrack(t *testing.T) {
|
||||
testDataForTracks(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Test GetTrack by ID
|
||||
track, err := store.GetTrack(ctx, db.GetTrackOpts{ID: 1})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int32(1), track.ID)
|
||||
assert.Equal(t, "Track One", track.Title)
|
||||
assert.Equal(t, uuid.MustParse("11111111-1111-1111-1111-111111111111"), *track.MbzID)
|
||||
|
||||
// Test GetTrack by MusicBrainzID
|
||||
track, err = store.GetTrack(ctx, db.GetTrackOpts{MusicBrainzID: uuid.MustParse("22222222-2222-2222-2222-222222222222")})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int32(2), track.ID)
|
||||
assert.Equal(t, "Track Two", track.Title)
|
||||
|
||||
// Test GetTrack by Title and ArtistIDs
|
||||
track, err = store.GetTrack(ctx, db.GetTrackOpts{
|
||||
Title: "Track One",
|
||||
ArtistIDs: []int32{1},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int32(1), track.ID)
|
||||
assert.Equal(t, "Track One", track.Title)
|
||||
|
||||
// Test GetTrack with insufficient information
|
||||
_, err = store.GetTrack(ctx, db.GetTrackOpts{})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
func TestSaveTrack(t *testing.T) {
|
||||
testDataForTracks(t)
|
||||
ctx := context.Background()
|
||||
|
||||
// Test SaveTrack with valid inputs
|
||||
track, err := store.SaveTrack(ctx, db.SaveTrackOpts{
|
||||
Title: "New Track",
|
||||
ArtistIDs: []int32{1},
|
||||
RecordingMbzID: uuid.MustParse("33333333-3333-3333-3333-333333333333"),
|
||||
AlbumID: 1,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "New Track", track.Title)
|
||||
assert.Equal(t, uuid.MustParse("33333333-3333-3333-3333-333333333333"), *track.MbzID)
|
||||
|
||||
// Verify artist associations exist
|
||||
exists, err := store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM artist_tracks
|
||||
WHERE artist_id = $1 AND track_id = $2
|
||||
)`, 1, track.ID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected artist association to exist")
|
||||
|
||||
// Verify alias exists
|
||||
exists, err = store.RowExists(ctx, `
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM track_aliases
|
||||
WHERE track_id = $1 AND is_primary = true
|
||||
)`, track.ID)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "expected primary alias to exist")
|
||||
|
||||
// Test SaveTrack with missing ArtistIDs
|
||||
_, err = store.SaveTrack(ctx, db.SaveTrackOpts{
|
||||
Title: "Invalid Track",
|
||||
ArtistIDs: []int32{},
|
||||
RecordingMbzID: uuid.MustParse("44444444-4444-4444-4444-444444444444"),
|
||||
})
|
||||
assert.Error(t, err)
|
||||
|
||||
// Test SaveTrack with invalid ArtistIDs
|
||||
_, err = store.SaveTrack(ctx, db.SaveTrackOpts{
|
||||
Title: "Invalid Track",
|
||||
ArtistIDs: []int32{0},
|
||||
RecordingMbzID: uuid.MustParse("55555555-5555-5555-5555-555555555555"),
|
||||
})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestUpdateTrack(t *testing.T) {
|
||||
testDataForTracks(t)
|
||||
ctx := context.Background()
|
||||
|
||||
newMbzID := uuid.MustParse("66666666-6666-6666-6666-666666666666")
|
||||
newDuration := 100
|
||||
err := store.UpdateTrack(ctx, db.UpdateTrackOpts{
|
||||
ID: 1,
|
||||
MusicBrainzID: newMbzID,
|
||||
Duration: int32(newDuration),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the update
|
||||
track, err := store.GetTrack(ctx, db.GetTrackOpts{ID: 1})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, newMbzID, *track.MbzID)
|
||||
require.EqualValues(t, newDuration, track.Duration)
|
||||
|
||||
// Test UpdateTrack with missing ID
|
||||
err = store.UpdateTrack(ctx, db.UpdateTrackOpts{
|
||||
ID: 0,
|
||||
MusicBrainzID: newMbzID,
|
||||
Duration: int32(newDuration),
|
||||
})
|
||||
assert.Error(t, err)
|
||||
|
||||
// Test UpdateTrack with nil MusicBrainz ID
|
||||
err = store.UpdateTrack(ctx, db.UpdateTrackOpts{
|
||||
ID: 1,
|
||||
MusicBrainzID: uuid.Nil,
|
||||
Duration: int32(newDuration),
|
||||
})
|
||||
assert.NoError(t, err) // No update should occur
|
||||
}
|
||||
|
||||
func TestTrackAliases(t *testing.T) {
|
||||
testDataForTracks(t)
|
||||
ctx := context.Background()
|
||||
|
||||
err := store.SaveTrackAliases(ctx, 1, []string{"Alias One", "Alias Two"}, "Testing")
|
||||
require.NoError(t, err)
|
||||
aliases, err := store.GetAllTrackAliases(ctx, 1)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, aliases, 3)
|
||||
|
||||
err = store.SetPrimaryTrackAlias(ctx, 1, "Alias One")
|
||||
require.NoError(t, err)
|
||||
track, err := store.GetTrack(ctx, db.GetTrackOpts{ID: 1})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Alias One", track.Title)
|
||||
|
||||
err = store.SetPrimaryTrackAlias(ctx, 1, "Fake Alias")
|
||||
require.Error(t, err)
|
||||
|
||||
store.SetPrimaryTrackAlias(ctx, 1, "Track One")
|
||||
}
|
||||
|
||||
func TestDeleteTrack(t *testing.T) {
|
||||
testDataForTracks(t)
|
||||
ctx := context.Background()
|
||||
|
||||
err := store.DeleteTrack(ctx, 2)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = store.Count(ctx, `SELECT * FROM tracks WHERE id = 2`)
|
||||
require.ErrorIs(t, err, pgx.ErrNoRows) // no rows error
|
||||
}
|
||||
219
internal/db/psql/user.go
Normal file
219
internal/db/psql/user.go
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
package psql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/gabehf/koito/internal/repository"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
// Returns nil, nil when no database entries are found
|
||||
func (d *Psql) GetUserByUsername(ctx context.Context, username string) (*models.User, error) {
|
||||
row, err := d.q.GetUserByUsername(ctx, strings.ToLower(username))
|
||||
if errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.User{
|
||||
ID: row.ID,
|
||||
Username: row.Username,
|
||||
Password: row.Password,
|
||||
Role: models.UserRole(row.Role),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Returns nil, nil when no database entries are found
|
||||
func (d *Psql) GetUserByApiKey(ctx context.Context, key string) (*models.User, error) {
|
||||
row, err := d.q.GetUserByApiKey(ctx, key)
|
||||
if errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.User{
|
||||
ID: row.ID,
|
||||
Username: row.Username,
|
||||
Password: row.Password,
|
||||
Role: models.UserRole(row.Role),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Psql) SaveUser(ctx context.Context, opts db.SaveUserOpts) (*models.User, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
err := ValidateUsername(opts.Username)
|
||||
if err != nil {
|
||||
l.Debug().AnErr("validator_notice", err).Msgf("Username failed validation: %s", opts.Username)
|
||||
return nil, err
|
||||
}
|
||||
pw, err := ValidateAndNormalizePassword(opts.Password)
|
||||
if err != nil {
|
||||
l.Debug().AnErr("validator_notice", err).Msgf("Password failed validation")
|
||||
return nil, err
|
||||
}
|
||||
if opts.Role == "" {
|
||||
opts.Role = models.UserRoleUser
|
||||
}
|
||||
hashPw, err := bcrypt.GenerateFromPassword([]byte(pw), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to generate hashed password")
|
||||
return nil, err
|
||||
}
|
||||
u, err := d.q.InsertUser(ctx, repository.InsertUserParams{
|
||||
Username: strings.ToLower(opts.Username),
|
||||
Password: hashPw,
|
||||
Role: repository.Role(opts.Role),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.User{
|
||||
ID: u.ID,
|
||||
Username: u.Username,
|
||||
Role: models.UserRole(u.Role),
|
||||
}, nil
|
||||
}
|
||||
func (d *Psql) SaveApiKey(ctx context.Context, opts db.SaveApiKeyOpts) (*models.ApiKey, error) {
|
||||
row, err := d.q.InsertApiKey(ctx, repository.InsertApiKeyParams{
|
||||
Key: opts.Key,
|
||||
Label: opts.Label,
|
||||
UserID: opts.UserID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &models.ApiKey{
|
||||
ID: row.ID,
|
||||
UserID: row.UserID,
|
||||
Key: row.Key,
|
||||
Label: row.Label,
|
||||
CreatedAt: row.CreatedAt.Time,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Psql) UpdateUser(ctx context.Context, opts db.UpdateUserOpts) error {
|
||||
l := logger.FromContext(ctx)
|
||||
if opts.ID == 0 {
|
||||
return errors.New("user id is required")
|
||||
}
|
||||
tx, err := d.conn.BeginTx(ctx, pgx.TxOptions{})
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to begin transaction")
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
qtx := d.q.WithTx(tx)
|
||||
if opts.Username != "" {
|
||||
err := ValidateUsername(opts.Username)
|
||||
if err != nil {
|
||||
l.Debug().AnErr("validator_notice", err).Msgf("Username failed validation: %s", opts.Username)
|
||||
return err
|
||||
}
|
||||
err = qtx.UpdateUserUsername(ctx, repository.UpdateUserUsernameParams{
|
||||
ID: opts.ID,
|
||||
Username: opts.Username,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if opts.Password != "" {
|
||||
pw, err := ValidateAndNormalizePassword(opts.Password)
|
||||
if err != nil {
|
||||
l.Debug().AnErr("validator_notice", err).Msgf("Password failed validation")
|
||||
return err
|
||||
}
|
||||
hashPw, err := bcrypt.GenerateFromPassword([]byte(pw), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to generate hashed password")
|
||||
return err
|
||||
}
|
||||
err = qtx.UpdateUserPassword(ctx, repository.UpdateUserPasswordParams{
|
||||
ID: opts.ID,
|
||||
Password: hashPw,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return tx.Commit(ctx)
|
||||
}
|
||||
|
||||
func (d *Psql) GetApiKeysByUserID(ctx context.Context, id int32) ([]models.ApiKey, error) {
|
||||
rows, err := d.q.GetAllApiKeysByUserID(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
keys := make([]models.ApiKey, len(rows))
|
||||
for i, row := range rows {
|
||||
keys[i] = models.ApiKey{
|
||||
ID: row.ID,
|
||||
Key: row.Key,
|
||||
Label: row.Label,
|
||||
UserID: row.UserID,
|
||||
}
|
||||
}
|
||||
return keys, nil
|
||||
}
|
||||
|
||||
func (d *Psql) UpdateApiKeyLabel(ctx context.Context, opts db.UpdateApiKeyLabelOpts) error {
|
||||
return d.q.UpdateApiKeyLabel(ctx, repository.UpdateApiKeyLabelParams{
|
||||
ID: opts.ID,
|
||||
Label: opts.Label,
|
||||
UserID: opts.UserID,
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Psql) DeleteApiKey(ctx context.Context, id int32) error {
|
||||
return d.q.DeleteApiKey(ctx, id)
|
||||
}
|
||||
|
||||
func (d *Psql) CountUsers(ctx context.Context) (int64, error) {
|
||||
return d.q.CountUsers(ctx)
|
||||
}
|
||||
|
||||
const (
|
||||
maxUsernameLength = 32
|
||||
minUsernameLength = 1
|
||||
maxPasswordLength = 128
|
||||
minPasswordLength = 8
|
||||
)
|
||||
|
||||
var usernameRegex = regexp.MustCompile(`^[a-zA-Z0-9_.-]+$`)
|
||||
|
||||
func ValidateUsername(username string) error {
|
||||
length := utf8.RuneCountInString(username)
|
||||
if length < minUsernameLength || length > maxUsernameLength {
|
||||
return errors.New("username must be between 1 and 32 characters")
|
||||
}
|
||||
if !usernameRegex.MatchString(username) {
|
||||
return errors.New("username can only contain [a-zA-Z0-9_.-]")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func ValidateAndNormalizePassword(password string) (string, error) {
|
||||
length := utf8.RuneCountInString(password)
|
||||
if length < minPasswordLength {
|
||||
return "", errors.New("password must be at least 8 characters long")
|
||||
}
|
||||
if length > maxPasswordLength {
|
||||
var truncated []rune
|
||||
for i, r := range password {
|
||||
if i >= maxPasswordLength {
|
||||
break
|
||||
}
|
||||
truncated = append(truncated, r)
|
||||
}
|
||||
password = string(truncated)
|
||||
}
|
||||
return password, nil
|
||||
}
|
||||
199
internal/db/psql/user_test.go
Normal file
199
internal/db/psql/user_test.go
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
package psql_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
func setupTestDataForUsers(t *testing.T) {
|
||||
truncateTestDataForUsers(t)
|
||||
// Insert additional test users
|
||||
err := store.Exec(context.Background(),
|
||||
`INSERT INTO users (username, password, role)
|
||||
VALUES ('test_user', $1, 'user'),
|
||||
('admin_user', $1, 'admin')`, []byte("hashed_password"))
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func truncateTestDataForUsers(t *testing.T) {
|
||||
err := store.Exec(context.Background(),
|
||||
`DELETE FROM users WHERE id NOT IN (1)`,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`ALTER SEQUENCE users_id_seq RESTART WITH 2`,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
err = store.Exec(context.Background(),
|
||||
`TRUNCATE api_keys RESTART IDENTITY CASCADE`,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestGetUserByUsername(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForUsers(t)
|
||||
|
||||
// Test fetching an existing user
|
||||
user, err := store.GetUserByUsername(ctx, "test_user")
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, user)
|
||||
assert.Equal(t, "test_user", user.Username)
|
||||
assert.Equal(t, "user", string(user.Role))
|
||||
|
||||
// Test fetching a non-existent user
|
||||
user, err = store.GetUserByUsername(ctx, "nonexistent_user")
|
||||
require.NoError(t, err)
|
||||
assert.Nil(t, user)
|
||||
}
|
||||
|
||||
func TestGetUserByApiKey(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForUsers(t)
|
||||
|
||||
// Insert an API key for the test user
|
||||
err := store.Exec(ctx, `INSERT INTO api_keys (key, label, user_id) VALUES ('test_key', 'Test Key', 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Test fetching a user by API key
|
||||
user, err := store.GetUserByApiKey(ctx, "test_key")
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, user)
|
||||
assert.Equal(t, int32(2), user.ID)
|
||||
assert.Equal(t, "test_user", user.Username)
|
||||
|
||||
// Test fetching a user with a non-existent API key
|
||||
user, err = store.GetUserByApiKey(ctx, "nonexistent_key")
|
||||
require.NoError(t, err)
|
||||
assert.Nil(t, user)
|
||||
}
|
||||
|
||||
func TestSaveUser(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForUsers(t)
|
||||
|
||||
// Save a new user
|
||||
opts := db.SaveUserOpts{
|
||||
Username: "new_user",
|
||||
Password: "secure_password",
|
||||
Role: "user",
|
||||
}
|
||||
user, err := store.SaveUser(ctx, opts)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, user)
|
||||
assert.Equal(t, "new_user", user.Username)
|
||||
assert.Equal(t, "user", string(user.Role))
|
||||
|
||||
// Verify the password was hashed
|
||||
var hashedPassword []byte
|
||||
err = store.QueryRow(ctx, `SELECT password FROM users WHERE username = $1`, "new_user").Scan(&hashedPassword)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, bcrypt.CompareHashAndPassword(hashedPassword, []byte(opts.Password)))
|
||||
|
||||
// Test validation failures
|
||||
_, err = store.SaveUser(ctx, db.SaveUserOpts{
|
||||
Username: "Q!@JH(F_H@#!*HF#*)&@",
|
||||
Password: "testpassword12345",
|
||||
})
|
||||
assert.Error(t, err)
|
||||
_, err = store.SaveUser(ctx, db.SaveUserOpts{
|
||||
Username: "test_user",
|
||||
Password: "<3",
|
||||
})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestSaveApiKey(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForUsers(t)
|
||||
|
||||
// Save an API key for the test user
|
||||
label := "New API Key"
|
||||
opts := db.SaveApiKeyOpts{
|
||||
Key: "new_api_key",
|
||||
Label: label,
|
||||
UserID: 2,
|
||||
}
|
||||
_, err := store.SaveApiKey(ctx, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the API key was saved
|
||||
count, err := store.Count(ctx, `SELECT COUNT(*) FROM api_keys WHERE key = $1 AND user_id = $2`, opts.Key, opts.UserID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, count)
|
||||
}
|
||||
|
||||
func TestGetApiKeysByUserID(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForUsers(t)
|
||||
|
||||
// Insert API keys for the test user
|
||||
err := store.Exec(ctx, `INSERT INTO api_keys (key, label, user_id) VALUES
|
||||
('key1', 'Key 1', 2),
|
||||
('key2', 'Key 2', 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Fetch API keys for the test user
|
||||
keys, err := store.GetApiKeysByUserID(ctx, 2)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, keys, 2)
|
||||
assert.Equal(t, "key1", keys[0].Key)
|
||||
assert.Equal(t, "key2", keys[1].Key)
|
||||
}
|
||||
|
||||
func TestUpdateApiKeyLabel(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForUsers(t)
|
||||
|
||||
// Insert an API key for the test user
|
||||
err := store.Exec(ctx, `INSERT INTO api_keys (key, label, user_id) VALUES ('key_to_update', 'Old Label', 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Update the API key label
|
||||
opts := db.UpdateApiKeyLabelOpts{
|
||||
ID: 1,
|
||||
Label: "Updated Label",
|
||||
UserID: 2,
|
||||
}
|
||||
err = store.UpdateApiKeyLabel(ctx, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the label was updated
|
||||
var label string
|
||||
err = store.QueryRow(ctx, `SELECT label FROM api_keys WHERE id = $1`, opts.ID).Scan(&label)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Updated Label", label)
|
||||
}
|
||||
|
||||
func TestDeleteApiKey(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForUsers(t)
|
||||
|
||||
// Insert an API key for the test user
|
||||
err := store.Exec(ctx, `INSERT INTO api_keys (key, label, user_id) VALUES ('key_to_delete', 'Label', 2)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Delete the API key
|
||||
err = store.DeleteApiKey(ctx, 1) // Assuming the ID is auto-generated and starts from 1
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the API key was deleted
|
||||
count, err := store.Count(ctx, `SELECT COUNT(*) FROM api_keys WHERE id = $1`, 1)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, count)
|
||||
}
|
||||
|
||||
func TestCountUsers(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
setupTestDataForUsers(t)
|
||||
|
||||
// Count the number of users
|
||||
count, err := store.Count(ctx, `SELECT COUNT(*) FROM users`)
|
||||
require.NoError(t, err)
|
||||
assert.GreaterOrEqual(t, count, 3) // Special user + test users
|
||||
}
|
||||
26
internal/db/types.go
Normal file
26
internal/db/types.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
package db
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type InformationSource string
|
||||
|
||||
const (
|
||||
InformationSourceInferred InformationSource = "Inferred"
|
||||
InformationSourceMusicBrainz InformationSource = "MusicBrainz"
|
||||
InformationSourceUserProvided InformationSource = "User"
|
||||
)
|
||||
|
||||
type ListenActivityItem struct {
|
||||
Start time.Time `json:"start_time"`
|
||||
Listens int64 `json:"listens"`
|
||||
}
|
||||
|
||||
type PaginatedResponse[T any] struct {
|
||||
Items []T `json:"items"`
|
||||
TotalCount int64 `json:"total_record_count"`
|
||||
ItemsPerPage int32 `json:"items_per_page"`
|
||||
HasNextPage bool `json:"has_next_page"`
|
||||
CurrentPage int32 `json:"current_page"`
|
||||
}
|
||||
189
internal/images/deezer.go
Normal file
189
internal/images/deezer.go
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
package images
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
"github.com/gabehf/koito/queue"
|
||||
)
|
||||
|
||||
type DeezerClient struct {
|
||||
url string
|
||||
userAgent string
|
||||
requestQueue *queue.RequestQueue
|
||||
}
|
||||
|
||||
type DeezerAlbumResponse struct {
|
||||
Data []DeezerAlbum `json:"data"`
|
||||
}
|
||||
type DeezerAlbum struct {
|
||||
Title string `json:"title"`
|
||||
CoverXL string `json:"cover_xl"`
|
||||
CoverSm string `json:"cover_small"`
|
||||
CoverMd string `json:"cover_medium"`
|
||||
CoverBig string `json:"cover_big"`
|
||||
}
|
||||
type DeezerArtistResponse struct {
|
||||
Data []DeezerArtist `json:"data"`
|
||||
}
|
||||
type DeezerArtist struct {
|
||||
Name string `json:"name"`
|
||||
PictureXL string `json:"picture_xl"`
|
||||
PictureSm string `json:"picture_small"`
|
||||
PictureMd string `json:"picture_medium"`
|
||||
PictureBig string `json:"picture_big"`
|
||||
}
|
||||
|
||||
const (
|
||||
deezerBaseUrl = "https://api.deezer.com"
|
||||
albumImageEndpoint = "/search/album?q=%s"
|
||||
artistImageEndpoint = "/search/artist?q=%s"
|
||||
)
|
||||
|
||||
func NewDeezerClient(useragent string) *DeezerClient {
|
||||
ret := new(DeezerClient)
|
||||
ret.url = deezerBaseUrl
|
||||
ret.userAgent = useragent
|
||||
ret.requestQueue = queue.NewRequestQueue(1, 1)
|
||||
return ret
|
||||
}
|
||||
|
||||
func (c *DeezerClient) Shutdown() {
|
||||
c.requestQueue.Shutdown()
|
||||
}
|
||||
|
||||
func (c *DeezerClient) queue(ctx context.Context, req *http.Request) ([]byte, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
req.Header.Set("User-Agent", c.userAgent)
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
resultChan := c.requestQueue.Enqueue(func(client *http.Client, done chan<- queue.RequestResult) {
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
l.Debug().Err(err).Str("url", req.RequestURI).Msg("Failed to contact ImageSrc")
|
||||
done <- queue.RequestResult{Err: err}
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
done <- queue.RequestResult{Body: body, Err: err}
|
||||
})
|
||||
|
||||
result := <-resultChan
|
||||
return result.Body, result.Err
|
||||
}
|
||||
|
||||
func (c *DeezerClient) getEntity(ctx context.Context, endpoint string, result any) error {
|
||||
l := logger.FromContext(ctx)
|
||||
url := deezerBaseUrl + endpoint
|
||||
l.Debug().Msgf("Sending request to ImageSrc: GET %s", url)
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
l.Debug().Msg("Adding ImageSrc request to queue")
|
||||
body, err := c.queue(ctx, req)
|
||||
if err != nil {
|
||||
l.Debug().Err(err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(body, result)
|
||||
if err != nil {
|
||||
l.Debug().Err(err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *DeezerClient) GetArtistImages(ctx context.Context, aliases []string) (string, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
resp := new(DeezerArtistResponse)
|
||||
|
||||
aliasesUniq := utils.UniqueIgnoringCase(aliases)
|
||||
aliasesAscii := utils.RemoveNonAscii(aliasesUniq)
|
||||
|
||||
// Deezer very often uses romanized names for foreign artists, so check those first
|
||||
for _, a := range aliasesAscii {
|
||||
err := c.getEntity(ctx, fmt.Sprintf(artistImageEndpoint, url.QueryEscape(fmt.Sprintf("artist:\"%s\"", a))), resp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(resp.Data) < 1 {
|
||||
return "", errors.New("artist image not found")
|
||||
}
|
||||
for _, v := range resp.Data {
|
||||
if strings.EqualFold(v.Name, a) {
|
||||
img := v.PictureXL
|
||||
l.Debug().Msgf("Found artist images for %s: %v", a, img)
|
||||
return img, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if no romanized name exists or couldn't be found, check the rest
|
||||
for _, a := range utils.RemoveInBoth(aliasesUniq, aliasesAscii) {
|
||||
err := c.getEntity(ctx, fmt.Sprintf(artistImageEndpoint, url.QueryEscape(fmt.Sprintf("artist:\"%s\"", a))), resp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(resp.Data) < 1 {
|
||||
return "", errors.New("artist image not found")
|
||||
}
|
||||
for _, v := range resp.Data {
|
||||
if strings.EqualFold(v.Name, a) {
|
||||
img := v.PictureXL
|
||||
l.Debug().Msgf("Found artist images for %s: %v", a, img)
|
||||
return img, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", errors.New("artist image not found")
|
||||
}
|
||||
|
||||
func (c *DeezerClient) GetAlbumImages(ctx context.Context, artists []string, album string) (string, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
resp := new(DeezerAlbumResponse)
|
||||
l.Debug().Msgf("Finding album image for %s from artist(s) %v", album, artists)
|
||||
// try to find artist + album match for all artists
|
||||
for _, alias := range artists {
|
||||
err := c.getEntity(ctx, fmt.Sprintf(albumImageEndpoint, url.QueryEscape(fmt.Sprintf("artist:\"%s\"album:\"%s\"", alias, album))), resp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(resp.Data) > 0 {
|
||||
for _, v := range resp.Data {
|
||||
if strings.EqualFold(v.Title, album) {
|
||||
img := v.CoverXL
|
||||
l.Debug().Msgf("Found album images for %s: %v", album, img)
|
||||
return img, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if none are found, try to find an album just by album title
|
||||
err := c.getEntity(ctx, fmt.Sprintf(albumImageEndpoint, url.QueryEscape(fmt.Sprintf("album:\"%s\"", album))), resp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
for _, v := range resp.Data {
|
||||
if strings.EqualFold(v.Title, album) {
|
||||
img := v.CoverXL
|
||||
l.Debug().Msgf("Found album images for %s: %v", album, img)
|
||||
return img, nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", errors.New("album image not found")
|
||||
}
|
||||
103
internal/images/imagesrc.go
Normal file
103
internal/images/imagesrc.go
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
// package imagesrc defines interfaces for album and artist image providers
|
||||
package images
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync"
|
||||
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type ImageSource struct {
|
||||
deezerEnabled bool
|
||||
deezerC *DeezerClient
|
||||
caaEnabled bool
|
||||
}
|
||||
type ImageSourceOpts struct {
|
||||
UserAgent string
|
||||
EnableCAA bool
|
||||
EnableDeezer bool
|
||||
}
|
||||
|
||||
var once sync.Once
|
||||
var imgsrc ImageSource
|
||||
|
||||
type ArtistImageOpts struct {
|
||||
Aliases []string
|
||||
}
|
||||
|
||||
type AlbumImageOpts struct {
|
||||
Artists []string
|
||||
Album string
|
||||
ReleaseMbzID *uuid.UUID
|
||||
ReleaseGroupMbzID *uuid.UUID
|
||||
}
|
||||
|
||||
const caaBaseUrl = "https://coverartarchive.org"
|
||||
|
||||
// all functions are no-op if no providers are enabled
|
||||
func Initialize(opts ImageSourceOpts) {
|
||||
once.Do(func() {
|
||||
if opts.EnableCAA {
|
||||
imgsrc.caaEnabled = true
|
||||
}
|
||||
if opts.EnableDeezer {
|
||||
imgsrc.deezerEnabled = true
|
||||
imgsrc.deezerC = NewDeezerClient(opts.UserAgent)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
if imgsrc.deezerC != nil {
|
||||
img, err := imgsrc.deezerC.GetArtistImages(ctx, opts.Aliases)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return img, nil
|
||||
}
|
||||
l.Warn().Msg("No image providers are enabled")
|
||||
return "", nil
|
||||
}
|
||||
func GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
if imgsrc.caaEnabled {
|
||||
l.Debug().Msg("Attempting to find album image from CoverArtArchive")
|
||||
if opts.ReleaseMbzID != nil && *opts.ReleaseMbzID != uuid.Nil {
|
||||
url := fmt.Sprintf(caaBaseUrl+"/release/%s/front", opts.ReleaseMbzID.String())
|
||||
resp, err := http.DefaultClient.Head(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if resp.StatusCode == 200 {
|
||||
return url, nil
|
||||
}
|
||||
l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release ID")
|
||||
}
|
||||
if opts.ReleaseGroupMbzID != nil && *opts.ReleaseGroupMbzID != uuid.Nil {
|
||||
url := fmt.Sprintf(caaBaseUrl+"/release-group/%s/front", opts.ReleaseGroupMbzID.String())
|
||||
resp, err := http.DefaultClient.Head(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if resp.StatusCode == 200 {
|
||||
return url, nil
|
||||
}
|
||||
l.Debug().Str("url", url).Str("status", resp.Status).Msg("Could not find album cover from CoverArtArchive with MusicBrainz release group ID")
|
||||
}
|
||||
}
|
||||
if imgsrc.deezerEnabled {
|
||||
l.Debug().Msg("Attempting to find album image from Deezer")
|
||||
img, err := imgsrc.deezerC.GetAlbumImages(ctx, opts.Artists, opts.Album)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return img, nil
|
||||
}
|
||||
l.Warn().Msg("No image providers are enabled")
|
||||
return "", nil
|
||||
}
|
||||
28
internal/images/mock.go
Normal file
28
internal/images/mock.go
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
package images
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
)
|
||||
|
||||
type MockFinder struct{}
|
||||
|
||||
func (m *MockFinder) GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (m *MockFinder) GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (m *MockFinder) Shutdown() {}
|
||||
|
||||
type ErrorFinder struct{}
|
||||
|
||||
func (m *ErrorFinder) GetArtistImage(ctx context.Context, opts ArtistImageOpts) (string, error) {
|
||||
return "", errors.New("mock error")
|
||||
}
|
||||
|
||||
func (m *ErrorFinder) GetAlbumImage(ctx context.Context, opts AlbumImageOpts) (string, error) {
|
||||
return "", errors.New("mock error")
|
||||
}
|
||||
func (m *ErrorFinder) Shutdown() {}
|
||||
1
internal/importer/importer.go
Normal file
1
internal/importer/importer.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package importer
|
||||
90
internal/importer/maloja.go
Normal file
90
internal/importer/maloja.go
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
package importer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/catalog"
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
)
|
||||
|
||||
type MalojaExport struct {
|
||||
Scrobbles []MalojaExportItem `json:"scrobbles"`
|
||||
}
|
||||
type MalojaExportItem struct {
|
||||
Time int64 `json:"time"`
|
||||
Track MalojaTrack `json:"track"`
|
||||
}
|
||||
type MalojaTrack struct {
|
||||
Artists []string `json:"artists"`
|
||||
Title string `json:"title"`
|
||||
Album struct {
|
||||
Title string `json:"albumtitle"`
|
||||
} `json:"album"`
|
||||
}
|
||||
|
||||
func ImportMalojaFile(ctx context.Context, store db.DB, filename string) error {
|
||||
l := logger.FromContext(ctx)
|
||||
l.Info().Msgf("Beginning maloja import on file: %s", filename)
|
||||
file, err := os.Open(path.Join(cfg.ConfigDir(), "import", filename))
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Failed to read import file: %s", filename)
|
||||
return err
|
||||
}
|
||||
export := new(MalojaExport)
|
||||
err = json.NewDecoder(file).Decode(&export)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, item := range export.Scrobbles {
|
||||
martists := make([]string, 0)
|
||||
// Maloja has a tendency to have the the artist order ['feature', 'main \u2022 feature'], so
|
||||
// here we try to turn that artist array into ['main', 'feature']
|
||||
item.Track.Artists = utils.MoveFirstMatchToFront(item.Track.Artists, " \u2022 ")
|
||||
for _, an := range item.Track.Artists {
|
||||
ans := strings.Split(an, " \u2022 ")
|
||||
martists = append(martists, ans...)
|
||||
}
|
||||
artists := utils.UniqueIgnoringCase(martists)
|
||||
if len(item.Track.Artists) < 1 || item.Track.Title == "" {
|
||||
l.Debug().Msg("Skipping invalid maloja import item")
|
||||
continue
|
||||
}
|
||||
ts := time.Unix(item.Time, 0)
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: &mbz.MusicBrainzClient{},
|
||||
Artist: item.Track.Artists[0],
|
||||
ArtistNames: artists,
|
||||
TrackTitle: item.Track.Title,
|
||||
ReleaseTitle: item.Track.Album.Title,
|
||||
Time: ts,
|
||||
UserID: 1,
|
||||
}
|
||||
err = catalog.SubmitListen(ctx, store, opts)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to import maloja playback item")
|
||||
return err
|
||||
}
|
||||
}
|
||||
_, err = os.Stat(path.Join(cfg.ConfigDir(), "import_complete"))
|
||||
if err != nil {
|
||||
err = os.Mkdir(path.Join(cfg.ConfigDir(), "import_complete"), 0744)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to create import_complete dir! Import files must be removed from the import directory manually, or else the importer will run on every app start")
|
||||
}
|
||||
}
|
||||
err = os.Rename(path.Join(cfg.ConfigDir(), "import", filename), path.Join(cfg.ConfigDir(), "import_complete", filename))
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to move file to import_complete dir! Import files must be removed from the import directory manually, or else the importer will run on every app start")
|
||||
}
|
||||
l.Info().Msgf("Finished importing %s; imported %d items", filename, len(export.Scrobbles))
|
||||
return nil
|
||||
}
|
||||
76
internal/importer/spotify.go
Normal file
76
internal/importer/spotify.go
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
package importer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/catalog"
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/db"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
)
|
||||
|
||||
type SpotifyExportItem struct {
|
||||
Timestamp time.Time `json:"ts"`
|
||||
TrackName string `json:"master_metadata_track_name"`
|
||||
ArtistName string `json:"master_metadata_album_artist_name"`
|
||||
AlbumName string `json:"master_metadata_album_album_name"`
|
||||
ReasonEnd string `json:"reason_end"`
|
||||
MsPlayed int32 `json:"ms_played"`
|
||||
}
|
||||
|
||||
func ImportSpotifyFile(ctx context.Context, store db.DB, filename string) error {
|
||||
l := logger.FromContext(ctx)
|
||||
l.Info().Msgf("Beginning spotify import on file: %s", filename)
|
||||
file, err := os.Open(path.Join(cfg.ConfigDir(), "import", filename))
|
||||
if err != nil {
|
||||
l.Err(err).Msgf("Failed to read import file: %s", filename)
|
||||
return err
|
||||
}
|
||||
export := make([]SpotifyExportItem, 0)
|
||||
err = json.NewDecoder(file).Decode(&export)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, item := range export {
|
||||
if item.ReasonEnd != "trackdone" {
|
||||
continue
|
||||
}
|
||||
dur := item.MsPlayed
|
||||
if item.TrackName == "" || item.ArtistName == "" {
|
||||
l.Debug().Msg("Skipping non-track item")
|
||||
continue
|
||||
}
|
||||
opts := catalog.SubmitListenOpts{
|
||||
MbzCaller: &mbz.MusicBrainzClient{},
|
||||
Artist: item.ArtistName,
|
||||
TrackTitle: item.TrackName,
|
||||
ReleaseTitle: item.AlbumName,
|
||||
Duration: dur / 1000,
|
||||
Time: item.Timestamp,
|
||||
UserID: 1,
|
||||
}
|
||||
err = catalog.SubmitListen(ctx, store, opts)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to import spotify playback item")
|
||||
return err
|
||||
}
|
||||
}
|
||||
_, err = os.Stat(path.Join(cfg.ConfigDir(), "import_complete"))
|
||||
if err != nil {
|
||||
err = os.Mkdir(path.Join(cfg.ConfigDir(), "import_complete"), 0744)
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to create import_complete dir! Import files must be removed from the import directory manually, or else the importer will run on every app start")
|
||||
}
|
||||
}
|
||||
err = os.Rename(path.Join(cfg.ConfigDir(), "import", filename), path.Join(cfg.ConfigDir(), "import_complete", filename))
|
||||
if err != nil {
|
||||
l.Err(err).Msg("Failed to move file to import_complete dir! Import files must be removed from the import directory manually, or else the importer will run on every app start")
|
||||
}
|
||||
l.Info().Msgf("Finished importing %s; imported %d items", filename, len(export))
|
||||
return nil
|
||||
}
|
||||
57
internal/logger/logger.go
Normal file
57
internal/logger/logger.go
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
package logger
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"os"
|
||||
"sync"
|
||||
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
var once sync.Once
|
||||
var logger zerolog.Logger
|
||||
|
||||
// Define a key type to avoid context key collisions
|
||||
type contextKey string
|
||||
|
||||
const loggerKey contextKey = "logger"
|
||||
|
||||
func Get() *zerolog.Logger {
|
||||
once.Do(func() {
|
||||
zerolog.TimeFieldFormat = zerolog.TimeFormatUnixMs
|
||||
|
||||
logLevel := cfg.LogLevel()
|
||||
|
||||
logger = zerolog.New(os.Stdout).
|
||||
Level(zerolog.Level(logLevel)).
|
||||
With().
|
||||
Timestamp().
|
||||
// Caller().
|
||||
Logger()
|
||||
})
|
||||
return &logger
|
||||
}
|
||||
|
||||
// injects the logger into context
|
||||
func Inject(r *http.Request, l *zerolog.Logger) *http.Request {
|
||||
ctx := context.WithValue(r.Context(), loggerKey, l)
|
||||
r = r.WithContext(ctx)
|
||||
return r
|
||||
}
|
||||
|
||||
func NewContext(l *zerolog.Logger) context.Context {
|
||||
ctx := context.WithValue(context.Background(), loggerKey, l)
|
||||
return ctx
|
||||
}
|
||||
|
||||
// retrieves the logger from context
|
||||
func FromContext(ctx context.Context) *zerolog.Logger {
|
||||
logger, ok := ctx.Value(loggerKey).(*zerolog.Logger)
|
||||
if !ok || logger == nil {
|
||||
defaultLogger := zerolog.New(os.Stdout)
|
||||
return &defaultLogger
|
||||
}
|
||||
return logger
|
||||
}
|
||||
57
internal/mbz/artist.go
Normal file
57
internal/mbz/artist.go
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
package mbz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"slices"
|
||||
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type MusicBrainzArtist struct {
|
||||
Name string `json:"name"`
|
||||
SortName string `json:"sort_name"`
|
||||
Gender string `json:"gender"`
|
||||
Area MusicBrainzArea `json:"area"`
|
||||
Aliases []MusicBrainzArtistAlias `json:"aliases"`
|
||||
}
|
||||
type MusicBrainzArtistAlias struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Primary bool `json:"primary"`
|
||||
}
|
||||
|
||||
const artistAliasFmtStr = "%s/ws/2/artist/%s?inc=aliases"
|
||||
|
||||
func (c *MusicBrainzClient) getArtist(ctx context.Context, id uuid.UUID) (*MusicBrainzArtist, error) {
|
||||
var mbzArtist *MusicBrainzArtist
|
||||
err := c.getEntity(ctx, artistAliasFmtStr, id, mbzArtist)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return mbzArtist, nil
|
||||
}
|
||||
|
||||
// Returns the artist name at index 0, and all primary aliases after.
|
||||
func (c *MusicBrainzClient) GetArtistPrimaryAliases(ctx context.Context, id uuid.UUID) ([]string, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
artist, err := c.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if artist == nil {
|
||||
return nil, errors.New("artist could not be found by musicbrainz")
|
||||
}
|
||||
used := make(map[string]bool)
|
||||
ret := make([]string, 1)
|
||||
ret[0] = artist.Name
|
||||
used[artist.Name] = true
|
||||
for _, alias := range artist.Aliases {
|
||||
if alias.Primary && !slices.Contains(ret, alias.Name) {
|
||||
l.Debug().Msgf("Found primary alias '%s' for artist '%s'", alias.Name, artist.Name)
|
||||
ret = append(ret, alias.Name)
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
92
internal/mbz/mbz.go
Normal file
92
internal/mbz/mbz.go
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
// package mbz provides functions for interacting with the musicbrainz api
|
||||
package mbz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/gabehf/koito/internal/cfg"
|
||||
"github.com/gabehf/koito/internal/logger"
|
||||
"github.com/gabehf/koito/queue"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type MusicBrainzArea struct {
|
||||
Name string `json:"name"`
|
||||
Iso3166_1Codes []string `json:"iso-3166-1-codes"`
|
||||
}
|
||||
|
||||
type MusicBrainzClient struct {
|
||||
url string
|
||||
userAgent string
|
||||
requestQueue *queue.RequestQueue
|
||||
}
|
||||
|
||||
type MusicBrainzCaller interface {
|
||||
GetArtistPrimaryAliases(ctx context.Context, id uuid.UUID) ([]string, error)
|
||||
GetReleaseTitles(ctx context.Context, RGID uuid.UUID) ([]string, error)
|
||||
GetTrack(ctx context.Context, id uuid.UUID) (*MusicBrainzTrack, error)
|
||||
GetReleaseGroup(ctx context.Context, id uuid.UUID) (*MusicBrainzReleaseGroup, error)
|
||||
GetRelease(ctx context.Context, id uuid.UUID) (*MusicBrainzRelease, error)
|
||||
Shutdown()
|
||||
}
|
||||
|
||||
func NewMusicBrainzClient() *MusicBrainzClient {
|
||||
ret := new(MusicBrainzClient)
|
||||
ret.url = cfg.MusicBrainzUrl()
|
||||
ret.userAgent = "Koito/0.0.1 (contact@koito.app)"
|
||||
ret.requestQueue = queue.NewRequestQueue(1, 1)
|
||||
return ret
|
||||
}
|
||||
|
||||
func (c *MusicBrainzClient) Shutdown() {
|
||||
c.requestQueue.Shutdown()
|
||||
}
|
||||
|
||||
func (c *MusicBrainzClient) getEntity(ctx context.Context, fmtStr string, id uuid.UUID, result any) error {
|
||||
l := logger.FromContext(ctx)
|
||||
url := fmt.Sprintf(fmtStr, c.url, id.String())
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
l.Debug().Msg("Adding MusicBrainz request to queue")
|
||||
body, err := c.queue(ctx, req)
|
||||
if err != nil {
|
||||
l.Debug().Err(err)
|
||||
return err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(body, result)
|
||||
if err != nil {
|
||||
l.Debug().Err(err)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *MusicBrainzClient) queue(ctx context.Context, req *http.Request) ([]byte, error) {
|
||||
l := logger.FromContext(ctx)
|
||||
req.Header.Set("User-Agent", c.userAgent)
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
resultChan := c.requestQueue.Enqueue(func(client *http.Client, done chan<- queue.RequestResult) {
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
l.Debug().Err(err).Str("url", req.RequestURI).Msg("Failed to contact MusicBrainz")
|
||||
done <- queue.RequestResult{Err: err}
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
done <- queue.RequestResult{Body: body, Err: err}
|
||||
})
|
||||
|
||||
result := <-resultChan
|
||||
return result.Body, result.Err
|
||||
}
|
||||
93
internal/mbz/mock.go
Normal file
93
internal/mbz/mock.go
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
package mbz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
// implements a mock caller
|
||||
|
||||
type MbzMockCaller struct {
|
||||
Artists map[uuid.UUID]*MusicBrainzArtist
|
||||
ReleaseGroups map[uuid.UUID]*MusicBrainzReleaseGroup
|
||||
Releases map[uuid.UUID]*MusicBrainzRelease
|
||||
Tracks map[uuid.UUID]*MusicBrainzTrack
|
||||
}
|
||||
|
||||
func (m *MbzMockCaller) GetReleaseGroup(ctx context.Context, id uuid.UUID) (*MusicBrainzReleaseGroup, error) {
|
||||
releaseGroup, exists := m.ReleaseGroups[id]
|
||||
if !exists {
|
||||
return nil, fmt.Errorf("release group with ID %s not found", id)
|
||||
}
|
||||
return releaseGroup, nil
|
||||
}
|
||||
|
||||
func (m *MbzMockCaller) GetRelease(ctx context.Context, id uuid.UUID) (*MusicBrainzRelease, error) {
|
||||
release, exists := m.Releases[id]
|
||||
if !exists {
|
||||
return nil, fmt.Errorf("release group with ID %s not found", id)
|
||||
}
|
||||
return release, nil
|
||||
}
|
||||
|
||||
func (m *MbzMockCaller) GetReleaseTitles(ctx context.Context, RGID uuid.UUID) ([]string, error) {
|
||||
rg, exists := m.ReleaseGroups[RGID]
|
||||
if !exists {
|
||||
return nil, fmt.Errorf("release with ID %s not found", RGID)
|
||||
}
|
||||
|
||||
var titles []string
|
||||
for _, release := range rg.Releases {
|
||||
if !slices.Contains(titles, release.Title) {
|
||||
titles = append(titles, release.Title)
|
||||
}
|
||||
}
|
||||
return titles, nil
|
||||
}
|
||||
|
||||
func (m *MbzMockCaller) GetTrack(ctx context.Context, id uuid.UUID) (*MusicBrainzTrack, error) {
|
||||
track, exists := m.Tracks[id]
|
||||
if !exists {
|
||||
return nil, fmt.Errorf("track with ID %s not found", id)
|
||||
}
|
||||
return track, nil
|
||||
}
|
||||
|
||||
func (m *MbzMockCaller) GetArtistPrimaryAliases(ctx context.Context, id uuid.UUID) ([]string, error) {
|
||||
name := m.Artists[id].Name
|
||||
ss := make([]string, len(m.Artists[id].Aliases)+1)
|
||||
ss[0] = name
|
||||
for i, alias := range m.Artists[id].Aliases {
|
||||
ss[i+1] = alias.Name
|
||||
}
|
||||
return ss, nil
|
||||
}
|
||||
|
||||
func (m *MbzMockCaller) Shutdown() {}
|
||||
|
||||
type MbzErrorCaller struct{}
|
||||
|
||||
func (m *MbzErrorCaller) GetReleaseGroup(ctx context.Context, id uuid.UUID) (*MusicBrainzReleaseGroup, error) {
|
||||
return nil, fmt.Errorf("error: GetReleaseGroup not implemented")
|
||||
}
|
||||
|
||||
func (m *MbzErrorCaller) GetRelease(ctx context.Context, id uuid.UUID) (*MusicBrainzRelease, error) {
|
||||
return nil, fmt.Errorf("error: GetRelease not implemented")
|
||||
}
|
||||
|
||||
func (m *MbzErrorCaller) GetReleaseTitles(ctx context.Context, RGID uuid.UUID) ([]string, error) {
|
||||
return nil, fmt.Errorf("error: GetReleaseTitles not implemented")
|
||||
}
|
||||
|
||||
func (m *MbzErrorCaller) GetTrack(ctx context.Context, id uuid.UUID) (*MusicBrainzTrack, error) {
|
||||
return nil, fmt.Errorf("error: GetTrack not implemented")
|
||||
}
|
||||
|
||||
func (m *MbzErrorCaller) GetArtistPrimaryAliases(ctx context.Context, id uuid.UUID) ([]string, error) {
|
||||
return nil, fmt.Errorf("error: GetArtistPrimaryAliases not implemented")
|
||||
}
|
||||
|
||||
func (m *MbzErrorCaller) Shutdown() {}
|
||||
92
internal/mbz/release.go
Normal file
92
internal/mbz/release.go
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
package mbz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type MusicBrainzReleaseGroup struct {
|
||||
Title string `json:"title"`
|
||||
Type string `json:"primary_type"`
|
||||
ArtistCredit []MusicBrainzArtistCredit `json:"artist-credit"`
|
||||
Releases []MusicBrainzRelease `json:"releases"`
|
||||
}
|
||||
type MusicBrainzRelease struct {
|
||||
Title string `json:"title"`
|
||||
ID string `json:"id"`
|
||||
ArtistCredit []MusicBrainzArtistCredit `json:"artist-credit"`
|
||||
Status string `json:"status"`
|
||||
TextRepresentation TextRepresentation `json:"text-representation"`
|
||||
}
|
||||
type MusicBrainzArtistCredit struct {
|
||||
Artist MusicBrainzArtist `json:"artist"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
type TextRepresentation struct {
|
||||
Language string `json:"language"`
|
||||
Script string `json:"script"`
|
||||
}
|
||||
|
||||
const releaseGroupFmtStr = "%s/ws/2/release-group/%s?inc=releases+artists"
|
||||
const releaseFmtStr = "%s/ws/2/release/%s?inc=artists"
|
||||
|
||||
func (c *MusicBrainzClient) GetReleaseGroup(ctx context.Context, id uuid.UUID) (*MusicBrainzReleaseGroup, error) {
|
||||
mbzRG := new(MusicBrainzReleaseGroup)
|
||||
err := c.getEntity(ctx, releaseGroupFmtStr, id, mbzRG)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return mbzRG, nil
|
||||
}
|
||||
|
||||
func (c *MusicBrainzClient) GetRelease(ctx context.Context, id uuid.UUID) (*MusicBrainzRelease, error) {
|
||||
mbzRelease := new(MusicBrainzRelease)
|
||||
err := c.getEntity(ctx, releaseFmtStr, id, mbzRelease)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return mbzRelease, nil
|
||||
}
|
||||
|
||||
func (c *MusicBrainzClient) GetReleaseTitles(ctx context.Context, RGID uuid.UUID) ([]string, error) {
|
||||
releaseGroup, err := c.GetReleaseGroup(ctx, RGID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var titles []string
|
||||
for _, release := range releaseGroup.Releases {
|
||||
if !slices.Contains(titles, release.Title) {
|
||||
titles = append(titles, release.Title)
|
||||
}
|
||||
}
|
||||
|
||||
return titles, nil
|
||||
}
|
||||
|
||||
func ReleaseGroupToTitles(rg *MusicBrainzReleaseGroup) []string {
|
||||
var titles []string
|
||||
for _, release := range rg.Releases {
|
||||
if !slices.Contains(titles, release.Title) {
|
||||
titles = append(titles, release.Title)
|
||||
}
|
||||
}
|
||||
return titles
|
||||
}
|
||||
|
||||
// Searches for Pseudo-Releases of release groups with Latin script, and returns them as an array
|
||||
func (c *MusicBrainzClient) GetLatinTitles(ctx context.Context, id uuid.UUID) ([]string, error) {
|
||||
rg, err := c.GetReleaseGroup(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
titles := make([]string, 0)
|
||||
for _, r := range rg.Releases {
|
||||
if r.Status == "Pseudo-Release" && r.TextRepresentation.Script == "Latn" { // not a typo
|
||||
titles = append(titles, r.Title)
|
||||
}
|
||||
}
|
||||
return titles, nil
|
||||
}
|
||||
23
internal/mbz/track.go
Normal file
23
internal/mbz/track.go
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
package mbz
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type MusicBrainzTrack struct {
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
const recordingFmtStr = "%s/ws/2/recording/%s"
|
||||
|
||||
// Returns the artist name at index 0, and all primary aliases after.
|
||||
func (c *MusicBrainzClient) GetTrack(ctx context.Context, id uuid.UUID) (*MusicBrainzTrack, error) {
|
||||
track := new(MusicBrainzTrack)
|
||||
err := c.getEntity(ctx, recordingFmtStr, id, track)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return track, nil
|
||||
}
|
||||
20
internal/models/album.go
Normal file
20
internal/models/album.go
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
package models
|
||||
|
||||
import "github.com/google/uuid"
|
||||
|
||||
type Album struct {
|
||||
ID int32 `json:"id"`
|
||||
MbzID *uuid.UUID `json:"musicbrainz_id"`
|
||||
Title string `json:"title"`
|
||||
Image *uuid.UUID `json:"image"`
|
||||
Artists []SimpleArtist `json:"artists"`
|
||||
VariousArtists bool `json:"is_various_artists"`
|
||||
ListenCount int64 `json:"listen_count"`
|
||||
}
|
||||
|
||||
// type SimpleAlbum struct {
|
||||
// ID int32 `json:"id"`
|
||||
// Title string `json:"title"`
|
||||
// VariousArtists bool `json:"is_various_artists"`
|
||||
// Image uuid.UUID `json:"image"`
|
||||
// }
|
||||
8
internal/models/alias.go
Normal file
8
internal/models/alias.go
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
package models
|
||||
|
||||
type Alias struct {
|
||||
ID int32 `json:"id"`
|
||||
Alias string `json:"alias"`
|
||||
Source string `json:"source"`
|
||||
Primary bool `json:"is_primary"`
|
||||
}
|
||||
17
internal/models/artist.go
Normal file
17
internal/models/artist.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
package models
|
||||
|
||||
import "github.com/google/uuid"
|
||||
|
||||
type Artist struct {
|
||||
ID int32 `json:"id"`
|
||||
MbzID *uuid.UUID `json:"musicbrainz_id"`
|
||||
Name string `json:"name"`
|
||||
Aliases []string `json:"aliases"`
|
||||
Image *uuid.UUID `json:"image"`
|
||||
ListenCount int64 `json:"listen_count"`
|
||||
}
|
||||
|
||||
type SimpleArtist struct {
|
||||
ID int32 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
11
internal/models/listen.go
Normal file
11
internal/models/listen.go
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// a Listen is the same thing as a 'scrobble' but i despise the word scrobble so i will not use it
|
||||
type Listen struct {
|
||||
Time time.Time `json:"time"`
|
||||
Track Track `json:"track"`
|
||||
}
|
||||
14
internal/models/track.go
Normal file
14
internal/models/track.go
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
package models
|
||||
|
||||
import "github.com/google/uuid"
|
||||
|
||||
type Track struct {
|
||||
ID int32 `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Artists []SimpleArtist `json:"artists"`
|
||||
MbzID *uuid.UUID `json:"musicbrainz_id"`
|
||||
ListenCount int64 `json:"listen_count"`
|
||||
Duration int32 `json:"duration"`
|
||||
Image *uuid.UUID `json:"image"`
|
||||
AlbumID int32 `json:"album_id"`
|
||||
}
|
||||
37
internal/models/user.go
Normal file
37
internal/models/user.go
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type UserRole string
|
||||
|
||||
const (
|
||||
UserRoleUser UserRole = "user"
|
||||
UserRoleAdmin UserRole = "admin"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
ID int32 `json:"id"`
|
||||
Username string `json:"username"`
|
||||
Role UserRole `json:"role"` // 'admin' | 'user'
|
||||
Password []byte `json:"-"`
|
||||
}
|
||||
|
||||
type ApiKey struct {
|
||||
ID int32 `json:"id"`
|
||||
Key string `json:"key"`
|
||||
Label string `json:"label"`
|
||||
UserID int32 `json:"user_id"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
}
|
||||
|
||||
type Session struct {
|
||||
ID uuid.UUID
|
||||
UserID int32
|
||||
CreatedAt time.Time
|
||||
ExpiresAt time.Time
|
||||
Persistent bool
|
||||
}
|
||||
316
internal/repository/alias.sql.go
Normal file
316
internal/repository/alias.sql.go
Normal file
|
|
@ -0,0 +1,316 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: alias.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
const deleteArtistAlias = `-- name: DeleteArtistAlias :exec
|
||||
DELETE FROM artist_aliases
|
||||
WHERE artist_id = $1
|
||||
AND alias = $2
|
||||
AND is_primary = false
|
||||
`
|
||||
|
||||
type DeleteArtistAliasParams struct {
|
||||
ArtistID int32
|
||||
Alias string
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteArtistAlias(ctx context.Context, arg DeleteArtistAliasParams) error {
|
||||
_, err := q.db.Exec(ctx, deleteArtistAlias, arg.ArtistID, arg.Alias)
|
||||
return err
|
||||
}
|
||||
|
||||
const deleteReleaseAlias = `-- name: DeleteReleaseAlias :exec
|
||||
DELETE FROM release_aliases
|
||||
WHERE release_id = $1
|
||||
AND alias = $2
|
||||
AND is_primary = false
|
||||
`
|
||||
|
||||
type DeleteReleaseAliasParams struct {
|
||||
ReleaseID int32
|
||||
Alias string
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteReleaseAlias(ctx context.Context, arg DeleteReleaseAliasParams) error {
|
||||
_, err := q.db.Exec(ctx, deleteReleaseAlias, arg.ReleaseID, arg.Alias)
|
||||
return err
|
||||
}
|
||||
|
||||
const deleteTrackAlias = `-- name: DeleteTrackAlias :exec
|
||||
DELETE FROM track_aliases
|
||||
WHERE track_id = $1
|
||||
AND alias = $2
|
||||
AND is_primary = false
|
||||
`
|
||||
|
||||
type DeleteTrackAliasParams struct {
|
||||
TrackID int32
|
||||
Alias string
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteTrackAlias(ctx context.Context, arg DeleteTrackAliasParams) error {
|
||||
_, err := q.db.Exec(ctx, deleteTrackAlias, arg.TrackID, arg.Alias)
|
||||
return err
|
||||
}
|
||||
|
||||
const getAllArtistAliases = `-- name: GetAllArtistAliases :many
|
||||
SELECT artist_id, alias, source, is_primary FROM artist_aliases
|
||||
WHERE artist_id = $1 ORDER BY is_primary DESC
|
||||
`
|
||||
|
||||
func (q *Queries) GetAllArtistAliases(ctx context.Context, artistID int32) ([]ArtistAlias, error) {
|
||||
rows, err := q.db.Query(ctx, getAllArtistAliases, artistID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ArtistAlias
|
||||
for rows.Next() {
|
||||
var i ArtistAlias
|
||||
if err := rows.Scan(
|
||||
&i.ArtistID,
|
||||
&i.Alias,
|
||||
&i.Source,
|
||||
&i.IsPrimary,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getAllReleaseAliases = `-- name: GetAllReleaseAliases :many
|
||||
SELECT release_id, alias, source, is_primary FROM release_aliases
|
||||
WHERE release_id = $1 ORDER BY is_primary DESC
|
||||
`
|
||||
|
||||
func (q *Queries) GetAllReleaseAliases(ctx context.Context, releaseID int32) ([]ReleaseAlias, error) {
|
||||
rows, err := q.db.Query(ctx, getAllReleaseAliases, releaseID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ReleaseAlias
|
||||
for rows.Next() {
|
||||
var i ReleaseAlias
|
||||
if err := rows.Scan(
|
||||
&i.ReleaseID,
|
||||
&i.Alias,
|
||||
&i.Source,
|
||||
&i.IsPrimary,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getAllTrackAliases = `-- name: GetAllTrackAliases :many
|
||||
SELECT track_id, alias, is_primary, source FROM track_aliases
|
||||
WHERE track_id = $1 ORDER BY is_primary DESC
|
||||
`
|
||||
|
||||
func (q *Queries) GetAllTrackAliases(ctx context.Context, trackID int32) ([]TrackAlias, error) {
|
||||
rows, err := q.db.Query(ctx, getAllTrackAliases, trackID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []TrackAlias
|
||||
for rows.Next() {
|
||||
var i TrackAlias
|
||||
if err := rows.Scan(
|
||||
&i.TrackID,
|
||||
&i.Alias,
|
||||
&i.IsPrimary,
|
||||
&i.Source,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getArtistAlias = `-- name: GetArtistAlias :one
|
||||
SELECT artist_id, alias, source, is_primary FROM artist_aliases
|
||||
WHERE alias = $1 LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetArtistAlias(ctx context.Context, alias string) (ArtistAlias, error) {
|
||||
row := q.db.QueryRow(ctx, getArtistAlias, alias)
|
||||
var i ArtistAlias
|
||||
err := row.Scan(
|
||||
&i.ArtistID,
|
||||
&i.Alias,
|
||||
&i.Source,
|
||||
&i.IsPrimary,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getReleaseAlias = `-- name: GetReleaseAlias :one
|
||||
SELECT release_id, alias, source, is_primary FROM release_aliases
|
||||
WHERE alias = $1 LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetReleaseAlias(ctx context.Context, alias string) (ReleaseAlias, error) {
|
||||
row := q.db.QueryRow(ctx, getReleaseAlias, alias)
|
||||
var i ReleaseAlias
|
||||
err := row.Scan(
|
||||
&i.ReleaseID,
|
||||
&i.Alias,
|
||||
&i.Source,
|
||||
&i.IsPrimary,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTrackAlias = `-- name: GetTrackAlias :one
|
||||
SELECT track_id, alias, is_primary, source FROM track_aliases
|
||||
WHERE alias = $1 LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetTrackAlias(ctx context.Context, alias string) (TrackAlias, error) {
|
||||
row := q.db.QueryRow(ctx, getTrackAlias, alias)
|
||||
var i TrackAlias
|
||||
err := row.Scan(
|
||||
&i.TrackID,
|
||||
&i.Alias,
|
||||
&i.IsPrimary,
|
||||
&i.Source,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertArtistAlias = `-- name: InsertArtistAlias :exec
|
||||
INSERT INTO artist_aliases (artist_id, alias, source, is_primary)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
ON CONFLICT DO NOTHING
|
||||
`
|
||||
|
||||
type InsertArtistAliasParams struct {
|
||||
ArtistID int32
|
||||
Alias string
|
||||
Source string
|
||||
IsPrimary bool
|
||||
}
|
||||
|
||||
func (q *Queries) InsertArtistAlias(ctx context.Context, arg InsertArtistAliasParams) error {
|
||||
_, err := q.db.Exec(ctx, insertArtistAlias,
|
||||
arg.ArtistID,
|
||||
arg.Alias,
|
||||
arg.Source,
|
||||
arg.IsPrimary,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
const insertReleaseAlias = `-- name: InsertReleaseAlias :exec
|
||||
INSERT INTO release_aliases (release_id, alias, source, is_primary)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
ON CONFLICT DO NOTHING
|
||||
`
|
||||
|
||||
type InsertReleaseAliasParams struct {
|
||||
ReleaseID int32
|
||||
Alias string
|
||||
Source string
|
||||
IsPrimary bool
|
||||
}
|
||||
|
||||
func (q *Queries) InsertReleaseAlias(ctx context.Context, arg InsertReleaseAliasParams) error {
|
||||
_, err := q.db.Exec(ctx, insertReleaseAlias,
|
||||
arg.ReleaseID,
|
||||
arg.Alias,
|
||||
arg.Source,
|
||||
arg.IsPrimary,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
const insertTrackAlias = `-- name: InsertTrackAlias :exec
|
||||
INSERT INTO track_aliases (track_id, alias, source, is_primary)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
ON CONFLICT DO NOTHING
|
||||
`
|
||||
|
||||
type InsertTrackAliasParams struct {
|
||||
TrackID int32
|
||||
Alias string
|
||||
Source string
|
||||
IsPrimary bool
|
||||
}
|
||||
|
||||
func (q *Queries) InsertTrackAlias(ctx context.Context, arg InsertTrackAliasParams) error {
|
||||
_, err := q.db.Exec(ctx, insertTrackAlias,
|
||||
arg.TrackID,
|
||||
arg.Alias,
|
||||
arg.Source,
|
||||
arg.IsPrimary,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
const setArtistAliasPrimaryStatus = `-- name: SetArtistAliasPrimaryStatus :exec
|
||||
UPDATE artist_aliases SET is_primary = $1 WHERE artist_id = $2 AND alias = $3
|
||||
`
|
||||
|
||||
type SetArtistAliasPrimaryStatusParams struct {
|
||||
IsPrimary bool
|
||||
ArtistID int32
|
||||
Alias string
|
||||
}
|
||||
|
||||
func (q *Queries) SetArtistAliasPrimaryStatus(ctx context.Context, arg SetArtistAliasPrimaryStatusParams) error {
|
||||
_, err := q.db.Exec(ctx, setArtistAliasPrimaryStatus, arg.IsPrimary, arg.ArtistID, arg.Alias)
|
||||
return err
|
||||
}
|
||||
|
||||
const setReleaseAliasPrimaryStatus = `-- name: SetReleaseAliasPrimaryStatus :exec
|
||||
UPDATE release_aliases SET is_primary = $1 WHERE release_id = $2 AND alias = $3
|
||||
`
|
||||
|
||||
type SetReleaseAliasPrimaryStatusParams struct {
|
||||
IsPrimary bool
|
||||
ReleaseID int32
|
||||
Alias string
|
||||
}
|
||||
|
||||
func (q *Queries) SetReleaseAliasPrimaryStatus(ctx context.Context, arg SetReleaseAliasPrimaryStatusParams) error {
|
||||
_, err := q.db.Exec(ctx, setReleaseAliasPrimaryStatus, arg.IsPrimary, arg.ReleaseID, arg.Alias)
|
||||
return err
|
||||
}
|
||||
|
||||
const setTrackAliasPrimaryStatus = `-- name: SetTrackAliasPrimaryStatus :exec
|
||||
UPDATE track_aliases SET is_primary = $1 WHERE track_id = $2 AND alias = $3
|
||||
`
|
||||
|
||||
type SetTrackAliasPrimaryStatusParams struct {
|
||||
IsPrimary bool
|
||||
TrackID int32
|
||||
Alias string
|
||||
}
|
||||
|
||||
func (q *Queries) SetTrackAliasPrimaryStatus(ctx context.Context, arg SetTrackAliasPrimaryStatusParams) error {
|
||||
_, err := q.db.Exec(ctx, setTrackAliasPrimaryStatus, arg.IsPrimary, arg.TrackID, arg.Alias)
|
||||
return err
|
||||
}
|
||||
418
internal/repository/artist.sql.go
Normal file
418
internal/repository/artist.sql.go
Normal file
|
|
@ -0,0 +1,418 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: artist.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
const countTopArtists = `-- name: CountTopArtists :one
|
||||
SELECT COUNT(DISTINCT at.artist_id) AS total_count
|
||||
FROM listens l
|
||||
JOIN artist_tracks at ON l.track_id = at.track_id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
`
|
||||
|
||||
type CountTopArtistsParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
}
|
||||
|
||||
func (q *Queries) CountTopArtists(ctx context.Context, arg CountTopArtistsParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTopArtists, arg.ListenedAt, arg.ListenedAt_2)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const deleteArtist = `-- name: DeleteArtist :exec
|
||||
DELETE FROM artists WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) DeleteArtist(ctx context.Context, id int32) error {
|
||||
_, err := q.db.Exec(ctx, deleteArtist, id)
|
||||
return err
|
||||
}
|
||||
|
||||
const deleteConflictingArtistReleases = `-- name: DeleteConflictingArtistReleases :exec
|
||||
DELETE FROM artist_releases ar
|
||||
WHERE ar.artist_id = $1
|
||||
AND release_id IN (
|
||||
SELECT ar.release_id FROM artist_releases ar WHERE ar.artist_id = $2
|
||||
)
|
||||
`
|
||||
|
||||
type DeleteConflictingArtistReleasesParams struct {
|
||||
ArtistID int32
|
||||
ArtistID_2 int32
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteConflictingArtistReleases(ctx context.Context, arg DeleteConflictingArtistReleasesParams) error {
|
||||
_, err := q.db.Exec(ctx, deleteConflictingArtistReleases, arg.ArtistID, arg.ArtistID_2)
|
||||
return err
|
||||
}
|
||||
|
||||
const deleteConflictingArtistTracks = `-- name: DeleteConflictingArtistTracks :exec
|
||||
DELETE FROM artist_tracks at
|
||||
WHERE at.artist_id = $1
|
||||
AND track_id IN (
|
||||
SELECT at.track_id FROM artist_tracks at WHERE at.artist_id = $2
|
||||
)
|
||||
`
|
||||
|
||||
type DeleteConflictingArtistTracksParams struct {
|
||||
ArtistID int32
|
||||
ArtistID_2 int32
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteConflictingArtistTracks(ctx context.Context, arg DeleteConflictingArtistTracksParams) error {
|
||||
_, err := q.db.Exec(ctx, deleteConflictingArtistTracks, arg.ArtistID, arg.ArtistID_2)
|
||||
return err
|
||||
}
|
||||
|
||||
const getArtist = `-- name: GetArtist :one
|
||||
SELECT
|
||||
a.id, a.musicbrainz_id, a.image, a.image_source, a.name,
|
||||
array_agg(aa.alias)::text[] AS aliases
|
||||
FROM artists_with_name a
|
||||
LEFT JOIN artist_aliases aa ON a.id = aa.artist_id
|
||||
WHERE a.id = $1
|
||||
GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name
|
||||
`
|
||||
|
||||
type GetArtistRow struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
Name string
|
||||
Aliases []string
|
||||
}
|
||||
|
||||
func (q *Queries) GetArtist(ctx context.Context, id int32) (GetArtistRow, error) {
|
||||
row := q.db.QueryRow(ctx, getArtist, id)
|
||||
var i GetArtistRow
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.ImageSource,
|
||||
&i.Name,
|
||||
&i.Aliases,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getArtistByImage = `-- name: GetArtistByImage :one
|
||||
SELECT id, musicbrainz_id, image, image_source FROM artists WHERE image = $1 LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetArtistByImage(ctx context.Context, image *uuid.UUID) (Artist, error) {
|
||||
row := q.db.QueryRow(ctx, getArtistByImage, image)
|
||||
var i Artist
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.ImageSource,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getArtistByMbzID = `-- name: GetArtistByMbzID :one
|
||||
SELECT
|
||||
a.id, a.musicbrainz_id, a.image, a.image_source, a.name,
|
||||
array_agg(aa.alias)::text[] AS aliases
|
||||
FROM artists_with_name a
|
||||
LEFT JOIN artist_aliases aa ON a.id = aa.artist_id
|
||||
WHERE a.musicbrainz_id = $1
|
||||
GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name
|
||||
`
|
||||
|
||||
type GetArtistByMbzIDRow struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
Name string
|
||||
Aliases []string
|
||||
}
|
||||
|
||||
func (q *Queries) GetArtistByMbzID(ctx context.Context, musicbrainzID *uuid.UUID) (GetArtistByMbzIDRow, error) {
|
||||
row := q.db.QueryRow(ctx, getArtistByMbzID, musicbrainzID)
|
||||
var i GetArtistByMbzIDRow
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.ImageSource,
|
||||
&i.Name,
|
||||
&i.Aliases,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getArtistByName = `-- name: GetArtistByName :one
|
||||
WITH artist_with_aliases AS (
|
||||
SELECT
|
||||
a.id, a.musicbrainz_id, a.image, a.image_source, a.name,
|
||||
COALESCE(array_agg(aa.alias), '{}')::text[] AS aliases
|
||||
FROM artists_with_name a
|
||||
LEFT JOIN artist_aliases aa ON a.id = aa.artist_id
|
||||
WHERE a.id IN (
|
||||
SELECT aa2.artist_id FROM artist_aliases aa2 WHERE aa2.alias = $1
|
||||
)
|
||||
GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name
|
||||
)
|
||||
SELECT id, musicbrainz_id, image, image_source, name, aliases FROM artist_with_aliases
|
||||
`
|
||||
|
||||
type GetArtistByNameRow struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
Name string
|
||||
Aliases []string
|
||||
}
|
||||
|
||||
func (q *Queries) GetArtistByName(ctx context.Context, alias string) (GetArtistByNameRow, error) {
|
||||
row := q.db.QueryRow(ctx, getArtistByName, alias)
|
||||
var i GetArtistByNameRow
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.ImageSource,
|
||||
&i.Name,
|
||||
&i.Aliases,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getReleaseArtists = `-- name: GetReleaseArtists :many
|
||||
SELECT
|
||||
a.id, a.musicbrainz_id, a.image, a.image_source, a.name
|
||||
FROM artists_with_name a
|
||||
LEFT JOIN artist_releases ar ON a.id = ar.artist_id
|
||||
WHERE ar.release_id = $1
|
||||
GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name
|
||||
`
|
||||
|
||||
func (q *Queries) GetReleaseArtists(ctx context.Context, releaseID int32) ([]ArtistsWithName, error) {
|
||||
rows, err := q.db.Query(ctx, getReleaseArtists, releaseID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ArtistsWithName
|
||||
for rows.Next() {
|
||||
var i ArtistsWithName
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.ImageSource,
|
||||
&i.Name,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getTopArtistsPaginated = `-- name: GetTopArtistsPaginated :many
|
||||
SELECT
|
||||
a.id,
|
||||
a.name,
|
||||
a.musicbrainz_id,
|
||||
a.image,
|
||||
COUNT(*) AS listen_count
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
JOIN artist_tracks at ON at.track_id = t.id
|
||||
JOIN artists_with_name a ON a.id = at.artist_id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
GROUP BY a.id, a.name, a.musicbrainz_id, a.image, a.image_source, a.name
|
||||
ORDER BY listen_count DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetTopArtistsPaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
}
|
||||
|
||||
type GetTopArtistsPaginatedRow struct {
|
||||
ID int32
|
||||
Name string
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
ListenCount int64
|
||||
}
|
||||
|
||||
func (q *Queries) GetTopArtistsPaginated(ctx context.Context, arg GetTopArtistsPaginatedParams) ([]GetTopArtistsPaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getTopArtistsPaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetTopArtistsPaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetTopArtistsPaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Name,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.ListenCount,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getTrackArtists = `-- name: GetTrackArtists :many
|
||||
SELECT
|
||||
a.id, a.musicbrainz_id, a.image, a.image_source, a.name
|
||||
FROM artists_with_name a
|
||||
LEFT JOIN artist_tracks at ON a.id = at.artist_id
|
||||
WHERE at.track_id = $1
|
||||
GROUP BY a.id, a.musicbrainz_id, a.image, a.image_source, a.name
|
||||
`
|
||||
|
||||
func (q *Queries) GetTrackArtists(ctx context.Context, trackID int32) ([]ArtistsWithName, error) {
|
||||
rows, err := q.db.Query(ctx, getTrackArtists, trackID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ArtistsWithName
|
||||
for rows.Next() {
|
||||
var i ArtistsWithName
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.ImageSource,
|
||||
&i.Name,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const insertArtist = `-- name: InsertArtist :one
|
||||
INSERT INTO artists (musicbrainz_id, image, image_source)
|
||||
VALUES ($1, $2, $3)
|
||||
RETURNING id, musicbrainz_id, image, image_source
|
||||
`
|
||||
|
||||
type InsertArtistParams struct {
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
}
|
||||
|
||||
func (q *Queries) InsertArtist(ctx context.Context, arg InsertArtistParams) (Artist, error) {
|
||||
row := q.db.QueryRow(ctx, insertArtist, arg.MusicBrainzID, arg.Image, arg.ImageSource)
|
||||
var i Artist
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.ImageSource,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateArtistImage = `-- name: UpdateArtistImage :exec
|
||||
UPDATE artists SET image = $2, image_source = $3
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateArtistImageParams struct {
|
||||
ID int32
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateArtistImage(ctx context.Context, arg UpdateArtistImageParams) error {
|
||||
_, err := q.db.Exec(ctx, updateArtistImage, arg.ID, arg.Image, arg.ImageSource)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateArtistMbzID = `-- name: UpdateArtistMbzID :exec
|
||||
UPDATE artists SET musicbrainz_id = $2
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateArtistMbzIDParams struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateArtistMbzID(ctx context.Context, arg UpdateArtistMbzIDParams) error {
|
||||
_, err := q.db.Exec(ctx, updateArtistMbzID, arg.ID, arg.MusicBrainzID)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateArtistReleases = `-- name: UpdateArtistReleases :exec
|
||||
UPDATE artist_releases
|
||||
SET artist_id = $2
|
||||
WHERE artist_id = $1
|
||||
`
|
||||
|
||||
type UpdateArtistReleasesParams struct {
|
||||
ArtistID int32
|
||||
ArtistID_2 int32
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateArtistReleases(ctx context.Context, arg UpdateArtistReleasesParams) error {
|
||||
_, err := q.db.Exec(ctx, updateArtistReleases, arg.ArtistID, arg.ArtistID_2)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateArtistTracks = `-- name: UpdateArtistTracks :exec
|
||||
UPDATE artist_tracks
|
||||
SET artist_id = $2
|
||||
WHERE artist_id = $1
|
||||
`
|
||||
|
||||
type UpdateArtistTracksParams struct {
|
||||
ArtistID int32
|
||||
ArtistID_2 int32
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateArtistTracks(ctx context.Context, arg UpdateArtistTracksParams) error {
|
||||
_, err := q.db.Exec(ctx, updateArtistTracks, arg.ArtistID, arg.ArtistID_2)
|
||||
return err
|
||||
}
|
||||
32
internal/repository/db.go
Normal file
32
internal/repository/db.go
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
)
|
||||
|
||||
type DBTX interface {
|
||||
Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error)
|
||||
Query(context.Context, string, ...interface{}) (pgx.Rows, error)
|
||||
QueryRow(context.Context, string, ...interface{}) pgx.Row
|
||||
}
|
||||
|
||||
func New(db DBTX) *Queries {
|
||||
return &Queries{db: db}
|
||||
}
|
||||
|
||||
type Queries struct {
|
||||
db DBTX
|
||||
}
|
||||
|
||||
func (q *Queries) WithTx(tx pgx.Tx) *Queries {
|
||||
return &Queries{
|
||||
db: tx,
|
||||
}
|
||||
}
|
||||
26
internal/repository/etc.sql.go
Normal file
26
internal/repository/etc.sql.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: etc.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
const cleanOrphanedEntries = `-- name: CleanOrphanedEntries :exec
|
||||
DO $$
|
||||
BEGIN
|
||||
DELETE FROM tracks WHERE id NOT IN (SELECT l.track_id FROM listens l);
|
||||
DELETE FROM releases WHERE id NOT IN (SELECT t.release_id FROM tracks t);
|
||||
DELETE FROM artists WHERE id NOT IN (SELECT at.artist_id FROM artist_tracks at);
|
||||
END $$
|
||||
`
|
||||
|
||||
// DELETE FROM releases WHERE release_group_id NOT IN (SELECT t.release_group_id FROM tracks t);
|
||||
// DELETE FROM releases WHERE release_group_id NOT IN (SELECT rg.id FROM release_groups rg);
|
||||
func (q *Queries) CleanOrphanedEntries(ctx context.Context) error {
|
||||
_, err := q.db.Exec(ctx, cleanOrphanedEntries)
|
||||
return err
|
||||
}
|
||||
742
internal/repository/listen.sql.go
Normal file
742
internal/repository/listen.sql.go
Normal file
|
|
@ -0,0 +1,742 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: listen.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
const countListens = `-- name: CountListens :one
|
||||
SELECT COUNT(*) AS total_count
|
||||
FROM listens l
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
`
|
||||
|
||||
type CountListensParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
}
|
||||
|
||||
func (q *Queries) CountListens(ctx context.Context, arg CountListensParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countListens, arg.ListenedAt, arg.ListenedAt_2)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const countListensFromArtist = `-- name: CountListensFromArtist :one
|
||||
SELECT COUNT(*) AS total_count
|
||||
FROM listens l
|
||||
JOIN artist_tracks at ON l.track_id = at.track_id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND at.artist_id = $3
|
||||
`
|
||||
|
||||
type CountListensFromArtistParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
func (q *Queries) CountListensFromArtist(ctx context.Context, arg CountListensFromArtistParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countListensFromArtist, arg.ListenedAt, arg.ListenedAt_2, arg.ArtistID)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const countListensFromRelease = `-- name: CountListensFromRelease :one
|
||||
SELECT COUNT(*) AS total_count
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND t.release_id = $3
|
||||
`
|
||||
|
||||
type CountListensFromReleaseParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
func (q *Queries) CountListensFromRelease(ctx context.Context, arg CountListensFromReleaseParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countListensFromRelease, arg.ListenedAt, arg.ListenedAt_2, arg.ReleaseID)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const countListensFromTrack = `-- name: CountListensFromTrack :one
|
||||
SELECT COUNT(*) AS total_count
|
||||
FROM listens l
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND l.track_id = $3
|
||||
`
|
||||
|
||||
type CountListensFromTrackParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
TrackID int32
|
||||
}
|
||||
|
||||
func (q *Queries) CountListensFromTrack(ctx context.Context, arg CountListensFromTrackParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countListensFromTrack, arg.ListenedAt, arg.ListenedAt_2, arg.TrackID)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const countTimeListened = `-- name: CountTimeListened :one
|
||||
SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
`
|
||||
|
||||
type CountTimeListenedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
}
|
||||
|
||||
func (q *Queries) CountTimeListened(ctx context.Context, arg CountTimeListenedParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTimeListened, arg.ListenedAt, arg.ListenedAt_2)
|
||||
var seconds_listened int64
|
||||
err := row.Scan(&seconds_listened)
|
||||
return seconds_listened, err
|
||||
}
|
||||
|
||||
const countTimeListenedToArtist = `-- name: CountTimeListenedToArtist :one
|
||||
SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
JOIN artist_tracks at ON t.id = at.track_id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND at.artist_id = $3
|
||||
`
|
||||
|
||||
type CountTimeListenedToArtistParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
func (q *Queries) CountTimeListenedToArtist(ctx context.Context, arg CountTimeListenedToArtistParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTimeListenedToArtist, arg.ListenedAt, arg.ListenedAt_2, arg.ArtistID)
|
||||
var seconds_listened int64
|
||||
err := row.Scan(&seconds_listened)
|
||||
return seconds_listened, err
|
||||
}
|
||||
|
||||
const countTimeListenedToRelease = `-- name: CountTimeListenedToRelease :one
|
||||
SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND t.release_id = $3
|
||||
`
|
||||
|
||||
type CountTimeListenedToReleaseParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
func (q *Queries) CountTimeListenedToRelease(ctx context.Context, arg CountTimeListenedToReleaseParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTimeListenedToRelease, arg.ListenedAt, arg.ListenedAt_2, arg.ReleaseID)
|
||||
var seconds_listened int64
|
||||
err := row.Scan(&seconds_listened)
|
||||
return seconds_listened, err
|
||||
}
|
||||
|
||||
const countTimeListenedToTrack = `-- name: CountTimeListenedToTrack :one
|
||||
SELECT COALESCE(SUM(t.duration), 0)::BIGINT AS seconds_listened
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND t.id = $3
|
||||
`
|
||||
|
||||
type CountTimeListenedToTrackParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
ID int32
|
||||
}
|
||||
|
||||
func (q *Queries) CountTimeListenedToTrack(ctx context.Context, arg CountTimeListenedToTrackParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTimeListenedToTrack, arg.ListenedAt, arg.ListenedAt_2, arg.ID)
|
||||
var seconds_listened int64
|
||||
err := row.Scan(&seconds_listened)
|
||||
return seconds_listened, err
|
||||
}
|
||||
|
||||
const deleteListen = `-- name: DeleteListen :exec
|
||||
DELETE FROM listens WHERE track_id = $1 AND listened_at = $2
|
||||
`
|
||||
|
||||
type DeleteListenParams struct {
|
||||
TrackID int32
|
||||
ListenedAt time.Time
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteListen(ctx context.Context, arg DeleteListenParams) error {
|
||||
_, err := q.db.Exec(ctx, deleteListen, arg.TrackID, arg.ListenedAt)
|
||||
return err
|
||||
}
|
||||
|
||||
const getLastListensFromArtistPaginated = `-- name: GetLastListensFromArtistPaginated :many
|
||||
SELECT
|
||||
l.track_id, l.listened_at, l.client, l.user_id,
|
||||
t.title AS track_title,
|
||||
t.release_id AS release_id,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at
|
||||
JOIN artists_with_name a ON a.id = at.artist_id
|
||||
WHERE at.track_id = t.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks_with_title t ON l.track_id = t.id
|
||||
JOIN artist_tracks at ON t.id = at.track_id
|
||||
WHERE at.artist_id = $5
|
||||
AND l.listened_at BETWEEN $1 AND $2
|
||||
ORDER BY l.listened_at DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetLastListensFromArtistPaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
type GetLastListensFromArtistPaginatedRow struct {
|
||||
TrackID int32
|
||||
ListenedAt time.Time
|
||||
Client *string
|
||||
UserID int32
|
||||
TrackTitle string
|
||||
ReleaseID int32
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetLastListensFromArtistPaginated(ctx context.Context, arg GetLastListensFromArtistPaginatedParams) ([]GetLastListensFromArtistPaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getLastListensFromArtistPaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
arg.ArtistID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetLastListensFromArtistPaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetLastListensFromArtistPaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.TrackID,
|
||||
&i.ListenedAt,
|
||||
&i.Client,
|
||||
&i.UserID,
|
||||
&i.TrackTitle,
|
||||
&i.ReleaseID,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getLastListensFromReleasePaginated = `-- name: GetLastListensFromReleasePaginated :many
|
||||
SELECT
|
||||
l.track_id, l.listened_at, l.client, l.user_id,
|
||||
t.title AS track_title,
|
||||
t.release_id AS release_id,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at
|
||||
JOIN artists_with_name a ON a.id = at.artist_id
|
||||
WHERE at.track_id = t.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks_with_title t ON l.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND t.release_id = $5
|
||||
ORDER BY l.listened_at DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetLastListensFromReleasePaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
type GetLastListensFromReleasePaginatedRow struct {
|
||||
TrackID int32
|
||||
ListenedAt time.Time
|
||||
Client *string
|
||||
UserID int32
|
||||
TrackTitle string
|
||||
ReleaseID int32
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetLastListensFromReleasePaginated(ctx context.Context, arg GetLastListensFromReleasePaginatedParams) ([]GetLastListensFromReleasePaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getLastListensFromReleasePaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
arg.ReleaseID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetLastListensFromReleasePaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetLastListensFromReleasePaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.TrackID,
|
||||
&i.ListenedAt,
|
||||
&i.Client,
|
||||
&i.UserID,
|
||||
&i.TrackTitle,
|
||||
&i.ReleaseID,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getLastListensFromTrackPaginated = `-- name: GetLastListensFromTrackPaginated :many
|
||||
SELECT
|
||||
l.track_id, l.listened_at, l.client, l.user_id,
|
||||
t.title AS track_title,
|
||||
t.release_id AS release_id,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at
|
||||
JOIN artists_with_name a ON a.id = at.artist_id
|
||||
WHERE at.track_id = t.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks_with_title t ON l.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND t.id = $5
|
||||
ORDER BY l.listened_at DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetLastListensFromTrackPaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
ID int32
|
||||
}
|
||||
|
||||
type GetLastListensFromTrackPaginatedRow struct {
|
||||
TrackID int32
|
||||
ListenedAt time.Time
|
||||
Client *string
|
||||
UserID int32
|
||||
TrackTitle string
|
||||
ReleaseID int32
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetLastListensFromTrackPaginated(ctx context.Context, arg GetLastListensFromTrackPaginatedParams) ([]GetLastListensFromTrackPaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getLastListensFromTrackPaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
arg.ID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetLastListensFromTrackPaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetLastListensFromTrackPaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.TrackID,
|
||||
&i.ListenedAt,
|
||||
&i.Client,
|
||||
&i.UserID,
|
||||
&i.TrackTitle,
|
||||
&i.ReleaseID,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getLastListensPaginated = `-- name: GetLastListensPaginated :many
|
||||
SELECT
|
||||
l.track_id, l.listened_at, l.client, l.user_id,
|
||||
t.title AS track_title,
|
||||
t.release_id AS release_id,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at
|
||||
JOIN artists_with_name a ON a.id = at.artist_id
|
||||
WHERE at.track_id = t.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks_with_title t ON l.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
ORDER BY l.listened_at DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetLastListensPaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
}
|
||||
|
||||
type GetLastListensPaginatedRow struct {
|
||||
TrackID int32
|
||||
ListenedAt time.Time
|
||||
Client *string
|
||||
UserID int32
|
||||
TrackTitle string
|
||||
ReleaseID int32
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetLastListensPaginated(ctx context.Context, arg GetLastListensPaginatedParams) ([]GetLastListensPaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getLastListensPaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetLastListensPaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetLastListensPaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.TrackID,
|
||||
&i.ListenedAt,
|
||||
&i.Client,
|
||||
&i.UserID,
|
||||
&i.TrackTitle,
|
||||
&i.ReleaseID,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const insertListen = `-- name: InsertListen :exec
|
||||
INSERT INTO listens (track_id, listened_at, user_id, client)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
ON CONFLICT DO NOTHING
|
||||
`
|
||||
|
||||
type InsertListenParams struct {
|
||||
TrackID int32
|
||||
ListenedAt time.Time
|
||||
UserID int32
|
||||
Client *string
|
||||
}
|
||||
|
||||
func (q *Queries) InsertListen(ctx context.Context, arg InsertListenParams) error {
|
||||
_, err := q.db.Exec(ctx, insertListen,
|
||||
arg.TrackID,
|
||||
arg.ListenedAt,
|
||||
arg.UserID,
|
||||
arg.Client,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
const listenActivity = `-- name: ListenActivity :many
|
||||
WITH buckets AS (
|
||||
SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start
|
||||
),
|
||||
bucketed_listens AS (
|
||||
SELECT
|
||||
b.bucket_start,
|
||||
COUNT(l.listened_at) AS listen_count
|
||||
FROM buckets b
|
||||
LEFT JOIN listens l
|
||||
ON l.listened_at >= b.bucket_start
|
||||
AND l.listened_at < b.bucket_start + $3::interval
|
||||
GROUP BY b.bucket_start
|
||||
ORDER BY b.bucket_start
|
||||
)
|
||||
SELECT bucket_start, listen_count FROM bucketed_listens
|
||||
`
|
||||
|
||||
type ListenActivityParams struct {
|
||||
Column1 time.Time
|
||||
Column2 time.Time
|
||||
Column3 pgtype.Interval
|
||||
}
|
||||
|
||||
type ListenActivityRow struct {
|
||||
BucketStart time.Time
|
||||
ListenCount int64
|
||||
}
|
||||
|
||||
func (q *Queries) ListenActivity(ctx context.Context, arg ListenActivityParams) ([]ListenActivityRow, error) {
|
||||
rows, err := q.db.Query(ctx, listenActivity, arg.Column1, arg.Column2, arg.Column3)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ListenActivityRow
|
||||
for rows.Next() {
|
||||
var i ListenActivityRow
|
||||
if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const listenActivityForArtist = `-- name: ListenActivityForArtist :many
|
||||
WITH buckets AS (
|
||||
SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start
|
||||
),
|
||||
filtered_listens AS (
|
||||
SELECT l.track_id, l.listened_at, l.client, l.user_id
|
||||
FROM listens l
|
||||
JOIN artist_tracks t ON l.track_id = t.track_id
|
||||
WHERE t.artist_id = $4
|
||||
),
|
||||
bucketed_listens AS (
|
||||
SELECT
|
||||
b.bucket_start,
|
||||
COUNT(l.listened_at) AS listen_count
|
||||
FROM buckets b
|
||||
LEFT JOIN filtered_listens l
|
||||
ON l.listened_at >= b.bucket_start
|
||||
AND l.listened_at < b.bucket_start + $3::interval
|
||||
GROUP BY b.bucket_start
|
||||
ORDER BY b.bucket_start
|
||||
)
|
||||
SELECT bucket_start, listen_count FROM bucketed_listens
|
||||
`
|
||||
|
||||
type ListenActivityForArtistParams struct {
|
||||
Column1 time.Time
|
||||
Column2 time.Time
|
||||
Column3 pgtype.Interval
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
type ListenActivityForArtistRow struct {
|
||||
BucketStart time.Time
|
||||
ListenCount int64
|
||||
}
|
||||
|
||||
func (q *Queries) ListenActivityForArtist(ctx context.Context, arg ListenActivityForArtistParams) ([]ListenActivityForArtistRow, error) {
|
||||
rows, err := q.db.Query(ctx, listenActivityForArtist,
|
||||
arg.Column1,
|
||||
arg.Column2,
|
||||
arg.Column3,
|
||||
arg.ArtistID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ListenActivityForArtistRow
|
||||
for rows.Next() {
|
||||
var i ListenActivityForArtistRow
|
||||
if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const listenActivityForRelease = `-- name: ListenActivityForRelease :many
|
||||
WITH buckets AS (
|
||||
SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start
|
||||
),
|
||||
filtered_listens AS (
|
||||
SELECT l.track_id, l.listened_at, l.client, l.user_id
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
WHERE t.release_id = $4
|
||||
),
|
||||
bucketed_listens AS (
|
||||
SELECT
|
||||
b.bucket_start,
|
||||
COUNT(l.listened_at) AS listen_count
|
||||
FROM buckets b
|
||||
LEFT JOIN filtered_listens l
|
||||
ON l.listened_at >= b.bucket_start
|
||||
AND l.listened_at < b.bucket_start + $3::interval
|
||||
GROUP BY b.bucket_start
|
||||
ORDER BY b.bucket_start
|
||||
)
|
||||
SELECT bucket_start, listen_count FROM bucketed_listens
|
||||
`
|
||||
|
||||
type ListenActivityForReleaseParams struct {
|
||||
Column1 time.Time
|
||||
Column2 time.Time
|
||||
Column3 pgtype.Interval
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
type ListenActivityForReleaseRow struct {
|
||||
BucketStart time.Time
|
||||
ListenCount int64
|
||||
}
|
||||
|
||||
func (q *Queries) ListenActivityForRelease(ctx context.Context, arg ListenActivityForReleaseParams) ([]ListenActivityForReleaseRow, error) {
|
||||
rows, err := q.db.Query(ctx, listenActivityForRelease,
|
||||
arg.Column1,
|
||||
arg.Column2,
|
||||
arg.Column3,
|
||||
arg.ReleaseID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ListenActivityForReleaseRow
|
||||
for rows.Next() {
|
||||
var i ListenActivityForReleaseRow
|
||||
if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const listenActivityForTrack = `-- name: ListenActivityForTrack :many
|
||||
WITH buckets AS (
|
||||
SELECT generate_series($1::timestamptz, $2::timestamptz, $3::interval) AS bucket_start
|
||||
),
|
||||
filtered_listens AS (
|
||||
SELECT l.track_id, l.listened_at, l.client, l.user_id
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
WHERE t.id = $4
|
||||
),
|
||||
bucketed_listens AS (
|
||||
SELECT
|
||||
b.bucket_start,
|
||||
COUNT(l.listened_at) AS listen_count
|
||||
FROM buckets b
|
||||
LEFT JOIN filtered_listens l
|
||||
ON l.listened_at >= b.bucket_start
|
||||
AND l.listened_at < b.bucket_start + $3::interval
|
||||
GROUP BY b.bucket_start
|
||||
ORDER BY b.bucket_start
|
||||
)
|
||||
SELECT bucket_start, listen_count FROM bucketed_listens
|
||||
`
|
||||
|
||||
type ListenActivityForTrackParams struct {
|
||||
Column1 time.Time
|
||||
Column2 time.Time
|
||||
Column3 pgtype.Interval
|
||||
ID int32
|
||||
}
|
||||
|
||||
type ListenActivityForTrackRow struct {
|
||||
BucketStart time.Time
|
||||
ListenCount int64
|
||||
}
|
||||
|
||||
func (q *Queries) ListenActivityForTrack(ctx context.Context, arg ListenActivityForTrackParams) ([]ListenActivityForTrackRow, error) {
|
||||
rows, err := q.db.Query(ctx, listenActivityForTrack,
|
||||
arg.Column1,
|
||||
arg.Column2,
|
||||
arg.Column3,
|
||||
arg.ID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ListenActivityForTrackRow
|
||||
for rows.Next() {
|
||||
var i ListenActivityForTrackRow
|
||||
if err := rows.Scan(&i.BucketStart, &i.ListenCount); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const updateTrackIdForListens = `-- name: UpdateTrackIdForListens :exec
|
||||
UPDATE listens SET track_id = $2
|
||||
WHERE track_id = $1
|
||||
`
|
||||
|
||||
type UpdateTrackIdForListensParams struct {
|
||||
TrackID int32
|
||||
TrackID_2 int32
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateTrackIdForListens(ctx context.Context, arg UpdateTrackIdForListensParams) error {
|
||||
_, err := q.db.Exec(ctx, updateTrackIdForListens, arg.TrackID, arg.TrackID_2)
|
||||
return err
|
||||
}
|
||||
164
internal/repository/models.go
Normal file
164
internal/repository/models.go
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
type Role string
|
||||
|
||||
const (
|
||||
RoleAdmin Role = "admin"
|
||||
RoleUser Role = "user"
|
||||
)
|
||||
|
||||
func (e *Role) Scan(src interface{}) error {
|
||||
switch s := src.(type) {
|
||||
case []byte:
|
||||
*e = Role(s)
|
||||
case string:
|
||||
*e = Role(s)
|
||||
default:
|
||||
return fmt.Errorf("unsupported scan type for Role: %T", src)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type NullRole struct {
|
||||
Role Role
|
||||
Valid bool // Valid is true if Role is not NULL
|
||||
}
|
||||
|
||||
// Scan implements the Scanner interface.
|
||||
func (ns *NullRole) Scan(value interface{}) error {
|
||||
if value == nil {
|
||||
ns.Role, ns.Valid = "", false
|
||||
return nil
|
||||
}
|
||||
ns.Valid = true
|
||||
return ns.Role.Scan(value)
|
||||
}
|
||||
|
||||
// Value implements the driver Valuer interface.
|
||||
func (ns NullRole) Value() (driver.Value, error) {
|
||||
if !ns.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
return string(ns.Role), nil
|
||||
}
|
||||
|
||||
type ApiKey struct {
|
||||
ID int32
|
||||
Key string
|
||||
UserID int32
|
||||
CreatedAt pgtype.Timestamp
|
||||
Label string
|
||||
}
|
||||
|
||||
type Artist struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
}
|
||||
|
||||
type ArtistAlias struct {
|
||||
ArtistID int32
|
||||
Alias string
|
||||
Source string
|
||||
IsPrimary bool
|
||||
}
|
||||
|
||||
type ArtistRelease struct {
|
||||
ArtistID int32
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
type ArtistTrack struct {
|
||||
ArtistID int32
|
||||
TrackID int32
|
||||
}
|
||||
|
||||
type ArtistsWithName struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
Name string
|
||||
}
|
||||
|
||||
type Listen struct {
|
||||
TrackID int32
|
||||
ListenedAt time.Time
|
||||
Client *string
|
||||
UserID int32
|
||||
}
|
||||
|
||||
type Release struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
VariousArtists bool
|
||||
ImageSource pgtype.Text
|
||||
}
|
||||
|
||||
type ReleaseAlias struct {
|
||||
ReleaseID int32
|
||||
Alias string
|
||||
Source string
|
||||
IsPrimary bool
|
||||
}
|
||||
|
||||
type ReleasesWithTitle struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
VariousArtists bool
|
||||
ImageSource pgtype.Text
|
||||
Title string
|
||||
}
|
||||
|
||||
type Session struct {
|
||||
ID uuid.UUID
|
||||
UserID int32
|
||||
CreatedAt time.Time
|
||||
ExpiresAt time.Time
|
||||
Persistent bool
|
||||
}
|
||||
|
||||
type Track struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Duration int32
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
type TrackAlias struct {
|
||||
TrackID int32
|
||||
Alias string
|
||||
IsPrimary bool
|
||||
Source string
|
||||
}
|
||||
|
||||
type TracksWithTitle struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Duration int32
|
||||
ReleaseID int32
|
||||
Title string
|
||||
}
|
||||
|
||||
type User struct {
|
||||
ID int32
|
||||
Username string
|
||||
Role Role
|
||||
Password []byte
|
||||
}
|
||||
462
internal/repository/release.sql.go
Normal file
462
internal/repository/release.sql.go
Normal file
|
|
@ -0,0 +1,462 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: release.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
const associateArtistToRelease = `-- name: AssociateArtistToRelease :exec
|
||||
INSERT INTO artist_releases (artist_id, release_id)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT DO NOTHING
|
||||
`
|
||||
|
||||
type AssociateArtistToReleaseParams struct {
|
||||
ArtistID int32
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
func (q *Queries) AssociateArtistToRelease(ctx context.Context, arg AssociateArtistToReleaseParams) error {
|
||||
_, err := q.db.Exec(ctx, associateArtistToRelease, arg.ArtistID, arg.ReleaseID)
|
||||
return err
|
||||
}
|
||||
|
||||
const countReleasesFromArtist = `-- name: CountReleasesFromArtist :one
|
||||
SELECT COUNT(*)
|
||||
FROM releases r
|
||||
JOIN artist_releases ar ON r.id = ar.release_id
|
||||
WHERE ar.artist_id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) CountReleasesFromArtist(ctx context.Context, artistID int32) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countReleasesFromArtist, artistID)
|
||||
var count int64
|
||||
err := row.Scan(&count)
|
||||
return count, err
|
||||
}
|
||||
|
||||
const countTopReleases = `-- name: CountTopReleases :one
|
||||
SELECT COUNT(DISTINCT r.id) AS total_count
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
JOIN releases r ON t.release_id = r.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
`
|
||||
|
||||
type CountTopReleasesParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
}
|
||||
|
||||
func (q *Queries) CountTopReleases(ctx context.Context, arg CountTopReleasesParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTopReleases, arg.ListenedAt, arg.ListenedAt_2)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const deleteRelease = `-- name: DeleteRelease :exec
|
||||
DELETE FROM releases WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) DeleteRelease(ctx context.Context, id int32) error {
|
||||
_, err := q.db.Exec(ctx, deleteRelease, id)
|
||||
return err
|
||||
}
|
||||
|
||||
const deleteReleasesFromArtist = `-- name: DeleteReleasesFromArtist :exec
|
||||
DELETE FROM releases r
|
||||
USING artist_releases ar
|
||||
WHERE ar.release_id = r.id
|
||||
AND ar.artist_id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) DeleteReleasesFromArtist(ctx context.Context, artistID int32) error {
|
||||
_, err := q.db.Exec(ctx, deleteReleasesFromArtist, artistID)
|
||||
return err
|
||||
}
|
||||
|
||||
const getRelease = `-- name: GetRelease :one
|
||||
SELECT id, musicbrainz_id, image, various_artists, image_source, title FROM releases_with_title
|
||||
WHERE id = $1 LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetRelease(ctx context.Context, id int32) (ReleasesWithTitle, error) {
|
||||
row := q.db.QueryRow(ctx, getRelease, id)
|
||||
var i ReleasesWithTitle
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
&i.Title,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getReleaseByArtistAndTitle = `-- name: GetReleaseByArtistAndTitle :one
|
||||
SELECT r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title
|
||||
FROM releases_with_title r
|
||||
JOIN artist_releases ar ON r.id = ar.release_id
|
||||
WHERE r.title = $1 AND ar.artist_id = $2
|
||||
LIMIT 1
|
||||
`
|
||||
|
||||
type GetReleaseByArtistAndTitleParams struct {
|
||||
Title string
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
func (q *Queries) GetReleaseByArtistAndTitle(ctx context.Context, arg GetReleaseByArtistAndTitleParams) (ReleasesWithTitle, error) {
|
||||
row := q.db.QueryRow(ctx, getReleaseByArtistAndTitle, arg.Title, arg.ArtistID)
|
||||
var i ReleasesWithTitle
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
&i.Title,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getReleaseByArtistAndTitles = `-- name: GetReleaseByArtistAndTitles :one
|
||||
SELECT r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title
|
||||
FROM releases_with_title r
|
||||
JOIN artist_releases ar ON r.id = ar.release_id
|
||||
WHERE r.title = ANY ($1::TEXT[]) AND ar.artist_id = $2
|
||||
LIMIT 1
|
||||
`
|
||||
|
||||
type GetReleaseByArtistAndTitlesParams struct {
|
||||
Column1 []string
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
func (q *Queries) GetReleaseByArtistAndTitles(ctx context.Context, arg GetReleaseByArtistAndTitlesParams) (ReleasesWithTitle, error) {
|
||||
row := q.db.QueryRow(ctx, getReleaseByArtistAndTitles, arg.Column1, arg.ArtistID)
|
||||
var i ReleasesWithTitle
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
&i.Title,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getReleaseByImageID = `-- name: GetReleaseByImageID :one
|
||||
SELECT id, musicbrainz_id, image, various_artists, image_source FROM releases
|
||||
WHERE image = $1 LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetReleaseByImageID(ctx context.Context, image *uuid.UUID) (Release, error) {
|
||||
row := q.db.QueryRow(ctx, getReleaseByImageID, image)
|
||||
var i Release
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getReleaseByMbzID = `-- name: GetReleaseByMbzID :one
|
||||
SELECT id, musicbrainz_id, image, various_artists, image_source, title FROM releases_with_title
|
||||
WHERE musicbrainz_id = $1 LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetReleaseByMbzID(ctx context.Context, musicbrainzID *uuid.UUID) (ReleasesWithTitle, error) {
|
||||
row := q.db.QueryRow(ctx, getReleaseByMbzID, musicbrainzID)
|
||||
var i ReleasesWithTitle
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
&i.Title,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getReleasesWithoutImages = `-- name: GetReleasesWithoutImages :many
|
||||
SELECT
|
||||
r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
|
||||
(
|
||||
SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name))
|
||||
FROM artists_with_name a
|
||||
JOIN artist_releases ar ON a.id = ar.artist_id
|
||||
WHERE ar.release_id = r.id
|
||||
) AS artists
|
||||
FROM releases_with_title r
|
||||
WHERE r.image IS NULL
|
||||
AND r.id > $2
|
||||
ORDER BY r.id ASC
|
||||
LIMIT $1
|
||||
`
|
||||
|
||||
type GetReleasesWithoutImagesParams struct {
|
||||
Limit int32
|
||||
ID int32
|
||||
}
|
||||
|
||||
type GetReleasesWithoutImagesRow struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
VariousArtists bool
|
||||
ImageSource pgtype.Text
|
||||
Title string
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetReleasesWithoutImages(ctx context.Context, arg GetReleasesWithoutImagesParams) ([]GetReleasesWithoutImagesRow, error) {
|
||||
rows, err := q.db.Query(ctx, getReleasesWithoutImages, arg.Limit, arg.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetReleasesWithoutImagesRow
|
||||
for rows.Next() {
|
||||
var i GetReleasesWithoutImagesRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
&i.Title,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getTopReleasesFromArtist = `-- name: GetTopReleasesFromArtist :many
|
||||
SELECT
|
||||
r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
|
||||
COUNT(*) AS listen_count,
|
||||
(
|
||||
SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name))
|
||||
FROM artists_with_name a
|
||||
JOIN artist_releases ar ON ar.artist_id = a.id
|
||||
WHERE ar.release_id = r.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
JOIN releases_with_title r ON t.release_id = r.id
|
||||
JOIN artist_releases ar ON r.id = ar.release_id
|
||||
WHERE ar.artist_id = $5
|
||||
AND l.listened_at BETWEEN $1 AND $2
|
||||
GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
|
||||
ORDER BY listen_count DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetTopReleasesFromArtistParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
type GetTopReleasesFromArtistRow struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
VariousArtists bool
|
||||
ImageSource pgtype.Text
|
||||
Title string
|
||||
ListenCount int64
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetTopReleasesFromArtist(ctx context.Context, arg GetTopReleasesFromArtistParams) ([]GetTopReleasesFromArtistRow, error) {
|
||||
rows, err := q.db.Query(ctx, getTopReleasesFromArtist,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
arg.ArtistID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetTopReleasesFromArtistRow
|
||||
for rows.Next() {
|
||||
var i GetTopReleasesFromArtistRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
&i.Title,
|
||||
&i.ListenCount,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getTopReleasesPaginated = `-- name: GetTopReleasesPaginated :many
|
||||
SELECT
|
||||
r.id, r.musicbrainz_id, r.image, r.various_artists, r.image_source, r.title,
|
||||
COUNT(*) AS listen_count,
|
||||
(
|
||||
SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name))
|
||||
FROM artists_with_name a
|
||||
JOIN artist_releases ar ON ar.artist_id = a.id
|
||||
WHERE ar.release_id = r.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
JOIN releases_with_title r ON t.release_id = r.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
GROUP BY r.id, r.title, r.musicbrainz_id, r.various_artists, r.image, r.image_source
|
||||
ORDER BY listen_count DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetTopReleasesPaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
}
|
||||
|
||||
type GetTopReleasesPaginatedRow struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
VariousArtists bool
|
||||
ImageSource pgtype.Text
|
||||
Title string
|
||||
ListenCount int64
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetTopReleasesPaginated(ctx context.Context, arg GetTopReleasesPaginatedParams) ([]GetTopReleasesPaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getTopReleasesPaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetTopReleasesPaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetTopReleasesPaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
&i.Title,
|
||||
&i.ListenCount,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const insertRelease = `-- name: InsertRelease :one
|
||||
INSERT INTO releases (musicbrainz_id, various_artists, image, image_source)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
RETURNING id, musicbrainz_id, image, various_artists, image_source
|
||||
`
|
||||
|
||||
type InsertReleaseParams struct {
|
||||
MusicBrainzID *uuid.UUID
|
||||
VariousArtists bool
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
}
|
||||
|
||||
func (q *Queries) InsertRelease(ctx context.Context, arg InsertReleaseParams) (Release, error) {
|
||||
row := q.db.QueryRow(ctx, insertRelease,
|
||||
arg.MusicBrainzID,
|
||||
arg.VariousArtists,
|
||||
arg.Image,
|
||||
arg.ImageSource,
|
||||
)
|
||||
var i Release
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.ImageSource,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateReleaseImage = `-- name: UpdateReleaseImage :exec
|
||||
UPDATE releases SET image = $2, image_source = $3
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateReleaseImageParams struct {
|
||||
ID int32
|
||||
Image *uuid.UUID
|
||||
ImageSource pgtype.Text
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateReleaseImage(ctx context.Context, arg UpdateReleaseImageParams) error {
|
||||
_, err := q.db.Exec(ctx, updateReleaseImage, arg.ID, arg.Image, arg.ImageSource)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateReleaseMbzID = `-- name: UpdateReleaseMbzID :exec
|
||||
UPDATE releases SET musicbrainz_id = $2
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateReleaseMbzIDParams struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateReleaseMbzID(ctx context.Context, arg UpdateReleaseMbzIDParams) error {
|
||||
_, err := q.db.Exec(ctx, updateReleaseMbzID, arg.ID, arg.MusicBrainzID)
|
||||
return err
|
||||
}
|
||||
431
internal/repository/search.sql.go
Normal file
431
internal/repository/search.sql.go
Normal file
|
|
@ -0,0 +1,431 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: search.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
const searchArtists = `-- name: SearchArtists :many
|
||||
SELECT id, name, musicbrainz_id, image, score
|
||||
FROM (
|
||||
SELECT
|
||||
a.id,
|
||||
a.name,
|
||||
a.musicbrainz_id,
|
||||
a.image,
|
||||
similarity(aa.alias, $1) AS score,
|
||||
ROW_NUMBER() OVER (PARTITION BY a.id ORDER BY similarity(aa.alias, $1) DESC) AS rn
|
||||
FROM artist_aliases aa
|
||||
JOIN artists_with_name a ON aa.artist_id = a.id
|
||||
WHERE similarity(aa.alias, $1) > 0.28
|
||||
) ranked
|
||||
WHERE rn = 1
|
||||
ORDER BY score DESC
|
||||
LIMIT $2
|
||||
`
|
||||
|
||||
type SearchArtistsParams struct {
|
||||
Similarity string
|
||||
Limit int32
|
||||
}
|
||||
|
||||
type SearchArtistsRow struct {
|
||||
ID int32
|
||||
Name string
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
Score float32
|
||||
}
|
||||
|
||||
func (q *Queries) SearchArtists(ctx context.Context, arg SearchArtistsParams) ([]SearchArtistsRow, error) {
|
||||
rows, err := q.db.Query(ctx, searchArtists, arg.Similarity, arg.Limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []SearchArtistsRow
|
||||
for rows.Next() {
|
||||
var i SearchArtistsRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Name,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.Score,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const searchArtistsBySubstring = `-- name: SearchArtistsBySubstring :many
|
||||
SELECT id, name, musicbrainz_id, image, score
|
||||
FROM (
|
||||
SELECT
|
||||
a.id,
|
||||
a.name,
|
||||
a.musicbrainz_id,
|
||||
a.image,
|
||||
1.0 AS score, -- why
|
||||
ROW_NUMBER() OVER (PARTITION BY a.id ORDER BY aa.alias) AS rn
|
||||
FROM artist_aliases aa
|
||||
JOIN artists_with_name a ON aa.artist_id = a.id
|
||||
WHERE aa.alias ILIKE $1 || '%'
|
||||
) ranked
|
||||
WHERE rn = 1
|
||||
ORDER BY score DESC
|
||||
LIMIT $2
|
||||
`
|
||||
|
||||
type SearchArtistsBySubstringParams struct {
|
||||
Column1 pgtype.Text
|
||||
Limit int32
|
||||
}
|
||||
|
||||
type SearchArtistsBySubstringRow struct {
|
||||
ID int32
|
||||
Name string
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
Score float64
|
||||
}
|
||||
|
||||
func (q *Queries) SearchArtistsBySubstring(ctx context.Context, arg SearchArtistsBySubstringParams) ([]SearchArtistsBySubstringRow, error) {
|
||||
rows, err := q.db.Query(ctx, searchArtistsBySubstring, arg.Column1, arg.Limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []SearchArtistsBySubstringRow
|
||||
for rows.Next() {
|
||||
var i SearchArtistsBySubstringRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Name,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.Score,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const searchReleases = `-- name: SearchReleases :many
|
||||
SELECT
|
||||
ranked.id,
|
||||
ranked.title,
|
||||
ranked.musicbrainz_id,
|
||||
ranked.image,
|
||||
ranked.various_artists,
|
||||
ranked.score,
|
||||
(
|
||||
SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name))
|
||||
FROM artists_with_name a
|
||||
JOIN artist_releases ar ON ar.artist_id = a.id
|
||||
WHERE ar.release_id = ranked.id
|
||||
) AS artists
|
||||
FROM (
|
||||
SELECT
|
||||
r.id,
|
||||
r.title,
|
||||
r.musicbrainz_id,
|
||||
r.image,
|
||||
r.various_artists,
|
||||
similarity(ra.alias, $1) AS score,
|
||||
ROW_NUMBER() OVER (PARTITION BY r.id ORDER BY similarity(ra.alias, $1) DESC) AS rn
|
||||
FROM release_aliases ra
|
||||
JOIN releases_with_title r ON ra.release_id = r.id
|
||||
WHERE similarity(ra.alias, $1) > 0.28
|
||||
) ranked
|
||||
WHERE rn = 1
|
||||
ORDER BY score DESC, title
|
||||
LIMIT $2
|
||||
`
|
||||
|
||||
type SearchReleasesParams struct {
|
||||
Similarity string
|
||||
Limit int32
|
||||
}
|
||||
|
||||
type SearchReleasesRow struct {
|
||||
ID int32
|
||||
Title string
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
VariousArtists bool
|
||||
Score float32
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) SearchReleases(ctx context.Context, arg SearchReleasesParams) ([]SearchReleasesRow, error) {
|
||||
rows, err := q.db.Query(ctx, searchReleases, arg.Similarity, arg.Limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []SearchReleasesRow
|
||||
for rows.Next() {
|
||||
var i SearchReleasesRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Title,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.Score,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const searchReleasesBySubstring = `-- name: SearchReleasesBySubstring :many
|
||||
SELECT
|
||||
ranked.id,
|
||||
ranked.title,
|
||||
ranked.musicbrainz_id,
|
||||
ranked.image,
|
||||
ranked.various_artists,
|
||||
ranked.score,
|
||||
(
|
||||
SELECT json_agg(DISTINCT jsonb_build_object('id', a.id, 'name', a.name))
|
||||
FROM artists_with_name a
|
||||
JOIN artist_releases ar ON ar.artist_id = a.id
|
||||
WHERE ar.release_id = ranked.id
|
||||
) AS artists
|
||||
FROM (
|
||||
SELECT
|
||||
r.id,
|
||||
r.title,
|
||||
r.musicbrainz_id,
|
||||
r.image,
|
||||
r.various_artists,
|
||||
1.0 AS score, -- idk why
|
||||
ROW_NUMBER() OVER (PARTITION BY r.id ORDER BY ra.alias) AS rn
|
||||
FROM release_aliases ra
|
||||
JOIN releases_with_title r ON ra.release_id = r.id
|
||||
WHERE ra.alias ILIKE $1 || '%'
|
||||
) ranked
|
||||
WHERE rn = 1
|
||||
ORDER BY score DESC, title
|
||||
LIMIT $2
|
||||
`
|
||||
|
||||
type SearchReleasesBySubstringParams struct {
|
||||
Column1 pgtype.Text
|
||||
Limit int32
|
||||
}
|
||||
|
||||
type SearchReleasesBySubstringRow struct {
|
||||
ID int32
|
||||
Title string
|
||||
MusicBrainzID *uuid.UUID
|
||||
Image *uuid.UUID
|
||||
VariousArtists bool
|
||||
Score float64
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) SearchReleasesBySubstring(ctx context.Context, arg SearchReleasesBySubstringParams) ([]SearchReleasesBySubstringRow, error) {
|
||||
rows, err := q.db.Query(ctx, searchReleasesBySubstring, arg.Column1, arg.Limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []SearchReleasesBySubstringRow
|
||||
for rows.Next() {
|
||||
var i SearchReleasesBySubstringRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Title,
|
||||
&i.MusicBrainzID,
|
||||
&i.Image,
|
||||
&i.VariousArtists,
|
||||
&i.Score,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const searchTracks = `-- name: SearchTracks :many
|
||||
SELECT
|
||||
ranked.id,
|
||||
ranked.title,
|
||||
ranked.musicbrainz_id,
|
||||
ranked.release_id,
|
||||
ranked.image,
|
||||
ranked.score,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at
|
||||
JOIN artists_with_name a ON a.id = at.artist_id
|
||||
WHERE at.track_id = ranked.id
|
||||
) AS artists
|
||||
FROM (
|
||||
SELECT
|
||||
t.id,
|
||||
t.title,
|
||||
t.musicbrainz_id,
|
||||
t.release_id,
|
||||
r.image,
|
||||
similarity(ta.alias, $1) AS score,
|
||||
ROW_NUMBER() OVER (PARTITION BY t.id ORDER BY similarity(ta.alias, $1) DESC) AS rn
|
||||
FROM track_aliases ta
|
||||
JOIN tracks_with_title t ON ta.track_id = t.id
|
||||
JOIN releases r ON t.release_id = r.id
|
||||
WHERE similarity(ta.alias, $1) > 0.28
|
||||
) ranked
|
||||
WHERE rn = 1
|
||||
ORDER BY score DESC, title
|
||||
LIMIT $2
|
||||
`
|
||||
|
||||
type SearchTracksParams struct {
|
||||
Similarity string
|
||||
Limit int32
|
||||
}
|
||||
|
||||
type SearchTracksRow struct {
|
||||
ID int32
|
||||
Title string
|
||||
MusicBrainzID *uuid.UUID
|
||||
ReleaseID int32
|
||||
Image *uuid.UUID
|
||||
Score float32
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) SearchTracks(ctx context.Context, arg SearchTracksParams) ([]SearchTracksRow, error) {
|
||||
rows, err := q.db.Query(ctx, searchTracks, arg.Similarity, arg.Limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []SearchTracksRow
|
||||
for rows.Next() {
|
||||
var i SearchTracksRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Title,
|
||||
&i.MusicBrainzID,
|
||||
&i.ReleaseID,
|
||||
&i.Image,
|
||||
&i.Score,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const searchTracksBySubstring = `-- name: SearchTracksBySubstring :many
|
||||
SELECT
|
||||
ranked.id,
|
||||
ranked.title,
|
||||
ranked.musicbrainz_id,
|
||||
ranked.release_id,
|
||||
ranked.image,
|
||||
ranked.score,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at
|
||||
JOIN artists_with_name a ON a.id = at.artist_id
|
||||
WHERE at.track_id = ranked.id
|
||||
) AS artists
|
||||
FROM (
|
||||
SELECT
|
||||
t.id,
|
||||
t.title,
|
||||
t.musicbrainz_id,
|
||||
t.release_id,
|
||||
r.image,
|
||||
1.0 AS score,
|
||||
ROW_NUMBER() OVER (PARTITION BY t.id ORDER BY ta.alias) AS rn
|
||||
FROM track_aliases ta
|
||||
JOIN tracks_with_title t ON ta.track_id = t.id
|
||||
JOIN releases r ON t.release_id = r.id
|
||||
WHERE ta.alias ILIKE $1 || '%'
|
||||
) ranked
|
||||
WHERE rn = 1
|
||||
ORDER BY score DESC, title
|
||||
LIMIT $2
|
||||
`
|
||||
|
||||
type SearchTracksBySubstringParams struct {
|
||||
Column1 pgtype.Text
|
||||
Limit int32
|
||||
}
|
||||
|
||||
type SearchTracksBySubstringRow struct {
|
||||
ID int32
|
||||
Title string
|
||||
MusicBrainzID *uuid.UUID
|
||||
ReleaseID int32
|
||||
Image *uuid.UUID
|
||||
Score float64
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) SearchTracksBySubstring(ctx context.Context, arg SearchTracksBySubstringParams) ([]SearchTracksBySubstringRow, error) {
|
||||
rows, err := q.db.Query(ctx, searchTracksBySubstring, arg.Column1, arg.Limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []SearchTracksBySubstringRow
|
||||
for rows.Next() {
|
||||
var i SearchTracksBySubstringRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Title,
|
||||
&i.MusicBrainzID,
|
||||
&i.ReleaseID,
|
||||
&i.Image,
|
||||
&i.Score,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
120
internal/repository/sessions.sql.go
Normal file
120
internal/repository/sessions.sql.go
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: sessions.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
const deleteSession = `-- name: DeleteSession :exec
|
||||
DELETE FROM sessions WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) DeleteSession(ctx context.Context, id uuid.UUID) error {
|
||||
_, err := q.db.Exec(ctx, deleteSession, id)
|
||||
return err
|
||||
}
|
||||
|
||||
const getSession = `-- name: GetSession :one
|
||||
SELECT id, user_id, created_at, expires_at, persistent FROM sessions WHERE id = $1 AND expires_at > NOW()
|
||||
`
|
||||
|
||||
func (q *Queries) GetSession(ctx context.Context, id uuid.UUID) (Session, error) {
|
||||
row := q.db.QueryRow(ctx, getSession, id)
|
||||
var i Session
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.UserID,
|
||||
&i.CreatedAt,
|
||||
&i.ExpiresAt,
|
||||
&i.Persistent,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getUserBySession = `-- name: GetUserBySession :one
|
||||
SELECT u.id, username, role, password, s.id, user_id, created_at, expires_at, persistent
|
||||
FROM users u
|
||||
JOIN sessions s ON u.id = s.user_id
|
||||
WHERE s.id = $1
|
||||
`
|
||||
|
||||
type GetUserBySessionRow struct {
|
||||
ID int32
|
||||
Username string
|
||||
Role Role
|
||||
Password []byte
|
||||
ID_2 uuid.UUID
|
||||
UserID int32
|
||||
CreatedAt time.Time
|
||||
ExpiresAt time.Time
|
||||
Persistent bool
|
||||
}
|
||||
|
||||
func (q *Queries) GetUserBySession(ctx context.Context, id uuid.UUID) (GetUserBySessionRow, error) {
|
||||
row := q.db.QueryRow(ctx, getUserBySession, id)
|
||||
var i GetUserBySessionRow
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.Username,
|
||||
&i.Role,
|
||||
&i.Password,
|
||||
&i.ID_2,
|
||||
&i.UserID,
|
||||
&i.CreatedAt,
|
||||
&i.ExpiresAt,
|
||||
&i.Persistent,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertSession = `-- name: InsertSession :one
|
||||
INSERT INTO sessions (id, user_id, expires_at, persistent)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
RETURNING id, user_id, created_at, expires_at, persistent
|
||||
`
|
||||
|
||||
type InsertSessionParams struct {
|
||||
ID uuid.UUID
|
||||
UserID int32
|
||||
ExpiresAt time.Time
|
||||
Persistent bool
|
||||
}
|
||||
|
||||
func (q *Queries) InsertSession(ctx context.Context, arg InsertSessionParams) (Session, error) {
|
||||
row := q.db.QueryRow(ctx, insertSession,
|
||||
arg.ID,
|
||||
arg.UserID,
|
||||
arg.ExpiresAt,
|
||||
arg.Persistent,
|
||||
)
|
||||
var i Session
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.UserID,
|
||||
&i.CreatedAt,
|
||||
&i.ExpiresAt,
|
||||
&i.Persistent,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateSessionExpiry = `-- name: UpdateSessionExpiry :exec
|
||||
UPDATE sessions SET expires_at = $2 WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateSessionExpiryParams struct {
|
||||
ID uuid.UUID
|
||||
ExpiresAt time.Time
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateSessionExpiry(ctx context.Context, arg UpdateSessionExpiryParams) error {
|
||||
_, err := q.db.Exec(ctx, updateSessionExpiry, arg.ID, arg.ExpiresAt)
|
||||
return err
|
||||
}
|
||||
504
internal/repository/track.sql.go
Normal file
504
internal/repository/track.sql.go
Normal file
|
|
@ -0,0 +1,504 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: track.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
const associateArtistToTrack = `-- name: AssociateArtistToTrack :exec
|
||||
INSERT INTO artist_tracks (artist_id, track_id)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT DO NOTHING
|
||||
`
|
||||
|
||||
type AssociateArtistToTrackParams struct {
|
||||
ArtistID int32
|
||||
TrackID int32
|
||||
}
|
||||
|
||||
func (q *Queries) AssociateArtistToTrack(ctx context.Context, arg AssociateArtistToTrackParams) error {
|
||||
_, err := q.db.Exec(ctx, associateArtistToTrack, arg.ArtistID, arg.TrackID)
|
||||
return err
|
||||
}
|
||||
|
||||
const countTopTracks = `-- name: CountTopTracks :one
|
||||
SELECT COUNT(DISTINCT l.track_id) AS total_count
|
||||
FROM listens l
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
`
|
||||
|
||||
type CountTopTracksParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
}
|
||||
|
||||
func (q *Queries) CountTopTracks(ctx context.Context, arg CountTopTracksParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTopTracks, arg.ListenedAt, arg.ListenedAt_2)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const countTopTracksByArtist = `-- name: CountTopTracksByArtist :one
|
||||
SELECT COUNT(DISTINCT l.track_id) AS total_count
|
||||
FROM listens l
|
||||
JOIN artist_tracks at ON l.track_id = at.track_id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND at.artist_id = $3
|
||||
`
|
||||
|
||||
type CountTopTracksByArtistParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
func (q *Queries) CountTopTracksByArtist(ctx context.Context, arg CountTopTracksByArtistParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTopTracksByArtist, arg.ListenedAt, arg.ListenedAt_2, arg.ArtistID)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const countTopTracksByRelease = `-- name: CountTopTracksByRelease :one
|
||||
SELECT COUNT(DISTINCT l.track_id) AS total_count
|
||||
FROM listens l
|
||||
JOIN tracks t ON l.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND t.release_id = $3
|
||||
`
|
||||
|
||||
type CountTopTracksByReleaseParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
func (q *Queries) CountTopTracksByRelease(ctx context.Context, arg CountTopTracksByReleaseParams) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countTopTracksByRelease, arg.ListenedAt, arg.ListenedAt_2, arg.ReleaseID)
|
||||
var total_count int64
|
||||
err := row.Scan(&total_count)
|
||||
return total_count, err
|
||||
}
|
||||
|
||||
const deleteTrack = `-- name: DeleteTrack :exec
|
||||
DELETE FROM tracks WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) DeleteTrack(ctx context.Context, id int32) error {
|
||||
_, err := q.db.Exec(ctx, deleteTrack, id)
|
||||
return err
|
||||
}
|
||||
|
||||
const getAllTracksFromArtist = `-- name: GetAllTracksFromArtist :many
|
||||
SELECT t.id, t.musicbrainz_id, t.duration, t.release_id, t.title
|
||||
FROM tracks_with_title t
|
||||
JOIN artist_tracks at ON t.id = at.track_id
|
||||
WHERE at.artist_id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetAllTracksFromArtist(ctx context.Context, artistID int32) ([]TracksWithTitle, error) {
|
||||
rows, err := q.db.Query(ctx, getAllTracksFromArtist, artistID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []TracksWithTitle
|
||||
for rows.Next() {
|
||||
var i TracksWithTitle
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Duration,
|
||||
&i.ReleaseID,
|
||||
&i.Title,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getTopTracksByArtistPaginated = `-- name: GetTopTracksByArtistPaginated :many
|
||||
SELECT
|
||||
t.id,
|
||||
t.title,
|
||||
t.musicbrainz_id,
|
||||
t.release_id,
|
||||
r.image,
|
||||
COUNT(*) AS listen_count,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at2
|
||||
JOIN artists_with_name a ON a.id = at2.artist_id
|
||||
WHERE at2.track_id = t.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks_with_title t ON l.track_id = t.id
|
||||
JOIN releases r ON t.release_id = r.id
|
||||
JOIN artist_tracks at ON at.track_id = t.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND at.artist_id = $5
|
||||
GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
|
||||
ORDER BY listen_count DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetTopTracksByArtistPaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
ArtistID int32
|
||||
}
|
||||
|
||||
type GetTopTracksByArtistPaginatedRow struct {
|
||||
ID int32
|
||||
Title string
|
||||
MusicBrainzID *uuid.UUID
|
||||
ReleaseID int32
|
||||
Image *uuid.UUID
|
||||
ListenCount int64
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetTopTracksByArtistPaginated(ctx context.Context, arg GetTopTracksByArtistPaginatedParams) ([]GetTopTracksByArtistPaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getTopTracksByArtistPaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
arg.ArtistID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetTopTracksByArtistPaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetTopTracksByArtistPaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Title,
|
||||
&i.MusicBrainzID,
|
||||
&i.ReleaseID,
|
||||
&i.Image,
|
||||
&i.ListenCount,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getTopTracksInReleasePaginated = `-- name: GetTopTracksInReleasePaginated :many
|
||||
SELECT
|
||||
t.id,
|
||||
t.title,
|
||||
t.musicbrainz_id,
|
||||
t.release_id,
|
||||
r.image,
|
||||
COUNT(*) AS listen_count,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at2
|
||||
JOIN artists_with_name a ON a.id = at2.artist_id
|
||||
WHERE at2.track_id = t.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks_with_title t ON l.track_id = t.id
|
||||
JOIN releases r ON t.release_id = r.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
AND t.release_id = $5
|
||||
GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
|
||||
ORDER BY listen_count DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetTopTracksInReleasePaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
ReleaseID int32
|
||||
}
|
||||
|
||||
type GetTopTracksInReleasePaginatedRow struct {
|
||||
ID int32
|
||||
Title string
|
||||
MusicBrainzID *uuid.UUID
|
||||
ReleaseID int32
|
||||
Image *uuid.UUID
|
||||
ListenCount int64
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetTopTracksInReleasePaginated(ctx context.Context, arg GetTopTracksInReleasePaginatedParams) ([]GetTopTracksInReleasePaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getTopTracksInReleasePaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
arg.ReleaseID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetTopTracksInReleasePaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetTopTracksInReleasePaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Title,
|
||||
&i.MusicBrainzID,
|
||||
&i.ReleaseID,
|
||||
&i.Image,
|
||||
&i.ListenCount,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getTopTracksPaginated = `-- name: GetTopTracksPaginated :many
|
||||
SELECT
|
||||
t.id,
|
||||
t.title,
|
||||
t.musicbrainz_id,
|
||||
t.release_id,
|
||||
r.image,
|
||||
COUNT(*) AS listen_count,
|
||||
(
|
||||
SELECT json_agg(json_build_object('id', a.id, 'name', a.name))
|
||||
FROM artist_tracks at
|
||||
JOIN artists_with_name a ON a.id = at.artist_id
|
||||
WHERE at.track_id = t.id
|
||||
) AS artists
|
||||
FROM listens l
|
||||
JOIN tracks_with_title t ON l.track_id = t.id
|
||||
JOIN releases r ON t.release_id = r.id
|
||||
WHERE l.listened_at BETWEEN $1 AND $2
|
||||
GROUP BY t.id, t.title, t.musicbrainz_id, t.release_id, r.image
|
||||
ORDER BY listen_count DESC
|
||||
LIMIT $3 OFFSET $4
|
||||
`
|
||||
|
||||
type GetTopTracksPaginatedParams struct {
|
||||
ListenedAt time.Time
|
||||
ListenedAt_2 time.Time
|
||||
Limit int32
|
||||
Offset int32
|
||||
}
|
||||
|
||||
type GetTopTracksPaginatedRow struct {
|
||||
ID int32
|
||||
Title string
|
||||
MusicBrainzID *uuid.UUID
|
||||
ReleaseID int32
|
||||
Image *uuid.UUID
|
||||
ListenCount int64
|
||||
Artists []byte
|
||||
}
|
||||
|
||||
func (q *Queries) GetTopTracksPaginated(ctx context.Context, arg GetTopTracksPaginatedParams) ([]GetTopTracksPaginatedRow, error) {
|
||||
rows, err := q.db.Query(ctx, getTopTracksPaginated,
|
||||
arg.ListenedAt,
|
||||
arg.ListenedAt_2,
|
||||
arg.Limit,
|
||||
arg.Offset,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetTopTracksPaginatedRow
|
||||
for rows.Next() {
|
||||
var i GetTopTracksPaginatedRow
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Title,
|
||||
&i.MusicBrainzID,
|
||||
&i.ReleaseID,
|
||||
&i.Image,
|
||||
&i.ListenCount,
|
||||
&i.Artists,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getTrack = `-- name: GetTrack :one
|
||||
SELECT
|
||||
t.id, t.musicbrainz_id, t.duration, t.release_id, t.title,
|
||||
r.image
|
||||
FROM tracks_with_title t
|
||||
JOIN releases r ON t.release_id = r.id
|
||||
WHERE t.id = $1 LIMIT 1
|
||||
`
|
||||
|
||||
type GetTrackRow struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
Duration int32
|
||||
ReleaseID int32
|
||||
Title string
|
||||
Image *uuid.UUID
|
||||
}
|
||||
|
||||
func (q *Queries) GetTrack(ctx context.Context, id int32) (GetTrackRow, error) {
|
||||
row := q.db.QueryRow(ctx, getTrack, id)
|
||||
var i GetTrackRow
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Duration,
|
||||
&i.ReleaseID,
|
||||
&i.Title,
|
||||
&i.Image,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTrackByMbzID = `-- name: GetTrackByMbzID :one
|
||||
SELECT id, musicbrainz_id, duration, release_id, title FROM tracks_with_title
|
||||
WHERE musicbrainz_id = $1 LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) GetTrackByMbzID(ctx context.Context, musicbrainzID *uuid.UUID) (TracksWithTitle, error) {
|
||||
row := q.db.QueryRow(ctx, getTrackByMbzID, musicbrainzID)
|
||||
var i TracksWithTitle
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Duration,
|
||||
&i.ReleaseID,
|
||||
&i.Title,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTrackByTitleAndArtists = `-- name: GetTrackByTitleAndArtists :one
|
||||
SELECT t.id, t.musicbrainz_id, t.duration, t.release_id, t.title
|
||||
FROM tracks_with_title t
|
||||
JOIN artist_tracks at ON at.track_id = t.id
|
||||
WHERE t.title = $1
|
||||
AND at.artist_id = ANY($2::int[])
|
||||
GROUP BY t.id, t.title, t.musicbrainz_id, t.duration, t.release_id
|
||||
HAVING COUNT(DISTINCT at.artist_id) = cardinality($2::int[])
|
||||
`
|
||||
|
||||
type GetTrackByTitleAndArtistsParams struct {
|
||||
Title string
|
||||
Column2 []int32
|
||||
}
|
||||
|
||||
func (q *Queries) GetTrackByTitleAndArtists(ctx context.Context, arg GetTrackByTitleAndArtistsParams) (TracksWithTitle, error) {
|
||||
row := q.db.QueryRow(ctx, getTrackByTitleAndArtists, arg.Title, arg.Column2)
|
||||
var i TracksWithTitle
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Duration,
|
||||
&i.ReleaseID,
|
||||
&i.Title,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertTrack = `-- name: InsertTrack :one
|
||||
INSERT INTO tracks (musicbrainz_id, release_id, duration)
|
||||
VALUES ($1, $2, $3)
|
||||
RETURNING id, musicbrainz_id, duration, release_id
|
||||
`
|
||||
|
||||
type InsertTrackParams struct {
|
||||
MusicBrainzID *uuid.UUID
|
||||
ReleaseID int32
|
||||
Duration int32
|
||||
}
|
||||
|
||||
func (q *Queries) InsertTrack(ctx context.Context, arg InsertTrackParams) (Track, error) {
|
||||
row := q.db.QueryRow(ctx, insertTrack, arg.MusicBrainzID, arg.ReleaseID, arg.Duration)
|
||||
var i Track
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.MusicBrainzID,
|
||||
&i.Duration,
|
||||
&i.ReleaseID,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateReleaseForAll = `-- name: UpdateReleaseForAll :exec
|
||||
UPDATE tracks SET release_id = $2
|
||||
WHERE release_id = $1
|
||||
`
|
||||
|
||||
type UpdateReleaseForAllParams struct {
|
||||
ReleaseID int32
|
||||
ReleaseID_2 int32
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateReleaseForAll(ctx context.Context, arg UpdateReleaseForAllParams) error {
|
||||
_, err := q.db.Exec(ctx, updateReleaseForAll, arg.ReleaseID, arg.ReleaseID_2)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateTrackDuration = `-- name: UpdateTrackDuration :exec
|
||||
UPDATE tracks SET duration = $2
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateTrackDurationParams struct {
|
||||
ID int32
|
||||
Duration int32
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateTrackDuration(ctx context.Context, arg UpdateTrackDurationParams) error {
|
||||
_, err := q.db.Exec(ctx, updateTrackDuration, arg.ID, arg.Duration)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateTrackMbzID = `-- name: UpdateTrackMbzID :exec
|
||||
UPDATE tracks SET musicbrainz_id = $2
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateTrackMbzIDParams struct {
|
||||
ID int32
|
||||
MusicBrainzID *uuid.UUID
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateTrackMbzID(ctx context.Context, arg UpdateTrackMbzIDParams) error {
|
||||
_, err := q.db.Exec(ctx, updateTrackMbzID, arg.ID, arg.MusicBrainzID)
|
||||
return err
|
||||
}
|
||||
210
internal/repository/users.sql.go
Normal file
210
internal/repository/users.sql.go
Normal file
|
|
@ -0,0 +1,210 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: users.sql
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
const countApiKeys = `-- name: CountApiKeys :one
|
||||
SELECT COUNT(*) FROM api_keys WHERE user_id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) CountApiKeys(ctx context.Context, userID int32) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countApiKeys, userID)
|
||||
var count int64
|
||||
err := row.Scan(&count)
|
||||
return count, err
|
||||
}
|
||||
|
||||
const countUsers = `-- name: CountUsers :one
|
||||
SELECT COUNT(*) FROM users
|
||||
`
|
||||
|
||||
func (q *Queries) CountUsers(ctx context.Context) (int64, error) {
|
||||
row := q.db.QueryRow(ctx, countUsers)
|
||||
var count int64
|
||||
err := row.Scan(&count)
|
||||
return count, err
|
||||
}
|
||||
|
||||
const deleteApiKey = `-- name: DeleteApiKey :exec
|
||||
DELETE FROM api_keys WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) DeleteApiKey(ctx context.Context, id int32) error {
|
||||
_, err := q.db.Exec(ctx, deleteApiKey, id)
|
||||
return err
|
||||
}
|
||||
|
||||
const deleteUser = `-- name: DeleteUser :exec
|
||||
DELETE FROM users WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) DeleteUser(ctx context.Context, id int32) error {
|
||||
_, err := q.db.Exec(ctx, deleteUser, id)
|
||||
return err
|
||||
}
|
||||
|
||||
const getAllApiKeysByUserID = `-- name: GetAllApiKeysByUserID :many
|
||||
SELECT ak.id, ak.key, ak.user_id, ak.created_at, ak.label
|
||||
FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE u.id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetAllApiKeysByUserID(ctx context.Context, id int32) ([]ApiKey, error) {
|
||||
rows, err := q.db.Query(ctx, getAllApiKeysByUserID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ApiKey
|
||||
for rows.Next() {
|
||||
var i ApiKey
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.Key,
|
||||
&i.UserID,
|
||||
&i.CreatedAt,
|
||||
&i.Label,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getUserByApiKey = `-- name: GetUserByApiKey :one
|
||||
SELECT u.id, u.username, u.role, u.password
|
||||
FROM users u
|
||||
JOIN api_keys ak ON u.id = ak.user_id
|
||||
WHERE ak.key = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetUserByApiKey(ctx context.Context, key string) (User, error) {
|
||||
row := q.db.QueryRow(ctx, getUserByApiKey, key)
|
||||
var i User
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.Username,
|
||||
&i.Role,
|
||||
&i.Password,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getUserByUsername = `-- name: GetUserByUsername :one
|
||||
SELECT id, username, role, password FROM users WHERE username = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetUserByUsername(ctx context.Context, username string) (User, error) {
|
||||
row := q.db.QueryRow(ctx, getUserByUsername, username)
|
||||
var i User
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.Username,
|
||||
&i.Role,
|
||||
&i.Password,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertApiKey = `-- name: InsertApiKey :one
|
||||
INSERT INTO api_keys (user_id, key, label)
|
||||
VALUES ($1, $2, $3)
|
||||
RETURNING id, key, user_id, created_at, label
|
||||
`
|
||||
|
||||
type InsertApiKeyParams struct {
|
||||
UserID int32
|
||||
Key string
|
||||
Label string
|
||||
}
|
||||
|
||||
func (q *Queries) InsertApiKey(ctx context.Context, arg InsertApiKeyParams) (ApiKey, error) {
|
||||
row := q.db.QueryRow(ctx, insertApiKey, arg.UserID, arg.Key, arg.Label)
|
||||
var i ApiKey
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.Key,
|
||||
&i.UserID,
|
||||
&i.CreatedAt,
|
||||
&i.Label,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertUser = `-- name: InsertUser :one
|
||||
INSERT INTO users (username, password, role)
|
||||
VALUES ($1, $2, $3)
|
||||
RETURNING id, username, role, password
|
||||
`
|
||||
|
||||
type InsertUserParams struct {
|
||||
Username string
|
||||
Password []byte
|
||||
Role Role
|
||||
}
|
||||
|
||||
func (q *Queries) InsertUser(ctx context.Context, arg InsertUserParams) (User, error) {
|
||||
row := q.db.QueryRow(ctx, insertUser, arg.Username, arg.Password, arg.Role)
|
||||
var i User
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.Username,
|
||||
&i.Role,
|
||||
&i.Password,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateApiKeyLabel = `-- name: UpdateApiKeyLabel :exec
|
||||
UPDATE api_keys SET label = $3 WHERE id = $1 AND user_id = $2
|
||||
`
|
||||
|
||||
type UpdateApiKeyLabelParams struct {
|
||||
ID int32
|
||||
UserID int32
|
||||
Label string
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateApiKeyLabel(ctx context.Context, arg UpdateApiKeyLabelParams) error {
|
||||
_, err := q.db.Exec(ctx, updateApiKeyLabel, arg.ID, arg.UserID, arg.Label)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateUserPassword = `-- name: UpdateUserPassword :exec
|
||||
UPDATE users SET password = $2 WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateUserPasswordParams struct {
|
||||
ID int32
|
||||
Password []byte
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateUserPassword(ctx context.Context, arg UpdateUserPasswordParams) error {
|
||||
_, err := q.db.Exec(ctx, updateUserPassword, arg.ID, arg.Password)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateUserUsername = `-- name: UpdateUserUsername :exec
|
||||
UPDATE users SET username = $2 WHERE id = $1
|
||||
`
|
||||
|
||||
type UpdateUserUsernameParams struct {
|
||||
ID int32
|
||||
Username string
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateUserUsername(ctx context.Context, arg UpdateUserUsernameParams) error {
|
||||
_, err := q.db.Exec(ctx, updateUserUsername, arg.ID, arg.Username)
|
||||
return err
|
||||
}
|
||||
313
internal/utils/utils.go
Normal file
313
internal/utils/utils.go
Normal file
|
|
@ -0,0 +1,313 @@
|
|||
package utils
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/big"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gabehf/koito/internal/mbz"
|
||||
"github.com/gabehf/koito/internal/models"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
func IDFromString(s string) string {
|
||||
s = strings.ToLower(s)
|
||||
s = strings.ReplaceAll(s, " ", "-")
|
||||
return s
|
||||
}
|
||||
|
||||
func ParseUUIDSlice(str []string) ([]uuid.UUID, error) {
|
||||
ret := make([]uuid.UUID, 0)
|
||||
for _, s := range str {
|
||||
parsed, err := uuid.Parse(s)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
ret = append(ret, parsed)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func FlattenArtistMbzIDs(artists []*models.Artist) []uuid.UUID {
|
||||
ids := make([]uuid.UUID, 0)
|
||||
for _, a := range artists {
|
||||
if a.MbzID == nil || *a.MbzID == uuid.Nil {
|
||||
continue
|
||||
}
|
||||
ids = append(ids, *a.MbzID)
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
func FlattenArtistNames(artists []*models.Artist) []string {
|
||||
names := make([]string, 0)
|
||||
for _, a := range artists {
|
||||
names = append(names, a.Aliases...)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
func FlattenSimpleArtistNames(artists []models.SimpleArtist) []string {
|
||||
names := make([]string, 0)
|
||||
for _, a := range artists {
|
||||
names = append(names, a.Name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
func FlattenMbzArtistCreditNames(artists []mbz.MusicBrainzArtistCredit) []string {
|
||||
names := make([]string, len(artists))
|
||||
for i, a := range artists {
|
||||
names[i] = a.Name
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
func FlattenArtistIDs(artists []*models.Artist) []int32 {
|
||||
ids := make([]int32, len(artists))
|
||||
for i, a := range artists {
|
||||
ids[i] = a.ID
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
// DateRange takes optional week, month, and year. If all are 0, it returns the zero time range.
|
||||
// If only year is provided, it returns the full year.
|
||||
// If both month and year are provided, it returns the start and end of that month.
|
||||
// If week and year are provided, it returns the start and end of that week.
|
||||
// If only week or month is provided without a year, it's considered invalid.
|
||||
func DateRange(week, month, year int) (time.Time, time.Time, error) {
|
||||
if week == 0 && month == 0 && year == 0 {
|
||||
// No filter applied
|
||||
return time.Time{}, time.Time{}, nil
|
||||
}
|
||||
|
||||
if month != 0 && (month < 1 || month > 12) {
|
||||
return time.Time{}, time.Time{}, errors.New("invalid month")
|
||||
}
|
||||
|
||||
if week != 0 && (week < 1 || week > 53) {
|
||||
return time.Time{}, time.Time{}, errors.New("invalid week")
|
||||
}
|
||||
|
||||
if year < 1 {
|
||||
return time.Time{}, time.Time{}, errors.New("invalid year")
|
||||
}
|
||||
|
||||
loc := time.Local
|
||||
|
||||
if week != 0 {
|
||||
if month != 0 {
|
||||
return time.Time{}, time.Time{}, errors.New("cannot specify both week and month")
|
||||
}
|
||||
// Specific week
|
||||
start := time.Date(year, 1, 1, 0, 0, 0, 0, loc)
|
||||
start = start.AddDate(0, 0, (week-1)*7)
|
||||
end := start.AddDate(0, 0, 7)
|
||||
return start, end, nil
|
||||
}
|
||||
|
||||
if month == 0 {
|
||||
// Whole year
|
||||
start := time.Date(year, 1, 1, 0, 0, 0, 0, loc)
|
||||
end := start.AddDate(1, 0, 0)
|
||||
return start, end, nil
|
||||
}
|
||||
|
||||
// Specific month
|
||||
start := time.Date(year, time.Month(month), 1, 0, 0, 0, 0, loc)
|
||||
end := start.AddDate(0, 1, 0)
|
||||
return start, end, nil
|
||||
}
|
||||
|
||||
// CopyFile copies a file from src to dst. If src and dst files exist, and are
|
||||
// the same, then return success. Otherise, attempt to create a hard link
|
||||
// between the two files. If that fail, copy the file contents from src to dst.
|
||||
func CopyFile(src, dst string) (err error) {
|
||||
sfi, err := os.Stat(src)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if !sfi.Mode().IsRegular() {
|
||||
// cannot copy non-regular files (e.g., directories,
|
||||
// symlinks, devices, etc.)
|
||||
return fmt.Errorf("non-regular source file %s (%q)", sfi.Name(), sfi.Mode().String())
|
||||
}
|
||||
dfi, err := os.Stat(dst)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if !(dfi.Mode().IsRegular()) {
|
||||
return fmt.Errorf("non-regular destination file %s (%q)", dfi.Name(), dfi.Mode().String())
|
||||
}
|
||||
if os.SameFile(sfi, dfi) {
|
||||
return
|
||||
}
|
||||
}
|
||||
if err = os.Link(src, dst); err == nil {
|
||||
return
|
||||
}
|
||||
err = copyFileContents(src, dst)
|
||||
return
|
||||
}
|
||||
|
||||
// copyFileContents copies the contents of the file named src to the file named
|
||||
// by dst. The file will be created if it does not already exist. If the
|
||||
// destination file exists, all it's contents will be replaced by the contents
|
||||
// of the source file.
|
||||
func copyFileContents(src, dst string) (err error) {
|
||||
in, err := os.Open(src)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer in.Close()
|
||||
out, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
cerr := out.Close()
|
||||
if err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
if _, err = io.Copy(out, in); err != nil {
|
||||
return
|
||||
}
|
||||
err = out.Sync()
|
||||
return
|
||||
}
|
||||
|
||||
// Returns the same slice, but with all strings that are equal (with strings.EqualFold)
|
||||
// included only once
|
||||
func UniqueIgnoringCase(s []string) []string {
|
||||
unique := []string{}
|
||||
|
||||
for _, str := range s {
|
||||
isDuplicate := false
|
||||
for _, u := range unique {
|
||||
if strings.EqualFold(str, u) {
|
||||
isDuplicate = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !isDuplicate {
|
||||
unique = append(unique, str)
|
||||
}
|
||||
}
|
||||
|
||||
return unique
|
||||
}
|
||||
|
||||
// Removes duplicates in a string set
|
||||
func Unique(xs *[]string) {
|
||||
found := make(map[string]bool)
|
||||
j := 0
|
||||
for i, x := range *xs {
|
||||
if !found[x] {
|
||||
found[x] = true
|
||||
(*xs)[j] = (*xs)[i]
|
||||
j++
|
||||
}
|
||||
}
|
||||
*xs = (*xs)[:j]
|
||||
}
|
||||
|
||||
// Returns the same slice, but with all entries that contain non ASCII characters removed
|
||||
func RemoveNonAscii(s []string) []string {
|
||||
filtered := []string{}
|
||||
for _, str := range s {
|
||||
isAscii := true
|
||||
for _, r := range str {
|
||||
if r > 127 {
|
||||
isAscii = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if isAscii {
|
||||
filtered = append(filtered, str)
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
// Returns only items that are in one slice but not the other
|
||||
func RemoveInBoth(s, c []string) []string {
|
||||
result := []string{}
|
||||
set := make(map[string]struct{})
|
||||
|
||||
for _, str := range c {
|
||||
set[str] = struct{}{}
|
||||
}
|
||||
|
||||
for _, str := range s {
|
||||
if _, exists := set[str]; !exists {
|
||||
result = append(result, str)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// MoveFirstMatchToFront moves the first string containing the substring to the front of the slice.
|
||||
func MoveFirstMatchToFront(slice []string, substring string) []string {
|
||||
for i, s := range slice {
|
||||
if strings.Contains(s, substring) {
|
||||
if i == 0 {
|
||||
return slice // already at the front
|
||||
}
|
||||
// Move the matching element to the front
|
||||
return append([]string{slice[i]}, append(slice[:i], slice[i+1:]...)...)
|
||||
}
|
||||
}
|
||||
// No match found, return unchanged
|
||||
return slice
|
||||
}
|
||||
|
||||
// Taken with little modification from
|
||||
// https://gist.github.com/dopey/c69559607800d2f2f90b1b1ed4e550fb?permalink_comment_id=3527095#gistcomment-3527095
|
||||
func GenerateRandomString(length int) (string, error) {
|
||||
const letters = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-"
|
||||
ret := make([]byte, length)
|
||||
for i := range length {
|
||||
num, err := rand.Int(rand.Reader, big.NewInt(int64(len(letters))))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
ret[i] = letters[num.Int64()]
|
||||
}
|
||||
|
||||
return string(ret), nil
|
||||
}
|
||||
|
||||
// Essentially the same as utils.WriteError(w, `{"error": "message"}`, code)
|
||||
func WriteError(w http.ResponseWriter, message string, code int) {
|
||||
http.Error(w, fmt.Sprintf(`{"error":"%s"}`, message), code)
|
||||
}
|
||||
|
||||
// Sets content type and status code, and encodes data to json
|
||||
func WriteJSON(w http.ResponseWriter, status int, data any) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
json.NewEncoder(w).Encode(data)
|
||||
}
|
||||
|
||||
// Returns true if more than one string is not empty
|
||||
func MoreThanOneString(s ...string) bool {
|
||||
count := 0
|
||||
for _, str := range s {
|
||||
if str != "" {
|
||||
count++
|
||||
}
|
||||
}
|
||||
return count > 1
|
||||
}
|
||||
49
internal/utils/utils_test.go
Normal file
49
internal/utils/utils_test.go
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
package utils_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/gabehf/koito/internal/utils"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestRemoveNonAscii(t *testing.T) {
|
||||
expected := [][]string{
|
||||
[]string{"test1", "test2"}, []string{"test1", "test2"},
|
||||
[]string{"ネクライトーキー", "NECRY TALKIE"}, []string{"NECRY TALKIE"},
|
||||
[]string{"BFY#& cn&W,KE|"}, []string{"BFY#& cn&W,KE|"},
|
||||
[]string{"もっさ"}, []string{},
|
||||
}
|
||||
|
||||
for i := 0; i < len(expected)/2; i = i + 2 {
|
||||
r := utils.RemoveNonAscii(expected[i])
|
||||
assert.EqualValues(t, expected[i+1], r)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUniqueIgnoringCase(t *testing.T) {
|
||||
expected := [][]string{
|
||||
[]string{"Necry Talkie", "NECRY TALKIE"}, []string{"Necry Talkie"},
|
||||
[]string{"ネクライトーキー", "NECRY TALKIE"}, []string{"ネクライトーキー", "NECRY TALKIE"},
|
||||
[]string{"BFY#& cn&W,KE|"}, []string{"BFY#& cn&W,KE|"},
|
||||
[]string{"もっさ"}, []string{"もっさ"},
|
||||
}
|
||||
|
||||
for i := 0; i < len(expected)/2; i = i + 2 {
|
||||
r := utils.UniqueIgnoringCase(expected[i])
|
||||
assert.EqualValues(t, expected[i+1], r)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemoveInBoth(t *testing.T) {
|
||||
expected := [][]string{
|
||||
{"Necry Talkie", "NECRY TALKIE"}, {"Necry Talkie"}, {"NECRY TALKIE"},
|
||||
{"ネクライトーキー", "NECRY TALKIE"}, {"ネクライトーキー", "NECRY TALKIE"}, {},
|
||||
{"BFY#& cn&W,KE|", "bleh"}, {"BFY#& cn&W,KE|"}, {"bleh"},
|
||||
}
|
||||
|
||||
for i := 0; i < len(expected)/3; i = i + 3 {
|
||||
r := utils.RemoveInBoth(expected[i], expected[i+1])
|
||||
assert.EqualValues(t, expected[i+2], r)
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue