Anilist -> AniList

This commit is contained in:
Gabe Farrell 2025-04-07 02:07:15 -04:00
parent e96577b5bc
commit 5f2917d8a0
3 changed files with 36 additions and 36 deletions

View file

@ -1,13 +1,13 @@
# sonarr-anime-importer # sonarr-anime-importer
Easily create import lists in sonarr with MyAnimeList or Anilist queries! Easily create import lists in sonarr with MyAnimeList or AniList queries!
This is basically a wrapper for [Jikan](jikan.moe) and the Anilist API that maps IDs to a list with TVDB IDs so that Sonarr can import the results. This is basically a wrapper for [Jikan](jikan.moe) and the AniList API that maps IDs to a list with TVDB IDs so that Sonarr can import the results.
**Until v1.0.0, breaking changes can happen at any time. Multiple have happened already! Be wary updating.** **Until v1.0.0, breaking changes can happen at any time. Multiple have happened already! Be wary updating.**
**The "limit" parameter is required for all requests!** **The "limit" parameter is required for all requests!**
Pulls MyAnimeList, Anilist, and TVDB ID associations from https://raw.githubusercontent.com/Kometa-Team/Anime-IDs/master/anime_ids.json. Pulls MyAnimeList, AniList, and TVDB ID associations from https://raw.githubusercontent.com/Kometa-Team/Anime-IDs/master/anime_ids.json.
## Supported Requests ## Supported Requests
### GET /v1/mal/anime ### GET /v1/mal/anime
@ -24,7 +24,7 @@ Example request:
curl "http://localhost:3333/v1/mal/anime?type=tv&status=airing&order_by=popularity&sort=asc&limit=10" curl "http://localhost:3333/v1/mal/anime?type=tv&status=airing&order_by=popularity&sort=asc&limit=10"
``` ```
### GET /v1/anilist/anime ### GET /v1/anilist/anime
Searches anime from Anilist Searches anime from AniList
Parameters: Parameters:
- isAdult: Boolean - isAdult: Boolean
@ -58,7 +58,7 @@ curl "http://localhost:3333/v1/anilist/anime?format=TV,ONA&sort=TRENDING_DESC&is
## Environment ## Environment
One configuration environment variable is supported: One configuration environment variable is supported:
- `ALWAYS_SKIP_MAL_IDS`: Comma-separated list of MyAnimeList IDs to always skip. These do not count towards the return limit. - `ALWAYS_SKIP_MAL_IDS`: Comma-separated list of MyAnimeList IDs to always skip. These do not count towards the return limit.
- `ALWAYS_SKIP_ANILIST_IDS`: Comma-separated list of Anilist IDs to always skip. These do not count towards the return limit. - `ALWAYS_SKIP_ANILIST_IDS`: Comma-separated list of AniList IDs to always skip. These do not count towards the return limit.
## Docker Compose ## Docker Compose
```yaml ```yaml

View file

@ -74,32 +74,32 @@ query (
} }
` `
type AnilistPageInfo struct { type AniListPageInfo struct {
HasNextPage bool `json:"hasNextPage"` HasNextPage bool `json:"hasNextPage"`
} }
type AnilistMediaItem struct { type AniListMediaItem struct {
Id int `json:"id"` Id int `json:"id"`
IdMal int `json:"idMal"` IdMal int `json:"idMal"`
Title AnilistTitle `json:"title"` Title AniListTitle `json:"title"`
} }
type AnilistTitle struct { type AniListTitle struct {
Romaji string `json:"romaji"` Romaji string `json:"romaji"`
English string `json:"english"` English string `json:"english"`
} }
type AnilistResponsePage struct { type AniListResponsePage struct {
PageInfo AnilistPageInfo `json:"pageInfo"` PageInfo AniListPageInfo `json:"pageInfo"`
Media []AnilistMediaItem `json:"media"` Media []AniListMediaItem `json:"media"`
} }
type AnilistResponseData struct { type AniListResponseData struct {
Page AnilistResponsePage `json:"Page"` Page AniListResponsePage `json:"Page"`
} }
type AnilistApiResponse struct { type AniListApiResponse struct {
Data AnilistResponseData `json:"data"` Data AniListResponseData `json:"data"`
} }
func handleAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http.HandlerFunc { func handleAniListAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
search, err := getAnilistAnimeSearch(idMap, permaSkipIds, r) search, err := getAniListAnimeSearch(idMap, permaSkipIds, r)
if err != nil { if err != nil {
w.WriteHeader(500) w.WriteHeader(500)
w.Write([]byte(err.Error())) w.Write([]byte(err.Error()))
@ -109,7 +109,7 @@ func handleAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http.
} }
} }
func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r *http.Request) ([]byte, error) { func getAniListAnimeSearch(idMap *ConcurrentMap, permaSkipAniListIds []string, r *http.Request) ([]byte, error) {
q := r.URL.Query() q := r.URL.Query()
// set default params // set default params
@ -119,7 +119,7 @@ func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r
} }
q.Set("type", "ANIME") q.Set("type", "ANIME")
// dont include limit in the Anilist api call as its already hard coded at 20 per page // dont include limit in the AniList api call as its already hard coded at 20 per page
q.Del("limit") q.Del("limit")
skipDedup := parseBoolParam(q, "allowDuplicates") skipDedup := parseBoolParam(q, "allowDuplicates")
@ -132,24 +132,24 @@ func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r
for hasNextPage { for hasNextPage {
page++ page++
q.Set("page", strconv.Itoa(page)) q.Set("page", strconv.Itoa(page))
result, err := makeAnilistApiCall(q) result, err := makeAniListApiCall(q)
if err != nil { if err != nil {
log.Println("Error sending request to Anilist: ", err) log.Println("Error sending request to AniList: ", err)
return nil, err return nil, err
} }
// map the data // map the data
for _, item := range result.Data.Page.Media { for _, item := range result.Data.Page.Media {
if idMap.GetByMalId(item.IdMal) == 0 { if idMap.GetByMalId(item.IdMal) == 0 {
log.Printf("Anilist ID %d (%s) has no associated TVDB ID, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English)) log.Printf("AniList ID %d (%s) has no associated TVDB ID, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue continue
} }
if usedIds[item.Id] && !skipDedup { if usedIds[item.Id] && !skipDedup {
log.Printf("Anilist ID %d (%s) is a duplicate, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English)) log.Printf("AniList ID %d (%s) is a duplicate, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue continue
} }
if slices.Contains(permaSkipAnilistIds, strconv.Itoa(item.Id)) { if slices.Contains(permaSkipAniListIds, strconv.Itoa(item.Id)) {
log.Printf("Anilist ID %d (%s) is set to always skip, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English)) log.Printf("AniList ID %d (%s) is set to always skip, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue continue
} }
count++ count++
@ -183,7 +183,7 @@ func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r
return respJson, nil return respJson, nil
} }
func makeAnilistApiCall(q url.Values) (*AnilistApiResponse, error) { func makeAniListApiCall(q url.Values) (*AniListApiResponse, error) {
// Build the GraphQL request body // Build the GraphQL request body
variables := BuildGraphQLVariables(q) variables := BuildGraphQLVariables(q)
@ -203,7 +203,7 @@ func makeAnilistApiCall(q url.Values) (*AnilistApiResponse, error) {
} }
defer resp.Body.Close() defer resp.Body.Close()
respData := new(AnilistApiResponse) respData := new(AniListApiResponse)
err = json.NewDecoder(resp.Body).Decode(respData) err = json.NewDecoder(resp.Body).Decode(respData)
if err != nil { if err != nil {
return nil, err return nil, err

16
main.go
View file

@ -17,13 +17,13 @@ type ResponseItem struct {
Title string `json:"title"` Title string `json:"title"`
TitleEng string `json:"titleEnglish,omitempty"` TitleEng string `json:"titleEnglish,omitempty"`
MalId int `json:"malId,omitempty"` MalId int `json:"malId,omitempty"`
AnilistId int `json:"anilistId,omitempty"` AniListId int `json:"anilistId,omitempty"`
TvdbId int `json:"tvdbId"` TvdbId int `json:"tvdbId"`
} }
type AnimeEntry struct { type AnimeEntry struct {
TvdbId int `json:"tvdb_id"` TvdbId int `json:"tvdb_id"`
MalId interface{} `json:"mal_id"` MalId interface{} `json:"mal_id"`
AnilistId int `json:"anilist_id"` AniListId int `json:"anilist_id"`
} }
type ConcurrentMap struct { type ConcurrentMap struct {
mal map[int]int mal map[int]int
@ -48,14 +48,14 @@ func main() {
if permaSkipMalStr != "" { if permaSkipMalStr != "" {
log.Printf("Always skipping MAL IDs: %v\n", permaSkipMalIds) log.Printf("Always skipping MAL IDs: %v\n", permaSkipMalIds)
} }
permaSkipAnilistStr := os.Getenv("ALWAYS_SKIP_ANILIST_IDS") permaSkipAniListStr := os.Getenv("ALWAYS_SKIP_ANILIST_IDS")
permaSkipAnilistIds := strings.Split(permaSkipAnilistStr, ",") permaSkipAniListIds := strings.Split(permaSkipAniListStr, ",")
if permaSkipAnilistStr != "" { if permaSkipAniListStr != "" {
log.Printf("Always skipping Anilist IDs: %v\n", permaSkipAnilistIds) log.Printf("Always skipping AniList IDs: %v\n", permaSkipAniListIds)
} }
buildIdMapMiddleware := newRebuildStaleIdMapMiddleware(idMap) buildIdMapMiddleware := newRebuildStaleIdMapMiddleware(idMap)
http.HandleFunc("/v1/mal/anime", loggerMiddleware(buildIdMapMiddleware(handleMalAnimeSearch(idMap, permaSkipMalIds)))) http.HandleFunc("/v1/mal/anime", loggerMiddleware(buildIdMapMiddleware(handleMalAnimeSearch(idMap, permaSkipMalIds))))
http.HandleFunc("/v1/anilist/anime", loggerMiddleware(buildIdMapMiddleware(handleAnilistAnimeSearch(idMap, permaSkipAnilistIds)))) http.HandleFunc("/v1/anilist/anime", loggerMiddleware(buildIdMapMiddleware(handleAniListAnimeSearch(idMap, permaSkipAniListIds))))
log.Println("Listening on :3333") log.Println("Listening on :3333")
log.Fatal(http.ListenAndServe(":3333", nil)) log.Fatal(http.ListenAndServe(":3333", nil))
} }
@ -102,7 +102,7 @@ func buildIdMap(idMap *ConcurrentMap) {
for _, val := range malIdList { for _, val := range malIdList {
idMap.mal[val] = entry.TvdbId idMap.mal[val] = entry.TvdbId
} }
if entry.AnilistId == 0 { if entry.AniListId == 0 {
continue continue
} }
} }