Anilist -> AniList

pull/5/head
Gabe Farrell 8 months ago
parent e96577b5bc
commit 5f2917d8a0

@ -1,13 +1,13 @@
# sonarr-anime-importer
Easily create import lists in sonarr with MyAnimeList or Anilist queries!
Easily create import lists in sonarr with MyAnimeList or AniList queries!
This is basically a wrapper for [Jikan](jikan.moe) and the Anilist API that maps IDs to a list with TVDB IDs so that Sonarr can import the results.
This is basically a wrapper for [Jikan](jikan.moe) and the AniList API that maps IDs to a list with TVDB IDs so that Sonarr can import the results.
**Until v1.0.0, breaking changes can happen at any time. Multiple have happened already! Be wary updating.**
**The "limit" parameter is required for all requests!**
Pulls MyAnimeList, Anilist, and TVDB ID associations from https://raw.githubusercontent.com/Kometa-Team/Anime-IDs/master/anime_ids.json.
Pulls MyAnimeList, AniList, and TVDB ID associations from https://raw.githubusercontent.com/Kometa-Team/Anime-IDs/master/anime_ids.json.
## Supported Requests
### GET /v1/mal/anime
@ -24,7 +24,7 @@ Example request:
curl "http://localhost:3333/v1/mal/anime?type=tv&status=airing&order_by=popularity&sort=asc&limit=10"
```
### GET /v1/anilist/anime
Searches anime from Anilist
Searches anime from AniList
Parameters:
- isAdult: Boolean
@ -58,7 +58,7 @@ curl "http://localhost:3333/v1/anilist/anime?format=TV,ONA&sort=TRENDING_DESC&is
## Environment
One configuration environment variable is supported:
- `ALWAYS_SKIP_MAL_IDS`: Comma-separated list of MyAnimeList IDs to always skip. These do not count towards the return limit.
- `ALWAYS_SKIP_ANILIST_IDS`: Comma-separated list of Anilist IDs to always skip. These do not count towards the return limit.
- `ALWAYS_SKIP_ANILIST_IDS`: Comma-separated list of AniList IDs to always skip. These do not count towards the return limit.
## Docker Compose
```yaml

@ -74,32 +74,32 @@ query (
}
`
type AnilistPageInfo struct {
type AniListPageInfo struct {
HasNextPage bool `json:"hasNextPage"`
}
type AnilistMediaItem struct {
type AniListMediaItem struct {
Id int `json:"id"`
IdMal int `json:"idMal"`
Title AnilistTitle `json:"title"`
Title AniListTitle `json:"title"`
}
type AnilistTitle struct {
type AniListTitle struct {
Romaji string `json:"romaji"`
English string `json:"english"`
}
type AnilistResponsePage struct {
PageInfo AnilistPageInfo `json:"pageInfo"`
Media []AnilistMediaItem `json:"media"`
type AniListResponsePage struct {
PageInfo AniListPageInfo `json:"pageInfo"`
Media []AniListMediaItem `json:"media"`
}
type AnilistResponseData struct {
Page AnilistResponsePage `json:"Page"`
type AniListResponseData struct {
Page AniListResponsePage `json:"Page"`
}
type AnilistApiResponse struct {
Data AnilistResponseData `json:"data"`
type AniListApiResponse struct {
Data AniListResponseData `json:"data"`
}
func handleAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http.HandlerFunc {
func handleAniListAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
search, err := getAnilistAnimeSearch(idMap, permaSkipIds, r)
search, err := getAniListAnimeSearch(idMap, permaSkipIds, r)
if err != nil {
w.WriteHeader(500)
w.Write([]byte(err.Error()))
@ -109,7 +109,7 @@ func handleAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http.
}
}
func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r *http.Request) ([]byte, error) {
func getAniListAnimeSearch(idMap *ConcurrentMap, permaSkipAniListIds []string, r *http.Request) ([]byte, error) {
q := r.URL.Query()
// set default params
@ -119,7 +119,7 @@ func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r
}
q.Set("type", "ANIME")
// dont include limit in the Anilist api call as its already hard coded at 20 per page
// dont include limit in the AniList api call as its already hard coded at 20 per page
q.Del("limit")
skipDedup := parseBoolParam(q, "allowDuplicates")
@ -132,24 +132,24 @@ func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r
for hasNextPage {
page++
q.Set("page", strconv.Itoa(page))
result, err := makeAnilistApiCall(q)
result, err := makeAniListApiCall(q)
if err != nil {
log.Println("Error sending request to Anilist: ", err)
log.Println("Error sending request to AniList: ", err)
return nil, err
}
// map the data
for _, item := range result.Data.Page.Media {
if idMap.GetByMalId(item.IdMal) == 0 {
log.Printf("Anilist ID %d (%s) has no associated TVDB ID, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
log.Printf("AniList ID %d (%s) has no associated TVDB ID, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue
}
if usedIds[item.Id] && !skipDedup {
log.Printf("Anilist ID %d (%s) is a duplicate, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
log.Printf("AniList ID %d (%s) is a duplicate, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue
}
if slices.Contains(permaSkipAnilistIds, strconv.Itoa(item.Id)) {
log.Printf("Anilist ID %d (%s) is set to always skip, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
if slices.Contains(permaSkipAniListIds, strconv.Itoa(item.Id)) {
log.Printf("AniList ID %d (%s) is set to always skip, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue
}
count++
@ -183,7 +183,7 @@ func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r
return respJson, nil
}
func makeAnilistApiCall(q url.Values) (*AnilistApiResponse, error) {
func makeAniListApiCall(q url.Values) (*AniListApiResponse, error) {
// Build the GraphQL request body
variables := BuildGraphQLVariables(q)
@ -203,7 +203,7 @@ func makeAnilistApiCall(q url.Values) (*AnilistApiResponse, error) {
}
defer resp.Body.Close()
respData := new(AnilistApiResponse)
respData := new(AniListApiResponse)
err = json.NewDecoder(resp.Body).Decode(respData)
if err != nil {
return nil, err

@ -17,13 +17,13 @@ type ResponseItem struct {
Title string `json:"title"`
TitleEng string `json:"titleEnglish,omitempty"`
MalId int `json:"malId,omitempty"`
AnilistId int `json:"anilistId,omitempty"`
AniListId int `json:"anilistId,omitempty"`
TvdbId int `json:"tvdbId"`
}
type AnimeEntry struct {
TvdbId int `json:"tvdb_id"`
MalId interface{} `json:"mal_id"`
AnilistId int `json:"anilist_id"`
AniListId int `json:"anilist_id"`
}
type ConcurrentMap struct {
mal map[int]int
@ -48,14 +48,14 @@ func main() {
if permaSkipMalStr != "" {
log.Printf("Always skipping MAL IDs: %v\n", permaSkipMalIds)
}
permaSkipAnilistStr := os.Getenv("ALWAYS_SKIP_ANILIST_IDS")
permaSkipAnilistIds := strings.Split(permaSkipAnilistStr, ",")
if permaSkipAnilistStr != "" {
log.Printf("Always skipping Anilist IDs: %v\n", permaSkipAnilistIds)
permaSkipAniListStr := os.Getenv("ALWAYS_SKIP_ANILIST_IDS")
permaSkipAniListIds := strings.Split(permaSkipAniListStr, ",")
if permaSkipAniListStr != "" {
log.Printf("Always skipping AniList IDs: %v\n", permaSkipAniListIds)
}
buildIdMapMiddleware := newRebuildStaleIdMapMiddleware(idMap)
http.HandleFunc("/v1/mal/anime", loggerMiddleware(buildIdMapMiddleware(handleMalAnimeSearch(idMap, permaSkipMalIds))))
http.HandleFunc("/v1/anilist/anime", loggerMiddleware(buildIdMapMiddleware(handleAnilistAnimeSearch(idMap, permaSkipAnilistIds))))
http.HandleFunc("/v1/anilist/anime", loggerMiddleware(buildIdMapMiddleware(handleAniListAnimeSearch(idMap, permaSkipAniListIds))))
log.Println("Listening on :3333")
log.Fatal(http.ListenAndServe(":3333", nil))
}
@ -102,7 +102,7 @@ func buildIdMap(idMap *ConcurrentMap) {
for _, val := range malIdList {
idMap.mal[val] = entry.TvdbId
}
if entry.AnilistId == 0 {
if entry.AniListId == 0 {
continue
}
}

Loading…
Cancel
Save