feat: Anilist support

pull/5/head v0.2.0
Gabe Farrell 8 months ago
parent ec6c1cc0e0
commit cdd7b67003

@ -4,5 +4,5 @@ WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
COPY *.go ./
RUN CGO_ENABLED=0 GOOS=linux go build -o /sonarr-mal-importer
CMD ["/sonarr-mal-importer"]
RUN CGO_ENABLED=0 GOOS=linux go build -o /sonarr-anime-importer
CMD ["/sonarr-anime-importer"]

@ -1,12 +1,18 @@
# sonarr-mal-importer
This is basically a wrapper for [Jikan](jikan.moe) that converts a Jikan API call to a list with TVDB IDs that Sonarr can import the results.
# sonarr-anime-importer
Easily create import lists in sonarr with MyAnimeList or Anilist queries!
**This API will spam calls that have pagination so make sure you set a limit in the query parameters so you don't get rate limited or IP banned!!**
This is basically a wrapper for [Jikan](jikan.moe) and the Anilist API that maps IDs to a list with TVDB IDs so that Sonarr can import the results.
Pulls MyAnimeList and TVDB ID associations from https://raw.githubusercontent.com/Kometa-Team/Anime-IDs/master/anime_ids.json.
**Until v1.0.0, breaking changes can happen at any time. Multiple have happened already! Be wary updating.**
**The "limit" parameter is required for all requests!**
Pulls MyAnimeList, Anilist, and TVDB ID associations from https://raw.githubusercontent.com/Kometa-Team/Anime-IDs/master/anime_ids.json.
## Supported Requests
### GET /anime
### GET /v1/mal/anime
Searches anime from MyAnimeList
See https://docs.api.jikan.moe/#tag/anime/operation/getAnimeSearch for parameters.
Additional parameters supported:
@ -15,36 +21,70 @@ Additional parameters supported:
Example request:
```bash
# fetches the top 10 most popular currently airing tv anime
curl "http://localhost:3333/anime?type=tv&status=airing&order_by=popularity&sort=asc&limit=10"
curl "http://localhost:3333/v1/mal/anime?type=tv&status=airing&order_by=popularity&sort=asc&limit=10"
```
### GET /v1/anilist/anime
Searches anime from Anilist
Parameters:
- isAdult: Boolean
- search: String
- format: [[MediaFormat]](https://studio.apollographql.com/sandbox/schema/reference/enums/MediaFormat)
- status: [MediaStatus](https://studio.apollographql.com/sandbox/schema/reference/enums/MediaStatus)
- countryOfOrigin: [CountryCode](https://studio.apollographql.com/sandbox/schema/reference/scalars/CountryCode)
- season: [MediaSeason](https://studio.apollographql.com/sandbox/schema/reference/enums/MediaSeason)
- seasonYear: Int
- year: String
- onList: Boolean
- yearLesser: [FuzzyDateInt](https://studio.apollographql.com/sandbox/schema/reference/scalars/FuzzyDateInt)
- yearGreater: [FuzzyDateInt](https://studio.apollographql.com/sandbox/schema/reference/scalars/FuzzyDateInt)
- averageScoreGreater: Int
- averageScoreLesser: Int
- genres: [String]
- excludedGenres: [String]
- tags: [String]
- excludedTags: [String]
- minimumTagRank: Int
- sort: [[MediaSort]](https://studio.apollographql.com/sandbox/schema/reference/enums/MediaSort)
- limit: Int
- allowDuplicates: Boolean
Example request:
```bash
# fetch the top 20, non-adult trending anime that are either TV or ONA and are made in Japan after 2020
curl "http://localhost:3333/v1/anilist/anime?format=TV,ONA&sort=TRENDING_DESC&isAdult=false&countryOfOrigin=JP&yearGreater=20200000&limit=20"
```
## Environment
One configuration environment variable is supported:
- `ALWAYS_SKIP_MAL_IDS`: Comma-separated list of MyAnimeList IDs to always skip. These do not count towards the return limit.
- `ALWAYS_SKIP_ANILIST_IDS`: Comma-separated list of Anilist IDs to always skip. These do not count towards the return limit.
## Docker Compose
```yaml
services:
sonarr-mal-importer:
image: gabehf/sonarr-mal-importer:latest
container_name: sonarr-mal-importer
sonarr-anime-importer:
image: gabehf/sonarr-anime-importer:latest
container_name: sonarr-anime-importer
ports:
- 3333:3333
environment:
- ALWAYS_SKIP_MAL_IDS=12345,67890 # Comma-separated
- ALWAYS_SKIP_ANILIST_IDS=01234,56789 # Comma-separated
restart: unless-stopped
```
# TODO
- [x] Add de-duplication and a query param to disable it
- [x] Add perma-skip by MALId option in environment variable
- [ ] Only do "a.k.a." when logging if the anime has different romanized and english titles
- [x] Only do "a.k.a." when logging if the anime has different romanized and english titles
- [ ] Prevent spamming calls when few/no IDs are mapped to TVDB
# Albums that fueled development
| Album | Artist |
|-------------------------|------------------------------|
| ZOO!! | Necry Talkie (ネクライトーキー) |
| FREAK | Necry Talkie (ネクライトーキー) |
| Expert In A Dying Field | The Beths |
| Vivid | ADOY |
| Album | Artist |
|-------------------------|---------------------------------|
| ZOO!! | Necry Talkie (ネクライトーキー) |
| FREAK | Necry Talkie (ネクライトーキー) |
| Expert In A Dying Field | The Beths |
| Vivid | ADOY |
| CHUU | Strawberry Rush |
| MIMI | Hug (feat. HATSUNE MIKU & KAFU) |

@ -0,0 +1,288 @@
package main
import (
"bytes"
"encoding/json"
"errors"
"log"
"net/http"
"net/url"
"slices"
"strconv"
"strings"
"time"
)
const anilistQuery = `
query (
$page: Int
$type: MediaType
$isAdult: Boolean
$search: String
$format: [MediaFormat]
$status: MediaStatus
$countryOfOrigin: CountryCode
$season: MediaSeason
$seasonYear: Int
$year: String
$onList: Boolean
$yearLesser: FuzzyDateInt
$yearGreater: FuzzyDateInt
$averageScoreGreater: Int
$averageScoreLesser: Int
$genres: [String]
$excludedGenres: [String]
$tags: [String]
$excludedTags: [String]
$minimumTagRank: Int
$sort: [MediaSort]
) {
Page(page: $page, perPage: 20) {
pageInfo {
hasNextPage
}
media(
type: $type
season: $season
format_in: $format
status: $status
countryOfOrigin: $countryOfOrigin
search: $search
onList: $onList
seasonYear: $seasonYear
startDate_like: $year
startDate_lesser: $yearLesser
startDate_greater: $yearGreater
averageScore_greater: $averageScoreGreater
averageScore_lesser: $averageScoreLesser
genre_in: $genres
genre_not_in: $excludedGenres
tag_in: $tags
tag_not_in: $excludedTags
minimumTagRank: $minimumTagRank
sort: $sort
isAdult: $isAdult
) {
id
title {
romaji
english
}
}
}
}
`
type AnilistPageInfo struct {
HasNextPage bool `json:"hasNextPage"`
}
type AnilistMediaItem struct {
Id int `json:"id"`
Title AnilistTitle `json:"title"`
}
type AnilistTitle struct {
Romaji string `json:"romaji"`
English string `json:"english"`
}
type AnilistResponsePage struct {
PageInfo AnilistPageInfo `json:"pageInfo"`
Media []AnilistMediaItem `json:"media"`
}
type AnilistResponseData struct {
Page AnilistResponsePage `json:"Page"`
}
type AnilistApiResponse struct {
Data AnilistResponseData `json:"data"`
}
func handleAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
search, err := getAnilistAnimeSearch(idMap, permaSkipIds, r)
if err != nil {
w.WriteHeader(500)
w.Write([]byte(err.Error()))
} else {
w.Write(search)
}
}
}
func getAnilistAnimeSearch(idMap *ConcurrentMap, permaSkipAnilistIds []string, r *http.Request) ([]byte, error) {
q := r.URL.Query()
// set default params
limit, err := strconv.Atoi(q.Get("limit"))
if err != nil {
return nil, errors.New(" Required parameter \"limit\" not specified")
}
q.Set("type", "ANIME")
// dont include limit in the Anilist api call as its already hard coded at 20 per page
q.Del("limit")
skipDedup := parseBoolParam(q, "allowDuplicates")
hasNextPage := true
page := 0
resp := []ResponseItem{}
count := 0
usedIds := make(map[int]bool, 0)
for hasNextPage {
page++
q.Set("page", strconv.Itoa(page))
result, err := makeAnilistApiCall(q)
if err != nil {
log.Println("Error sending request to Anilist: ", err)
return nil, err
}
// map the data
for _, item := range result.Data.Page.Media {
if idMap.GetByAnilistId(item.Id) == 0 {
log.Printf("Anilist ID %d (%s) has no associated TVDB ID, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue
}
if usedIds[item.Id] && !skipDedup {
log.Printf("Anilist ID %d (%s) is a duplicate, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue
}
if slices.Contains(permaSkipAnilistIds, strconv.Itoa(item.Id)) {
log.Printf("Anilist ID %d (%s) is set to always skip, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English))
continue
}
count++
if count > limit {
break
}
resp = append(resp,
ResponseItem{
item.Title.Romaji,
item.Title.English,
0,
item.Id,
idMap.GetByAnilistId(item.Id),
})
usedIds[item.Id] = true
}
hasNextPage = result.Data.Page.PageInfo.HasNextPage
if count > limit {
break
}
if hasNextPage {
time.Sleep(500 * time.Millisecond) // sleep between requests for new page to try and avoid rate limits
}
}
respJson, err := json.MarshalIndent(resp, "", " ")
if err != nil {
log.Println("Error marshalling response: ", err)
return nil, err
}
return respJson, nil
}
func makeAnilistApiCall(q url.Values) (*AnilistApiResponse, error) {
// Build the GraphQL request body
variables := BuildGraphQLVariables(q)
body := map[string]interface{}{
"query": anilistQuery,
"variables": variables,
}
jsonBody, err := json.Marshal(body)
if err != nil {
return nil, err
}
// Make the POST request
resp, err := http.Post("https://graphql.anilist.co", "application/json", bytes.NewBuffer(jsonBody))
if err != nil {
return nil, err
}
defer resp.Body.Close()
respData := new(AnilistApiResponse)
err = json.NewDecoder(resp.Body).Decode(respData)
if err != nil {
return nil, err
}
return respData, nil
}
// BuildGraphQLVariables converts URL query parameters into a GraphQL variables map.
func BuildGraphQLVariables(params url.Values) map[string]interface{} {
vars := make(map[string]interface{})
// Helper to convert comma-separated strings into slices
parseList := func(key string) []string {
if val := params.Get(key); val != "" {
return strings.Split(val, ",")
}
return nil
}
// Helper to convert integer parameters
parseInt := func(key string) *int {
if val := params.Get(key); val != "" {
if i, err := strconv.Atoi(val); err == nil {
return &i
}
}
return nil
}
// Helper to convert boolean parameters
parseBool := func(key string) *bool {
if val := params.Get(key); val != "" {
if b, err := strconv.ParseBool(val); err == nil {
return &b
}
}
return nil
}
// Basic int and bool params
if v := parseInt("page"); v != nil {
vars["page"] = *v
}
if v := parseInt("seasonYear"); v != nil {
vars["seasonYear"] = *v
}
if v := parseInt("yearLesser"); v != nil {
vars["yearLesser"] = *v
}
if v := parseInt("yearGreater"); v != nil {
vars["yearGreater"] = *v
}
if v := parseInt("averageScoreGreater"); v != nil {
vars["averageScoreGreater"] = *v
}
if v := parseInt("averageScoreLesser"); v != nil {
vars["averageScoreLesser"] = *v
}
if v := parseInt("minimumTagRank"); v != nil {
vars["minimumTagRank"] = *v
}
if v := parseBool("onList"); v != nil {
vars["onList"] = *v
}
if v := parseBool("isAdult"); v != nil {
vars["isAdult"] = *v
}
// Simple string params
for _, key := range []string{"type", "search", "status", "countryOfOrigin", "season", "year"} {
if val := params.Get(key); val != "" {
vars[key] = val
}
}
// List-type string params
for _, key := range []string{"format", "genres", "excludedGenres", "tags", "excludedTags", "sort"} {
if list := parseList(key); list != nil {
vars[key] = list
}
}
return vars
}

@ -1,4 +1,4 @@
module github.com/gabehf/sonarr-mal-importer
module github.com/gabehf/sonarr-anime-importer
go 1.23.0

@ -0,0 +1,30 @@
package main
import (
"net/url"
"strconv"
)
// parses the boolean param "name" from url.Values "values"
func parseBoolParam(values url.Values, name string) bool {
param := values.Get(name)
if param != "" {
val, err := strconv.ParseBool(param)
if err == nil {
return val
}
} else if _, exists := values[name]; exists {
return true
}
return false
}
// just the title, or "title a.k.a. english title" if both exist
func FullAnimeTitle(title, engtitle string) string {
if engtitle != "" {
return title + " a.k.a. " + engtitle
} else {
return title
}
}

@ -5,142 +5,67 @@ import (
"io"
"log"
"net/http"
"net/url"
"os"
"reflect"
"slices"
"strconv"
"strings"
"sync"
"time"
"github.com/darenliang/jikan-go"
)
type ResponseItem struct {
Title string `json:"title"`
MalId int `json:"malId"`
TvdbId int `json:"tvdbId"`
Title string `json:"title"`
TitleEng string `json:"titleEnglish,omitempty"`
MalId int `json:"malId,omitempty"`
AnilistId int `json:"anilistId,omitempty"`
TvdbId int `json:"tvdbId"`
}
type AnimeEntry struct {
TvdbId int `json:"tvdb_id"`
MalId interface{} `json:"mal_id"`
TvdbId int `json:"tvdb_id"`
MalId interface{} `json:"mal_id"`
AnilistId int `json:"anilist_id"`
}
type ConcurrentMap struct {
m map[int]int
mut sync.RWMutex
mal map[int]int
anilist map[int]int
mut sync.RWMutex
}
func (m *ConcurrentMap) Get(i int) int {
func (m *ConcurrentMap) GetByMalId(i int) int {
m.mut.RLock()
defer m.mut.RUnlock()
return m.m[i]
return m.mal[i]
}
func (m *ConcurrentMap) GetByAnilistId(i int) int {
m.mut.RLock()
defer m.mut.RUnlock()
return m.anilist[i]
}
var lastBuiltAnimeIdList time.Time
func main() {
log.Println("sonarr-mal-importer v0.1.1")
log.Println("sonarr-anime-importer v0.2.0")
log.Println("Building Anime ID Associations...")
var malToTvdb = new(ConcurrentMap)
buildIdMap(malToTvdb)
var idMap = new(ConcurrentMap)
buildIdMap(idMap)
permaSkipMalStr := os.Getenv("ALWAYS_SKIP_MAL_IDS")
permaSkipMalIds := strings.Split(permaSkipMalStr, ",")
if permaSkipMalStr != "" {
log.Printf("Always skipping: %v\n", permaSkipMalIds)
log.Printf("Always skipping MAL IDs: %v\n", permaSkipMalIds)
}
permaSkipAnilistStr := os.Getenv("ALWAYS_SKIP_ANILIST_IDS")
permaSkipAnilistIds := strings.Split(permaSkipAnilistStr, ",")
if permaSkipAnilistStr != "" {
log.Printf("Always skipping Anilist IDs: %v\n", permaSkipAnilistIds)
}
http.HandleFunc("/anime", handleAnimeSearch(malToTvdb, permaSkipMalIds))
buildIdMapMiddleware := newRebuildStaleIdMapMiddleware(idMap)
http.HandleFunc("/v1/mal/anime", loggerMiddleware(buildIdMapMiddleware(handleMalAnimeSearch(idMap, permaSkipMalIds))))
http.HandleFunc("/v1/anilist/anime", loggerMiddleware(buildIdMapMiddleware(handleAnilistAnimeSearch(idMap, permaSkipAnilistIds))))
log.Println("Listening on :3333")
log.Fatal(http.ListenAndServe(":3333", nil))
}
func handleAnimeSearch(malToTvdb *ConcurrentMap, permaSkipMalIds []string) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
log.Printf("%s %s?%s", r.Method, r.URL.Path, r.URL.RawQuery)
if time.Since(lastBuiltAnimeIdList) > 24*time.Hour {
log.Println("Anime ID association table expired, building new table...")
buildIdMap(malToTvdb)
}
search, err := getAnimeSearch(malToTvdb, permaSkipMalIds, r)
if err != nil {
w.WriteHeader(500)
} else {
w.Write([]byte(search))
}
}
}
func getAnimeSearch(malToTvdb *ConcurrentMap, permaSkipMalIds []string, r *http.Request) (string, error) {
q := r.URL.Query()
limit, err := strconv.Atoi(q.Get("limit"))
if err != nil {
limit = 9999 // limit not specified or invalid
}
skipDedup := parseBoolParam(q, "allow_duplicates")
// for some reason Jikan responds with 400 Bad Request for any limit >25
// so instead, we just limit when mapping the data and remove the limit from the Jikan request
q.Del("limit")
hasNextPage := true
page := 0
resp := []ResponseItem{}
count := 0
usedIds := make(map[int]bool, 0)
for hasNextPage {
page++
q.Set("page", strconv.Itoa(page))
result, err := jikan.GetAnimeSearch(q)
if err != nil {
log.Println("Error sending request to Jikan: ", err)
return "", err
}
// map the data
for _, item := range result.Data {
if malToTvdb.Get(item.MalId) == 0 {
log.Printf("MyAnimeList ID %d (%s a.k.a. %s) has no associated TVDB ID, skipping...\n", item.MalId, item.Title, item.TitleEnglish)
continue
}
if usedIds[item.MalId] && !skipDedup {
log.Printf("MyAnimeList ID %d (%s a.k.a. %s) is a duplicate, skipping...\n", item.MalId, item.Title, item.TitleEnglish)
continue
}
if slices.Contains(permaSkipMalIds, strconv.Itoa(item.MalId)) {
log.Printf("MyAnimeList ID %d (%s a.k.a. %s) is set to always skip, skipping...\n", item.MalId, item.Title, item.TitleEnglish)
continue
}
count++
if count > limit {
break
}
resp = append(resp,
ResponseItem{
item.Title,
item.MalId,
malToTvdb.Get(item.MalId),
})
usedIds[item.MalId] = true
}
hasNextPage = result.Pagination.HasNextPage
if count > limit {
break
}
if hasNextPage {
time.Sleep(500 * time.Millisecond) // sleep between requests for new page to try and avoid rate limits
}
}
respJson, err := json.MarshalIndent(resp, "", " ")
if err != nil {
log.Println("Error marshalling response: ", err)
return "", err
}
return string(respJson), nil
}
func buildIdMap(idMap *ConcurrentMap) {
// build/re-build the mal -> tvdb association table
idMap.mut.Lock()
@ -150,6 +75,7 @@ func buildIdMap(idMap *ConcurrentMap) {
if err != nil {
log.Fatal("Error fetching anime_ids.json: ", err)
}
defer resp.Body.Close()
idListBytes, err = io.ReadAll(resp.Body)
if err != nil {
log.Fatal("Error reading anime_ids.json: ", err)
@ -160,7 +86,8 @@ func buildIdMap(idMap *ConcurrentMap) {
if err != nil {
log.Fatal("Error unmarshalling anime_ids.json: ", err)
}
idMap.m = make(map[int]int, 0)
idMap.mal = make(map[int]int, 0)
idMap.anilist = make(map[int]int, 0)
for _, entry := range animeMap {
if entry.MalId == nil {
continue
@ -180,23 +107,12 @@ func buildIdMap(idMap *ConcurrentMap) {
malIdList = append(malIdList, int(entry.MalId.(float64)))
}
for _, val := range malIdList {
idMap.m[val] = entry.TvdbId
idMap.mal[val] = entry.TvdbId
}
}
lastBuiltAnimeIdList = time.Now()
}
// parses the boolean param "name" from url.Values "values"
func parseBoolParam(values url.Values, name string) bool {
param := values.Get(name)
if param != "" {
val, err := strconv.ParseBool(param)
if err == nil {
return val
if entry.AnilistId == 0 {
continue
}
} else if _, exists := values[name]; exists {
return true
idMap.anilist[entry.AnilistId] = entry.TvdbId
}
return false
lastBuiltAnimeIdList = time.Now()
}

@ -0,0 +1,98 @@
package main
import (
"encoding/json"
"errors"
"log"
"net/http"
"slices"
"strconv"
"time"
"github.com/darenliang/jikan-go"
)
func handleMalAnimeSearch(idMap *ConcurrentMap, permaSkipMalIds []string) http.HandlerFunc {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
search, err := getJikanAnimeSearch(idMap, permaSkipMalIds, r)
if err != nil {
w.WriteHeader(500)
w.Write([]byte(err.Error()))
} else {
w.Write([]byte(search))
}
})
}
func getJikanAnimeSearch(idMap *ConcurrentMap, permaSkipMalIds []string, r *http.Request) (string, error) {
q := r.URL.Query()
limit, err := strconv.Atoi(q.Get("limit"))
if err != nil {
return "", errors.New(" Required parameter \"limit\" not specified")
}
skipDedup := parseBoolParam(q, "allow_duplicates")
// for some reason Jikan responds with 400 Bad Request for any limit >25
// so instead, we just limit when mapping the data and remove the limit from the Jikan request
q.Del("limit")
hasNextPage := true
page := 0
resp := []ResponseItem{}
count := 0
usedIds := make(map[int]bool, 0)
for hasNextPage {
page++
q.Set("page", strconv.Itoa(page))
result, err := jikan.GetAnimeSearch(q)
if err != nil {
log.Println("Error sending request to Jikan: ", err)
return "", err
}
// map the data
for _, item := range result.Data {
if idMap.GetByMalId(item.MalId) == 0 {
log.Printf("MyAnimeList ID %d (%s) has no associated TVDB ID, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEnglish))
continue
}
if usedIds[item.MalId] && !skipDedup {
log.Printf("MyAnimeList ID %d (%s) is a duplicate, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEnglish))
continue
}
if slices.Contains(permaSkipMalIds, strconv.Itoa(item.MalId)) {
log.Printf("MyAnimeList ID %d (%s) is set to always skip, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEnglish))
continue
}
count++
if count > limit {
break
}
resp = append(resp,
ResponseItem{
item.Title,
item.TitleEnglish,
item.MalId,
0,
idMap.GetByMalId(item.MalId),
})
usedIds[item.MalId] = true
}
hasNextPage = result.Pagination.HasNextPage
if count > limit {
break
}
if hasNextPage {
time.Sleep(500 * time.Millisecond) // sleep between requests for new page to try and avoid rate limits
}
}
respJson, err := json.MarshalIndent(resp, "", " ")
if err != nil {
log.Println("Error marshalling response: ", err)
return "", err
}
return string(respJson), nil
}

@ -0,0 +1,26 @@
package main
import (
"log"
"net/http"
"time"
)
func newRebuildStaleIdMapMiddleware(idMap *ConcurrentMap) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if time.Since(lastBuiltAnimeIdList) > 24*time.Hour {
log.Println("Anime ID association table expired, building new table...")
buildIdMap(idMap)
}
next.ServeHTTP(w, r)
})
}
}
func loggerMiddleware(next http.Handler) http.HandlerFunc {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
log.Printf("%s %s?%s", r.Method, r.URL.Path, r.URL.RawQuery)
next.ServeHTTP(w, r)
})
}
Loading…
Cancel
Save