diff --git a/anilist.go b/anilist.go index 6b67bf8..5534e51 100644 --- a/anilist.go +++ b/anilist.go @@ -7,10 +7,8 @@ import ( "log" "net/http" "net/url" - "slices" "strconv" "strings" - "time" ) const anilistQuery = ` @@ -97,9 +95,14 @@ type AniListApiResponse struct { Data AniListResponseData `json:"data"` } -func handleAniListAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http.HandlerFunc { +func handleAniListAnimeSearch(idMap *ConcurrentMap) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - search, err := getAniListAnimeSearch(idMap, permaSkipIds, r) + opts, err := SearchOptsFromAniListRequest(r) + if err != nil { + log.Printf("Error creating search options: %v", err) + return + } + search, err := makeApiRequest(idMap, AniList, opts) if err != nil { w.WriteHeader(500) if _, writeErr := w.Write([]byte(err.Error())); writeErr != nil { @@ -114,7 +117,7 @@ func handleAniListAnimeSearch(idMap *ConcurrentMap, permaSkipIds []string) http. } } -func getAniListAnimeSearch(idMap *ConcurrentMap, permaSkipAniListIds []string, r *http.Request) ([]byte, error) { +func SearchOptsFromAniListRequest(r *http.Request) (*SearchOpts, error) { q := r.URL.Query() // set default params @@ -122,70 +125,21 @@ func getAniListAnimeSearch(idMap *ConcurrentMap, permaSkipAniListIds []string, r if err != nil { return nil, errors.New(" Required parameter \"limit\" not specified") } - q.Set("type", "ANIME") // dont include limit in the AniList api call as its already hard coded at 20 per page q.Del("limit") - skipDedup := parseBoolParam(q, "allowDuplicates") - - hasNextPage := true - page := 0 - resp := []ResponseItem{} - count := 0 - usedIds := make(map[int]bool, 0) - for hasNextPage { - page++ - q.Set("page", strconv.Itoa(page)) - result, err := makeAniListApiCall(q) - if err != nil { - log.Println("Error sending request to AniList: ", err) - return nil, err - } - - // map the data - for _, item := range result.Data.Page.Media { - if idMap.GetByMalId(item.IdMal) == 0 { - log.Printf("AniList ID %d (%s) has no associated TVDB ID, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English)) - continue - } - if usedIds[item.Id] && !skipDedup { - log.Printf("AniList ID %d (%s) is a duplicate, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English)) - continue - } - if slices.Contains(permaSkipAniListIds, strconv.Itoa(item.Id)) { - log.Printf("AniList ID %d (%s) is set to always skip, skipping...\n", item.Id, FullAnimeTitle(item.Title.Romaji, item.Title.English)) - continue - } - count++ - if count > limit { - break - } - resp = append(resp, - ResponseItem{ - item.Title.Romaji, - item.Title.English, - item.IdMal, - item.Id, - idMap.GetByMalId(item.IdMal), - }) - usedIds[item.Id] = true - } - hasNextPage = result.Data.Page.PageInfo.HasNextPage - if count > limit { - break - } - if hasNextPage { - time.Sleep(500 * time.Millisecond) // sleep between requests for new page to try and avoid rate limits - } - } + q.Set("type", "ANIME") - respJson, err := json.MarshalIndent(resp, "", " ") - if err != nil { - log.Println("Error marshalling response: ", err) - return nil, err - } - return respJson, nil + skipDedup := parseBoolParam(q, "allowDuplicates") + mergeSeasons := parseBoolParam(q, "mergeSeasons") + + return &SearchOpts{ + AllowDuplicates: skipDedup, + MergeSeasons: mergeSeasons, + Query: q, + Limit: limit, + }, nil } func makeAniListApiCall(q url.Values) (*AniListApiResponse, error) { diff --git a/consumer.go b/consumer.go new file mode 100644 index 0000000..d26ac2f --- /dev/null +++ b/consumer.go @@ -0,0 +1,164 @@ +package main + +import ( + "encoding/json" + "errors" + "log" + "net/url" + "slices" + "strconv" + "time" + + "fmt" + + "github.com/darenliang/jikan-go" + "github.com/patrickmn/go-cache" +) + +type SupportedAPI int + +const ( + AniList SupportedAPI = iota + MyAnimeList +) + +type ResponseItem struct { + Title string `json:"title"` + TitleEng string `json:"titleEnglish,omitempty"` + MalId int `json:"malId,omitempty"` + AniListId int `json:"anilistId,omitempty"` + TvdbId int `json:"tvdbId"` +} + +type SearchOpts struct { + AllowDuplicates bool + MergeSeasons bool + Query url.Values + Limit int +} + +func ResponseItemFromAPI(target SupportedAPI, item any) *ResponseItem { + switch target { + case AniList: + if aniListItem, ok := item.(AniListMediaItem); !ok { + return nil + } else { + return &ResponseItem{ + Title: aniListItem.Title.Romaji, + TitleEng: aniListItem.Title.English, + MalId: aniListItem.IdMal, + AniListId: aniListItem.Id, + } + } + case MyAnimeList: + if malItem, ok := item.(jikan.AnimeBase); !ok { + return nil + } else { + return &ResponseItem{ + Title: malItem.Title, + TitleEng: malItem.TitleEnglish, + MalId: malItem.MalId, + } + } + default: + return nil + } +} + +func makeApiRequest(idMap *ConcurrentMap, target SupportedAPI, opts *SearchOpts) ([]byte, error) { + + hasNextPage := true + page := 0 + resp := []ResponseItem{} + apiItems := []*ResponseItem{} + count := 0 + usedIds := make(map[int]bool, 0) + usedTvdbIds := make(map[int]bool, 0) + + for hasNextPage { + + page++ + opts.Query.Set("page", strconv.Itoa(page)) + if target == MyAnimeList { + var result *jikan.AnimeSearch + if cachedResult, found := Cache.Get(fmt.Sprint(MyAnimeList) + opts.Query.Encode()); found { + result = cachedResult.(*jikan.AnimeSearch) + log.Println("Jikan cache hit!") + } else { + log.Println(opts.Query.Encode()) + newResult, err := jikan.GetAnimeSearch(opts.Query) + if err != nil { + log.Println("Error sending request to Jikan: ", err) + return nil, err + } + result = newResult + Cache.Set(fmt.Sprint(MyAnimeList)+opts.Query.Encode(), newResult, cache.DefaultExpiration) + } + for _, item := range result.Data { + respItem := ResponseItemFromAPI(MyAnimeList, item) + if respItem == nil { + return nil, errors.New("failed to parse item from mal api") + } + apiItems = append(apiItems, respItem) + } + hasNextPage = result.Pagination.HasNextPage + } else if target == AniList { + result, err := makeAniListApiCall(opts.Query) + if err != nil { + log.Println("Error sending request to AniList: ", err) + return nil, err + } + for _, item := range result.Data.Page.Media { + respItem := ResponseItemFromAPI(AniList, item) + if respItem == nil { + return nil, errors.New("failed to parse item from anilist api") + } + apiItems = append(apiItems, respItem) + } + hasNextPage = result.Data.Page.PageInfo.HasNextPage + } else { + return nil, errors.New("unsupported api") + } + + // map the data + for _, item := range apiItems { + item.TvdbId = idMap.GetByMalId(item.MalId) + if item.TvdbId == 0 { + log.Printf("MyAnimeList ID %d (%s) has no associated TVDB ID, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEng)) + continue + } + if usedTvdbIds[item.TvdbId] && opts.MergeSeasons { + log.Printf("MyAnimeList ID %d (%s) is season of an already included anime, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEng)) + continue + } + if usedIds[item.MalId] && !opts.AllowDuplicates { + log.Printf("MyAnimeList ID %d (%s) is a duplicate, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEng)) + continue + } + if slices.Contains(PermaSkipIds, strconv.Itoa(idMap.GetByMalId(item.MalId))) { + log.Printf("MyAnimeList ID %d (%s) is set to always skip, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEng)) + continue + } + count++ + if count > opts.Limit { + break + } + resp = append(resp, *item) + usedIds[item.MalId] = true + usedTvdbIds[item.TvdbId] = true + } + if count > opts.Limit { + break + } + if hasNextPage { + time.Sleep(500 * time.Millisecond) // sleep between requests for new page to try and avoid rate limits + } + } + + respJson, err := json.MarshalIndent(resp, "", " ") + if err != nil { + log.Println("Error marshalling response: ", err) + return nil, err + } + return respJson, nil +} diff --git a/main.go b/main.go index 5c3535f..2de1858 100644 --- a/main.go +++ b/main.go @@ -15,14 +15,6 @@ import ( "github.com/patrickmn/go-cache" ) -type ResponseItem struct { - Title string `json:"title"` - TitleEng string `json:"titleEnglish,omitempty"` - MalId int `json:"malId,omitempty"` - AniListId int `json:"anilistId,omitempty"` - TvdbId int `json:"tvdbId"` -} - type AnimeEntry struct { TvdbId int `json:"tvdb_id"` MalId any `json:"mal_id"` @@ -40,6 +32,10 @@ func (m *ConcurrentMap) GetByMalId(i int) int { return m.mal[i] } +var PermaSkipIds []string + +var Cache = cache.New(10*time.Minute, 15*time.Minute) + var lastBuiltAnimeIdList time.Time const Version = "v0.2.2" @@ -49,25 +45,18 @@ func main() { log.Println("Building Anime ID Associations...") var idMap = new(ConcurrentMap) buildIdMap(idMap) - permaSkipMalStr := os.Getenv("ALWAYS_SKIP_MAL_IDS") - permaSkipMalIds := strings.Split(permaSkipMalStr, ",") - if permaSkipMalStr != "" { - log.Printf("Always skipping MAL IDs: %v\n", permaSkipMalIds) - } - permaSkipAniListStr := os.Getenv("ALWAYS_SKIP_ANILIST_IDS") - permaSkipAniListIds := strings.Split(permaSkipAniListStr, ",") - if permaSkipAniListStr != "" { - log.Printf("Always skipping AniList IDs: %v\n", permaSkipAniListIds) + permaSkipStr := os.Getenv("ALWAYS_SKIP_TVDB_IDS") + PermaSkipIds = strings.Split(permaSkipStr, ",") + if permaSkipStr != "" { + log.Printf("Always skipping TVDB IDs: %v\n", PermaSkipIds) } - log.Printf("Preparing cache...") - c := cache.New(10*time.Minute, 15*time.Minute) middleware := []Middleware{ loggerMiddleware, - newCacheMiddleware(c), + cacheMiddleware, newRebuildStaleIdMapMiddleware(idMap), } - http.HandleFunc("/v1/mal/anime", ChainMiddleware(handleMalAnimeSearch(idMap, permaSkipMalIds), middleware...)) - http.HandleFunc("/v1/anilist/anime", ChainMiddleware(handleAniListAnimeSearch(idMap, permaSkipAniListIds), middleware...)) + http.HandleFunc("/v1/mal/anime", ChainMiddleware(handleMalAnimeSearch(idMap), middleware...)) + http.HandleFunc("/v1/anilist/anime", ChainMiddleware(handleAniListAnimeSearch(idMap), middleware...)) log.Println("Listening on :3333") srv := &http.Server{ diff --git a/mal.go b/mal.go index afe170c..476e015 100644 --- a/mal.go +++ b/mal.go @@ -1,20 +1,20 @@ package main import ( - "encoding/json" "errors" "log" "net/http" - "slices" "strconv" - "time" - - "github.com/darenliang/jikan-go" ) -func handleMalAnimeSearch(idMap *ConcurrentMap, permaSkipMalIds []string) http.HandlerFunc { +func handleMalAnimeSearch(idMap *ConcurrentMap) http.HandlerFunc { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - search, err := getJikanAnimeSearch(idMap, permaSkipMalIds, r) + opts, err := SearchOptsFromMalRequest(r) + if err != nil { + log.Printf("Error creating search options: %v", err) + return + } + search, err := makeApiRequest(idMap, MyAnimeList, opts) if err != nil { w.WriteHeader(500) if _, writeErr := w.Write([]byte(err.Error())); writeErr != nil { @@ -29,75 +29,25 @@ func handleMalAnimeSearch(idMap *ConcurrentMap, permaSkipMalIds []string) http.H }) } -func getJikanAnimeSearch(idMap *ConcurrentMap, permaSkipMalIds []string, r *http.Request) (string, error) { +func SearchOptsFromMalRequest(r *http.Request) (*SearchOpts, error) { q := r.URL.Query() limit, err := strconv.Atoi(q.Get("limit")) if err != nil { - return "", errors.New(" Required parameter \"limit\" not specified") + return nil, errors.New(" Required parameter \"limit\" not specified") } skipDedup := parseBoolParam(q, "allow_duplicates") + mergeSeasons := parseBoolParam(q, "merge_seasons") // for some reason Jikan responds with 400 Bad Request for any limit >25 // so instead, we just limit when mapping the data and remove the limit from the Jikan request q.Del("limit") - hasNextPage := true - page := 0 - resp := []ResponseItem{} - count := 0 - usedIds := make(map[int]bool, 0) - for hasNextPage { - page++ - q.Set("page", strconv.Itoa(page)) - result, err := jikan.GetAnimeSearch(q) - if err != nil { - log.Println("Error sending request to Jikan: ", err) - return "", err - } - - // map the data - for _, item := range result.Data { - if idMap.GetByMalId(item.MalId) == 0 { - log.Printf("MyAnimeList ID %d (%s) has no associated TVDB ID, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEnglish)) - continue - } - if usedIds[item.MalId] && !skipDedup { - log.Printf("MyAnimeList ID %d (%s) is a duplicate, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEnglish)) - continue - } - if slices.Contains(permaSkipMalIds, strconv.Itoa(item.MalId)) { - log.Printf("MyAnimeList ID %d (%s) is set to always skip, skipping...\n", item.MalId, FullAnimeTitle(item.Title, item.TitleEnglish)) - continue - } - count++ - if count > limit { - break - } - resp = append(resp, - ResponseItem{ - item.Title, - item.TitleEnglish, - item.MalId, - 0, - idMap.GetByMalId(item.MalId), - }) - usedIds[item.MalId] = true - } - hasNextPage = result.Pagination.HasNextPage - if count > limit { - break - } - if hasNextPage { - time.Sleep(500 * time.Millisecond) // sleep between requests for new page to try and avoid rate limits - } - } - - respJson, err := json.MarshalIndent(resp, "", " ") - if err != nil { - log.Println("Error marshalling response: ", err) - return "", err - } - return string(respJson), nil + return &SearchOpts{ + AllowDuplicates: skipDedup, + MergeSeasons: mergeSeasons, + Query: q, + Limit: limit, + }, nil } diff --git a/middleware.go b/middleware.go index 50a4cf5..0bf04eb 100644 --- a/middleware.go +++ b/middleware.go @@ -74,24 +74,22 @@ func (w *cacheResponseWriter) Write(b []byte) (int, error) { return w.ResponseWriter.Write(b) } -func newCacheMiddleware(c *cache.Cache) func(http.HandlerFunc) http.HandlerFunc { - return func(next http.HandlerFunc) http.HandlerFunc { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - key := RequestString(r) - if cachedResp, found := c.Get(key); found { - log.Println("Responding with cached response") - w.WriteHeader(http.StatusOK) - w.Write(cachedResp.([]byte)) - return - } - crw := &cacheResponseWriter{ - ResponseWriter: w, - body: &bytes.Buffer{}, - } - next.ServeHTTP(crw, r) - if crw.status == http.StatusOK { - c.Set(key, crw.body.Bytes(), cache.DefaultExpiration) - } - }) - } +func cacheMiddleware(next http.HandlerFunc) http.HandlerFunc { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + key := RequestString(r) + if cachedResp, found := Cache.Get(key); found { + log.Println("Responding with cached response") + w.WriteHeader(http.StatusOK) + w.Write(cachedResp.([]byte)) + return + } + crw := &cacheResponseWriter{ + ResponseWriter: w, + body: &bytes.Buffer{}, + } + next.ServeHTTP(crw, r) + if crw.status == http.StatusOK { + Cache.Set(key, crw.body.Bytes(), cache.DefaultExpiration) + } + }) }