fix limit param

pull/5/head
Gabe Farrell 9 months ago
parent 78c2173197
commit 84b1184061

@ -48,9 +48,15 @@ func handleAnimeSearch(malToTvdb map[int]int) func(w http.ResponseWriter, r *htt
func getAnimeSearch(malToTvdb map[int]int, r *http.Request) (string, error) { func getAnimeSearch(malToTvdb map[int]int, r *http.Request) (string, error) {
q := r.URL.Query() q := r.URL.Query()
limit, err := strconv.Atoi(q.Get("limit"))
if err != nil {
limit = 9999 // limit not specified or invalid
}
hasNextPage := true hasNextPage := true
page := 0 page := 0
resp := []ResponseItem{} resp := []ResponseItem{}
count := 0
for hasNextPage { for hasNextPage {
page++ page++
q.Set("page", strconv.Itoa(page)) q.Set("page", strconv.Itoa(page))
@ -62,6 +68,10 @@ func getAnimeSearch(malToTvdb map[int]int, r *http.Request) (string, error) {
// map the data // map the data
for _, item := range result.Data { for _, item := range result.Data {
count++
if count > limit {
break
}
resp = append(resp, resp = append(resp,
ResponseItem{ ResponseItem{
item.Title, item.Title,
@ -70,6 +80,9 @@ func getAnimeSearch(malToTvdb map[int]int, r *http.Request) (string, error) {
}) })
} }
hasNextPage = result.Pagination.HasNextPage hasNextPage = result.Pagination.HasNextPage
if count > limit {
break
}
if hasNextPage { if hasNextPage {
time.Sleep(1 * time.Second) // sleep between requests for new page to try and avoid rate limits time.Sleep(1 * time.Second) // sleep between requests for new page to try and avoid rate limits
} }

Loading…
Cancel
Save