Refactor torrent fetching
This commit is contained in:
@@ -115,89 +115,88 @@ func (rd *RealDebrid) UnrestrictLink(link string, checkFirstByte bool) (*Downloa
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
// GetTorrents returns all torrents, paginated
|
||||
// if customLimit is 0, the default limit of 500 is used
|
||||
func (rd *RealDebrid) GetTorrents(customLimit int, active bool) ([]Torrent, int, error) {
|
||||
const MAX_PARALLEL = 4
|
||||
type getTorrentsResult struct {
|
||||
torrents []Torrent
|
||||
err error
|
||||
totalCount int
|
||||
}
|
||||
|
||||
func (rd *RealDebrid) getPageOfTorrents(page, limit int) getTorrentsResult {
|
||||
baseURL := "https://api.real-debrid.com/rest/1.0/torrents"
|
||||
|
||||
params := url.Values{}
|
||||
params.Set("page", fmt.Sprintf("%d", page))
|
||||
params.Set("limit", fmt.Sprintf("%d", limit))
|
||||
|
||||
reqURL := baseURL + "?" + params.Encode()
|
||||
req, err := http.NewRequest("GET", reqURL, nil)
|
||||
if err != nil {
|
||||
return getTorrentsResult{nil, err, 0}
|
||||
}
|
||||
|
||||
resp, err := rd.apiClient.Do(req)
|
||||
if err != nil {
|
||||
return getTorrentsResult{nil, err, 0}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNoContent {
|
||||
return getTorrentsResult{nil, nil, 0}
|
||||
}
|
||||
|
||||
var torrents []Torrent
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
err = decoder.Decode(&torrents)
|
||||
if err != nil {
|
||||
return getTorrentsResult{nil, err, 0}
|
||||
}
|
||||
|
||||
countHeader := resp.Header.Get("x-total-count")
|
||||
count, _ := strconv.Atoi(countHeader) // In real use, handle this error
|
||||
|
||||
return getTorrentsResult{torrents, nil, count}
|
||||
}
|
||||
|
||||
func (rd *RealDebrid) GetTorrents(onlyOne bool) ([]Torrent, int, error) {
|
||||
var allTorrents []Torrent
|
||||
|
||||
// fetch 1 to get total count
|
||||
result := rd.getPageOfTorrents(1, 1)
|
||||
allTorrents = append(allTorrents, result.torrents...)
|
||||
totalCount := result.totalCount
|
||||
|
||||
if onlyOne || totalCount == len(allTorrents) {
|
||||
return allTorrents, totalCount, nil
|
||||
}
|
||||
|
||||
const MAX_PARALLEL = 4
|
||||
page := 1
|
||||
limit := customLimit
|
||||
if limit == 0 {
|
||||
limit = rd.cfg.GetTorrentsCount()
|
||||
}
|
||||
totalCount := 0
|
||||
|
||||
type fetchResult struct {
|
||||
torrents []Torrent
|
||||
err error
|
||||
count int
|
||||
}
|
||||
|
||||
maxPages := totalCount / rd.cfg.GetTorrentsCount()
|
||||
for {
|
||||
results := make(chan fetchResult, 4) // Channel to collect results from goroutines
|
||||
for i := 0; i < MAX_PARALLEL; i++ { // Launch GET_PARALLEL concurrent fetches
|
||||
go func(p int) {
|
||||
params := url.Values{}
|
||||
params.Set("page", fmt.Sprintf("%d", p))
|
||||
params.Set("limit", fmt.Sprintf("%d", limit))
|
||||
if active {
|
||||
params.Set("filter", "active")
|
||||
}
|
||||
|
||||
reqURL := baseURL + "?" + params.Encode()
|
||||
req, err := http.NewRequest("GET", reqURL, nil)
|
||||
if err != nil {
|
||||
results <- fetchResult{nil, err, 0}
|
||||
allResults := make(chan getTorrentsResult, MAX_PARALLEL) // Channel to collect results from goroutines
|
||||
for i := 0; i < MAX_PARALLEL; i++ { // Launch GET_PARALLEL concurrent fetches
|
||||
go func(add int) {
|
||||
if page > maxPages {
|
||||
allResults <- getTorrentsResult{nil, nil, 0}
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := rd.apiClient.Do(req)
|
||||
if err != nil {
|
||||
results <- fetchResult{nil, err, 0}
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNoContent {
|
||||
results <- fetchResult{nil, nil, 0}
|
||||
return
|
||||
}
|
||||
|
||||
var torrents []Torrent
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
err = decoder.Decode(&torrents)
|
||||
if err != nil {
|
||||
results <- fetchResult{nil, err, 0}
|
||||
return
|
||||
}
|
||||
|
||||
countHeader := resp.Header.Get("x-total-count")
|
||||
count, _ := strconv.Atoi(countHeader) // In real use, handle this error
|
||||
|
||||
results <- fetchResult{torrents, nil, count}
|
||||
}(page + i)
|
||||
allResults <- rd.getPageOfTorrents(page+add, rd.cfg.GetTorrentsCount())
|
||||
}(i)
|
||||
}
|
||||
|
||||
// Collect results from all goroutines
|
||||
for i := 0; i < MAX_PARALLEL; i++ {
|
||||
result := <-results
|
||||
if result.err != nil {
|
||||
return nil, 0, result.err
|
||||
}
|
||||
allTorrents = append(allTorrents, result.torrents...)
|
||||
if totalCount == 0 { // Set totalCount from the first successful fetch
|
||||
totalCount = result.count
|
||||
res := <-allResults
|
||||
if res.err != nil {
|
||||
return nil, 0, res.err
|
||||
}
|
||||
allTorrents = append(allTorrents, res.torrents...)
|
||||
}
|
||||
|
||||
// Increment page by GET_PARALLEL for the next iteration of GET_PARALLEL concurrent fetches
|
||||
page += MAX_PARALLEL
|
||||
|
||||
// Break loop if all torrents fetched or the limit is reached
|
||||
if len(allTorrents) >= totalCount || (customLimit != 0 && len(allTorrents) >= customLimit) {
|
||||
if len(allTorrents) >= totalCount {
|
||||
break
|
||||
}
|
||||
|
||||
page += MAX_PARALLEL
|
||||
}
|
||||
|
||||
return allTorrents, totalCount, nil
|
||||
|
||||
Reference in New Issue
Block a user