Add download cache again
This commit is contained in:
@@ -356,9 +356,73 @@ func (rd *RealDebrid) GetActiveTorrentCount() (*ActiveTorrentCountResponse, erro
|
||||
}
|
||||
|
||||
// GetDownloads returns all torrents, paginated
|
||||
func (rd *RealDebrid) GetDownloads(page, offset int) ([]Download, int, error) {
|
||||
func (rd *RealDebrid) GetDownloads() ([]Download, int, error) {
|
||||
_, totalCount, err := rd.fetchPageOfDownloads(1, 0)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// reset allDownloads
|
||||
allDownloads := []Download{}
|
||||
page := 1
|
||||
offset := 0
|
||||
limit := 100
|
||||
|
||||
// compute ceiling of totalCount / limit
|
||||
maxPages := (totalCount + limit - 1) / limit
|
||||
// rd.log.Debugf("Total count is %d, max pages is %d", totalCount, maxPages)
|
||||
maxParallelThreads := 8
|
||||
if maxPages < maxParallelThreads {
|
||||
maxParallelThreads = maxPages
|
||||
}
|
||||
for {
|
||||
allResults := make(chan []Download, maxParallelThreads) // Channel to collect results from goroutines
|
||||
errChan := make(chan error, maxParallelThreads) // Channel to collect errors from goroutines
|
||||
for i := 0; i < maxParallelThreads; i++ { // Launch GET_PARALLEL concurrent fetches
|
||||
go func(add int) {
|
||||
if page+add > maxPages {
|
||||
allResults <- nil
|
||||
errChan <- nil
|
||||
return
|
||||
}
|
||||
result, _, err := rd.fetchPageOfDownloads(page+add, offset+add*limit)
|
||||
if err != nil {
|
||||
allResults <- nil
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
allResults <- result
|
||||
errChan <- nil
|
||||
}(i)
|
||||
}
|
||||
// Collect results from all goroutines
|
||||
for i := 0; i < maxParallelThreads; i++ {
|
||||
res := <-allResults
|
||||
err := <-errChan
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
allDownloads = append(allDownloads, res...)
|
||||
}
|
||||
|
||||
// rd.log.Debugf("Got %d/%d downloads", len(allDownloads), totalCount)
|
||||
|
||||
if len(allDownloads) >= totalCount || page >= maxPages {
|
||||
break
|
||||
}
|
||||
|
||||
page += maxParallelThreads
|
||||
offset += maxParallelThreads * limit
|
||||
}
|
||||
|
||||
rd.log.Debugf("Got %d downloads", len(allDownloads))
|
||||
|
||||
return allDownloads, totalCount, nil
|
||||
}
|
||||
|
||||
func (rd *RealDebrid) fetchPageOfDownloads(page, offset int) ([]Download, int, error) {
|
||||
baseURL := "https://api.real-debrid.com/rest/1.0/downloads"
|
||||
var allDownloads []Download
|
||||
var downloads []Download
|
||||
limit := 500
|
||||
totalCount := 0
|
||||
|
||||
@@ -384,12 +448,15 @@ func (rd *RealDebrid) GetDownloads(page, offset int) ([]Download, int, error) {
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNoContent {
|
||||
return allDownloads, 0, nil
|
||||
return downloads, 0, nil
|
||||
}
|
||||
|
||||
// if status code is not 2xx, return erro
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
err := fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||
rd.log.Errorf("Error when executing the get downloads request: %v", err)
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
var downloads []Download
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
err = decoder.Decode(&downloads)
|
||||
if err != nil {
|
||||
@@ -397,16 +464,13 @@ func (rd *RealDebrid) GetDownloads(page, offset int) ([]Download, int, error) {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
allDownloads = append(allDownloads, downloads...)
|
||||
|
||||
totalCountHeader := resp.Header.Get("x-total-count")
|
||||
totalCount, err = strconv.Atoi(totalCountHeader)
|
||||
if err != nil {
|
||||
totalCount = 0
|
||||
}
|
||||
|
||||
rd.log.Debugf("Got %d downloads (page %d), total count is %d", len(allDownloads)+offset, page, totalCount)
|
||||
return allDownloads, totalCount, nil
|
||||
return downloads, totalCount, nil
|
||||
}
|
||||
|
||||
// GetUserInformation gets the current user information.
|
||||
|
||||
Reference in New Issue
Block a user