Optimize torrent fetching
This commit is contained in:
@@ -5,7 +5,6 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/debridmediamanager/zurg/internal/config"
|
||||
@@ -15,6 +14,7 @@ import (
|
||||
)
|
||||
|
||||
type RealDebrid struct {
|
||||
torrentsCache []Torrent
|
||||
apiClient *zurghttp.HTTPClient
|
||||
unrestrictClient *zurghttp.HTTPClient
|
||||
downloadClient *zurghttp.HTTPClient
|
||||
@@ -25,6 +25,7 @@ type RealDebrid struct {
|
||||
|
||||
func NewRealDebrid(apiClient, unrestrictClient, downloadClient *zurghttp.HTTPClient, workerPool *ants.Pool, cfg config.ConfigInterface, log *logutil.Logger) *RealDebrid {
|
||||
return &RealDebrid{
|
||||
torrentsCache: []Torrent{},
|
||||
apiClient: apiClient,
|
||||
unrestrictClient: unrestrictClient,
|
||||
downloadClient: downloadClient,
|
||||
@@ -118,104 +119,6 @@ func (rd *RealDebrid) UnrestrictLink(link string, checkFirstByte bool) (*Downloa
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
type getTorrentsResult struct {
|
||||
torrents []Torrent
|
||||
err error
|
||||
totalCount int
|
||||
}
|
||||
|
||||
func (rd *RealDebrid) getPageOfTorrents(page, limit int) getTorrentsResult {
|
||||
baseURL := "https://api.real-debrid.com/rest/1.0/torrents"
|
||||
|
||||
params := url.Values{}
|
||||
params.Set("page", fmt.Sprintf("%d", page))
|
||||
params.Set("limit", fmt.Sprintf("%d", limit))
|
||||
|
||||
reqURL := baseURL + "?" + params.Encode()
|
||||
req, err := http.NewRequest("GET", reqURL, nil)
|
||||
if err != nil {
|
||||
return getTorrentsResult{nil, err, 0}
|
||||
}
|
||||
|
||||
resp, err := rd.apiClient.Do(req)
|
||||
if err != nil {
|
||||
return getTorrentsResult{nil, err, 0}
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNoContent {
|
||||
return getTorrentsResult{nil, nil, 0}
|
||||
}
|
||||
|
||||
var torrents []Torrent
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
err = decoder.Decode(&torrents)
|
||||
if err != nil {
|
||||
return getTorrentsResult{nil, err, 0}
|
||||
}
|
||||
|
||||
countHeader := resp.Header.Get("x-total-count")
|
||||
count, _ := strconv.Atoi(countHeader) // In real use, handle this error
|
||||
|
||||
return getTorrentsResult{torrents, nil, count}
|
||||
}
|
||||
|
||||
func (rd *RealDebrid) GetTorrents(onlyOne bool) ([]Torrent, int, error) {
|
||||
var allTorrents []Torrent
|
||||
|
||||
// fetch 1 to get total count
|
||||
result := rd.getPageOfTorrents(1, 1)
|
||||
allTorrents = append(allTorrents, result.torrents...)
|
||||
totalCount := result.totalCount
|
||||
|
||||
if onlyOne {
|
||||
return allTorrents, totalCount, nil
|
||||
}
|
||||
|
||||
// reset allTorrents
|
||||
allTorrents = []Torrent{}
|
||||
page := 1
|
||||
// compute ceiling of totalCount / limit
|
||||
maxPages := (totalCount + 250 - 1) / 250
|
||||
rd.log.Debugf("Torrents total count is %d", totalCount)
|
||||
maxParallelThreads := 4
|
||||
if maxPages < maxParallelThreads {
|
||||
maxParallelThreads = maxPages
|
||||
}
|
||||
for {
|
||||
allResults := make(chan getTorrentsResult, maxParallelThreads) // Channel to collect results from goroutines
|
||||
for i := 0; i < maxParallelThreads; i++ { // Launch GET_PARALLEL concurrent fetches
|
||||
idx := i
|
||||
rd.workerPool.Submit(func() {
|
||||
if page > maxPages {
|
||||
allResults <- getTorrentsResult{nil, nil, 0}
|
||||
return
|
||||
}
|
||||
allResults <- rd.getPageOfTorrents(page+idx, 250)
|
||||
})
|
||||
}
|
||||
// Collect results from all goroutines
|
||||
for i := 0; i < maxParallelThreads; i++ {
|
||||
res := <-allResults
|
||||
if res.err != nil {
|
||||
rd.log.Warnf("Ignoring error when fetching torrents: %v", res.err)
|
||||
continue
|
||||
}
|
||||
allTorrents = append(allTorrents, res.torrents...)
|
||||
}
|
||||
|
||||
rd.log.Debugf("Got %d/%d torrents", len(allTorrents), totalCount)
|
||||
|
||||
if len(allTorrents) >= totalCount || page >= maxPages {
|
||||
break
|
||||
}
|
||||
|
||||
page += maxParallelThreads
|
||||
}
|
||||
|
||||
return allTorrents, totalCount, nil
|
||||
}
|
||||
|
||||
func (rd *RealDebrid) GetTorrentInfo(id string) (*TorrentInfo, error) {
|
||||
url := "https://api.real-debrid.com/rest/1.0/torrents/info/" + id
|
||||
|
||||
@@ -360,120 +263,6 @@ func (rd *RealDebrid) GetActiveTorrentCount() (*ActiveTorrentCountResponse, erro
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
// GetDownloads returns all torrents, paginated
|
||||
func (rd *RealDebrid) GetDownloads() []Download {
|
||||
_, totalCount, err := rd.fetchPageOfDownloads(1, 1)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// reset allDownloads
|
||||
allDownloads := []Download{}
|
||||
page := 1
|
||||
limit := 250
|
||||
|
||||
// compute ceiling of totalCount / limit
|
||||
maxPages := (totalCount + limit - 1) / limit
|
||||
rd.log.Debugf("Total downloads count is %d", totalCount)
|
||||
maxParallelThreads := 4
|
||||
if maxPages < maxParallelThreads {
|
||||
maxParallelThreads = maxPages
|
||||
}
|
||||
for {
|
||||
allResults := make(chan []Download, maxParallelThreads) // Channel to collect results from goroutines
|
||||
errChan := make(chan error, maxParallelThreads) // Channel to collect errors from goroutines
|
||||
for i := 0; i < maxParallelThreads; i++ { // Launch GET_PARALLEL concurrent fetches
|
||||
idx := i
|
||||
rd.workerPool.Submit(func() {
|
||||
if page+idx > maxPages {
|
||||
allResults <- nil
|
||||
errChan <- nil
|
||||
return
|
||||
}
|
||||
result, _, err := rd.fetchPageOfDownloads(page+idx, limit)
|
||||
if err != nil {
|
||||
allResults <- nil
|
||||
errChan <- err
|
||||
return
|
||||
}
|
||||
allResults <- result
|
||||
errChan <- nil
|
||||
})
|
||||
}
|
||||
// Collect results from all goroutines
|
||||
for i := 0; i < maxParallelThreads; i++ {
|
||||
res := <-allResults
|
||||
err := <-errChan
|
||||
if err != nil {
|
||||
rd.log.Warnf("Ignoring error when fetching downloads: %v", err)
|
||||
continue
|
||||
}
|
||||
allDownloads = append(allDownloads, res...)
|
||||
}
|
||||
|
||||
rd.log.Debugf("Got %d/%d downloads", len(allDownloads), totalCount)
|
||||
|
||||
if len(allDownloads) >= totalCount || page >= maxPages {
|
||||
break
|
||||
}
|
||||
|
||||
page += maxParallelThreads
|
||||
}
|
||||
|
||||
return allDownloads
|
||||
}
|
||||
|
||||
func (rd *RealDebrid) fetchPageOfDownloads(page, limit int) ([]Download, int, error) {
|
||||
baseURL := "https://api.real-debrid.com/rest/1.0/downloads"
|
||||
var downloads []Download
|
||||
totalCount := 0
|
||||
|
||||
params := url.Values{}
|
||||
params.Set("page", fmt.Sprintf("%d", page))
|
||||
params.Set("limit", fmt.Sprintf("%d", limit))
|
||||
// params.Set("filter", "active")
|
||||
|
||||
reqURL := baseURL + "?" + params.Encode()
|
||||
|
||||
req, err := http.NewRequest("GET", reqURL, nil)
|
||||
if err != nil {
|
||||
rd.log.Errorf("Error when creating a get downloads request: %v", err)
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
resp, err := rd.apiClient.Do(req)
|
||||
if err != nil {
|
||||
rd.log.Errorf("Error when executing the get downloads request: %v", err)
|
||||
return nil, 0, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNoContent {
|
||||
return downloads, 0, nil
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
err := fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||
rd.log.Errorf("Error when executing the get downloads request: %v", err)
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
err = decoder.Decode(&downloads)
|
||||
if err != nil {
|
||||
rd.log.Errorf("Error when decoding get downloads JSON: %v", err)
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
totalCountHeader := resp.Header.Get("x-total-count")
|
||||
totalCount, err = strconv.Atoi(totalCountHeader)
|
||||
if err != nil {
|
||||
totalCount = 0
|
||||
}
|
||||
|
||||
return downloads, totalCount, nil
|
||||
}
|
||||
|
||||
// GetUserInformation gets the current user information.
|
||||
func (rd *RealDebrid) GetUserInformation() (*User, error) {
|
||||
// Construct request URL
|
||||
|
||||
Reference in New Issue
Block a user