A lot of rewrite here
This commit is contained in:
@@ -8,7 +8,6 @@ import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func UnrestrictCheck(accessToken, link string) (*UnrestrictResponse, error) {
|
||||
@@ -89,7 +88,7 @@ func GetTorrents(accessToken string) ([]Torrent, error) {
|
||||
baseURL := "https://api.real-debrid.com/rest/1.0/torrents"
|
||||
var allTorrents []Torrent
|
||||
page := 1
|
||||
limit := 100
|
||||
limit := 2500
|
||||
|
||||
for {
|
||||
params := url.Values{}
|
||||
@@ -125,7 +124,7 @@ func GetTorrents(accessToken string) ([]Torrent, error) {
|
||||
|
||||
allTorrents = append(allTorrents, torrents...)
|
||||
|
||||
totalCountHeader := "100" // resp.Header.Get("x-total-count")
|
||||
totalCountHeader := resp.Header.Get("x-total-count")
|
||||
totalCount, err := strconv.Atoi(totalCountHeader)
|
||||
if err != nil {
|
||||
break
|
||||
@@ -138,7 +137,7 @@ func GetTorrents(accessToken string) ([]Torrent, error) {
|
||||
page++
|
||||
}
|
||||
|
||||
return deduplicateTorrents(allTorrents), nil
|
||||
return allTorrents, nil
|
||||
}
|
||||
|
||||
func GetTorrentInfo(accessToken, id string) (*Torrent, error) {
|
||||
@@ -175,69 +174,3 @@ func GetTorrentInfo(accessToken, id string) (*Torrent, error) {
|
||||
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
func deduplicateTorrents(torrents []Torrent) []Torrent {
|
||||
mappedTorrents := make(map[string]Torrent)
|
||||
|
||||
for _, t := range torrents {
|
||||
torrentName := t.Filename
|
||||
if existing, ok := mappedTorrents[torrentName]; ok {
|
||||
if existing.Hash == t.Hash {
|
||||
// If hash is the same, combine the links
|
||||
existing.ID += "," + t.ID
|
||||
// existing.Links = append(existing.Links, t.Links...)
|
||||
for _, link := range t.Links {
|
||||
existing.Links = appendIfNotExists(existing.Links, link)
|
||||
}
|
||||
existing.Bytes += t.Bytes
|
||||
existing.Added = moreRecent(existing.Added, t.Added)
|
||||
mappedTorrents[torrentName] = existing
|
||||
} else {
|
||||
// If hash is different, delete old entry and create two new entries
|
||||
delete(mappedTorrents, torrentName)
|
||||
newKey1 := fmt.Sprintf("%s - %s", torrentName, t.Hash[:4])
|
||||
mappedTorrents[newKey1] = t
|
||||
newKey2 := fmt.Sprintf("%s - %s", existing.Filename, existing.Hash[:4])
|
||||
mappedTorrents[newKey2] = existing
|
||||
}
|
||||
} else {
|
||||
mappedTorrents[torrentName] = t
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the map back to a slice
|
||||
deduplicated := make([]Torrent, 0, len(mappedTorrents))
|
||||
for _, value := range mappedTorrents {
|
||||
deduplicated = append(deduplicated, value)
|
||||
}
|
||||
|
||||
return deduplicated
|
||||
}
|
||||
|
||||
func contains(slice []string, str string) bool {
|
||||
for _, v := range slice {
|
||||
if v == str {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func appendIfNotExists(slice []string, str string) []string {
|
||||
if !contains(slice, str) {
|
||||
slice = append(slice, str)
|
||||
}
|
||||
return slice
|
||||
}
|
||||
|
||||
func moreRecent(time1, time2 string) string {
|
||||
tTime1, err1 := time.Parse(time.RFC3339, time1)
|
||||
tTime2, err2 := time.Parse(time.RFC3339, time2)
|
||||
if err1 != nil || err2 != nil {
|
||||
return time1
|
||||
}
|
||||
if tTime2.After(tTime1) {
|
||||
time1 = time2
|
||||
}
|
||||
return time1
|
||||
}
|
||||
|
||||
@@ -27,6 +27,6 @@ type Torrent struct {
|
||||
type File struct {
|
||||
ID int `json:"id"`
|
||||
Path string `json:"path"`
|
||||
Bytes int `json:"bytes"`
|
||||
Bytes int64 `json:"bytes"`
|
||||
Selected int `json:"selected"`
|
||||
}
|
||||
|
||||
@@ -1,211 +0,0 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"database/sql"
|
||||
"encoding/gob"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/debridmediamanager.com/zurg/pkg/davextra"
|
||||
"github.com/debridmediamanager.com/zurg/pkg/realdebrid"
|
||||
_ "github.com/go-sql-driver/mysql"
|
||||
"github.com/qianbin/directcache"
|
||||
"github.com/zeebo/xxh3"
|
||||
)
|
||||
|
||||
type Database struct {
|
||||
Connection *sql.DB
|
||||
Cache *directcache.Cache
|
||||
}
|
||||
|
||||
func GenerateID(segment1, segment2, segment3 string) string {
|
||||
fullPath := path.Join(segment1, segment2, segment3)
|
||||
hash := xxh3.HashString(fullPath)
|
||||
return fmt.Sprintf("%016x", hash)
|
||||
}
|
||||
|
||||
func NewDatabase(dsn string) (*Database, error) {
|
||||
db, err := sql.Open("mysql", dsn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cache := directcache.New(10 << 20) // This initializes a cache with 10 MB
|
||||
|
||||
return &Database{Connection: db, Cache: cache}, nil
|
||||
}
|
||||
|
||||
func (db *Database) Insert(parentHash, torrentName string, resp realdebrid.UnrestrictResponse) {
|
||||
// Generate the ID for the link
|
||||
var id string
|
||||
if resp.Filename == "" {
|
||||
// alternative ID for 404 links
|
||||
id = GenerateID(parentHash, resp.Link, "")
|
||||
} else {
|
||||
id = GenerateID(parentHash, resp.Filename, davextra.GetLinkFragment(resp.Link))
|
||||
}
|
||||
// Check if the link already exists in the database
|
||||
var exists int
|
||||
err := db.Connection.QueryRow("SELECT COUNT(*) FROM Links WHERE ID = ?", id).Scan(&exists)
|
||||
if err != nil {
|
||||
log.Printf("failed to check existence: %v", err)
|
||||
}
|
||||
|
||||
// If link does not exist in the database, insert the new record
|
||||
if exists == 0 {
|
||||
_, err = db.Connection.Exec(`
|
||||
INSERT INTO Links (ID, ParentHash, Directory, Filename, Filesize, Link, Host)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
||||
id,
|
||||
parentHash,
|
||||
torrentName,
|
||||
resp.Filename,
|
||||
resp.Filesize,
|
||||
resp.Link,
|
||||
resp.Host,
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("failed to insert record: %v", err)
|
||||
}
|
||||
|
||||
// Clear cache for parentHash
|
||||
db.Cache.Del([]byte(parentHash))
|
||||
}
|
||||
}
|
||||
|
||||
func (db *Database) Get(parentHash, filename string) (*DavFile, error) {
|
||||
filenameV2, linkFragment := extractIDFromFilename(filename)
|
||||
id := GenerateID(parentHash, filenameV2, linkFragment)
|
||||
data, ok := db.Cache.Get([]byte(id))
|
||||
if !ok {
|
||||
resp, err := fetchFromDatabaseByID(db.Connection, id, linkFragment)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buffer := &bytes.Buffer{}
|
||||
encoder := gob.NewEncoder(buffer)
|
||||
if err := encoder.Encode(resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
db.Cache.Set([]byte(id), buffer.Bytes())
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
buffer := bytes.NewBuffer(data)
|
||||
decoder := gob.NewDecoder(buffer)
|
||||
var resp DavFile
|
||||
if err := decoder.Decode(&resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func extractIDFromFilename(filename string) (string, string) {
|
||||
filenameV2, err := url.PathUnescape(filename)
|
||||
if err != nil {
|
||||
filenameV2 = filename
|
||||
}
|
||||
ext := filepath.Ext(filenameV2)
|
||||
name := strings.TrimSuffix(filenameV2, ext)
|
||||
|
||||
r := regexp.MustCompile(`\sDMM(\w+)`)
|
||||
matches := r.FindStringSubmatch(name)
|
||||
if len(matches) < 2 {
|
||||
// No ID found
|
||||
return filenameV2, ""
|
||||
}
|
||||
|
||||
// Remove ID from filename
|
||||
originalName := strings.Replace(name, matches[0], "", 1)
|
||||
return originalName + ext, matches[1]
|
||||
}
|
||||
|
||||
func (db *Database) GetMultiple(parentHash string) (*DavFiles, error) {
|
||||
key := []byte(parentHash)
|
||||
data, ok := db.Cache.Get(key)
|
||||
if !ok {
|
||||
resps, err := fetchMultipleFromDatabase(db.Connection, parentHash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buffer := &bytes.Buffer{}
|
||||
encoder := gob.NewEncoder(buffer)
|
||||
if err := encoder.Encode(resps); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
db.Cache.Set(key, buffer.Bytes())
|
||||
return resps, nil
|
||||
}
|
||||
|
||||
buffer := bytes.NewBuffer(data)
|
||||
decoder := gob.NewDecoder(buffer)
|
||||
var resps DavFiles
|
||||
if err := decoder.Decode(&resps); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &resps, nil
|
||||
}
|
||||
|
||||
func fetchFromDatabaseByID(conn *sql.DB, id, linkFragment string) (*DavFile, error) {
|
||||
log.Printf("fetching from database: %s", id)
|
||||
var resp DavFile
|
||||
|
||||
query := `
|
||||
SELECT Filename, Filesize, Link
|
||||
FROM Links WHERE ID = ? AND Link LIKE ?`
|
||||
row := conn.QueryRow(query, id, "https://real-debrid.com/d/"+linkFragment+"%")
|
||||
|
||||
err := row.Scan(&resp.Filename, &resp.Filesize, &resp.Link)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
log.Printf("failed to fetch record: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func fetchMultipleFromDatabase(conn *sql.DB, parentHash string) (*DavFiles, error) {
|
||||
log.Printf("fetching multiple from database: %s", parentHash)
|
||||
rows, err := conn.Query(`
|
||||
SELECT Filename, Filesize, Link
|
||||
FROM Links WHERE ParentHash = ?`,
|
||||
parentHash,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch records: %v", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var responses []*DavFile
|
||||
|
||||
for rows.Next() {
|
||||
resp := &DavFile{}
|
||||
if err := rows.Scan(&resp.Filename, &resp.Filesize, &resp.Link); err != nil {
|
||||
return nil, fmt.Errorf("failed to scan row: %v", err)
|
||||
}
|
||||
responses = append(responses, resp)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, fmt.Errorf("error while iterating over rows: %v", err)
|
||||
}
|
||||
|
||||
result := &DavFiles{
|
||||
Files: responses,
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
package repo
|
||||
|
||||
type DavFile struct {
|
||||
Filename string
|
||||
Filesize int64
|
||||
Link string
|
||||
}
|
||||
|
||||
type DavFiles struct {
|
||||
Files []*DavFile
|
||||
}
|
||||
Reference in New Issue
Block a user