Add debug symbols
This commit is contained in:
30
bench.py
Normal file
30
bench.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
async def extract_links(url):
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(url) as response:
|
||||||
|
soup = BeautifulSoup(await response.text(), 'html.parser')
|
||||||
|
for link in soup.find_all('a'):
|
||||||
|
yield urljoin(url, link.get('href'))
|
||||||
|
|
||||||
|
async def benchmark(url):
|
||||||
|
# This will still block, because subprocess.run is not async
|
||||||
|
subprocess.run(['hey', '-n', '100', '-c', '10', url])
|
||||||
|
|
||||||
|
url = 'http://zen.box:9999/http/'
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
i = 1
|
||||||
|
async for link in extract_links(url):
|
||||||
|
if i > 50:
|
||||||
|
break
|
||||||
|
await benchmark(link)
|
||||||
|
await benchmark(link.replace('/http/', '/'))
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
# Python 3.7+
|
||||||
|
asyncio.run(main())
|
||||||
@@ -15,6 +15,8 @@ import (
|
|||||||
zurghttp "github.com/debridmediamanager.com/zurg/pkg/http"
|
zurghttp "github.com/debridmediamanager.com/zurg/pkg/http"
|
||||||
"github.com/debridmediamanager.com/zurg/pkg/logutil"
|
"github.com/debridmediamanager.com/zurg/pkg/logutil"
|
||||||
"github.com/debridmediamanager.com/zurg/pkg/realdebrid"
|
"github.com/debridmediamanager.com/zurg/pkg/realdebrid"
|
||||||
|
|
||||||
|
_ "net/http/pprof"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
@@ -55,6 +57,10 @@ func main() {
|
|||||||
downloadClient := zurghttp.NewHTTPClient(config.GetToken(), config.GetRetriesUntilFailed(), 0, config, log.Named("dlclient"))
|
downloadClient := zurghttp.NewHTTPClient(config.GetToken(), config.GetRetriesUntilFailed(), 0, config, log.Named("dlclient"))
|
||||||
getfile := universal.NewGetFile(downloadClient)
|
getfile := universal.NewGetFile(downloadClient)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
http.ListenAndServe("[::]:6060", nil)
|
||||||
|
}()
|
||||||
|
|
||||||
mux := http.NewServeMux()
|
mux := http.NewServeMux()
|
||||||
net.Router(mux, getfile, config, torrentMgr, log.Named("net"))
|
net.Router(mux, getfile, config, torrentMgr, log.Named("net"))
|
||||||
|
|
||||||
|
|||||||
@@ -72,20 +72,10 @@ func handleListTorrents(w http.ResponseWriter, requestPath string, t *torrent.To
|
|||||||
// initial response is the directory itself
|
// initial response is the directory itself
|
||||||
fmt.Fprint(w, dav.BaseDirectory(basePath, ""))
|
fmt.Fprint(w, dav.BaseDirectory(basePath, ""))
|
||||||
|
|
||||||
var allTorrents []torrent.Torrent
|
allTorrents := torrents.Keys()
|
||||||
torrents.IterCb(func(key string, tor *torrent.Torrent) {
|
sort.Strings(allTorrents)
|
||||||
if tor.AllInProgress() {
|
for _, accessKey := range allTorrents {
|
||||||
return
|
tor, _ := torrents.Get(accessKey)
|
||||||
}
|
|
||||||
copy := *tor
|
|
||||||
copy.AccessKey = key
|
|
||||||
allTorrents = append(allTorrents, copy)
|
|
||||||
})
|
|
||||||
sort.Slice(allTorrents, func(i, j int) bool {
|
|
||||||
return allTorrents[i].AccessKey < allTorrents[j].AccessKey
|
|
||||||
})
|
|
||||||
|
|
||||||
for _, tor := range allTorrents {
|
|
||||||
fmt.Fprint(w, dav.Directory(tor.AccessKey, tor.LatestAdded))
|
fmt.Fprint(w, dav.Directory(tor.AccessKey, tor.LatestAdded))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -73,21 +73,10 @@ func handleListOfTorrents(requestPath string, t *torrent.TorrentManager) (*strin
|
|||||||
|
|
||||||
htmlDoc := "<ol>"
|
htmlDoc := "<ol>"
|
||||||
|
|
||||||
var allTorrents []torrent.Torrent
|
allTorrents := torrents.Keys()
|
||||||
torrents.IterCb(func(key string, tor *torrent.Torrent) {
|
sort.Strings(allTorrents)
|
||||||
if tor.AllInProgress() {
|
for _, accessKey := range allTorrents {
|
||||||
return
|
htmlDoc = htmlDoc + fmt.Sprintf("<li><a href=\"%s/\">%s</a></li>", filepath.Join(requestPath, url.PathEscape(accessKey)), accessKey)
|
||||||
}
|
|
||||||
copy := *tor
|
|
||||||
copy.AccessKey = key
|
|
||||||
allTorrents = append(allTorrents, copy)
|
|
||||||
})
|
|
||||||
sort.Slice(allTorrents, func(i, j int) bool {
|
|
||||||
return allTorrents[i].AccessKey < allTorrents[j].AccessKey
|
|
||||||
})
|
|
||||||
|
|
||||||
for _, tor := range allTorrents {
|
|
||||||
htmlDoc = htmlDoc + fmt.Sprintf("<li><a href=\"%s/\">%s</a></li>", filepath.Join(requestPath, url.PathEscape(tor.AccessKey)), tor.AccessKey)
|
|
||||||
}
|
}
|
||||||
return &htmlDoc, nil
|
return &htmlDoc, nil
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user