From bb41c87b2a4ec28ff5e081f7f046879211f38ccb Mon Sep 17 00:00:00 2001 From: Fesaa <77553571+Fesaa@users.noreply.github.com> Date: Fri, 5 Jul 2024 19:21:45 +0200 Subject: [PATCH] Remove utils, use go-tools --- Dockerfile | 1 - api/routes/torrent.go | 3 +- go.mod | 5 +-- go.sum | 6 +++ limetorrents/search.go | 6 +-- mangadex/client.go | 18 ++++----- mangadex/manga.go | 18 ++++++--- mangadex/search.go | 9 +++-- mangadex/setup.go | 2 +- {utils => mangadex}/zip.go | 4 +- providers/nyaaWrapper.go | 6 +-- subsplease/search.go | 6 +-- utils/cache.go | 68 -------------------------------- utils/functions.go | 35 ----------------- utils/map.go | 19 --------- utils/queue.go | 54 -------------------------- utils/safeMap.go | 79 -------------------------------------- yoitsu/torrent.go | 22 +++++++++-- yoitsu/types.go | 4 +- yoitsu/yoitsu.go | 29 +++++++------- yts/search.go | 9 ++--- 21 files changed, 88 insertions(+), 315 deletions(-) rename {utils => mangadex}/zip.go (93%) delete mode 100644 utils/cache.go delete mode 100644 utils/functions.go delete mode 100644 utils/map.go delete mode 100644 utils/queue.go delete mode 100644 utils/safeMap.go diff --git a/Dockerfile b/Dockerfile index 91e28b8..11ae136 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,7 +29,6 @@ COPY ./mangadex ./mangadex COPY ./payload ./payload COPY ./providers ./providers COPY ./subsplease ./subsplease -COPY ./utils ./utils COPY ./yoitsu ./yoitsu COPY ./yts ./yts COPY ./*.go ./ diff --git a/api/routes/torrent.go b/api/routes/torrent.go index de714a1..17cce00 100644 --- a/api/routes/torrent.go +++ b/api/routes/torrent.go @@ -60,8 +60,9 @@ func Stats(ctx *fiber.Ctx) error { Running: []payload.InfoStat{}, Queued: []payload.QueueStat{}, } - yoitsu.I().GetRunningTorrents().ForEachSafe(func(key string, torrent yoitsu.Torrent) { + yoitsu.I().GetRunningTorrents().ForEach(func(key string, torrent yoitsu.Torrent) bool { statsResponse.Running = append(statsResponse.Running, torrent.GetInfo()) + return true }) manga := mangadex.I().GetCurrentManga() if manga != nil { diff --git a/go.mod b/go.mod index e66c06a..fbc91c1 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,6 @@ module github.com/Fesaa/Media-Provider -go 1.22 - -toolchain go1.22.2 +go 1.22.2 require ( github.com/PuerkitoBio/goquery v1.9.2 @@ -14,6 +12,7 @@ require ( ) require ( + github.com/Fesaa/go-tools v0.0.3 // indirect github.com/RoaringBitmap/roaring v1.9.4 // indirect github.com/ajwerner/btree v0.0.0-20211221152037-f427b3e689c0 // indirect github.com/alecthomas/atomic v0.1.0-alpha2 // indirect diff --git a/go.sum b/go.sum index 8c165c2..6ec9bfa 100644 --- a/go.sum +++ b/go.sum @@ -5,6 +5,12 @@ crawshaw.io/sqlite v0.3.2/go.mod h1:igAO5JulrQ1DbdZdtVq48mnZUBAPOeFzer7VhDWNtW4= filippo.io/edwards25519 v1.0.0-rc.1 h1:m0VOOB23frXZvAOK44usCgLWvtsxIoMCTBGJZlpmGfU= filippo.io/edwards25519 v1.0.0-rc.1/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Fesaa/go-tools v0.0.1 h1:YnOM/nkD0R/d+ryIMqWJW4OUMbfDCddokX0QSsIHdCI= +github.com/Fesaa/go-tools v0.0.1/go.mod h1:cqt4m4QkWtNlVN+NGvWjNKai6/hHJYbKPpS6VnrORiA= +github.com/Fesaa/go-tools v0.0.2 h1:z1XUKbb7GGMOppUEy8CC1tH8x1TPD+QN43wpRSYeNwI= +github.com/Fesaa/go-tools v0.0.2/go.mod h1:cqt4m4QkWtNlVN+NGvWjNKai6/hHJYbKPpS6VnrORiA= +github.com/Fesaa/go-tools v0.0.3 h1:eq5H03eowQV4SYGxIVWa1HUAnmAQ/yl39l4p/wTk9Gw= +github.com/Fesaa/go-tools v0.0.3/go.mod h1:cqt4m4QkWtNlVN+NGvWjNKai6/hHJYbKPpS6VnrORiA= github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc= diff --git a/limetorrents/search.go b/limetorrents/search.go index 6ba39e8..c07b191 100644 --- a/limetorrents/search.go +++ b/limetorrents/search.go @@ -3,26 +3,26 @@ package limetorrents import ( "fmt" "github.com/Fesaa/Media-Provider/log" + tools "github.com/Fesaa/go-tools" "net/http" "net/url" "strings" "time" - "github.com/Fesaa/Media-Provider/utils" "github.com/PuerkitoBio/goquery" ) const BASE_URl string = "https://www.limetorrents.lol" const SEARCH_URL string = BASE_URl + "/search/%s/%s/%d/" -var cache utils.Cache[[]SearchResult] = *utils.NewCache[[]SearchResult](5 * time.Minute) +var cache = tools.NewCache[[]SearchResult](5 * time.Minute) func Search(searchOptions SearchOptions) ([]SearchResult, error) { searchUrl := formatUrl(searchOptions) log.Debug("searching lime for torrents", "url", searchUrl) if res := cache.Get(searchUrl); res != nil { log.Trace("Limetorrents Cache hit", "url", searchUrl) - return *res, nil + return res.Get(), nil } doc, err := getSearch(searchUrl) diff --git a/mangadex/client.go b/mangadex/client.go index c84c67b..380809d 100644 --- a/mangadex/client.go +++ b/mangadex/client.go @@ -5,7 +5,7 @@ import ( "github.com/Fesaa/Media-Provider/config" "github.com/Fesaa/Media-Provider/log" "github.com/Fesaa/Media-Provider/payload" - "github.com/Fesaa/Media-Provider/utils" + tools "github.com/Fesaa/go-tools" "os" "path" "slices" @@ -22,8 +22,8 @@ func newClient(c MangadexConfig) MangadexClient { return &mangadexClientImpl{ dir: config.OrDefault(c.GetRootDir(), "temp"), maxImages: c.GetMaxConcurrentMangadexImages(), - mangas: utils.NewSafeMap[string, Manga](), - queue: utils.NewQueue[payload.QueueStat](), + mangas: tools.NewSafeMap[string, Manga](), + queue: tools.NewQueue[payload.QueueStat](), downloading: nil, mu: sync.Mutex{}, } @@ -32,8 +32,8 @@ func newClient(c MangadexConfig) MangadexClient { type mangadexClientImpl struct { dir string maxImages int - mangas *utils.SafeMap[string, Manga] - queue utils.Queue[payload.QueueStat] + mangas tools.SafeMap[string, Manga] + queue tools.Queue[payload.QueueStat] downloading Manga mu sync.Mutex } @@ -51,7 +51,7 @@ func (m *mangadexClientImpl) GetQueuedMangas() []payload.QueueStat { } func (m *mangadexClientImpl) Download(req payload.DownloadRequest) (Manga, error) { - if m.mangas.Has(req.Id) { + if m.mangas.Contains(req.Id) { return nil, fmt.Errorf("manga already exists: %s", req.Id) } @@ -64,7 +64,7 @@ func (m *mangadexClientImpl) Download(req payload.DownloadRequest) (Manga, error log.Info("downloading manga", "mangaId", req.Id, "into", req.BaseDir, "title?", req.TempTitle) manga := newManga(req, m.maxImages, m) - m.mangas.Set(req.Id, manga) + m.mangas.Put(req.Id, manga) m.downloading = manga manga.WaitForInfoAndDownload() return manga, nil @@ -85,7 +85,7 @@ func (m *mangadexClientImpl) RemoveDownload(req payload.StopRequest) error { log.Info("Dropping manga", "mangaId", req.Id, "title", manga.Title(), "deleteFiles", req.DeleteFiles) go func() { - m.mangas.Delete(req.Id) + m.mangas.Remove(req.Id) manga.Cancel() m.mu.Lock() m.downloading = nil @@ -159,7 +159,7 @@ func (m *mangadexClientImpl) cleanup(manga Manga) { if !entry.IsDir() { continue } - err = utils.ZipFolder(path.Join(dir, entry.Name()), path.Join(dir, entry.Name()+".cbz")) + err = zipFolder(path.Join(dir, entry.Name()), path.Join(dir, entry.Name()+".cbz")) if err != nil { log.Error("error while zipping directory", "dir", dir, "mangaId", manga.Id(), "err", err) continue diff --git a/mangadex/manga.go b/mangadex/manga.go index 4683605..4339f41 100644 --- a/mangadex/manga.go +++ b/mangadex/manga.go @@ -8,7 +8,7 @@ import ( "github.com/Fesaa/Media-Provider/config" "github.com/Fesaa/Media-Provider/log" "github.com/Fesaa/Media-Provider/payload" - "github.com/Fesaa/Media-Provider/utils" + tools "github.com/Fesaa/go-tools" "io" "log/slog" "net/http" @@ -37,7 +37,7 @@ type mangaImpl struct { info *MangaSearchData chapters ChapterSearchResponse - covers *utils.SafeMap[string, string] + covers tools.SafeMap[string, string] volumeMetadata []string alreadyDownloadedVolumes []string @@ -120,7 +120,7 @@ func (m *mangaImpl) GetInfo() payload.InfoStat { }(), Size: strconv.Itoa(len(m.chapters.Data)) + " Chapters", Downloading: m.wg != nil, - Progress: utils.Percent(int64(m.chaptersDownloaded), int64(len(m.chapters.Data))), + Progress: Percent(int64(m.chaptersDownloaded), int64(len(m.chapters.Data))), SpeedType: payload.IMAGES, Speed: payload.SpeedData{T: time.Now().Unix(), Speed: speed}, DownloadDir: m.GetDownloadDir(), @@ -182,9 +182,9 @@ func (m *mangaImpl) loadInfo() chan struct{} { covers, err := GetCoverImages(m.id) if err != nil || covers == nil { m.log.Warn("error while loading manga covers, ignoring", "err", err) - m.covers = &utils.SafeMap[string, string]{} + m.covers = tools.NewSafeMap[string, string]() } else { - m.covers = utils.NewSafeMap(covers.GetUrlsPerVolume(m.id)) + m.covers = tools.NewSafeMap(covers.GetUrlsPerVolume(m.id)) } close(out) @@ -403,7 +403,7 @@ func (m *mangaImpl) comicInfo(chapter ChapterSearchData) *comicinfo.ComicInfo { m.log.Trace("unable to parse volume number", "volume", chapter.Attributes.Volume, "err", err) } - ci.Tags = strings.Join(utils.MaybeMap(m.info.Attributes.Tags, func(t TagData) (string, bool) { + ci.Tags = strings.Join(tools.TransformMaybeArray(m.info.Attributes.Tags, func(t TagData) (string, bool) { n, ok := t.Attributes.Name["en"] if !ok { return "", false @@ -468,3 +468,9 @@ func downloadAndWrite(url string, path string) error { return nil } + +func Percent(a, b int64) int64 { + b = max(b, 1) + ratio := (float64)(a) / (float64)(b) + return (int64)(ratio * 100) +} diff --git a/mangadex/search.go b/mangadex/search.go index 8f1ec76..e173dc9 100644 --- a/mangadex/search.go +++ b/mangadex/search.go @@ -4,15 +4,15 @@ import ( "encoding/json" "fmt" "github.com/Fesaa/Media-Provider/log" - "github.com/Fesaa/Media-Provider/utils" + tools "github.com/Fesaa/go-tools" "io" "net/http" "time" ) -var tags = utils.NewSafeMap[string, string]() +var tags = tools.NewSafeMap[string, string]() -var cache = utils.NewCache[MangaSearchResponse](5 * time.Minute) +var cache = tools.NewCache[*MangaSearchResponse](5 * time.Minute) func mapTags(in []string, skip bool) ([]string, error) { mappedTags := make([]string, 0) @@ -48,7 +48,7 @@ func SearchManga(options SearchOptions) (*MangaSearchResponse, error) { log.Trace("searching Mangadex for Manga", "options", fmt.Sprintf("%#v", options), "url", url) if hit := cache.Get(url); hit != nil { log.Trace("Mangadex Cache hit", "url", url) - return hit, nil + return hit.Get(), nil } var searchResponse MangaSearchResponse @@ -56,6 +56,7 @@ func SearchManga(options SearchOptions) (*MangaSearchResponse, error) { if err != nil { return nil, err } + cache.Set(url, &searchResponse) return &searchResponse, nil } diff --git a/mangadex/setup.go b/mangadex/setup.go index 231cc7c..d00acf3 100644 --- a/mangadex/setup.go +++ b/mangadex/setup.go @@ -46,7 +46,7 @@ func loadTags() error { if !ok { continue } - tags.Set(enName, tag.Id) + tags.Put(enName, tag.Id) } return nil } diff --git a/utils/zip.go b/mangadex/zip.go similarity index 93% rename from utils/zip.go rename to mangadex/zip.go index 55e7831..638e621 100644 --- a/utils/zip.go +++ b/mangadex/zip.go @@ -1,4 +1,4 @@ -package utils +package mangadex import ( "archive/zip" @@ -40,7 +40,7 @@ func addFileToZip(zipWriter *zip.Writer, filename string, baseDir string) error return err } -func ZipFolder(folderPath string, zipFileName string) error { +func zipFolder(folderPath string, zipFileName string) error { log.Trace("zipping folder", "path", folderPath, "filename", zipFileName) zipFile, err := os.Create(zipFileName) if err != nil { diff --git a/providers/nyaaWrapper.go b/providers/nyaaWrapper.go index 2f11c42..a50f5aa 100644 --- a/providers/nyaaWrapper.go +++ b/providers/nyaaWrapper.go @@ -3,13 +3,13 @@ package providers import ( "fmt" "github.com/Fesaa/Media-Provider/log" - "github.com/Fesaa/Media-Provider/utils" + tools "github.com/Fesaa/go-tools" "github.com/irevenko/go-nyaa/nyaa" "github.com/irevenko/go-nyaa/types" "time" ) -var cache = *utils.NewCache[[]types.Torrent](5 * time.Minute) +var cache = tools.NewCache[[]types.Torrent](5 * time.Minute) func cacheKey(opts nyaa.SearchOptions) string { return fmt.Sprintf("%s_%s_%s_%s_%s", opts.Provider, opts.Filter, opts.SortBy, opts.Category, opts.Query) @@ -21,7 +21,7 @@ func nyaaSearch(opts nyaa.SearchOptions) ([]types.Torrent, error) { if hit := cache.Get(key); hit != nil { log.Trace("Nyaa Cache hit", "key", key) - return *hit, nil + return hit.Get(), nil } search, err := nyaa.Search(opts) diff --git a/subsplease/search.go b/subsplease/search.go index 02c5fcc..1daa7b8 100644 --- a/subsplease/search.go +++ b/subsplease/search.go @@ -4,7 +4,7 @@ import ( "encoding/json" "fmt" "github.com/Fesaa/Media-Provider/log" - "github.com/Fesaa/Media-Provider/utils" + tools "github.com/Fesaa/go-tools" "io" "net/http" "net/url" @@ -13,7 +13,7 @@ import ( const URL string = "https://subsplease.org/api/?f=search&tz=Europe/Brussels&s=%s" -var cache = *utils.NewCache[SearchResult](5 * time.Minute) +var cache = tools.NewCache[SearchResult](5 * time.Minute) type SearchOptions struct { Query string @@ -29,7 +29,7 @@ func Search(options SearchOptions) (SearchResult, error) { if res := cache.Get(u); res != nil { log.Trace("Cache hit", "url", u) - return *res, nil + return res.Get(), nil } req, err := http.Get(u) diff --git a/utils/cache.go b/utils/cache.go deleted file mode 100644 index e448070..0000000 --- a/utils/cache.go +++ /dev/null @@ -1,68 +0,0 @@ -package utils - -import ( - "sync" - "time" -) - -type Cache[T any] struct { - objects map[string]*CacheObject[T] - lock *sync.RWMutex - expiry time.Duration -} - -func NewCache[T any](expiry time.Duration) *Cache[T] { - c := &Cache[T]{ - objects: make(map[string]*CacheObject[T]), - lock: &sync.RWMutex{}, - expiry: expiry, - } - go c.cleaner() - return c -} - -func (c *Cache[T]) Get(key string) *T { - c.lock.RLock() - defer c.lock.RUnlock() - obj, ok := c.objects[key] - if !ok { - return nil - } - return &obj.Obj -} - -func (c *Cache[T]) Set(key string, obj T) { - c.lock.Lock() - defer c.lock.Unlock() - c.objects[key] = newCacheObject(obj, c.expiry) -} - -func (c *Cache[T]) cleaner() { - for range time.Tick(c.expiry) { - c.lock.Lock() - for k, v := range c.objects { - if v.Expired() { - delete(c.objects, k) - } - } - c.lock.Unlock() - } -} - -type CacheObject[T any] struct { - Obj T - insert time.Time - expiry time.Duration -} - -func newCacheObject[T any](obj T, expiry time.Duration) *CacheObject[T] { - return &CacheObject[T]{ - Obj: obj, - insert: time.Now(), - expiry: expiry, - } -} - -func (c *CacheObject[T]) Expired() bool { - return time.Since(c.insert) > c.expiry -} diff --git a/utils/functions.go b/utils/functions.go deleted file mode 100644 index bf994ed..0000000 --- a/utils/functions.go +++ /dev/null @@ -1,35 +0,0 @@ -package utils - -import ( - "fmt" - "math" -) - -func HumanReadableSpeed(s int64) string { - speed := float64(s) - if speed < 1024 { - return fmt.Sprintf("%.2f B/s", speed) - } - speed /= 1024 - if speed < 1024 { - return fmt.Sprintf("%.2f KB/s", speed) - } - speed /= 1024 - return fmt.Sprintf("%.2f MB/s", speed) -} - -func Percent(a, b int64) int64 { - b = max(b, 1) - ratio := (float64)(a) / (float64)(b) - return (int64)(ratio * 100) -} - -var sizes = [...]string{"Bytes", "KB", "MB", "GB", "TB"} - -func BytesToSize(bytes float64) string { - if bytes == 0 { - return "0 Byte" - } - i := math.Floor(math.Log(bytes) / math.Log(1024)) - return fmt.Sprintf("%.2f %s", bytes/math.Pow(1024, i), sizes[int(i)]) -} diff --git a/utils/map.go b/utils/map.go deleted file mode 100644 index 49efc24..0000000 --- a/utils/map.go +++ /dev/null @@ -1,19 +0,0 @@ -package utils - -func Map[T, S any](in []T, f func(T) S) []S { - out := make([]S, len(in)) - for i, t := range in { - out[i] = f(t) - } - return out -} - -func MaybeMap[T, S any](in []T, f func(T) (S, bool)) []S { - out := make([]S, 0) - for _, t := range in { - if s, ok := f(t); ok { - out = append(out, s) - } - } - return out -} diff --git a/utils/queue.go b/utils/queue.go deleted file mode 100644 index 1e48e52..0000000 --- a/utils/queue.go +++ /dev/null @@ -1,54 +0,0 @@ -package utils - -import ( - "errors" - "slices" -) - -type Queue[T comparable] interface { - Enqueue(item T) - Dequeue() (*T, error) - IsEmpty() bool - Size() int - Items() []T - RemoveFunc(func(T) bool) bool -} - -type queueImpl[T comparable] struct { - items []T -} - -func NewQueue[T comparable]() Queue[T] { - return &queueImpl[T]{items: []T{}} -} - -func (q *queueImpl[T]) Enqueue(item T) { - q.items = append(q.items, item) -} - -func (q *queueImpl[T]) Dequeue() (*T, error) { - if q.IsEmpty() { - return nil, errors.New("queue is empty") - } - item := q.items[0] - q.items = q.items[1:] - return &item, nil -} - -func (q *queueImpl[T]) IsEmpty() bool { - return len(q.items) == 0 -} - -func (q *queueImpl[T]) Size() int { - return len(q.items) -} - -func (q *queueImpl[T]) Items() []T { - return slices.Clone(q.items) -} - -func (q *queueImpl[T]) RemoveFunc(f func(T) bool) bool { - size := q.Size() - q.items = slices.DeleteFunc(q.items, f) - return size != q.Size() -} diff --git a/utils/safeMap.go b/utils/safeMap.go deleted file mode 100644 index 4441d43..0000000 --- a/utils/safeMap.go +++ /dev/null @@ -1,79 +0,0 @@ -package utils - -import "sync" - -type SafeMap[K comparable, V any] struct { - lock sync.RWMutex - m map[K]V -} - -func NewSafeMap[K comparable, V any](m ...map[K]V) *SafeMap[K, V] { - var startMap map[K]V - if len(m) > 0 { - startMap = m[0] - } else { - startMap = make(map[K]V) - } - return &SafeMap[K, V]{ - m: startMap, - lock: sync.RWMutex{}, - } -} - -func (s *SafeMap[K, V]) Has(k K) bool { - s.lock.RLock() - defer s.lock.RUnlock() - _, ok := s.m[k] - return ok -} - -func (s *SafeMap[K, V]) Get(k K) (V, bool) { - s.lock.RLock() - defer s.lock.RUnlock() - v, ok := s.m[k] - return v, ok -} - -func (s *SafeMap[K, V]) Set(k K, v V) { - s.lock.Lock() - defer s.lock.Unlock() - s.m[k] = v -} - -func (s *SafeMap[K, V]) Delete(k K) { - s.lock.Lock() - defer s.lock.Unlock() - delete(s.m, k) -} - -func (s *SafeMap[K, V]) Len() int { - s.lock.RLock() - defer s.lock.RUnlock() - return len(s.m) -} - -func (s *SafeMap[K, V]) ForEachSafe(f func(K, V)) { - s.lock.RLock() - defer s.lock.RUnlock() - for k, v := range s.m { - f(k, v) - } -} - -func (s *SafeMap[K, V]) ForEach(f func(K, V)) { - s.lock.Lock() - for k, v := range s.m { - s.lock.Unlock() - f(k, v) - s.lock.Lock() - } - s.lock.Unlock() -} - -func (s *SafeMap[K, V]) Lock() { - s.lock.Lock() -} - -func (s *SafeMap[K, V]) Unlock() { - s.lock.Unlock() -} diff --git a/yoitsu/torrent.go b/yoitsu/torrent.go index 308a69f..140f1bc 100644 --- a/yoitsu/torrent.go +++ b/yoitsu/torrent.go @@ -6,9 +6,9 @@ import ( "github.com/Fesaa/Media-Provider/config" "github.com/Fesaa/Media-Provider/log" "github.com/Fesaa/Media-Provider/payload" - "github.com/Fesaa/Media-Provider/utils" "github.com/anacrolix/torrent" "log/slog" + "math" "path" "time" ) @@ -104,11 +104,27 @@ func (t *torrentImpl) GetInfo() payload.InfoStat { } return t.tempTitle }(), - Size: utils.BytesToSize(float64(t.t.Length())), + Size: BytesToSize(float64(t.t.Length())), Downloading: t.t.Info() != nil, - Progress: utils.Percent(t.t.BytesCompleted(), t.t.Length()), + Progress: Percent(t.t.BytesCompleted(), t.t.Length()), SpeedType: payload.BYTES, Speed: payload.SpeedData{T: time.Now().Unix(), Speed: speed}, DownloadDir: t.GetDownloadDir(), } } + +func Percent(a, b int64) int64 { + b = max(b, 1) + ratio := (float64)(a) / (float64)(b) + return (int64)(ratio * 100) +} + +var sizes = [...]string{"Bytes", "KB", "MB", "GB", "TB"} + +func BytesToSize(bytes float64) string { + if bytes == 0 { + return "0 Byte" + } + i := math.Floor(math.Log(bytes) / math.Log(1024)) + return fmt.Sprintf("%.2f %s", bytes/math.Pow(1024, i), sizes[int(i)]) +} diff --git a/yoitsu/types.go b/yoitsu/types.go index fee58b8..892190a 100644 --- a/yoitsu/types.go +++ b/yoitsu/types.go @@ -2,7 +2,7 @@ package yoitsu import ( "github.com/Fesaa/Media-Provider/payload" - "github.com/Fesaa/Media-Provider/utils" + tools "github.com/Fesaa/go-tools" "github.com/anacrolix/torrent" ) @@ -40,7 +40,7 @@ type Yoitsu interface { RemoveDownload(request payload.StopRequest) error // GetRunningTorrents returns a map of all running torrents, indexed by their info hash - GetRunningTorrents() *utils.SafeMap[string, Torrent] + GetRunningTorrents() tools.SafeMap[string, Torrent] GetQueuedTorrents() []payload.QueueStat GetBaseDir() string diff --git a/yoitsu/yoitsu.go b/yoitsu/yoitsu.go index 39909e0..5c268bf 100644 --- a/yoitsu/yoitsu.go +++ b/yoitsu/yoitsu.go @@ -5,7 +5,7 @@ import ( "github.com/Fesaa/Media-Provider/config" "github.com/Fesaa/Media-Provider/log" "github.com/Fesaa/Media-Provider/payload" - "github.com/Fesaa/Media-Provider/utils" + tools "github.com/Fesaa/go-tools" "github.com/anacrolix/torrent" "github.com/anacrolix/torrent/metainfo" "github.com/anacrolix/torrent/storage" @@ -37,9 +37,9 @@ type yoitsuImpl struct { maxTorrents int client *torrent.Client - torrents *utils.SafeMap[string, Torrent] - baseDirs *utils.SafeMap[string, string] - queue utils.Queue[payload.QueueStat] + torrents tools.SafeMap[string, Torrent] + baseDirs tools.SafeMap[string, string] + queue tools.Queue[payload.QueueStat] } func newYoitsu(c YoitsuConfig) (Yoitsu, error) { @@ -49,9 +49,9 @@ func newYoitsu(c YoitsuConfig) (Yoitsu, error) { dir: dir, maxTorrents: c.GetMaxConcurrentTorrents(), - torrents: utils.NewSafeMap[string, Torrent](), - baseDirs: utils.NewSafeMap[string, string](), - queue: utils.NewQueue[payload.QueueStat](), + torrents: tools.NewSafeMap[string, Torrent](), + baseDirs: tools.NewSafeMap[string, string](), + queue: tools.NewQueue[payload.QueueStat](), } opts := storage.NewFileClientOpts{ @@ -84,7 +84,7 @@ func (y *yoitsuImpl) AddDownload(req payload.DownloadRequest) (Torrent, error) { if y.maxTorrents <= 0 { return y.addDownload(req) } - if y.torrents.Len() >= y.maxTorrents { + if y.torrents.Size() >= y.maxTorrents { y.queue.Enqueue(req.ToQueueStat()) return nil, nil } @@ -103,8 +103,8 @@ func (y *yoitsuImpl) addDownload(req payload.DownloadRequest) (Torrent, error) { func (y *yoitsuImpl) processTorrent(torrentInfo *torrent.Torrent, req payload.DownloadRequest) Torrent { nTorrent := newTorrent(torrentInfo, req) - y.torrents.Set(torrentInfo.InfoHash().String(), nTorrent) - y.baseDirs.Set(torrentInfo.InfoHash().String(), req.BaseDir) + y.torrents.Put(torrentInfo.InfoHash().String(), nTorrent) + y.baseDirs.Put(torrentInfo.InfoHash().String(), req.BaseDir) nTorrent.WaitForInfoAndDownload() return nTorrent } @@ -140,8 +140,8 @@ func (y *yoitsuImpl) RemoveDownload(req payload.StopRequest) error { "total", backingTorrent.Length()) backingTorrent.Drop() - y.torrents.Delete(infoHashString) - y.baseDirs.Delete(infoHashString) + y.torrents.Remove(infoHashString) + y.baseDirs.Remove(infoHashString) if req.DeleteFiles { go y.deleteTorrentFiles(backingTorrent, baseDir) } else { @@ -226,7 +226,7 @@ func (y *yoitsuImpl) deleteTorrentFiles(tor *torrent.Torrent, baseDir string) { } } -func (y *yoitsuImpl) GetRunningTorrents() *utils.SafeMap[string, Torrent] { +func (y *yoitsuImpl) GetRunningTorrents() tools.SafeMap[string, Torrent] { return y.torrents } @@ -248,7 +248,7 @@ func (y *yoitsuImpl) GetTorrentDirFilePathMaker() storage.TorrentDirFilePathMake func (y *yoitsuImpl) cleaner() { for range time.Tick(time.Second * 5) { i := 0 - y.torrents.ForEach(func(s string, m Torrent) { + y.torrents.ForEach(func(s string, m Torrent) bool { tor := m.GetTorrent() if tor.BytesCompleted() == tor.Length() && tor.BytesCompleted() > 0 { i++ @@ -261,6 +261,7 @@ func (y *yoitsuImpl) cleaner() { log.Error("error while cleaning up torrent", "file", s, "err", err) } } + return true }) if i > 0 { log.Trace("auto removing torrents", "amount", i) diff --git a/yts/search.go b/yts/search.go index dfde801..808f04f 100644 --- a/yts/search.go +++ b/yts/search.go @@ -4,16 +4,15 @@ import ( "encoding/json" "fmt" "github.com/Fesaa/Media-Provider/log" + tools "github.com/Fesaa/go-tools" "io" "net/http" "time" - - "github.com/Fesaa/Media-Provider/utils" ) const URL string = "https://yts.mx/api/v2/list_movies.json?query_term=%s&page=%d&sort_by=%s" -var cache = *utils.NewCache[SearchResult](5 * time.Minute) +var cache = tools.NewCache[*SearchResult](5 * time.Minute) type SearchOptions struct { Query string @@ -39,7 +38,7 @@ func Search(options SearchOptions) (*SearchResult, error) { if res := cache.Get(url); res != nil { log.Trace("YTS cache hit", "url", url) - return res, nil + return res.Get(), nil } req, err := http.Get(url) @@ -59,6 +58,6 @@ func Search(options SearchOptions) (*SearchResult, error) { return nil, err } - cache.Set(url, r) + cache.Set(url, &r) return &r, nil }