Compare commits

..

34 Commits

Author SHA1 Message Date
Simon Ding
8ab33f3d54 ui refactor 2024-08-04 10:54:47 +08:00
Simon Ding
4d3b26135c fix: ui alignment 2024-08-04 10:22:47 +08:00
Simon Ding
56d5cdb2bf refactor ui resource list 2024-08-03 23:05:17 +08:00
Simon Ding
6f80da779b fix: download client id 2024-08-03 17:12:20 +08:00
Simon Ding
5fef156052 fix: indexid 2024-08-03 17:10:49 +08:00
Simon Ding
eab3a6ca2b try fix seed ratio 2024-08-03 16:47:58 +08:00
Simon Ding
ffa5c37c4c fix: name matching 2024-08-03 15:03:47 +08:00
Simon Ding
241e30152b feat: file size limiter 2024-08-03 12:31:53 +08:00
Simon Ding
16216fcc4f feat: change single episode monitoring status 2024-08-03 10:46:52 +08:00
Simon Ding
578b6a9d78 feat: proxy only affects tmdb 2024-08-03 09:54:23 +08:00
Simon Ding
f4da80c845 add testcases & add parse condition 2024-08-02 21:04:34 +08:00
Simon Ding
5a9acd3e6e ui: improve tv display 2024-08-02 19:10:53 +08:00
Simon Ding
8bfa8f84b9 fix monitor 2024-08-02 14:56:14 +08:00
Simon Ding
5b0b2ce5b0 fix: status 2024-08-02 14:39:01 +08:00
Simon Ding
b24c1a1501 ui: add monitored field 2024-08-02 14:06:44 +08:00
Simon Ding
aa320c6dcb fix: monitored 2024-08-02 13:06:38 +08:00
Simon Ding
5132714247 feat: change method to monitor episodes 2024-08-02 12:52:54 +08:00
Simon Ding
3aeecac4fb updates 2024-08-02 12:19:53 +08:00
Simon Ding
7f8c613a65 fix number formats 2024-08-02 11:14:21 +08:00
Simon Ding
c787d71fbd code refactor 2024-08-02 10:08:26 +08:00
Simon Ding
c28e16805e fix: empty list 2024-08-01 20:34:23 +08:00
Simon Ding
fc3d3878bc feat: disable indexer 2024-08-01 20:12:42 +08:00
Simon Ding
e26e86a63f feat: implement seed ratio check logic 2024-08-01 19:52:40 +08:00
Simon Ding
408ff163ef feat: improve support for pt 2024-08-01 19:12:14 +08:00
Simon Ding
35d299b60c feat: improve indexer setting 2024-08-01 17:36:40 +08:00
Simon Ding
6e002b1198 fix: add defaults 2024-08-01 13:05:05 +08:00
Simon Ding
7508a264a6 chore: ci update 2024-08-01 09:42:37 +08:00
Simon Ding
0022c9dad5 fix: umask 2024-08-01 09:39:38 +08:00
Simon Ding
654d8b50b4 chore: add more screenshot 2024-08-01 09:23:13 +08:00
Simon Ding
97ede5d9c9 feat: add badges 2024-08-01 00:01:55 +08:00
Simon Ding
4803567818 fix: remove attestations 2024-07-31 23:43:07 +08:00
Simon Ding
4e0014cb3f fix: sha256 tags in repo 2024-07-31 23:41:58 +08:00
Simon Ding
c256d46d5c test ci 2024-07-31 23:11:55 +08:00
Simon Ding
b765f16ea6 chore: updates 2024-07-31 20:59:40 +08:00
73 changed files with 2803 additions and 1145 deletions

View File

@@ -17,11 +17,6 @@ jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
attestations: write
id-token: write
steps:
- uses: actions/checkout@v4
@@ -51,9 +46,3 @@ jobs:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Generate artifact attestation
uses: actions/attest-build-provenance@v1
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@@ -61,4 +61,4 @@ jobs:
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
push-to-registry: false

View File

@@ -1,9 +1,16 @@
# polaris
# Polaris
![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/simon-ding/polaris/go.yml)
![GitHub Release](https://img.shields.io/github/v/release/simon-ding/polaris)
![GitHub Repo stars](https://img.shields.io/github/stars/simon-ding/polaris)
![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/simon-ding/polaris)
Polaris 是一个电视剧和电影的追踪软件。配置好了之后当剧集或者电影播出后会第一时间下载对应的资源。支持本地存储或者webdav。
![main_page](./doc/assets/main_page.png)
![detail_page](./doc/assets/detail_page.png)
![anime](./doc/assets/anime_match.png)
交流群: https://t.me/+8R2nzrlSs2JhMDgx
@@ -18,6 +25,12 @@ Polaris 是一个电视剧和电影的追踪软件。配置好了之后,当剧
- [x] plex 刮削支持
- [x] and more...
## Todos
- [] qbittorrent客户端支持
- [] 更多通知客户端支持
- [] 第三方watchlist导入支持
## 使用
使用此程序参考 [【快速开始】](./doc/quick_start.md)

View File

@@ -10,7 +10,7 @@ import (
func main() {
log.Infof("------------------- Starting Polaris ---------------------")
syscall.Umask(0000) //max permission 0777
syscall.Umask(0) //max permission 0777
dbClient, err := db.Open()
if err != nil {

View File

@@ -147,6 +147,7 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
SetResolution(m.Resolution).
SetTargetDir(m.TargetDir).
SetDownloadHistoryEpisodes(m.DownloadHistoryEpisodes).
SetLimiter(m.Limiter).
AddEpisodeIDs(episodes...).
Save(context.TODO())
return r, err
@@ -222,6 +223,7 @@ func (c *Client) SaveEposideDetail(d *ent.Episode) (int, error) {
SetSeasonNumber(d.SeasonNumber).
SetEpisodeNumber(d.EpisodeNumber).
SetOverview(d.Overview).
SetMonitored(d.Monitored).
SetTitle(d.Title).Save(context.TODO())
if err != nil {
return 0, errors.Wrap(err, "save episode")
@@ -247,19 +249,23 @@ type TorznabSetting struct {
ApiKey string `json:"api_key"`
}
func (c *Client) SaveTorznabInfo(name string, setting TorznabSetting) error {
data, err := json.Marshal(setting)
if err != nil {
return errors.Wrap(err, "marshal json")
func (c *Client) SaveIndexer(in *ent.Indexers) error {
if in.ID != 0 {
//update setting
return c.ent.Indexers.Update().Where(indexers.ID(in.ID)).SetName(in.Name).SetImplementation(in.Implementation).
SetPriority(in.Priority).SetSettings(in.Settings).SetSeedRatio(in.SeedRatio).SetDisabled(in.Disabled).Exec(context.Background())
}
count := c.ent.Indexers.Query().Where(indexers.Name(name)).CountX(context.TODO())
//create new one
count := c.ent.Indexers.Query().Where(indexers.Name(in.Name)).CountX(context.TODO())
if count > 0 {
c.ent.Indexers.Update().Where(indexers.Name(name)).SetSettings(string(data)).Save(context.TODO())
return err
return fmt.Errorf("name already esxits: %v", in.Name)
}
_, err = c.ent.Indexers.Create().
SetName(name).SetImplementation(IndexerTorznabImpl).SetPriority(1).SetSettings(string(data)).Save(context.TODO())
_, err := c.ent.Indexers.Create().
SetName(in.Name).SetImplementation(in.Implementation).SetPriority(in.Priority).SetSettings(in.Settings).SetSeedRatio(in.SeedRatio).
SetDisabled(in.Disabled).Save(context.TODO())
if err != nil {
return errors.Wrap(err, "save db")
}
@@ -271,9 +277,21 @@ func (c *Client) DeleteTorznab(id int) {
c.ent.Indexers.Delete().Where(indexers.ID(id)).Exec(context.TODO())
}
func (c *Client) GetIndexer(id int) (*TorznabInfo, error) {
res, err := c.ent.Indexers.Query().Where(indexers.ID(id)).First(context.TODO())
if err != nil {
return nil, err
}
var ss TorznabSetting
err = json.Unmarshal([]byte(res.Settings), &ss)
if err != nil {
return nil, fmt.Errorf("unmarshal torznab %s error: %v", res.Name, err)
}
return &TorznabInfo{Indexers: res, TorznabSetting: ss}, nil
}
type TorznabInfo struct {
ID int `json:"id"`
Name string `json:"name"`
*ent.Indexers
TorznabSetting
}
@@ -289,8 +307,7 @@ func (c *Client) GetAllTorznabInfo() []*TorznabInfo {
continue
}
l = append(l, &TorznabInfo{
ID: r.ID,
Name: r.Name,
Indexers: r,
TorznabSetting: ss,
})
}
@@ -455,7 +472,8 @@ func (c *Client) SetDefaultStorageByName(name string) error {
func (c *Client) SaveHistoryRecord(h ent.History) (*ent.History, error) {
return c.ent.History.Create().SetMediaID(h.MediaID).SetEpisodeID(h.EpisodeID).SetDate(time.Now()).
SetStatus(h.Status).SetTargetDir(h.TargetDir).SetSourceTitle(h.SourceTitle).SetSaved(h.Saved).Save(context.TODO())
SetStatus(h.Status).SetTargetDir(h.TargetDir).SetSourceTitle(h.SourceTitle).SetIndexerID(h.IndexerID).
SetDownloadClientID(h.DownloadClientID).SetSaved(h.Saved).Save(context.TODO())
}
func (c *Client) SetHistoryStatus(id int, status history.Status) error {
@@ -536,3 +554,8 @@ func (c *Client) GetMovieDummyEpisode(movieId int) (*ent.Episode, error) {
func (c *Client) GetDownloadClient(id int) (*ent.DownloadClients, error) {
return c.ent.DownloadClients.Query().Where(downloadclients.ID(id)).First(context.Background())
}
func (c *Client) SetEpisodeMonitoring(id int, b bool) error {
return c.ent.Episode.Update().Where(episode.ID(id)).SetMonitored(b).Exec(context.Background())
}

BIN
doc/assets/anime_match.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 774 KiB

View File

@@ -31,6 +31,8 @@ type Episode struct {
AirDate string `json:"air_date,omitempty"`
// Status holds the value of the "status" field.
Status episode.Status `json:"status,omitempty"`
// Monitored holds the value of the "monitored" field.
Monitored bool `json:"monitored"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the EpisodeQuery when eager-loading is set.
Edges EpisodeEdges `json:"edges"`
@@ -62,6 +64,8 @@ func (*Episode) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case episode.FieldMonitored:
values[i] = new(sql.NullBool)
case episode.FieldID, episode.FieldMediaID, episode.FieldSeasonNumber, episode.FieldEpisodeNumber:
values[i] = new(sql.NullInt64)
case episode.FieldTitle, episode.FieldOverview, episode.FieldAirDate, episode.FieldStatus:
@@ -129,6 +133,12 @@ func (e *Episode) assignValues(columns []string, values []any) error {
} else if value.Valid {
e.Status = episode.Status(value.String)
}
case episode.FieldMonitored:
if value, ok := values[i].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field monitored", values[i])
} else if value.Valid {
e.Monitored = value.Bool
}
default:
e.selectValues.Set(columns[i], values[i])
}
@@ -190,6 +200,9 @@ func (e *Episode) String() string {
builder.WriteString(", ")
builder.WriteString("status=")
builder.WriteString(fmt.Sprintf("%v", e.Status))
builder.WriteString(", ")
builder.WriteString("monitored=")
builder.WriteString(fmt.Sprintf("%v", e.Monitored))
builder.WriteByte(')')
return builder.String()
}

View File

@@ -28,6 +28,8 @@ const (
FieldAirDate = "air_date"
// FieldStatus holds the string denoting the status field in the database.
FieldStatus = "status"
// FieldMonitored holds the string denoting the monitored field in the database.
FieldMonitored = "monitored"
// EdgeMedia holds the string denoting the media edge name in mutations.
EdgeMedia = "media"
// Table holds the table name of the episode in the database.
@@ -51,6 +53,7 @@ var Columns = []string{
FieldOverview,
FieldAirDate,
FieldStatus,
FieldMonitored,
}
// ValidColumn reports if the column name is valid (part of the table columns).
@@ -63,6 +66,11 @@ func ValidColumn(column string) bool {
return false
}
var (
// DefaultMonitored holds the default value on creation for the "monitored" field.
DefaultMonitored bool
)
// Status defines the type for the "status" enum field.
type Status string
@@ -133,6 +141,11 @@ func ByStatus(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStatus, opts...).ToFunc()
}
// ByMonitored orders the results by the monitored field.
func ByMonitored(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldMonitored, opts...).ToFunc()
}
// ByMediaField orders the results by media field.
func ByMediaField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {

View File

@@ -84,6 +84,11 @@ func AirDate(v string) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldAirDate, v))
}
// Monitored applies equality check predicate on the "monitored" field. It's identical to MonitoredEQ.
func Monitored(v bool) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMonitored, v))
}
// MediaIDEQ applies the EQ predicate on the "media_id" field.
func MediaIDEQ(v int) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMediaID, v))
@@ -409,6 +414,16 @@ func StatusNotIn(vs ...Status) predicate.Episode {
return predicate.Episode(sql.FieldNotIn(FieldStatus, vs...))
}
// MonitoredEQ applies the EQ predicate on the "monitored" field.
func MonitoredEQ(v bool) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMonitored, v))
}
// MonitoredNEQ applies the NEQ predicate on the "monitored" field.
func MonitoredNEQ(v bool) predicate.Episode {
return predicate.Episode(sql.FieldNEQ(FieldMonitored, v))
}
// HasMedia applies the HasEdge predicate on the "media" edge.
func HasMedia() predicate.Episode {
return predicate.Episode(func(s *sql.Selector) {

View File

@@ -78,6 +78,20 @@ func (ec *EpisodeCreate) SetNillableStatus(e *episode.Status) *EpisodeCreate {
return ec
}
// SetMonitored sets the "monitored" field.
func (ec *EpisodeCreate) SetMonitored(b bool) *EpisodeCreate {
ec.mutation.SetMonitored(b)
return ec
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (ec *EpisodeCreate) SetNillableMonitored(b *bool) *EpisodeCreate {
if b != nil {
ec.SetMonitored(*b)
}
return ec
}
// SetMedia sets the "media" edge to the Media entity.
func (ec *EpisodeCreate) SetMedia(m *Media) *EpisodeCreate {
return ec.SetMediaID(m.ID)
@@ -122,6 +136,10 @@ func (ec *EpisodeCreate) defaults() {
v := episode.DefaultStatus
ec.mutation.SetStatus(v)
}
if _, ok := ec.mutation.Monitored(); !ok {
v := episode.DefaultMonitored
ec.mutation.SetMonitored(v)
}
}
// check runs all checks and user-defined validators on the builder.
@@ -149,6 +167,9 @@ func (ec *EpisodeCreate) check() error {
return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "Episode.status": %w`, err)}
}
}
if _, ok := ec.mutation.Monitored(); !ok {
return &ValidationError{Name: "monitored", err: errors.New(`ent: missing required field "Episode.monitored"`)}
}
return nil
}
@@ -199,6 +220,10 @@ func (ec *EpisodeCreate) createSpec() (*Episode, *sqlgraph.CreateSpec) {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
_node.Status = value
}
if value, ok := ec.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
_node.Monitored = value
}
if nodes := ec.mutation.MediaIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,

View File

@@ -146,6 +146,20 @@ func (eu *EpisodeUpdate) SetNillableStatus(e *episode.Status) *EpisodeUpdate {
return eu
}
// SetMonitored sets the "monitored" field.
func (eu *EpisodeUpdate) SetMonitored(b bool) *EpisodeUpdate {
eu.mutation.SetMonitored(b)
return eu
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (eu *EpisodeUpdate) SetNillableMonitored(b *bool) *EpisodeUpdate {
if b != nil {
eu.SetMonitored(*b)
}
return eu
}
// SetMedia sets the "media" edge to the Media entity.
func (eu *EpisodeUpdate) SetMedia(m *Media) *EpisodeUpdate {
return eu.SetMediaID(m.ID)
@@ -235,6 +249,9 @@ func (eu *EpisodeUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := eu.mutation.Status(); ok {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
}
if value, ok := eu.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
}
if eu.mutation.MediaCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
@@ -402,6 +419,20 @@ func (euo *EpisodeUpdateOne) SetNillableStatus(e *episode.Status) *EpisodeUpdate
return euo
}
// SetMonitored sets the "monitored" field.
func (euo *EpisodeUpdateOne) SetMonitored(b bool) *EpisodeUpdateOne {
euo.mutation.SetMonitored(b)
return euo
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (euo *EpisodeUpdateOne) SetNillableMonitored(b *bool) *EpisodeUpdateOne {
if b != nil {
euo.SetMonitored(*b)
}
return euo
}
// SetMedia sets the "media" edge to the Media entity.
func (euo *EpisodeUpdateOne) SetMedia(m *Media) *EpisodeUpdateOne {
return euo.SetMediaID(m.ID)
@@ -521,6 +552,9 @@ func (euo *EpisodeUpdateOne) sqlSave(ctx context.Context) (_node *Episode, err e
if value, ok := euo.mutation.Status(); ok {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
}
if value, ok := euo.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
}
if euo.mutation.MediaCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,

View File

@@ -31,6 +31,8 @@ type History struct {
Size int `json:"size,omitempty"`
// DownloadClientID holds the value of the "download_client_id" field.
DownloadClientID int `json:"download_client_id,omitempty"`
// IndexerID holds the value of the "indexer_id" field.
IndexerID int `json:"indexer_id,omitempty"`
// Status holds the value of the "status" field.
Status history.Status `json:"status,omitempty"`
// Saved holds the value of the "saved" field.
@@ -43,7 +45,7 @@ func (*History) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case history.FieldID, history.FieldMediaID, history.FieldEpisodeID, history.FieldSize, history.FieldDownloadClientID:
case history.FieldID, history.FieldMediaID, history.FieldEpisodeID, history.FieldSize, history.FieldDownloadClientID, history.FieldIndexerID:
values[i] = new(sql.NullInt64)
case history.FieldSourceTitle, history.FieldTargetDir, history.FieldStatus, history.FieldSaved:
values[i] = new(sql.NullString)
@@ -112,6 +114,12 @@ func (h *History) assignValues(columns []string, values []any) error {
} else if value.Valid {
h.DownloadClientID = int(value.Int64)
}
case history.FieldIndexerID:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field indexer_id", values[i])
} else if value.Valid {
h.IndexerID = int(value.Int64)
}
case history.FieldStatus:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field status", values[i])
@@ -181,6 +189,9 @@ func (h *History) String() string {
builder.WriteString("download_client_id=")
builder.WriteString(fmt.Sprintf("%v", h.DownloadClientID))
builder.WriteString(", ")
builder.WriteString("indexer_id=")
builder.WriteString(fmt.Sprintf("%v", h.IndexerID))
builder.WriteString(", ")
builder.WriteString("status=")
builder.WriteString(fmt.Sprintf("%v", h.Status))
builder.WriteString(", ")

View File

@@ -27,6 +27,8 @@ const (
FieldSize = "size"
// FieldDownloadClientID holds the string denoting the download_client_id field in the database.
FieldDownloadClientID = "download_client_id"
// FieldIndexerID holds the string denoting the indexer_id field in the database.
FieldIndexerID = "indexer_id"
// FieldStatus holds the string denoting the status field in the database.
FieldStatus = "status"
// FieldSaved holds the string denoting the saved field in the database.
@@ -45,6 +47,7 @@ var Columns = []string{
FieldTargetDir,
FieldSize,
FieldDownloadClientID,
FieldIndexerID,
FieldStatus,
FieldSaved,
}
@@ -132,6 +135,11 @@ func ByDownloadClientID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldDownloadClientID, opts...).ToFunc()
}
// ByIndexerID orders the results by the indexer_id field.
func ByIndexerID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldIndexerID, opts...).ToFunc()
}
// ByStatus orders the results by the status field.
func ByStatus(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStatus, opts...).ToFunc()

View File

@@ -89,6 +89,11 @@ func DownloadClientID(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldDownloadClientID, v))
}
// IndexerID applies equality check predicate on the "indexer_id" field. It's identical to IndexerIDEQ.
func IndexerID(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldIndexerID, v))
}
// Saved applies equality check predicate on the "saved" field. It's identical to SavedEQ.
func Saved(v string) predicate.History {
return predicate.History(sql.FieldEQ(FieldSaved, v))
@@ -444,6 +449,56 @@ func DownloadClientIDNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldDownloadClientID))
}
// IndexerIDEQ applies the EQ predicate on the "indexer_id" field.
func IndexerIDEQ(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldIndexerID, v))
}
// IndexerIDNEQ applies the NEQ predicate on the "indexer_id" field.
func IndexerIDNEQ(v int) predicate.History {
return predicate.History(sql.FieldNEQ(FieldIndexerID, v))
}
// IndexerIDIn applies the In predicate on the "indexer_id" field.
func IndexerIDIn(vs ...int) predicate.History {
return predicate.History(sql.FieldIn(FieldIndexerID, vs...))
}
// IndexerIDNotIn applies the NotIn predicate on the "indexer_id" field.
func IndexerIDNotIn(vs ...int) predicate.History {
return predicate.History(sql.FieldNotIn(FieldIndexerID, vs...))
}
// IndexerIDGT applies the GT predicate on the "indexer_id" field.
func IndexerIDGT(v int) predicate.History {
return predicate.History(sql.FieldGT(FieldIndexerID, v))
}
// IndexerIDGTE applies the GTE predicate on the "indexer_id" field.
func IndexerIDGTE(v int) predicate.History {
return predicate.History(sql.FieldGTE(FieldIndexerID, v))
}
// IndexerIDLT applies the LT predicate on the "indexer_id" field.
func IndexerIDLT(v int) predicate.History {
return predicate.History(sql.FieldLT(FieldIndexerID, v))
}
// IndexerIDLTE applies the LTE predicate on the "indexer_id" field.
func IndexerIDLTE(v int) predicate.History {
return predicate.History(sql.FieldLTE(FieldIndexerID, v))
}
// IndexerIDIsNil applies the IsNil predicate on the "indexer_id" field.
func IndexerIDIsNil() predicate.History {
return predicate.History(sql.FieldIsNull(FieldIndexerID))
}
// IndexerIDNotNil applies the NotNil predicate on the "indexer_id" field.
func IndexerIDNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldIndexerID))
}
// StatusEQ applies the EQ predicate on the "status" field.
func StatusEQ(v Status) predicate.History {
return predicate.History(sql.FieldEQ(FieldStatus, v))

View File

@@ -86,6 +86,20 @@ func (hc *HistoryCreate) SetNillableDownloadClientID(i *int) *HistoryCreate {
return hc
}
// SetIndexerID sets the "indexer_id" field.
func (hc *HistoryCreate) SetIndexerID(i int) *HistoryCreate {
hc.mutation.SetIndexerID(i)
return hc
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (hc *HistoryCreate) SetNillableIndexerID(i *int) *HistoryCreate {
if i != nil {
hc.SetIndexerID(*i)
}
return hc
}
// SetStatus sets the "status" field.
func (hc *HistoryCreate) SetStatus(h history.Status) *HistoryCreate {
hc.mutation.SetStatus(h)
@@ -226,6 +240,10 @@ func (hc *HistoryCreate) createSpec() (*History, *sqlgraph.CreateSpec) {
_spec.SetField(history.FieldDownloadClientID, field.TypeInt, value)
_node.DownloadClientID = value
}
if value, ok := hc.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
_node.IndexerID = value
}
if value, ok := hc.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
_node.Status = value

View File

@@ -166,6 +166,33 @@ func (hu *HistoryUpdate) ClearDownloadClientID() *HistoryUpdate {
return hu
}
// SetIndexerID sets the "indexer_id" field.
func (hu *HistoryUpdate) SetIndexerID(i int) *HistoryUpdate {
hu.mutation.ResetIndexerID()
hu.mutation.SetIndexerID(i)
return hu
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (hu *HistoryUpdate) SetNillableIndexerID(i *int) *HistoryUpdate {
if i != nil {
hu.SetIndexerID(*i)
}
return hu
}
// AddIndexerID adds i to the "indexer_id" field.
func (hu *HistoryUpdate) AddIndexerID(i int) *HistoryUpdate {
hu.mutation.AddIndexerID(i)
return hu
}
// ClearIndexerID clears the value of the "indexer_id" field.
func (hu *HistoryUpdate) ClearIndexerID() *HistoryUpdate {
hu.mutation.ClearIndexerID()
return hu
}
// SetStatus sets the "status" field.
func (hu *HistoryUpdate) SetStatus(h history.Status) *HistoryUpdate {
hu.mutation.SetStatus(h)
@@ -293,6 +320,15 @@ func (hu *HistoryUpdate) sqlSave(ctx context.Context) (n int, err error) {
if hu.mutation.DownloadClientIDCleared() {
_spec.ClearField(history.FieldDownloadClientID, field.TypeInt)
}
if value, ok := hu.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
}
if value, ok := hu.mutation.AddedIndexerID(); ok {
_spec.AddField(history.FieldIndexerID, field.TypeInt, value)
}
if hu.mutation.IndexerIDCleared() {
_spec.ClearField(history.FieldIndexerID, field.TypeInt)
}
if value, ok := hu.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
}
@@ -460,6 +496,33 @@ func (huo *HistoryUpdateOne) ClearDownloadClientID() *HistoryUpdateOne {
return huo
}
// SetIndexerID sets the "indexer_id" field.
func (huo *HistoryUpdateOne) SetIndexerID(i int) *HistoryUpdateOne {
huo.mutation.ResetIndexerID()
huo.mutation.SetIndexerID(i)
return huo
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (huo *HistoryUpdateOne) SetNillableIndexerID(i *int) *HistoryUpdateOne {
if i != nil {
huo.SetIndexerID(*i)
}
return huo
}
// AddIndexerID adds i to the "indexer_id" field.
func (huo *HistoryUpdateOne) AddIndexerID(i int) *HistoryUpdateOne {
huo.mutation.AddIndexerID(i)
return huo
}
// ClearIndexerID clears the value of the "indexer_id" field.
func (huo *HistoryUpdateOne) ClearIndexerID() *HistoryUpdateOne {
huo.mutation.ClearIndexerID()
return huo
}
// SetStatus sets the "status" field.
func (huo *HistoryUpdateOne) SetStatus(h history.Status) *HistoryUpdateOne {
huo.mutation.SetStatus(h)
@@ -617,6 +680,15 @@ func (huo *HistoryUpdateOne) sqlSave(ctx context.Context) (_node *History, err e
if huo.mutation.DownloadClientIDCleared() {
_spec.ClearField(history.FieldDownloadClientID, field.TypeInt)
}
if value, ok := huo.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
}
if value, ok := huo.mutation.AddedIndexerID(); ok {
_spec.AddField(history.FieldIndexerID, field.TypeInt, value)
}
if huo.mutation.IndexerIDCleared() {
_spec.ClearField(history.FieldIndexerID, field.TypeInt)
}
if value, ok := huo.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
}

View File

@@ -25,7 +25,11 @@ type Indexers struct {
// EnableRss holds the value of the "enable_rss" field.
EnableRss bool `json:"enable_rss,omitempty"`
// Priority holds the value of the "priority" field.
Priority int `json:"priority,omitempty"`
Priority int `json:"priority,omitempty"`
// minimal seed ratio requied, before removing torrent
SeedRatio float32 `json:"seed_ratio,omitempty"`
// Disabled holds the value of the "disabled" field.
Disabled bool `json:"disabled,omitempty"`
selectValues sql.SelectValues
}
@@ -34,8 +38,10 @@ func (*Indexers) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case indexers.FieldEnableRss:
case indexers.FieldEnableRss, indexers.FieldDisabled:
values[i] = new(sql.NullBool)
case indexers.FieldSeedRatio:
values[i] = new(sql.NullFloat64)
case indexers.FieldID, indexers.FieldPriority:
values[i] = new(sql.NullInt64)
case indexers.FieldName, indexers.FieldImplementation, indexers.FieldSettings:
@@ -91,6 +97,18 @@ func (i *Indexers) assignValues(columns []string, values []any) error {
} else if value.Valid {
i.Priority = int(value.Int64)
}
case indexers.FieldSeedRatio:
if value, ok := values[j].(*sql.NullFloat64); !ok {
return fmt.Errorf("unexpected type %T for field seed_ratio", values[j])
} else if value.Valid {
i.SeedRatio = float32(value.Float64)
}
case indexers.FieldDisabled:
if value, ok := values[j].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field disabled", values[j])
} else if value.Valid {
i.Disabled = value.Bool
}
default:
i.selectValues.Set(columns[j], values[j])
}
@@ -141,6 +159,12 @@ func (i *Indexers) String() string {
builder.WriteString(", ")
builder.WriteString("priority=")
builder.WriteString(fmt.Sprintf("%v", i.Priority))
builder.WriteString(", ")
builder.WriteString("seed_ratio=")
builder.WriteString(fmt.Sprintf("%v", i.SeedRatio))
builder.WriteString(", ")
builder.WriteString("disabled=")
builder.WriteString(fmt.Sprintf("%v", i.Disabled))
builder.WriteByte(')')
return builder.String()
}

View File

@@ -21,6 +21,10 @@ const (
FieldEnableRss = "enable_rss"
// FieldPriority holds the string denoting the priority field in the database.
FieldPriority = "priority"
// FieldSeedRatio holds the string denoting the seed_ratio field in the database.
FieldSeedRatio = "seed_ratio"
// FieldDisabled holds the string denoting the disabled field in the database.
FieldDisabled = "disabled"
// Table holds the table name of the indexers in the database.
Table = "indexers"
)
@@ -33,6 +37,8 @@ var Columns = []string{
FieldSettings,
FieldEnableRss,
FieldPriority,
FieldSeedRatio,
FieldDisabled,
}
// ValidColumn reports if the column name is valid (part of the table columns).
@@ -48,6 +54,12 @@ func ValidColumn(column string) bool {
var (
// DefaultEnableRss holds the default value on creation for the "enable_rss" field.
DefaultEnableRss bool
// DefaultPriority holds the default value on creation for the "priority" field.
DefaultPriority int
// DefaultSeedRatio holds the default value on creation for the "seed_ratio" field.
DefaultSeedRatio float32
// DefaultDisabled holds the default value on creation for the "disabled" field.
DefaultDisabled bool
)
// OrderOption defines the ordering options for the Indexers queries.
@@ -82,3 +94,13 @@ func ByEnableRss(opts ...sql.OrderTermOption) OrderOption {
func ByPriority(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldPriority, opts...).ToFunc()
}
// BySeedRatio orders the results by the seed_ratio field.
func BySeedRatio(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldSeedRatio, opts...).ToFunc()
}
// ByDisabled orders the results by the disabled field.
func ByDisabled(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldDisabled, opts...).ToFunc()
}

View File

@@ -78,6 +78,16 @@ func Priority(v int) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldPriority, v))
}
// SeedRatio applies equality check predicate on the "seed_ratio" field. It's identical to SeedRatioEQ.
func SeedRatio(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldSeedRatio, v))
}
// Disabled applies equality check predicate on the "disabled" field. It's identical to DisabledEQ.
func Disabled(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldDisabled, v))
}
// NameEQ applies the EQ predicate on the "name" field.
func NameEQ(v string) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldName, v))
@@ -323,6 +333,76 @@ func PriorityLTE(v int) predicate.Indexers {
return predicate.Indexers(sql.FieldLTE(FieldPriority, v))
}
// SeedRatioEQ applies the EQ predicate on the "seed_ratio" field.
func SeedRatioEQ(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldSeedRatio, v))
}
// SeedRatioNEQ applies the NEQ predicate on the "seed_ratio" field.
func SeedRatioNEQ(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldNEQ(FieldSeedRatio, v))
}
// SeedRatioIn applies the In predicate on the "seed_ratio" field.
func SeedRatioIn(vs ...float32) predicate.Indexers {
return predicate.Indexers(sql.FieldIn(FieldSeedRatio, vs...))
}
// SeedRatioNotIn applies the NotIn predicate on the "seed_ratio" field.
func SeedRatioNotIn(vs ...float32) predicate.Indexers {
return predicate.Indexers(sql.FieldNotIn(FieldSeedRatio, vs...))
}
// SeedRatioGT applies the GT predicate on the "seed_ratio" field.
func SeedRatioGT(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldGT(FieldSeedRatio, v))
}
// SeedRatioGTE applies the GTE predicate on the "seed_ratio" field.
func SeedRatioGTE(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldGTE(FieldSeedRatio, v))
}
// SeedRatioLT applies the LT predicate on the "seed_ratio" field.
func SeedRatioLT(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldLT(FieldSeedRatio, v))
}
// SeedRatioLTE applies the LTE predicate on the "seed_ratio" field.
func SeedRatioLTE(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldLTE(FieldSeedRatio, v))
}
// SeedRatioIsNil applies the IsNil predicate on the "seed_ratio" field.
func SeedRatioIsNil() predicate.Indexers {
return predicate.Indexers(sql.FieldIsNull(FieldSeedRatio))
}
// SeedRatioNotNil applies the NotNil predicate on the "seed_ratio" field.
func SeedRatioNotNil() predicate.Indexers {
return predicate.Indexers(sql.FieldNotNull(FieldSeedRatio))
}
// DisabledEQ applies the EQ predicate on the "disabled" field.
func DisabledEQ(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldDisabled, v))
}
// DisabledNEQ applies the NEQ predicate on the "disabled" field.
func DisabledNEQ(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldNEQ(FieldDisabled, v))
}
// DisabledIsNil applies the IsNil predicate on the "disabled" field.
func DisabledIsNil() predicate.Indexers {
return predicate.Indexers(sql.FieldIsNull(FieldDisabled))
}
// DisabledNotNil applies the NotNil predicate on the "disabled" field.
func DisabledNotNil() predicate.Indexers {
return predicate.Indexers(sql.FieldNotNull(FieldDisabled))
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.Indexers) predicate.Indexers {
return predicate.Indexers(sql.AndPredicates(predicates...))

View File

@@ -57,6 +57,42 @@ func (ic *IndexersCreate) SetPriority(i int) *IndexersCreate {
return ic
}
// SetNillablePriority sets the "priority" field if the given value is not nil.
func (ic *IndexersCreate) SetNillablePriority(i *int) *IndexersCreate {
if i != nil {
ic.SetPriority(*i)
}
return ic
}
// SetSeedRatio sets the "seed_ratio" field.
func (ic *IndexersCreate) SetSeedRatio(f float32) *IndexersCreate {
ic.mutation.SetSeedRatio(f)
return ic
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (ic *IndexersCreate) SetNillableSeedRatio(f *float32) *IndexersCreate {
if f != nil {
ic.SetSeedRatio(*f)
}
return ic
}
// SetDisabled sets the "disabled" field.
func (ic *IndexersCreate) SetDisabled(b bool) *IndexersCreate {
ic.mutation.SetDisabled(b)
return ic
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (ic *IndexersCreate) SetNillableDisabled(b *bool) *IndexersCreate {
if b != nil {
ic.SetDisabled(*b)
}
return ic
}
// Mutation returns the IndexersMutation object of the builder.
func (ic *IndexersCreate) Mutation() *IndexersMutation {
return ic.mutation
@@ -96,6 +132,18 @@ func (ic *IndexersCreate) defaults() {
v := indexers.DefaultEnableRss
ic.mutation.SetEnableRss(v)
}
if _, ok := ic.mutation.Priority(); !ok {
v := indexers.DefaultPriority
ic.mutation.SetPriority(v)
}
if _, ok := ic.mutation.SeedRatio(); !ok {
v := indexers.DefaultSeedRatio
ic.mutation.SetSeedRatio(v)
}
if _, ok := ic.mutation.Disabled(); !ok {
v := indexers.DefaultDisabled
ic.mutation.SetDisabled(v)
}
}
// check runs all checks and user-defined validators on the builder.
@@ -161,6 +209,14 @@ func (ic *IndexersCreate) createSpec() (*Indexers, *sqlgraph.CreateSpec) {
_spec.SetField(indexers.FieldPriority, field.TypeInt, value)
_node.Priority = value
}
if value, ok := ic.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
_node.SeedRatio = value
}
if value, ok := ic.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
_node.Disabled = value
}
return _node, _spec
}

View File

@@ -104,6 +104,53 @@ func (iu *IndexersUpdate) AddPriority(i int) *IndexersUpdate {
return iu
}
// SetSeedRatio sets the "seed_ratio" field.
func (iu *IndexersUpdate) SetSeedRatio(f float32) *IndexersUpdate {
iu.mutation.ResetSeedRatio()
iu.mutation.SetSeedRatio(f)
return iu
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (iu *IndexersUpdate) SetNillableSeedRatio(f *float32) *IndexersUpdate {
if f != nil {
iu.SetSeedRatio(*f)
}
return iu
}
// AddSeedRatio adds f to the "seed_ratio" field.
func (iu *IndexersUpdate) AddSeedRatio(f float32) *IndexersUpdate {
iu.mutation.AddSeedRatio(f)
return iu
}
// ClearSeedRatio clears the value of the "seed_ratio" field.
func (iu *IndexersUpdate) ClearSeedRatio() *IndexersUpdate {
iu.mutation.ClearSeedRatio()
return iu
}
// SetDisabled sets the "disabled" field.
func (iu *IndexersUpdate) SetDisabled(b bool) *IndexersUpdate {
iu.mutation.SetDisabled(b)
return iu
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (iu *IndexersUpdate) SetNillableDisabled(b *bool) *IndexersUpdate {
if b != nil {
iu.SetDisabled(*b)
}
return iu
}
// ClearDisabled clears the value of the "disabled" field.
func (iu *IndexersUpdate) ClearDisabled() *IndexersUpdate {
iu.mutation.ClearDisabled()
return iu
}
// Mutation returns the IndexersMutation object of the builder.
func (iu *IndexersUpdate) Mutation() *IndexersMutation {
return iu.mutation
@@ -163,6 +210,21 @@ func (iu *IndexersUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := iu.mutation.AddedPriority(); ok {
_spec.AddField(indexers.FieldPriority, field.TypeInt, value)
}
if value, ok := iu.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if value, ok := iu.mutation.AddedSeedRatio(); ok {
_spec.AddField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if iu.mutation.SeedRatioCleared() {
_spec.ClearField(indexers.FieldSeedRatio, field.TypeFloat32)
}
if value, ok := iu.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
}
if iu.mutation.DisabledCleared() {
_spec.ClearField(indexers.FieldDisabled, field.TypeBool)
}
if n, err = sqlgraph.UpdateNodes(ctx, iu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{indexers.Label}
@@ -260,6 +322,53 @@ func (iuo *IndexersUpdateOne) AddPriority(i int) *IndexersUpdateOne {
return iuo
}
// SetSeedRatio sets the "seed_ratio" field.
func (iuo *IndexersUpdateOne) SetSeedRatio(f float32) *IndexersUpdateOne {
iuo.mutation.ResetSeedRatio()
iuo.mutation.SetSeedRatio(f)
return iuo
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (iuo *IndexersUpdateOne) SetNillableSeedRatio(f *float32) *IndexersUpdateOne {
if f != nil {
iuo.SetSeedRatio(*f)
}
return iuo
}
// AddSeedRatio adds f to the "seed_ratio" field.
func (iuo *IndexersUpdateOne) AddSeedRatio(f float32) *IndexersUpdateOne {
iuo.mutation.AddSeedRatio(f)
return iuo
}
// ClearSeedRatio clears the value of the "seed_ratio" field.
func (iuo *IndexersUpdateOne) ClearSeedRatio() *IndexersUpdateOne {
iuo.mutation.ClearSeedRatio()
return iuo
}
// SetDisabled sets the "disabled" field.
func (iuo *IndexersUpdateOne) SetDisabled(b bool) *IndexersUpdateOne {
iuo.mutation.SetDisabled(b)
return iuo
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (iuo *IndexersUpdateOne) SetNillableDisabled(b *bool) *IndexersUpdateOne {
if b != nil {
iuo.SetDisabled(*b)
}
return iuo
}
// ClearDisabled clears the value of the "disabled" field.
func (iuo *IndexersUpdateOne) ClearDisabled() *IndexersUpdateOne {
iuo.mutation.ClearDisabled()
return iuo
}
// Mutation returns the IndexersMutation object of the builder.
func (iuo *IndexersUpdateOne) Mutation() *IndexersMutation {
return iuo.mutation
@@ -349,6 +458,21 @@ func (iuo *IndexersUpdateOne) sqlSave(ctx context.Context) (_node *Indexers, err
if value, ok := iuo.mutation.AddedPriority(); ok {
_spec.AddField(indexers.FieldPriority, field.TypeInt, value)
}
if value, ok := iuo.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if value, ok := iuo.mutation.AddedSeedRatio(); ok {
_spec.AddField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if iuo.mutation.SeedRatioCleared() {
_spec.ClearField(indexers.FieldSeedRatio, field.TypeFloat32)
}
if value, ok := iuo.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
}
if iuo.mutation.DisabledCleared() {
_spec.ClearField(indexers.FieldDisabled, field.TypeBool)
}
_node = &Indexers{config: iuo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues

View File

@@ -3,8 +3,10 @@
package ent
import (
"encoding/json"
"fmt"
"polaris/ent/media"
"polaris/ent/schema"
"strings"
"time"
@@ -43,6 +45,8 @@ type Media struct {
TargetDir string `json:"target_dir,omitempty"`
// tv series only
DownloadHistoryEpisodes bool `json:"download_history_episodes,omitempty"`
// Limiter holds the value of the "limiter" field.
Limiter *schema.MediaLimiter `json:"limiter,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the MediaQuery when eager-loading is set.
Edges MediaEdges `json:"edges"`
@@ -72,6 +76,8 @@ func (*Media) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case media.FieldLimiter:
values[i] = new([]byte)
case media.FieldDownloadHistoryEpisodes:
values[i] = new(sql.NullBool)
case media.FieldID, media.FieldTmdbID, media.FieldStorageID:
@@ -179,6 +185,14 @@ func (m *Media) assignValues(columns []string, values []any) error {
} else if value.Valid {
m.DownloadHistoryEpisodes = value.Bool
}
case media.FieldLimiter:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field limiter", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &m.Limiter); err != nil {
return fmt.Errorf("unmarshal field limiter: %w", err)
}
}
default:
m.selectValues.Set(columns[i], values[i])
}
@@ -258,6 +272,9 @@ func (m *Media) String() string {
builder.WriteString(", ")
builder.WriteString("download_history_episodes=")
builder.WriteString(fmt.Sprintf("%v", m.DownloadHistoryEpisodes))
builder.WriteString(", ")
builder.WriteString("limiter=")
builder.WriteString(fmt.Sprintf("%v", m.Limiter))
builder.WriteByte(')')
return builder.String()
}

View File

@@ -41,6 +41,8 @@ const (
FieldTargetDir = "target_dir"
// FieldDownloadHistoryEpisodes holds the string denoting the download_history_episodes field in the database.
FieldDownloadHistoryEpisodes = "download_history_episodes"
// FieldLimiter holds the string denoting the limiter field in the database.
FieldLimiter = "limiter"
// EdgeEpisodes holds the string denoting the episodes edge name in mutations.
EdgeEpisodes = "episodes"
// Table holds the table name of the media in the database.
@@ -70,6 +72,7 @@ var Columns = []string{
FieldStorageID,
FieldTargetDir,
FieldDownloadHistoryEpisodes,
FieldLimiter,
}
// ValidColumn reports if the column name is valid (part of the table columns).

View File

@@ -775,6 +775,16 @@ func DownloadHistoryEpisodesNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldDownloadHistoryEpisodes))
}
// LimiterIsNil applies the IsNil predicate on the "limiter" field.
func LimiterIsNil() predicate.Media {
return predicate.Media(sql.FieldIsNull(FieldLimiter))
}
// LimiterNotNil applies the NotNil predicate on the "limiter" field.
func LimiterNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldLimiter))
}
// HasEpisodes applies the HasEdge predicate on the "episodes" edge.
func HasEpisodes() predicate.Media {
return predicate.Media(func(s *sql.Selector) {

View File

@@ -8,6 +8,7 @@ import (
"fmt"
"polaris/ent/episode"
"polaris/ent/media"
"polaris/ent/schema"
"time"
"entgo.io/ent/dialect/sql/sqlgraph"
@@ -155,6 +156,12 @@ func (mc *MediaCreate) SetNillableDownloadHistoryEpisodes(b *bool) *MediaCreate
return mc
}
// SetLimiter sets the "limiter" field.
func (mc *MediaCreate) SetLimiter(sl *schema.MediaLimiter) *MediaCreate {
mc.mutation.SetLimiter(sl)
return mc
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (mc *MediaCreate) AddEpisodeIDs(ids ...int) *MediaCreate {
mc.mutation.AddEpisodeIDs(ids...)
@@ -340,6 +347,10 @@ func (mc *MediaCreate) createSpec() (*Media, *sqlgraph.CreateSpec) {
_spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value)
_node.DownloadHistoryEpisodes = value
}
if value, ok := mc.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
_node.Limiter = value
}
if nodes := mc.mutation.EpisodesIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,

View File

@@ -9,6 +9,7 @@ import (
"polaris/ent/episode"
"polaris/ent/media"
"polaris/ent/predicate"
"polaris/ent/schema"
"time"
"entgo.io/ent/dialect/sql"
@@ -249,6 +250,18 @@ func (mu *MediaUpdate) ClearDownloadHistoryEpisodes() *MediaUpdate {
return mu
}
// SetLimiter sets the "limiter" field.
func (mu *MediaUpdate) SetLimiter(sl *schema.MediaLimiter) *MediaUpdate {
mu.mutation.SetLimiter(sl)
return mu
}
// ClearLimiter clears the value of the "limiter" field.
func (mu *MediaUpdate) ClearLimiter() *MediaUpdate {
mu.mutation.ClearLimiter()
return mu
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (mu *MediaUpdate) AddEpisodeIDs(ids ...int) *MediaUpdate {
mu.mutation.AddEpisodeIDs(ids...)
@@ -401,6 +414,12 @@ func (mu *MediaUpdate) sqlSave(ctx context.Context) (n int, err error) {
if mu.mutation.DownloadHistoryEpisodesCleared() {
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
}
if value, ok := mu.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
}
if mu.mutation.LimiterCleared() {
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
}
if mu.mutation.EpisodesCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
@@ -686,6 +705,18 @@ func (muo *MediaUpdateOne) ClearDownloadHistoryEpisodes() *MediaUpdateOne {
return muo
}
// SetLimiter sets the "limiter" field.
func (muo *MediaUpdateOne) SetLimiter(sl *schema.MediaLimiter) *MediaUpdateOne {
muo.mutation.SetLimiter(sl)
return muo
}
// ClearLimiter clears the value of the "limiter" field.
func (muo *MediaUpdateOne) ClearLimiter() *MediaUpdateOne {
muo.mutation.ClearLimiter()
return muo
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (muo *MediaUpdateOne) AddEpisodeIDs(ids ...int) *MediaUpdateOne {
muo.mutation.AddEpisodeIDs(ids...)
@@ -868,6 +899,12 @@ func (muo *MediaUpdateOne) sqlSave(ctx context.Context) (_node *Media, err error
if muo.mutation.DownloadHistoryEpisodesCleared() {
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
}
if value, ok := muo.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
}
if muo.mutation.LimiterCleared() {
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
}
if muo.mutation.EpisodesCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,

View File

@@ -38,6 +38,7 @@ var (
{Name: "overview", Type: field.TypeString},
{Name: "air_date", Type: field.TypeString},
{Name: "status", Type: field.TypeEnum, Enums: []string{"missing", "downloading", "downloaded"}, Default: "missing"},
{Name: "monitored", Type: field.TypeBool, Default: false},
{Name: "media_id", Type: field.TypeInt, Nullable: true},
}
// EpisodesTable holds the schema information for the "episodes" table.
@@ -48,7 +49,7 @@ var (
ForeignKeys: []*schema.ForeignKey{
{
Symbol: "episodes_media_episodes",
Columns: []*schema.Column{EpisodesColumns[7]},
Columns: []*schema.Column{EpisodesColumns[8]},
RefColumns: []*schema.Column{MediaColumns[0]},
OnDelete: schema.SetNull,
},
@@ -64,6 +65,7 @@ var (
{Name: "target_dir", Type: field.TypeString},
{Name: "size", Type: field.TypeInt, Default: 0},
{Name: "download_client_id", Type: field.TypeInt, Nullable: true},
{Name: "indexer_id", Type: field.TypeInt, Nullable: true},
{Name: "status", Type: field.TypeEnum, Enums: []string{"running", "success", "fail", "uploading"}},
{Name: "saved", Type: field.TypeString, Nullable: true},
}
@@ -80,7 +82,9 @@ var (
{Name: "implementation", Type: field.TypeString},
{Name: "settings", Type: field.TypeString},
{Name: "enable_rss", Type: field.TypeBool, Default: true},
{Name: "priority", Type: field.TypeInt},
{Name: "priority", Type: field.TypeInt, Default: 50},
{Name: "seed_ratio", Type: field.TypeFloat32, Nullable: true, Default: 0},
{Name: "disabled", Type: field.TypeBool, Nullable: true, Default: false},
}
// IndexersTable holds the schema information for the "indexers" table.
IndexersTable = &schema.Table{
@@ -104,6 +108,7 @@ var (
{Name: "storage_id", Type: field.TypeInt, Nullable: true},
{Name: "target_dir", Type: field.TypeString, Nullable: true},
{Name: "download_history_episodes", Type: field.TypeBool, Nullable: true, Default: false},
{Name: "limiter", Type: field.TypeJSON, Nullable: true},
}
// MediaTable holds the schema information for the "media" table.
MediaTable = &schema.Table{

View File

@@ -13,6 +13,7 @@ import (
"polaris/ent/media"
"polaris/ent/notificationclient"
"polaris/ent/predicate"
"polaris/ent/schema"
"polaris/ent/settings"
"polaris/ent/storage"
"sync"
@@ -921,6 +922,7 @@ type EpisodeMutation struct {
overview *string
air_date *string
status *episode.Status
monitored *bool
clearedFields map[string]struct{}
media *int
clearedmedia bool
@@ -1332,6 +1334,42 @@ func (m *EpisodeMutation) ResetStatus() {
m.status = nil
}
// SetMonitored sets the "monitored" field.
func (m *EpisodeMutation) SetMonitored(b bool) {
m.monitored = &b
}
// Monitored returns the value of the "monitored" field in the mutation.
func (m *EpisodeMutation) Monitored() (r bool, exists bool) {
v := m.monitored
if v == nil {
return
}
return *v, true
}
// OldMonitored returns the old "monitored" field's value of the Episode entity.
// If the Episode object wasn't provided to the builder, the object is fetched from the database.
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
func (m *EpisodeMutation) OldMonitored(ctx context.Context) (v bool, err error) {
if !m.op.Is(OpUpdateOne) {
return v, errors.New("OldMonitored is only allowed on UpdateOne operations")
}
if m.id == nil || m.oldValue == nil {
return v, errors.New("OldMonitored requires an ID field in the mutation")
}
oldValue, err := m.oldValue(ctx)
if err != nil {
return v, fmt.Errorf("querying old value for OldMonitored: %w", err)
}
return oldValue.Monitored, nil
}
// ResetMonitored resets all changes to the "monitored" field.
func (m *EpisodeMutation) ResetMonitored() {
m.monitored = nil
}
// ClearMedia clears the "media" edge to the Media entity.
func (m *EpisodeMutation) ClearMedia() {
m.clearedmedia = true
@@ -1393,7 +1431,7 @@ func (m *EpisodeMutation) Type() string {
// order to get all numeric fields that were incremented/decremented, call
// AddedFields().
func (m *EpisodeMutation) Fields() []string {
fields := make([]string, 0, 7)
fields := make([]string, 0, 8)
if m.media != nil {
fields = append(fields, episode.FieldMediaID)
}
@@ -1415,6 +1453,9 @@ func (m *EpisodeMutation) Fields() []string {
if m.status != nil {
fields = append(fields, episode.FieldStatus)
}
if m.monitored != nil {
fields = append(fields, episode.FieldMonitored)
}
return fields
}
@@ -1437,6 +1478,8 @@ func (m *EpisodeMutation) Field(name string) (ent.Value, bool) {
return m.AirDate()
case episode.FieldStatus:
return m.Status()
case episode.FieldMonitored:
return m.Monitored()
}
return nil, false
}
@@ -1460,6 +1503,8 @@ func (m *EpisodeMutation) OldField(ctx context.Context, name string) (ent.Value,
return m.OldAirDate(ctx)
case episode.FieldStatus:
return m.OldStatus(ctx)
case episode.FieldMonitored:
return m.OldMonitored(ctx)
}
return nil, fmt.Errorf("unknown Episode field %s", name)
}
@@ -1518,6 +1563,13 @@ func (m *EpisodeMutation) SetField(name string, value ent.Value) error {
}
m.SetStatus(v)
return nil
case episode.FieldMonitored:
v, ok := value.(bool)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.SetMonitored(v)
return nil
}
return fmt.Errorf("unknown Episode field %s", name)
}
@@ -1624,6 +1676,9 @@ func (m *EpisodeMutation) ResetField(name string) error {
case episode.FieldStatus:
m.ResetStatus()
return nil
case episode.FieldMonitored:
m.ResetMonitored()
return nil
}
return fmt.Errorf("unknown Episode field %s", name)
}
@@ -1719,6 +1774,8 @@ type HistoryMutation struct {
addsize *int
download_client_id *int
adddownload_client_id *int
indexer_id *int
addindexer_id *int
status *history.Status
saved *string
clearedFields map[string]struct{}
@@ -2185,6 +2242,76 @@ func (m *HistoryMutation) ResetDownloadClientID() {
delete(m.clearedFields, history.FieldDownloadClientID)
}
// SetIndexerID sets the "indexer_id" field.
func (m *HistoryMutation) SetIndexerID(i int) {
m.indexer_id = &i
m.addindexer_id = nil
}
// IndexerID returns the value of the "indexer_id" field in the mutation.
func (m *HistoryMutation) IndexerID() (r int, exists bool) {
v := m.indexer_id
if v == nil {
return
}
return *v, true
}
// OldIndexerID returns the old "indexer_id" field's value of the History entity.
// If the History object wasn't provided to the builder, the object is fetched from the database.
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
func (m *HistoryMutation) OldIndexerID(ctx context.Context) (v int, err error) {
if !m.op.Is(OpUpdateOne) {
return v, errors.New("OldIndexerID is only allowed on UpdateOne operations")
}
if m.id == nil || m.oldValue == nil {
return v, errors.New("OldIndexerID requires an ID field in the mutation")
}
oldValue, err := m.oldValue(ctx)
if err != nil {
return v, fmt.Errorf("querying old value for OldIndexerID: %w", err)
}
return oldValue.IndexerID, nil
}
// AddIndexerID adds i to the "indexer_id" field.
func (m *HistoryMutation) AddIndexerID(i int) {
if m.addindexer_id != nil {
*m.addindexer_id += i
} else {
m.addindexer_id = &i
}
}
// AddedIndexerID returns the value that was added to the "indexer_id" field in this mutation.
func (m *HistoryMutation) AddedIndexerID() (r int, exists bool) {
v := m.addindexer_id
if v == nil {
return
}
return *v, true
}
// ClearIndexerID clears the value of the "indexer_id" field.
func (m *HistoryMutation) ClearIndexerID() {
m.indexer_id = nil
m.addindexer_id = nil
m.clearedFields[history.FieldIndexerID] = struct{}{}
}
// IndexerIDCleared returns if the "indexer_id" field was cleared in this mutation.
func (m *HistoryMutation) IndexerIDCleared() bool {
_, ok := m.clearedFields[history.FieldIndexerID]
return ok
}
// ResetIndexerID resets all changes to the "indexer_id" field.
func (m *HistoryMutation) ResetIndexerID() {
m.indexer_id = nil
m.addindexer_id = nil
delete(m.clearedFields, history.FieldIndexerID)
}
// SetStatus sets the "status" field.
func (m *HistoryMutation) SetStatus(h history.Status) {
m.status = &h
@@ -2304,7 +2431,7 @@ func (m *HistoryMutation) Type() string {
// order to get all numeric fields that were incremented/decremented, call
// AddedFields().
func (m *HistoryMutation) Fields() []string {
fields := make([]string, 0, 9)
fields := make([]string, 0, 10)
if m.media_id != nil {
fields = append(fields, history.FieldMediaID)
}
@@ -2326,6 +2453,9 @@ func (m *HistoryMutation) Fields() []string {
if m.download_client_id != nil {
fields = append(fields, history.FieldDownloadClientID)
}
if m.indexer_id != nil {
fields = append(fields, history.FieldIndexerID)
}
if m.status != nil {
fields = append(fields, history.FieldStatus)
}
@@ -2354,6 +2484,8 @@ func (m *HistoryMutation) Field(name string) (ent.Value, bool) {
return m.Size()
case history.FieldDownloadClientID:
return m.DownloadClientID()
case history.FieldIndexerID:
return m.IndexerID()
case history.FieldStatus:
return m.Status()
case history.FieldSaved:
@@ -2381,6 +2513,8 @@ func (m *HistoryMutation) OldField(ctx context.Context, name string) (ent.Value,
return m.OldSize(ctx)
case history.FieldDownloadClientID:
return m.OldDownloadClientID(ctx)
case history.FieldIndexerID:
return m.OldIndexerID(ctx)
case history.FieldStatus:
return m.OldStatus(ctx)
case history.FieldSaved:
@@ -2443,6 +2577,13 @@ func (m *HistoryMutation) SetField(name string, value ent.Value) error {
}
m.SetDownloadClientID(v)
return nil
case history.FieldIndexerID:
v, ok := value.(int)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.SetIndexerID(v)
return nil
case history.FieldStatus:
v, ok := value.(history.Status)
if !ok {
@@ -2477,6 +2618,9 @@ func (m *HistoryMutation) AddedFields() []string {
if m.adddownload_client_id != nil {
fields = append(fields, history.FieldDownloadClientID)
}
if m.addindexer_id != nil {
fields = append(fields, history.FieldIndexerID)
}
return fields
}
@@ -2493,6 +2637,8 @@ func (m *HistoryMutation) AddedField(name string) (ent.Value, bool) {
return m.AddedSize()
case history.FieldDownloadClientID:
return m.AddedDownloadClientID()
case history.FieldIndexerID:
return m.AddedIndexerID()
}
return nil, false
}
@@ -2530,6 +2676,13 @@ func (m *HistoryMutation) AddField(name string, value ent.Value) error {
}
m.AddDownloadClientID(v)
return nil
case history.FieldIndexerID:
v, ok := value.(int)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.AddIndexerID(v)
return nil
}
return fmt.Errorf("unknown History numeric field %s", name)
}
@@ -2544,6 +2697,9 @@ func (m *HistoryMutation) ClearedFields() []string {
if m.FieldCleared(history.FieldDownloadClientID) {
fields = append(fields, history.FieldDownloadClientID)
}
if m.FieldCleared(history.FieldIndexerID) {
fields = append(fields, history.FieldIndexerID)
}
if m.FieldCleared(history.FieldSaved) {
fields = append(fields, history.FieldSaved)
}
@@ -2567,6 +2723,9 @@ func (m *HistoryMutation) ClearField(name string) error {
case history.FieldDownloadClientID:
m.ClearDownloadClientID()
return nil
case history.FieldIndexerID:
m.ClearIndexerID()
return nil
case history.FieldSaved:
m.ClearSaved()
return nil
@@ -2599,6 +2758,9 @@ func (m *HistoryMutation) ResetField(name string) error {
case history.FieldDownloadClientID:
m.ResetDownloadClientID()
return nil
case history.FieldIndexerID:
m.ResetIndexerID()
return nil
case history.FieldStatus:
m.ResetStatus()
return nil
@@ -2669,6 +2831,9 @@ type IndexersMutation struct {
enable_rss *bool
priority *int
addpriority *int
seed_ratio *float32
addseed_ratio *float32
disabled *bool
clearedFields map[string]struct{}
done bool
oldValue func(context.Context) (*Indexers, error)
@@ -2973,6 +3138,125 @@ func (m *IndexersMutation) ResetPriority() {
m.addpriority = nil
}
// SetSeedRatio sets the "seed_ratio" field.
func (m *IndexersMutation) SetSeedRatio(f float32) {
m.seed_ratio = &f
m.addseed_ratio = nil
}
// SeedRatio returns the value of the "seed_ratio" field in the mutation.
func (m *IndexersMutation) SeedRatio() (r float32, exists bool) {
v := m.seed_ratio
if v == nil {
return
}
return *v, true
}
// OldSeedRatio returns the old "seed_ratio" field's value of the Indexers entity.
// If the Indexers object wasn't provided to the builder, the object is fetched from the database.
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
func (m *IndexersMutation) OldSeedRatio(ctx context.Context) (v float32, err error) {
if !m.op.Is(OpUpdateOne) {
return v, errors.New("OldSeedRatio is only allowed on UpdateOne operations")
}
if m.id == nil || m.oldValue == nil {
return v, errors.New("OldSeedRatio requires an ID field in the mutation")
}
oldValue, err := m.oldValue(ctx)
if err != nil {
return v, fmt.Errorf("querying old value for OldSeedRatio: %w", err)
}
return oldValue.SeedRatio, nil
}
// AddSeedRatio adds f to the "seed_ratio" field.
func (m *IndexersMutation) AddSeedRatio(f float32) {
if m.addseed_ratio != nil {
*m.addseed_ratio += f
} else {
m.addseed_ratio = &f
}
}
// AddedSeedRatio returns the value that was added to the "seed_ratio" field in this mutation.
func (m *IndexersMutation) AddedSeedRatio() (r float32, exists bool) {
v := m.addseed_ratio
if v == nil {
return
}
return *v, true
}
// ClearSeedRatio clears the value of the "seed_ratio" field.
func (m *IndexersMutation) ClearSeedRatio() {
m.seed_ratio = nil
m.addseed_ratio = nil
m.clearedFields[indexers.FieldSeedRatio] = struct{}{}
}
// SeedRatioCleared returns if the "seed_ratio" field was cleared in this mutation.
func (m *IndexersMutation) SeedRatioCleared() bool {
_, ok := m.clearedFields[indexers.FieldSeedRatio]
return ok
}
// ResetSeedRatio resets all changes to the "seed_ratio" field.
func (m *IndexersMutation) ResetSeedRatio() {
m.seed_ratio = nil
m.addseed_ratio = nil
delete(m.clearedFields, indexers.FieldSeedRatio)
}
// SetDisabled sets the "disabled" field.
func (m *IndexersMutation) SetDisabled(b bool) {
m.disabled = &b
}
// Disabled returns the value of the "disabled" field in the mutation.
func (m *IndexersMutation) Disabled() (r bool, exists bool) {
v := m.disabled
if v == nil {
return
}
return *v, true
}
// OldDisabled returns the old "disabled" field's value of the Indexers entity.
// If the Indexers object wasn't provided to the builder, the object is fetched from the database.
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
func (m *IndexersMutation) OldDisabled(ctx context.Context) (v bool, err error) {
if !m.op.Is(OpUpdateOne) {
return v, errors.New("OldDisabled is only allowed on UpdateOne operations")
}
if m.id == nil || m.oldValue == nil {
return v, errors.New("OldDisabled requires an ID field in the mutation")
}
oldValue, err := m.oldValue(ctx)
if err != nil {
return v, fmt.Errorf("querying old value for OldDisabled: %w", err)
}
return oldValue.Disabled, nil
}
// ClearDisabled clears the value of the "disabled" field.
func (m *IndexersMutation) ClearDisabled() {
m.disabled = nil
m.clearedFields[indexers.FieldDisabled] = struct{}{}
}
// DisabledCleared returns if the "disabled" field was cleared in this mutation.
func (m *IndexersMutation) DisabledCleared() bool {
_, ok := m.clearedFields[indexers.FieldDisabled]
return ok
}
// ResetDisabled resets all changes to the "disabled" field.
func (m *IndexersMutation) ResetDisabled() {
m.disabled = nil
delete(m.clearedFields, indexers.FieldDisabled)
}
// Where appends a list predicates to the IndexersMutation builder.
func (m *IndexersMutation) Where(ps ...predicate.Indexers) {
m.predicates = append(m.predicates, ps...)
@@ -3007,7 +3291,7 @@ func (m *IndexersMutation) Type() string {
// order to get all numeric fields that were incremented/decremented, call
// AddedFields().
func (m *IndexersMutation) Fields() []string {
fields := make([]string, 0, 5)
fields := make([]string, 0, 7)
if m.name != nil {
fields = append(fields, indexers.FieldName)
}
@@ -3023,6 +3307,12 @@ func (m *IndexersMutation) Fields() []string {
if m.priority != nil {
fields = append(fields, indexers.FieldPriority)
}
if m.seed_ratio != nil {
fields = append(fields, indexers.FieldSeedRatio)
}
if m.disabled != nil {
fields = append(fields, indexers.FieldDisabled)
}
return fields
}
@@ -3041,6 +3331,10 @@ func (m *IndexersMutation) Field(name string) (ent.Value, bool) {
return m.EnableRss()
case indexers.FieldPriority:
return m.Priority()
case indexers.FieldSeedRatio:
return m.SeedRatio()
case indexers.FieldDisabled:
return m.Disabled()
}
return nil, false
}
@@ -3060,6 +3354,10 @@ func (m *IndexersMutation) OldField(ctx context.Context, name string) (ent.Value
return m.OldEnableRss(ctx)
case indexers.FieldPriority:
return m.OldPriority(ctx)
case indexers.FieldSeedRatio:
return m.OldSeedRatio(ctx)
case indexers.FieldDisabled:
return m.OldDisabled(ctx)
}
return nil, fmt.Errorf("unknown Indexers field %s", name)
}
@@ -3104,6 +3402,20 @@ func (m *IndexersMutation) SetField(name string, value ent.Value) error {
}
m.SetPriority(v)
return nil
case indexers.FieldSeedRatio:
v, ok := value.(float32)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.SetSeedRatio(v)
return nil
case indexers.FieldDisabled:
v, ok := value.(bool)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.SetDisabled(v)
return nil
}
return fmt.Errorf("unknown Indexers field %s", name)
}
@@ -3115,6 +3427,9 @@ func (m *IndexersMutation) AddedFields() []string {
if m.addpriority != nil {
fields = append(fields, indexers.FieldPriority)
}
if m.addseed_ratio != nil {
fields = append(fields, indexers.FieldSeedRatio)
}
return fields
}
@@ -3125,6 +3440,8 @@ func (m *IndexersMutation) AddedField(name string) (ent.Value, bool) {
switch name {
case indexers.FieldPriority:
return m.AddedPriority()
case indexers.FieldSeedRatio:
return m.AddedSeedRatio()
}
return nil, false
}
@@ -3141,6 +3458,13 @@ func (m *IndexersMutation) AddField(name string, value ent.Value) error {
}
m.AddPriority(v)
return nil
case indexers.FieldSeedRatio:
v, ok := value.(float32)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.AddSeedRatio(v)
return nil
}
return fmt.Errorf("unknown Indexers numeric field %s", name)
}
@@ -3148,7 +3472,14 @@ func (m *IndexersMutation) AddField(name string, value ent.Value) error {
// ClearedFields returns all nullable fields that were cleared during this
// mutation.
func (m *IndexersMutation) ClearedFields() []string {
return nil
var fields []string
if m.FieldCleared(indexers.FieldSeedRatio) {
fields = append(fields, indexers.FieldSeedRatio)
}
if m.FieldCleared(indexers.FieldDisabled) {
fields = append(fields, indexers.FieldDisabled)
}
return fields
}
// FieldCleared returns a boolean indicating if a field with the given name was
@@ -3161,6 +3492,14 @@ func (m *IndexersMutation) FieldCleared(name string) bool {
// ClearField clears the value of the field with the given name. It returns an
// error if the field is not defined in the schema.
func (m *IndexersMutation) ClearField(name string) error {
switch name {
case indexers.FieldSeedRatio:
m.ClearSeedRatio()
return nil
case indexers.FieldDisabled:
m.ClearDisabled()
return nil
}
return fmt.Errorf("unknown Indexers nullable field %s", name)
}
@@ -3183,6 +3522,12 @@ func (m *IndexersMutation) ResetField(name string) error {
case indexers.FieldPriority:
m.ResetPriority()
return nil
case indexers.FieldSeedRatio:
m.ResetSeedRatio()
return nil
case indexers.FieldDisabled:
m.ResetDisabled()
return nil
}
return fmt.Errorf("unknown Indexers field %s", name)
}
@@ -3256,6 +3601,7 @@ type MediaMutation struct {
addstorage_id *int
target_dir *string
download_history_episodes *bool
limiter **schema.MediaLimiter
clearedFields map[string]struct{}
episodes map[int]struct{}
removedepisodes map[int]struct{}
@@ -3924,6 +4270,55 @@ func (m *MediaMutation) ResetDownloadHistoryEpisodes() {
delete(m.clearedFields, media.FieldDownloadHistoryEpisodes)
}
// SetLimiter sets the "limiter" field.
func (m *MediaMutation) SetLimiter(sl *schema.MediaLimiter) {
m.limiter = &sl
}
// Limiter returns the value of the "limiter" field in the mutation.
func (m *MediaMutation) Limiter() (r *schema.MediaLimiter, exists bool) {
v := m.limiter
if v == nil {
return
}
return *v, true
}
// OldLimiter returns the old "limiter" field's value of the Media entity.
// If the Media object wasn't provided to the builder, the object is fetched from the database.
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
func (m *MediaMutation) OldLimiter(ctx context.Context) (v *schema.MediaLimiter, err error) {
if !m.op.Is(OpUpdateOne) {
return v, errors.New("OldLimiter is only allowed on UpdateOne operations")
}
if m.id == nil || m.oldValue == nil {
return v, errors.New("OldLimiter requires an ID field in the mutation")
}
oldValue, err := m.oldValue(ctx)
if err != nil {
return v, fmt.Errorf("querying old value for OldLimiter: %w", err)
}
return oldValue.Limiter, nil
}
// ClearLimiter clears the value of the "limiter" field.
func (m *MediaMutation) ClearLimiter() {
m.limiter = nil
m.clearedFields[media.FieldLimiter] = struct{}{}
}
// LimiterCleared returns if the "limiter" field was cleared in this mutation.
func (m *MediaMutation) LimiterCleared() bool {
_, ok := m.clearedFields[media.FieldLimiter]
return ok
}
// ResetLimiter resets all changes to the "limiter" field.
func (m *MediaMutation) ResetLimiter() {
m.limiter = nil
delete(m.clearedFields, media.FieldLimiter)
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by ids.
func (m *MediaMutation) AddEpisodeIDs(ids ...int) {
if m.episodes == nil {
@@ -4012,7 +4407,7 @@ func (m *MediaMutation) Type() string {
// order to get all numeric fields that were incremented/decremented, call
// AddedFields().
func (m *MediaMutation) Fields() []string {
fields := make([]string, 0, 13)
fields := make([]string, 0, 14)
if m.tmdb_id != nil {
fields = append(fields, media.FieldTmdbID)
}
@@ -4052,6 +4447,9 @@ func (m *MediaMutation) Fields() []string {
if m.download_history_episodes != nil {
fields = append(fields, media.FieldDownloadHistoryEpisodes)
}
if m.limiter != nil {
fields = append(fields, media.FieldLimiter)
}
return fields
}
@@ -4086,6 +4484,8 @@ func (m *MediaMutation) Field(name string) (ent.Value, bool) {
return m.TargetDir()
case media.FieldDownloadHistoryEpisodes:
return m.DownloadHistoryEpisodes()
case media.FieldLimiter:
return m.Limiter()
}
return nil, false
}
@@ -4121,6 +4521,8 @@ func (m *MediaMutation) OldField(ctx context.Context, name string) (ent.Value, e
return m.OldTargetDir(ctx)
case media.FieldDownloadHistoryEpisodes:
return m.OldDownloadHistoryEpisodes(ctx)
case media.FieldLimiter:
return m.OldLimiter(ctx)
}
return nil, fmt.Errorf("unknown Media field %s", name)
}
@@ -4221,6 +4623,13 @@ func (m *MediaMutation) SetField(name string, value ent.Value) error {
}
m.SetDownloadHistoryEpisodes(v)
return nil
case media.FieldLimiter:
v, ok := value.(*schema.MediaLimiter)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.SetLimiter(v)
return nil
}
return fmt.Errorf("unknown Media field %s", name)
}
@@ -4290,6 +4699,9 @@ func (m *MediaMutation) ClearedFields() []string {
if m.FieldCleared(media.FieldDownloadHistoryEpisodes) {
fields = append(fields, media.FieldDownloadHistoryEpisodes)
}
if m.FieldCleared(media.FieldLimiter) {
fields = append(fields, media.FieldLimiter)
}
return fields
}
@@ -4316,6 +4728,9 @@ func (m *MediaMutation) ClearField(name string) error {
case media.FieldDownloadHistoryEpisodes:
m.ClearDownloadHistoryEpisodes()
return nil
case media.FieldLimiter:
m.ClearLimiter()
return nil
}
return fmt.Errorf("unknown Media nullable field %s", name)
}
@@ -4363,6 +4778,9 @@ func (m *MediaMutation) ResetField(name string) error {
case media.FieldDownloadHistoryEpisodes:
m.ResetDownloadHistoryEpisodes()
return nil
case media.FieldLimiter:
m.ResetLimiter()
return nil
}
return fmt.Errorf("unknown Media field %s", name)
}

View File

@@ -4,6 +4,7 @@ package ent
import (
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/indexers"
"polaris/ent/media"
@@ -49,6 +50,10 @@ func init() {
downloadclients.DefaultTags = downloadclientsDescTags.Default.(string)
episodeFields := schema.Episode{}.Fields()
_ = episodeFields
// episodeDescMonitored is the schema descriptor for monitored field.
episodeDescMonitored := episodeFields[7].Descriptor()
// episode.DefaultMonitored holds the default value on creation for the monitored field.
episode.DefaultMonitored = episodeDescMonitored.Default.(bool)
historyFields := schema.History{}.Fields()
_ = historyFields
// historyDescSize is the schema descriptor for size field.
@@ -61,6 +66,18 @@ func init() {
indexersDescEnableRss := indexersFields[3].Descriptor()
// indexers.DefaultEnableRss holds the default value on creation for the enable_rss field.
indexers.DefaultEnableRss = indexersDescEnableRss.Default.(bool)
// indexersDescPriority is the schema descriptor for priority field.
indexersDescPriority := indexersFields[4].Descriptor()
// indexers.DefaultPriority holds the default value on creation for the priority field.
indexers.DefaultPriority = indexersDescPriority.Default.(int)
// indexersDescSeedRatio is the schema descriptor for seed_ratio field.
indexersDescSeedRatio := indexersFields[5].Descriptor()
// indexers.DefaultSeedRatio holds the default value on creation for the seed_ratio field.
indexers.DefaultSeedRatio = indexersDescSeedRatio.Default.(float32)
// indexersDescDisabled is the schema descriptor for disabled field.
indexersDescDisabled := indexersFields[6].Descriptor()
// indexers.DefaultDisabled holds the default value on creation for the disabled field.
indexers.DefaultDisabled = indexersDescDisabled.Default.(bool)
mediaFields := schema.Media{}.Fields()
_ = mediaFields
// mediaDescCreatedAt is the schema descriptor for created_at field.

View File

@@ -21,6 +21,7 @@ func (Episode) Fields() []ent.Field {
field.String("overview"),
field.String("air_date"),
field.Enum("status").Values("missing", "downloading", "downloaded").Default("missing"),
field.Bool("monitored").Default(false).StructTag("json:\"monitored\""), //whether this episode is monitored
}
}

View File

@@ -20,6 +20,7 @@ func (History) Fields() []ent.Field {
field.String("target_dir"),
field.Int("size").Default(0),
field.Int("download_client_id").Optional(),
field.Int("indexer_id").Optional(),
field.Enum("status").Values("running", "success", "fail", "uploading"),
field.String("saved").Optional(),
}

View File

@@ -17,7 +17,9 @@ func (Indexers) Fields() []ent.Field {
field.String("implementation"),
field.String("settings"),
field.Bool("enable_rss").Default(true),
field.Int("priority"),
field.Int("priority").Default(50),
field.Float32("seed_ratio").Optional().Default(0).Comment("minimal seed ratio requied, before removing torrent"),
field.Bool("disabled").Optional().Default(false),
}
}

View File

@@ -29,6 +29,7 @@ func (Media) Fields() []ent.Field {
field.Int("storage_id").Optional(),
field.String("target_dir").Optional(),
field.Bool("download_history_episodes").Optional().Default(false).Comment("tv series only"),
field.JSON("limiter", &MediaLimiter{}).Optional(),
}
}
@@ -38,3 +39,8 @@ func (Media) Edges() []ent.Edge {
edge.To("episodes", Episode.Type),
}
}
type MediaLimiter struct {
SizeMin int `json:"size_min"`
SizeMax int `json:"size_max"`
}

View File

@@ -8,6 +8,7 @@ type Torrent interface {
Remove() error
Save() string
Exists() bool
SeedRatio() *float64
}

23
pkg/metadata/doc.go Normal file
View File

@@ -0,0 +1,23 @@
package metadata
/*
tv name examples
Cowboy Cartel S01E02 XviD-AFG [eztv]
The.Bold.and.the.Beautiful.S37E219.XviD-AFG
As Husband As Wife 2024 S01E05-E06 1080p WEB-DL HEVC DDP 2Audios-QHstudIo
Twinkle Love 2024 S04 Complete 2160p WEB-DL HEVC AAC-QHstudIo
[HorribleSubs] One Punch Man S2 - 08 [720p].mkv
[Breeze] One Punch Man S01 S02 [1080p BD AV1][dual audio]
[HYSUB]ONE PUNCH MAN[S1+S2][BDrip][GB_MP4][1920X1080]
Cowboy Cartel S01E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv]
Limitless Wrestling 2021 01 06 The Road Season 2 Episode 12 XviD-AFG [eztv]
[千夏字幕组][小市民系列_Shoushimin Series][第03话][1080p_HEVC][简繁内封][招募新人]
[OPFans楓雪動漫][ONE PIECE 海賊王][第1113話][周日版][1080p][MP4][簡體]
[桜都字幕组] 亦叶亦花 / Nanare Hananare [04][1080p][简体内嵌]
[ANi] 戰國妖狐 千魔混沌篇 - 16 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]
[ANi] 這是妳與我的最後戰場,或是開創世界的聖戰 第二季 - 04 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]
[桜都字幕组][一拳超人 第2季/One Punch Man 2nd Season][01-12 END][BIG5][720P]
一拳超人第二季One.Punch.Man.Season2.2160p.10Bit.HEVC.AAC.CHS&JPN
*/

View File

@@ -29,20 +29,11 @@ func ParseTv(name string) *Metadata {
func parseEnglishName(name string) *Metadata {
re := regexp.MustCompile(`[^\p{L}\w\s]`)
name = re.ReplaceAllString(strings.ToLower(name), " ")
splits := strings.Split(strings.TrimSpace(name), " ")
var newSplits []string
for _, p := range splits {
p = strings.TrimSpace(p)
if p == "" {
continue
}
newSplits = append(newSplits, p)
}
newSplits := strings.Split(strings.TrimSpace(name), " ")
seasonRe := regexp.MustCompile(`^s\d{1,2}`)
resRe := regexp.MustCompile(`^\d{3,4}p`)
episodeRe := regexp.MustCompile(`e\d{1,2}`)
episodeRe := regexp.MustCompile(`e\d{1,3}`)
var seasonIndex = -1
var episodeIndex = -1
@@ -58,7 +49,7 @@ func parseEnglishName(name string) *Metadata {
} else if resRe.MatchString(p) {
resIndex = i
}
if episodeRe.MatchString(p) {
if i >= seasonIndex && episodeRe.MatchString(p) {
episodeIndex = i
}
}
@@ -137,7 +128,7 @@ func parseEnglishName(name string) *Metadata {
//resolution exists
meta.Resolution = newSplits[resIndex]
}
if meta.Episode == -1 || strings.Contains(name, "complete") {
if meta.Episode == -1 {
meta.Episode = -1
meta.IsSeasonPack = true
}
@@ -154,9 +145,11 @@ func parseEnglishName(name string) *Metadata {
}
func parseChineseName(name string) *Metadata {
var meta = &Metadata{
Season: 1,
var meta = parseEnglishName(name)
if meta.Season != -1 && (meta.Episode != -1 || meta.IsSeasonPack) {
return meta
}
meta = &Metadata{Season: 1}
//season pack
packRe := regexp.MustCompile(`(\d{1,2}-\d{1,2})|(全集)`)
if packRe.MatchString(name) {

147
pkg/metadata/tv_test.go Normal file
View File

@@ -0,0 +1,147 @@
package metadata
import (
"polaris/log"
"testing"
"github.com/stretchr/testify/assert"
)
func Test_ParseTV1(t *testing.T) {
s1 := "Twinkle Love 2024 S04 Complete 2160p WEB-DL HEVC AAC-QHstudIo"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 4)
assert.Equal(t, m.IsSeasonPack, true)
assert.Equal(t, m.Resolution, "2160p")
}
func Test_ParseTV2(t *testing.T) {
s1 := "Cowboy Cartel S01E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 4)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV3(t *testing.T) {
s1 := "The.Bold.and.the.Beautiful.S37E219.XviD-AFG "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 37)
assert.Equal(t, m.Episode, 219)
assert.Equal(t, m.IsSeasonPack, false)
//assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV4(t *testing.T) {
s1 := "Limitless Wrestling 2021 01 06 The Road Season 2 Episode 12 XviD-AFG [eztv] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 2)
//assert.Equal(t, m.Episode, 219)
assert.Equal(t, m.IsSeasonPack, true)
//assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV5(t *testing.T) {
s1 := "[Breeze] One Punch Man S01 S02 [1080p BD AV1][dual audio]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
//assert.Equal(t, m.Episode, 219)
assert.Equal(t, m.IsSeasonPack, true)
//assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV6(t *testing.T) {
s1 := "[千夏字幕组][小市民系列_Shoushimin Series][第03话][1080p_HEVC][简繁内封][招募新人]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 3)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV7(t *testing.T) {
s1 := " [OPFans楓雪動漫][ONE PIECE 海賊王][第1113話][周日版][1080p][MP4][簡體]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 1113)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV8(t *testing.T) {
s1 := "[桜都字幕组] 亦叶亦花 / Nanare Hananare [04][1080p][简体内嵌] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 4)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV9(t *testing.T) {
s1 := "[ANi] 戰國妖狐 千魔混沌篇 - 16 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 16)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV10(t *testing.T) {
s1 := " [桜都字幕组][一拳超人 第2季/One Punch Man 2nd Season][01-12 END][BIG5][720P]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
//assert.Equal(t, 01, m.Episode)
assert.Equal(t, true, m.IsSeasonPack)
assert.Equal(t, "720p", m.Resolution)
}
func Test_ParseTV11(t *testing.T) {
s1 := " [ANi] 這是妳與我的最後戰場,或是開創世界的聖戰 第二季 - 04 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
assert.Equal(t, 4, m.Episode)
assert.Equal(t, false, m.IsSeasonPack)
assert.Equal(t, "1080p", m.Resolution)
}
func Test_ParseTV12(t *testing.T) {
s1 := " 牛仔Cowboy Cartel S02E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
assert.Equal(t, 4, m.Episode)
assert.Equal(t, false, m.IsSeasonPack)
assert.Equal(t, "1080p", m.Resolution)
}
func Test_ParseTV13(t *testing.T) {
s1 := "House of Dragon 2024 1080p S02E08 Leaked HQCAM NOT COMPLETE English Audio x264 ESub BOTHD"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
assert.Equal(t, 8, m.Episode)
assert.Equal(t, false, m.IsSeasonPack)
assert.Equal(t, "1080p", m.Resolution)
}
func Test_ParseTV14(t *testing.T) {
s1 := ""
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
assert.Equal(t, 01, m.Episode)
assert.Equal(t, false, m.IsSeasonPack)
assert.Equal(t, "720p", m.Resolution)
}

View File

@@ -1,9 +1,12 @@
package tmdb
import (
"net/http"
"net/url"
"polaris/log"
"strconv"
"strings"
"time"
tmdb "github.com/cyruzin/golang-tmdb"
"github.com/pkg/errors"
@@ -14,11 +17,29 @@ type Client struct {
tmdbClient *tmdb.Client
}
func NewClient(apiKey string) (*Client, error) {
func NewClient(apiKey, proxyUrl string) (*Client, error) {
tmdbClient, err := tmdb.Init(apiKey)
if err != nil {
return nil, errors.Wrap(err, "new tmdb client")
}
if proxyUrl != "" {
//set proxy
u, err := url.Parse(proxyUrl)
if err != nil {
log.Errorf("parse proxy %v error, skip: %v", proxyUrl, err)
} else {
tmdbClient.SetClientConfig(http.Client{
Timeout: time.Second * 10,
Transport: &http.Transport{
Proxy: http.ProxyURL(u),
MaxIdleConns: 10,
IdleConnTimeout: 15 * time.Second,
},
})
}
}
return &Client{
apiKey: apiKey,
@@ -50,7 +71,6 @@ func (c *Client) GetTvDetails(id int, language string) (*tmdb.TVDetails, error)
}
}
return d, err
}
@@ -152,7 +172,7 @@ func (c *Client) GetEposideDetail(id, seasonNumber, eposideNumber int, language
return d, err
}
d.Name = detailEN.Name
d.Overview = detailEN.Overview
d.Overview = detailEN.Overview
}
return d, err
@@ -172,7 +192,7 @@ func (c *Client) GetSeasonDetails(id, seasonNumber int, language string) (*tmdb.
}
for i, ep := range detailCN.Episodes {
if !episodeNameUseful(ep.Name) && episodeNameUseful(detailEN.Episodes[i].Name){
if !episodeNameUseful(ep.Name) && episodeNameUseful(detailEN.Episodes[i].Name) {
detailCN.Episodes[i].Name = detailEN.Episodes[i].Name
detailCN.Episodes[i].Overview = detailEN.Episodes[i].Overview
}
@@ -198,7 +218,6 @@ func withLangOption(language string) map[string]string {
}
}
func episodeNameUseful(name string) bool {
return !strings.HasSuffix(name, "集") && !strings.HasPrefix(strings.ToLower(name), "episode")
}
}

View File

@@ -6,6 +6,7 @@ import (
"io"
"net/http"
"net/url"
"polaris/db"
"polaris/log"
"strconv"
"time"
@@ -71,17 +72,22 @@ func (i *Item) GetAttr(key string) string {
}
return ""
}
func (r *Response) ToResults() []Result {
func (r *Response) ToResults(indexer *db.TorznabInfo) []Result {
var res []Result
for _, item := range r.Channel.Item {
r := Result{
Name: item.Title,
Link: item.Link,
Size: mustAtoI(item.Size),
Seeders: mustAtoI(item.GetAttr("seeders")),
Peers: mustAtoI(item.GetAttr("peers")),
Category: mustAtoI(item.GetAttr("category")),
Source: r.Channel.Title,
Name: item.Title,
Link: item.Link,
Size: mustAtoI(item.Size),
Seeders: mustAtoI(item.GetAttr("seeders")),
Peers: mustAtoI(item.GetAttr("peers")),
Category: mustAtoI(item.GetAttr("category")),
DownloadVolumeFactor: tryParseFloat(item.GetAttr("downloadvolumefactor")),
UploadVolumeFactor: tryParseFloat(item.GetAttr("uploadvolumefactor")),
Source: indexer.Name,
IndexerId: indexer.ID,
Priority: indexer.Priority,
IsPrivate: item.Type == "private",
}
res = append(res, r)
}
@@ -96,11 +102,21 @@ func mustAtoI(key string) int {
}
return i
}
func Search(torznabUrl, api, keyWord string) ([]Result, error) {
func tryParseFloat(s string) float32 {
r, err := strconv.ParseFloat(s, 32)
if err != nil {
log.Warnf("parse float error: %v", err)
return 0
}
return float32(r)
}
func Search(indexer *db.TorznabInfo, api, keyWord string) ([]Result, error) {
ctx, cancel := context.WithTimeout(context.TODO(), 10*time.Second)
defer cancel()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, torznabUrl, nil)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, indexer.URL, nil)
if err != nil {
return nil, errors.Wrap(err, "new request")
}
@@ -124,15 +140,20 @@ func Search(torznabUrl, api, keyWord string) ([]Result, error) {
if err != nil {
return nil, errors.Wrap(err, "json unmarshal")
}
return res.ToResults(), nil
return res.ToResults(indexer), nil
}
type Result struct {
Name string
Link string
Size int
Seeders int
Peers int
Category int
Source string
Name string `json:"name"`
Link string `json:"link"`
Size int `json:"size"`
Seeders int `json:"seeders"`
Peers int `json:"peers"`
Category int `json:"category"`
Source string `json:"source"`
DownloadVolumeFactor float32 `json:"download_volume_factor"`
UploadVolumeFactor float32 `json:"upload_volume_factor"`
IndexerId int `json:"indexer_id"`
Priority int `json:"priority"`
IsPrivate bool `json:"is_private"`
}

View File

@@ -130,6 +130,9 @@ func (t *Torrent) Exists() bool {
}
func (t *Torrent) Name() string {
if !t.Exists() {
return ""
}
return *t.getTorrent().Name
}
@@ -155,6 +158,10 @@ func (t *Torrent) Stop() error {
return t.c.TorrentStopIDs(context.TODO(), []int64{t.ID})
}
func (t *Torrent) SeedRatio() *float64 {
return t.getTorrent().UploadRatio
}
func (t *Torrent) Start() error {
return t.c.TorrentStartIDs(context.TODO(), []int64{t.ID})
}

View File

@@ -7,8 +7,6 @@ import (
"strings"
"unicode"
"github.com/adrg/strutil"
"github.com/adrg/strutil/metrics"
"github.com/pkg/errors"
"golang.org/x/crypto/bcrypt"
"golang.org/x/exp/rand"
@@ -65,7 +63,7 @@ func IsNameAcceptable(name1, name2 string) bool {
if strings.Contains(name1, name2) || strings.Contains(name2, name1) {
return true
}
return strutil.Similarity(name1, name2, metrics.NewHamming()) > 0.4
return false
}
func FindSeasonEpisodeNum(name string) (se int, ep int, err error) {
@@ -112,21 +110,6 @@ func FindSeasonPackageInfo(name string) (se int, err error) {
return se, err
}
func IsSeasonPackageName(name string) bool {
seRe := regexp.MustCompile(`S\d+`)
epRe := regexp.MustCompile(`E\d+`)
nameUpper := strings.ToUpper(name)
matchEp := epRe.FindAllString(nameUpper, -1)
if len(matchEp) != 0 {
return false //episode number should not exist
}
matchSe := seRe.FindAllString(nameUpper, -1)
if len(matchSe) == 0 {
return false //no season num
}
return true
}
func ContainsIgnoreCase(s, substr string) bool {
return strings.Contains(strings.ToLower(s), strings.ToLower(substr))
}

View File

@@ -32,7 +32,7 @@ func (s *Server) GetAllActivities(c *gin.Context) (interface{}, error) {
a := Activity{
History: h,
}
for id, task := range s.tasks {
for id, task := range s.core.GetTasks() {
if h.ID == id && task.Exists() {
a.Progress = task.Progress()
}
@@ -54,13 +54,11 @@ func (s *Server) RemoveActivity(c *gin.Context) (interface{}, error) {
log.Errorf("no record of id: %d", id)
return nil, nil
}
torrent := s.tasks[his.ID]
if torrent != nil {
if err := torrent.Remove(); err != nil {
return nil, errors.Wrap(err, "remove torrent")
}
delete(s.tasks, his.ID)
if err := s.core.RemoveTaskAndTorrent(his.ID); err != nil {
return nil, errors.Wrap(err, "remove torrent")
}
if his.EpisodeID != 0 {
s.db.SetEpisodeStatus(his.EpisodeID, episode.StatusMissing)
@@ -96,7 +94,7 @@ func (s *Server) GetMediaDownloadHistory(c *gin.Context) (interface{}, error) {
type TorrentInfo struct {
Name string `json:"name"`
ID int64 `json:"id"`
ID int64 `json:"id"`
SeedRatio float32 `json:"seed_ratio"`
Progress int `json:"progress"`
}
@@ -116,8 +114,8 @@ func (s *Server) GetAllTorrents(c *gin.Context) (interface{}, error) {
continue
}
infos = append(infos, TorrentInfo{
Name: t.Name(),
ID: t.ID,
Name: t.Name(),
ID: t.ID,
Progress: t.Progress(),
})
}

95
server/core/client.go Normal file
View File

@@ -0,0 +1,95 @@
package core
import (
"polaris/db"
"polaris/ent"
"polaris/log"
"polaris/pkg/tmdb"
"polaris/pkg/transmission"
"github.com/pkg/errors"
"github.com/robfig/cron"
)
func NewClient(db *db.Client, language string) *Client {
return &Client{
db: db,
cron: cron.New(),
tasks: make(map[int]*Task, 0),
language: language,
}
}
type Client struct {
db *db.Client
cron *cron.Cron
tasks map[int]*Task
language string
}
func (c *Client) Init() {
c.reloadTasks()
c.addSysCron()
}
func (c *Client) reloadTasks() {
allTasks := c.db.GetHistories()
for _, t := range allTasks {
torrent, err := transmission.ReloadTorrent(t.Saved)
if err != nil {
log.Errorf("relaod task %s failed: %v", t.SourceTitle, err)
continue
}
if !torrent.Exists() { //只要种子还存在于客户端中,就重新加载,有可能是还在做种中
continue
}
log.Infof("reloading task: %d %s", t.ID, t.SourceTitle)
c.tasks[t.ID] = &Task{Torrent: torrent}
}
}
func (c *Client) getDownloadClient() (*transmission.Client, *ent.DownloadClients, error) {
tr := c.db.GetTransmission()
trc, err := transmission.NewClient(transmission.Config{
URL: tr.URL,
User: tr.User,
Password: tr.Password,
})
if err != nil {
return nil, nil, errors.Wrap(err, "connect transmission")
}
return trc, tr, nil
}
func (c *Client) TMDB() (*tmdb.Client, error) {
api := c.db.GetSetting(db.SettingTmdbApiKey)
if api == "" {
return nil, errors.New("TMDB apiKey not set")
}
proxy := c.db.GetSetting(db.SettingProxy)
return tmdb.NewClient(api, proxy)
}
func (c *Client) MustTMDB() *tmdb.Client {
t, err := c.TMDB()
if err != nil {
log.Panicf("get tmdb: %v", err)
}
return t
}
func (c *Client) RemoveTaskAndTorrent(id int)error {
torrent := c.tasks[id]
if torrent != nil {
if err := torrent.Remove(); err != nil {
return errors.Wrap(err, "remove torrent")
}
delete(c.tasks, id)
}
return nil
}
func (c *Client) GetTasks() map[int]*Task {
return c.tasks
}

121
server/core/integration.go Normal file
View File

@@ -0,0 +1,121 @@
package core
import (
"bytes"
"fmt"
"path/filepath"
"polaris/db"
"polaris/ent/media"
storage1 "polaris/ent/storage"
"polaris/log"
"polaris/pkg/notifier"
"polaris/pkg/storage"
"github.com/pkg/errors"
)
func (c *Client) writePlexmatch(seriesId int, episodeId int, targetDir, name string) error {
if !c.plexmatchEnabled() {
return nil
}
series, err := c.db.GetMedia(seriesId)
if err != nil {
return err
}
if series.MediaType != media.MediaTypeTv {
return nil
}
st, err := c.getStorage(series.StorageID, media.MediaTypeTv)
if err != nil {
return errors.Wrap(err, "get storage")
}
//series plexmatch file
_, err = st.ReadFile(filepath.Join(series.TargetDir, ".plexmatch"))
if err != nil {
//create new
log.Warnf(".plexmatch file not found, create new one: %s", series.NameEn)
if err := st.WriteFile(filepath.Join(series.TargetDir, ".plexmatch"),
[]byte(fmt.Sprintf("tmdbid: %d\n", series.TmdbID))); err != nil {
return errors.Wrap(err, "series plexmatch")
}
}
//season plexmatch file
ep, err := c.db.GetEpisodeByID(episodeId)
if err != nil {
return errors.Wrap(err, "query episode")
}
buff := bytes.Buffer{}
seasonPlex := filepath.Join(targetDir, ".plexmatch")
data, err := st.ReadFile(seasonPlex)
if err != nil {
log.Infof("read season plexmatch: %v", err)
} else {
buff.Write(data)
}
buff.WriteString(fmt.Sprintf("\nep: %d: %s\n", ep.EpisodeNumber, name))
log.Infof("write season plexmatch file content: %s", buff.String())
return st.WriteFile(seasonPlex, buff.Bytes())
}
func (c *Client) plexmatchEnabled() bool {
return c.db.GetSetting(db.SettingPlexMatchEnabled) == "true"
}
func (c *Client) getStorage(storageId int, mediaType media.MediaType) (storage.Storage, error) {
st := c.db.GetStorage(storageId)
targetPath := st.TvPath
if mediaType == media.MediaTypeMovie {
targetPath = st.MoviePath
}
switch st.Implementation {
case storage1.ImplementationLocal:
storageImpl1, err := storage.NewLocalStorage(targetPath)
if err != nil {
return nil, errors.Wrap(err, "new local")
}
return storageImpl1, nil
case storage1.ImplementationWebdav:
ws := st.ToWebDavSetting()
storageImpl1, err := storage.NewWebdavStorage(ws.URL, ws.User, ws.Password, targetPath, ws.ChangeFileHash == "true")
if err != nil {
return nil, errors.Wrap(err, "new webdav")
}
return storageImpl1, nil
}
return nil, errors.New("no storage found")
}
func (c *Client) sendMsg(msg string) {
clients, err := c.db.GetAllNotificationClients2()
if err != nil {
log.Errorf("query notification clients: %v", err)
return
}
for _, cl := range clients {
if !cl.Enabled {
continue
}
handler, ok := notifier.Gethandler(cl.Service)
if !ok {
log.Errorf("no notification implementation of service %s", cl.Service)
continue
}
noCl, err := handler(cl.Settings)
if err != nil {
log.Errorf("handle setting for name %s error: %v", cl.Name, err)
continue
}
err = noCl.SendMsg(msg)
if err != nil {
log.Errorf("send message error: %v", err)
continue
}
log.Debugf("send message to %s success, msg is %s", cl.Name, msg)
}
}

165
server/core/resources.go Normal file
View File

@@ -0,0 +1,165 @@
package core
import (
"fmt"
"polaris/ent"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/log"
"polaris/pkg/notifier/message"
"polaris/pkg/torznab"
"polaris/pkg/utils"
"github.com/pkg/errors"
)
func (c *Client) DownloadSeasonPackage(r1 torznab.Result, seriesId, seasonNum int) (*string, error) {
trc, dlClient, err := c.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
downloadDir := c.db.GetDownloadDir()
size := utils.AvailableSpace(downloadDir)
if size < uint64(r1.Size) {
log.Errorf("space available %v, space needed %v", size, r1.Size)
return nil, errors.New("no enough space")
}
torrent, err := trc.Download(r1.Link, c.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
series := c.db.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
dir := fmt.Sprintf("%s/Season %02d/", series.TargetDir, seasonNum)
history, err := c.db.SaveHistoryRecord(ent.History{
MediaID: seriesId,
EpisodeID: 0,
SourceTitle: r1.Name,
TargetDir: dir,
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
DownloadClientID: dlClient.ID,
IndexerID: r1.IndexerId,
})
if err != nil {
return nil, errors.Wrap(err, "save record")
}
c.db.SetSeasonAllEpisodeStatus(seriesId, seasonNum, episode.StatusDownloading)
c.tasks[history.ID] = &Task{Torrent: torrent}
c.sendMsg(fmt.Sprintf(message.BeginDownload, r1.Name))
return &r1.Name, nil
}
func (c *Client) DownloadEpisodeTorrent(r1 torznab.Result, seriesId, seasonNum, episodeNum int) (*string, error) {
trc, dlc, err := c.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
series := c.db.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
var ep *ent.Episode
for _, e := range series.Episodes {
if e.SeasonNumber == seasonNum && e.EpisodeNumber == episodeNum {
ep = e
}
}
if ep == nil {
return nil, errors.Errorf("no episode of season %d episode %d", seasonNum, episodeNum)
}
torrent, err := trc.Download(r1.Link, c.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
dir := fmt.Sprintf("%s/Season %02d/", series.TargetDir, seasonNum)
history, err := c.db.SaveHistoryRecord(ent.History{
MediaID: ep.MediaID,
EpisodeID: ep.ID,
SourceTitle: r1.Name,
TargetDir: dir,
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
DownloadClientID: dlc.ID,
IndexerID: r1.IndexerId,
})
if err != nil {
return nil, errors.Wrap(err, "save record")
}
c.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
c.tasks[history.ID] = &Task{Torrent: torrent}
c.sendMsg(fmt.Sprintf(message.BeginDownload, r1.Name))
log.Infof("success add %s to download task", r1.Name)
return &r1.Name, nil
}
func (c *Client) SearchAndDownload(seriesId, seasonNum, episodeNum int) (*string, error) {
res, err := SearchTvSeries(c.db, seriesId, seasonNum, []int{episodeNum}, true, true)
if err != nil {
return nil, err
}
r1 := res[0]
log.Infof("found resource to download: %+v", r1)
return c.DownloadEpisodeTorrent(r1, seriesId, seasonNum, episodeNum)
}
func (c *Client) DownloadMovie(m *ent.Media,link, name string, size int, indexerID int) (*string, error) {
trc, dlc, err := c.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
torrent, err := trc.Download(link, c.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
if name == "" {
name = m.OriginalName
}
go func() {
ep, _ := c.db.GetMovieDummyEpisode(m.ID)
history, err := c.db.SaveHistoryRecord(ent.History{
MediaID: m.ID,
EpisodeID: ep.ID,
SourceTitle: name,
TargetDir: m.TargetDir,
Status: history.StatusRunning,
Size: size,
Saved: torrent.Save(),
DownloadClientID: dlc.ID,
IndexerID: indexerID,
})
if err != nil {
log.Errorf("save history error: %v", err)
}
c.tasks[history.ID] = &Task{Torrent: torrent}
c.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
}()
c.sendMsg(fmt.Sprintf(message.BeginDownload, name))
log.Infof("success add %s to download task", name)
return &name, nil
}

View File

@@ -1,4 +1,4 @@
package server
package core
import (
"fmt"
@@ -11,43 +11,55 @@ import (
"polaris/pkg"
"polaris/pkg/notifier/message"
"polaris/pkg/utils"
"polaris/server/core"
"time"
"github.com/pkg/errors"
)
func (s *Server) scheduler() {
s.mustAddCron("@every 1m", s.checkTasks)
s.mustAddCron("0 0 * * * *", func() {
s.downloadTvSeries()
s.downloadMovie()
func (c *Client) addSysCron() {
c.mustAddCron("@every 1m", c.checkTasks)
c.mustAddCron("0 0 * * * *", func() {
c.downloadTvSeries()
c.downloadMovie()
})
s.mustAddCron("0 0 */12 * * *", s.checkAllSeriesNewSeason)
s.cron.Start()
c.mustAddCron("0 0 */12 * * *", c.checkAllSeriesNewSeason)
c.cron.Start()
}
func (s *Server) mustAddCron(spec string, cmd func()) {
if err := s.cron.AddFunc(spec, cmd); err != nil {
func (c *Client) mustAddCron(spec string, cmd func()) {
if err := c.cron.AddFunc(spec, cmd); err != nil {
log.Errorf("add func error: %v", err)
panic(err)
}
}
func (s *Server) checkTasks() {
func (c *Client) checkTasks() {
log.Debug("begin check tasks...")
for id, t := range s.tasks {
for id, t := range c.tasks {
if !t.Exists() {
log.Infof("task no longer exists: %v", id)
delete(s.tasks, id)
delete(c.tasks, id)
continue
}
log.Infof("task (%s) percentage done: %d%%", t.Name(), t.Progress())
if t.Progress() == 100 {
r := c.db.GetHistory(id)
if r.Status == history.StatusSuccess {
//task already success, check seed ratio
torrent := c.tasks[id]
ok := c.isSeedRatioLimitReached(r.IndexerID, torrent)
if ok {
log.Infof("torrent file seed ratio reached, remove: %v", torrent.Name())
torrent.Remove()
delete(c.tasks, id)
} else {
log.Infof("torrent file still sedding: %v", torrent.Name())
}
continue
}
log.Infof("task is done: %v", t.Name())
s.sendMsg(fmt.Sprintf(message.DownloadComplete, t.Name()))
c.sendMsg(fmt.Sprintf(message.DownloadComplete, t.Name()))
go func() {
if err := s.moveCompletedTask(id); err != nil {
if err := c.moveCompletedTask(id); err != nil {
log.Infof("post tasks for id %v fail: %v", id, err)
}
}()
@@ -55,20 +67,20 @@ func (s *Server) checkTasks() {
}
}
func (s *Server) moveCompletedTask(id int) (err1 error) {
torrent := s.tasks[id]
r := s.db.GetHistory(id)
func (c *Client) moveCompletedTask(id int) (err1 error) {
torrent := c.tasks[id]
r := c.db.GetHistory(id)
if r.Status == history.StatusUploading {
log.Infof("task %d is already uploading, skip", id)
return nil
}
s.db.SetHistoryStatus(r.ID, history.StatusUploading)
c.db.SetHistoryStatus(r.ID, history.StatusUploading)
seasonNum, err := utils.SeasonId(r.TargetDir)
if err != nil {
log.Errorf("no season id: %v", r.TargetDir)
seasonNum = -1
}
downloadclient, err := s.db.GetDownloadClient(r.DownloadClientID)
downloadclient, err := c.db.GetDownloadClient(r.DownloadClientID)
if err != nil {
log.Errorf("get task download client error: %v, use default one", err)
downloadclient = &ent.DownloadClients{RemoveCompletedDownloads: true, RemoveFailedDownloads: true}
@@ -78,70 +90,69 @@ func (s *Server) moveCompletedTask(id int) (err1 error) {
defer func() {
if err1 != nil {
s.db.SetHistoryStatus(r.ID, history.StatusFail)
c.db.SetHistoryStatus(r.ID, history.StatusFail)
if r.EpisodeID != 0 {
s.db.SetEpisodeStatus(r.EpisodeID, episode.StatusMissing)
c.db.SetEpisodeStatus(r.EpisodeID, episode.StatusMissing)
} else {
s.db.SetSeasonAllEpisodeStatus(r.MediaID, seasonNum, episode.StatusMissing)
c.db.SetSeasonAllEpisodeStatus(r.MediaID, seasonNum, episode.StatusMissing)
}
s.sendMsg(fmt.Sprintf(message.ProcessingFailed, err))
c.sendMsg(fmt.Sprintf(message.ProcessingFailed, err))
if downloadclient.RemoveFailedDownloads {
log.Debugf("task failed, remove failed torrent and files related")
delete(s.tasks, r.ID)
torrent.Remove()
delete(c.tasks, r.ID)
torrent.Remove()
}
}
}()
series := s.db.GetMediaDetails(r.MediaID)
series := c.db.GetMediaDetails(r.MediaID)
if series == nil {
return nil
}
st := s.db.GetStorage(series.StorageID)
st := c.db.GetStorage(series.StorageID)
log.Infof("move task files to target dir: %v", r.TargetDir)
stImpl, err := s.getStorage(st.ID, series.MediaType)
stImpl, err := c.getStorage(st.ID, series.MediaType)
if err != nil {
return err
}
//如果种子是路径,则会把路径展开,只移动文件,类似 move dir/* dir2/, 如果种子是文件,则会直接移动文件,类似 move file dir/
if err := stImpl.Copy(filepath.Join(s.db.GetDownloadDir(), torrentName), r.TargetDir); err != nil {
if err := stImpl.Copy(filepath.Join(c.db.GetDownloadDir(), torrentName), r.TargetDir); err != nil {
return errors.Wrap(err, "move file")
}
// .plexmatch file
if err := s.writePlexmatch(r.MediaID, r.EpisodeID, r.TargetDir, torrentName); err != nil {
if err := c.writePlexmatch(r.MediaID, r.EpisodeID, r.TargetDir, torrentName); err != nil {
log.Errorf("create .plexmatch file error: %v", err)
}
s.db.SetHistoryStatus(r.ID, history.StatusSuccess)
c.db.SetHistoryStatus(r.ID, history.StatusSuccess)
if r.EpisodeID != 0 {
s.db.SetEpisodeStatus(r.EpisodeID, episode.StatusDownloaded)
c.db.SetEpisodeStatus(r.EpisodeID, episode.StatusDownloaded)
} else {
s.db.SetSeasonAllEpisodeStatus(r.MediaID, seasonNum, episode.StatusDownloaded)
c.db.SetSeasonAllEpisodeStatus(r.MediaID, seasonNum, episode.StatusDownloaded)
}
s.sendMsg(fmt.Sprintf(message.ProcessingComplete, torrentName))
c.sendMsg(fmt.Sprintf(message.ProcessingComplete, torrentName))
//判断是否需要删除本地文件
if downloadclient.RemoveCompletedDownloads {
ok := c.isSeedRatioLimitReached(r.IndexerID, torrent)
if downloadclient.RemoveCompletedDownloads && ok {
log.Debugf("download complete,remove torrent and files related")
delete(s.tasks, r.ID)
delete(c.tasks, r.ID)
torrent.Remove()
}
log.Infof("move downloaded files to target dir success, file: %v, target dir: %v", torrentName, r.TargetDir)
return nil
}
func (s *Server) checkDownloadedSeriesFiles(m *ent.Media) error {
func (c *Client) CheckDownloadedSeriesFiles(m *ent.Media) error {
if m.MediaType != media.MediaTypeTv {
return nil
}
log.Infof("check files in directory: %s", m.TargetDir)
var storageImpl, err = s.getStorage(m.StorageID, media.MediaTypeTv)
var storageImpl, err = c.getStorage(m.StorageID, media.MediaTypeTv)
if err != nil {
return err
}
@@ -169,12 +180,12 @@ func (s *Server) checkDownloadedSeriesFiles(m *ent.Media) error {
continue
}
log.Infof("found match, season num %d, episode num %d", seNum, epNum)
ep, err := s.db.GetEpisode(m.ID, seNum, epNum)
ep, err := c.db.GetEpisode(m.ID, seNum, epNum)
if err != nil {
log.Error("update episode: %v", err)
continue
}
err = s.db.SetEpisodeStatus(ep.ID, episode.StatusDownloaded)
err = c.db.SetEpisodeStatus(ep.ID, episode.StatusDownloaded)
if err != nil {
log.Error("update episode: %v", err)
continue
@@ -191,27 +202,20 @@ type Task struct {
pkg.Torrent
}
func (s *Server) downloadTvSeries() {
func (c *Client) downloadTvSeries() {
log.Infof("begin check all tv series resources")
allSeries := s.db.GetMediaWatchlist(media.MediaTypeTv)
allSeries := c.db.GetMediaWatchlist(media.MediaTypeTv)
for _, series := range allSeries {
tvDetail := s.db.GetMediaDetails(series.ID)
tvDetail := c.db.GetMediaDetails(series.ID)
for _, ep := range tvDetail.Episodes {
if !series.DownloadHistoryEpisodes { //设置不下载历史已播出剧集,只下载将来剧集
t, err := time.Parse("2006-01-02", ep.AirDate)
if err != nil {
log.Error("air date not known, skip: %v", ep.Title)
continue
}
if series.CreatedAt.Sub(t) > 24*time.Hour { //剧集在加入watchlist之前不去下载
continue
}
if !ep.Monitored { //未监控的剧集不去下载
continue
}
if ep.Status != episode.StatusMissing { //已经下载的不去下载
continue
}
name, err := s.searchAndDownload(series.ID, ep.SeasonNumber, ep.EpisodeNumber)
name, err := c.SearchAndDownload(series.ID, ep.SeasonNumber, ep.EpisodeNumber)
if err != nil {
log.Infof("cannot find resource to download for %s: %v", ep.Title, err)
} else {
@@ -223,12 +227,12 @@ func (s *Server) downloadTvSeries() {
}
}
func (s *Server) downloadMovie() {
func (c *Client) downloadMovie() {
log.Infof("begin check all movie resources")
allSeries := s.db.GetMediaWatchlist(media.MediaTypeMovie)
allSeries := c.db.GetMediaWatchlist(media.MediaTypeMovie)
for _, series := range allSeries {
detail := s.db.GetMediaDetails(series.ID)
detail := c.db.GetMediaDetails(series.ID)
if len(detail.Episodes) == 0 {
log.Errorf("no related dummy episode: %v", detail.NameEn)
continue
@@ -238,32 +242,32 @@ func (s *Server) downloadMovie() {
continue
}
if err := s.downloadMovieSingleEpisode(ep); err != nil {
if err := c.downloadMovieSingleEpisode(ep); err != nil {
log.Errorf("download movie error: %v", err)
}
}
}
func (s *Server) downloadMovieSingleEpisode(ep *ent.Episode) error {
trc, dlc, err := s.getDownloadClient()
func (c *Client) downloadMovieSingleEpisode(ep *ent.Episode) error {
trc, dlc, err := c.getDownloadClient()
if err != nil {
return errors.Wrap(err, "connect transmission")
}
res, err := core.SearchMovie(s.db, ep.MediaID, true)
res, err := SearchMovie(c.db, ep.MediaID, true, true)
if err != nil {
return errors.Wrap(err, "search movie")
}
r1 := res[0]
log.Infof("begin download torrent resource: %v", r1.Name)
torrent, err := trc.Download(r1.Link, s.db.GetDownloadDir())
torrent, err := trc.Download(r1.Link, c.db.GetDownloadDir())
if err != nil {
return errors.Wrap(err, "downloading")
}
torrent.Start()
history, err := s.db.SaveHistoryRecord(ent.History{
history, err := c.db.SaveHistoryRecord(ent.History{
MediaID: ep.MediaID,
EpisodeID: ep.ID,
SourceTitle: r1.Name,
@@ -272,41 +276,42 @@ func (s *Server) downloadMovieSingleEpisode(ep *ent.Episode) error {
Size: r1.Size,
Saved: torrent.Save(),
DownloadClientID: dlc.ID,
IndexerID: r1.IndexerId,
})
if err != nil {
log.Errorf("save history error: %v", err)
}
s.tasks[history.ID] = &Task{Torrent: torrent}
c.tasks[history.ID] = &Task{Torrent: torrent}
s.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
c.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
return nil
}
func (s *Server) checkAllSeriesNewSeason() {
func (c *Client) checkAllSeriesNewSeason() {
log.Infof("begin checking series all new season")
allSeries := s.db.GetMediaWatchlist(media.MediaTypeTv)
allSeries := c.db.GetMediaWatchlist(media.MediaTypeTv)
for _, series := range allSeries {
err := s.checkSeiesNewSeason(series)
err := c.checkSeiesNewSeason(series)
if err != nil {
log.Errorf("check series new season error: series name %v, error: %v", series.NameEn, err)
}
}
}
func (s *Server) checkSeiesNewSeason(media *ent.Media) error {
d, err := s.MustTMDB().GetTvDetails(media.TmdbID, s.language)
func (c *Client) checkSeiesNewSeason(media *ent.Media) error {
d, err := c.MustTMDB().GetTvDetails(media.TmdbID, c.language)
if err != nil {
return errors.Wrap(err, "tmdb")
}
lastsSason := d.NumberOfSeasons
seasonDetail, err := s.MustTMDB().GetSeasonDetails(media.TmdbID, lastsSason, s.language)
seasonDetail, err := c.MustTMDB().GetSeasonDetails(media.TmdbID, lastsSason, c.language)
if err != nil {
return errors.Wrap(err, "tmdb season")
}
for _, ep := range seasonDetail.Episodes {
epDb, err := s.db.GetEpisode(media.ID, ep.SeasonNumber, ep.EpisodeNumber)
epDb, err := c.db.GetEpisode(media.ID, ep.SeasonNumber, ep.EpisodeNumber)
if err != nil {
if ent.IsNotFound(err) {
log.Infof("add new episode: %+v", ep)
@@ -318,15 +323,29 @@ func (s *Server) checkSeiesNewSeason(media *ent.Media) error {
Overview: ep.Overview,
AirDate: ep.AirDate,
Status: episode.StatusMissing,
Monitored: true,
}
s.db.SaveEposideDetail2(episode)
c.db.SaveEposideDetail2(episode)
}
} else { //update episode
if ep.Name != epDb.Title || ep.Overview != epDb.Overview || ep.AirDate != epDb.AirDate {
log.Infof("update new episode: %+v", ep)
s.db.UpdateEpiode2(epDb.ID, ep.Name, ep.Overview, ep.AirDate)
c.db.UpdateEpiode2(epDb.ID, ep.Name, ep.Overview, ep.AirDate)
}
}
}
return nil
}
func (c *Client) isSeedRatioLimitReached(indexId int, t pkg.Torrent) bool {
indexer, err := c.db.GetIndexer(indexId)
if err != nil {
return true
}
currentRatio := t.SeedRatio()
if currentRatio == nil {
log.Warnf("get current seed ratio error, current ratio is nil")
return indexer.SeedRatio == 0
}
return *currentRatio >= float64(indexer.SeedRatio)
}

View File

@@ -7,6 +7,7 @@ import (
"polaris/pkg/metadata"
"polaris/pkg/torznab"
"polaris/pkg/utils"
"slices"
"sort"
"strconv"
"strings"
@@ -15,8 +16,64 @@ import (
"github.com/pkg/errors"
)
func SearchSeasonPackage(db1 *db.Client, seriesId, seasonNum int, checkResolution bool) ([]torznab.Result, error) {
return SearchEpisode(db1, seriesId, seasonNum, -1, checkResolution)
func SearchTvSeries(db1 *db.Client, seriesId, seasonNum int, episodes []int, checkResolution bool, checkFileSize bool) ([]torznab.Result, error) {
series := db1.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
log.Debugf("check tv series %s, season %d, episode %v", series.NameEn, seasonNum, episodes)
res := searchWithTorznab(db1, series.NameEn)
resCn := searchWithTorznab(db1, series.NameCn)
res = append(res, resCn...)
var filtered []torznab.Result
for _, r := range res {
//log.Infof("torrent resource: %+v", r)
meta := metadata.ParseTv(r.Name)
if meta == nil { //cannot parse name
continue
}
if !isNumberedSeries(series) && meta.Season != seasonNum { //do not check season on series that only rely on episode number
continue
}
if isNumberedSeries(series) && len(episodes) == 0 {
//should not want season
continue
}
if len(episodes) > 0 && !slices.Contains(episodes, meta.Episode) { //not season pack, but episode number not equal
continue
} else if len(episodes) == 0 && !meta.IsSeasonPack { //want season pack, but not season pack
continue
}
if checkResolution && meta.Resolution != series.Resolution.String() {
continue
}
if !utils.IsNameAcceptable(meta.NameEn, series.NameEn) && !utils.IsNameAcceptable(meta.NameCn, series.NameCn) {
continue
}
if checkFileSize && series.Limiter != nil {
if series.Limiter.SizeMin > 0 && r.Size < series.Limiter.SizeMin {
//min size not satified
continue
}
if series.Limiter.SizeMax > 0 && r.Size > series.Limiter.SizeMax {
//max size not satified
continue
}
}
filtered = append(filtered, r)
}
if len(filtered) == 0 {
return nil, errors.New("no resource found")
}
filtered = dedup(filtered)
return filtered, nil
}
func isNumberedSeries(detail *db.MediaDetails) bool {
@@ -34,55 +91,7 @@ func isNumberedSeries(detail *db.MediaDetails) bool {
return hasSeason2 && !season2HasEpisode1 //only one 1st episode
}
func SearchEpisode(db1 *db.Client, seriesId, seasonNum, episodeNum int, checkResolution bool) ([]torznab.Result, error) {
series := db1.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
res := searchWithTorznab(db1, series.NameEn)
resCn := searchWithTorznab(db1, series.NameCn)
res = append(res, resCn...)
var filtered []torznab.Result
for _, r := range res {
//log.Infof("torrent resource: %+v", r)
meta := metadata.ParseTv(r.Name)
if meta == nil { //cannot parse name
continue
}
if !isNumberedSeries(series) { //do not check season on series that only rely on episode number
if meta.Season != seasonNum {
continue
}
}
if isNumberedSeries(series) && episodeNum == -1 {
//should not want season
continue
}
if episodeNum != -1 && meta.Episode != episodeNum { //not season pack, episode number equals
continue
}else if episodeNum == -1 && !meta.IsSeasonPack { //want season pack, but not season pack
continue
}
if checkResolution && meta.Resolution != series.Resolution.String() {
continue
}
if !utils.IsNameAcceptable(meta.NameEn, series.NameEn) && !utils.IsNameAcceptable(meta.NameCn, series.NameCn) {
continue
}
filtered = append(filtered, r)
}
if len(filtered) == 0 {
return nil, errors.New("no resource found")
}
return filtered, nil
}
func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.Result, error) {
func SearchMovie(db1 *db.Client, movieId int, checkResolution bool, checkFileSize bool) ([]torznab.Result, error) {
movieDetail := db1.GetMediaDetails(movieId)
if movieDetail == nil {
return nil, errors.New("no media found of id")
@@ -105,6 +114,18 @@ func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.R
if checkResolution && meta.Resolution != movieDetail.Resolution.String() {
continue
}
if checkFileSize && movieDetail.Limiter != nil {
if movieDetail.Limiter.SizeMin > 0 && r.Size < movieDetail.Limiter.SizeMin {
//min size not satified
continue
}
if movieDetail.Limiter.SizeMax > 0 && r.Size > movieDetail.Limiter.SizeMax {
//max size not satified
continue
}
}
ss := strings.Split(movieDetail.AirDate, "-")[0]
year, _ := strconv.Atoi(ss)
if meta.Year != year && meta.Year != year-1 && meta.Year != year+1 { //year not match
@@ -117,6 +138,7 @@ func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.R
if len(filtered) == 0 {
return nil, errors.New("no resource found")
}
filtered = dedup(filtered)
return filtered, nil
@@ -130,11 +152,14 @@ func searchWithTorznab(db *db.Client, q string) []torznab.Result {
var wg sync.WaitGroup
for _, tor := range allTorznab {
if tor.Disabled {
continue
}
wg.Add(1)
go func() {
log.Debugf("search torznab %v with %v", tor.Name, q)
defer wg.Done()
resp, err := torznab.Search(tor.URL, tor.ApiKey, q)
resp, err := torznab.Search(tor, tor.ApiKey, q)
if err != nil {
log.Errorf("search %s error: %v", tor.Name, err)
return
@@ -152,11 +177,53 @@ func searchWithTorznab(db *db.Client, q string) []torznab.Result {
res = append(res, result...)
}
sort.Slice(res, func(i, j int) bool {
//res = dedup(res)
sort.SliceStable(res, func(i, j int) bool { //先按做种人数排序
var s1 = res[i]
var s2 = res[j]
return s1.Seeders > s2.Seeders
})
sort.SliceStable(res, func(i, j int) bool { //再按优先级排序,优先级高的种子排前面
var s1 = res[i]
var s2 = res[j]
return s1.Priority > s2.Priority
})
//pt资源中同一indexer内部优先下载free的资源
sort.SliceStable(res, func(i, j int) bool {
var s1 = res[i]
var s2 = res[j]
if s1.IndexerId == s2.IndexerId && s1.IsPrivate {
return s1.DownloadVolumeFactor < s2.DownloadVolumeFactor
}
return false
})
//同一indexer内部如果下载消耗一样则优先下载上传奖励较多的
sort.SliceStable(res, func(i, j int) bool {
var s1 = res[i]
var s2 = res[j]
if s1.IndexerId == s2.IndexerId && s1.IsPrivate && s1.DownloadVolumeFactor == s2.DownloadVolumeFactor {
return s1.UploadVolumeFactor > s2.UploadVolumeFactor
}
return false
})
return res
}
func dedup(list []torznab.Result) []torznab.Result {
var res = make([]torznab.Result, 0, len(list))
seen := make(map[string]bool, 0)
for _, r := range list {
key := fmt.Sprintf("%s%s%d%d", r.Name, r.Source, r.Seeders, r.Peers)
if seen[key] {
continue
}
seen[key] = true
res = append(res, r)
}
return res
}

View File

@@ -1,62 +0,0 @@
package server
import (
"bytes"
"fmt"
"path/filepath"
"polaris/db"
"polaris/ent/media"
"polaris/log"
"github.com/pkg/errors"
)
func (s *Server) writePlexmatch(seriesId int, episodeId int, targetDir, name string) error {
if !s.plexmatchEnabled() {
return nil
}
series, err := s.db.GetMedia(seriesId)
if err != nil {
return err
}
if series.MediaType != media.MediaTypeTv {
return nil
}
st, err := s.getStorage(series.StorageID, media.MediaTypeTv)
if err != nil {
return errors.Wrap(err, "get storage")
}
//series plexmatch file
_, err = st.ReadFile(filepath.Join(series.TargetDir, ".plexmatch"))
if err != nil {
//create new
log.Warnf(".plexmatch file not found, create new one: %s", series.NameEn)
if err := st.WriteFile(filepath.Join(series.TargetDir, ".plexmatch"),
[]byte(fmt.Sprintf("tmdbid: %d\n",series.TmdbID))); err != nil {
return errors.Wrap(err, "series plexmatch")
}
}
//season plexmatch file
ep, err := s.db.GetEpisodeByID(episodeId)
if err != nil {
return errors.Wrap(err, "query episode")
}
buff := bytes.Buffer{}
seasonPlex := filepath.Join(targetDir, ".plexmatch")
data, err := st.ReadFile(seasonPlex)
if err != nil {
log.Infof("read season plexmatch: %v", err)
} else {
buff.Write(data)
}
buff.WriteString(fmt.Sprintf("\nep: %d: %s\n", ep.EpisodeNumber, name))
log.Infof("write season plexmatch file content: %s", buff.String())
return st.WriteFile(seasonPlex, buff.Bytes())
}
func (s *Server) plexmatchEnabled() bool {
return s.db.GetSetting(db.SettingPlexMatchEnabled) == "true"
}

View File

@@ -2,8 +2,6 @@ package server
import (
"polaris/ent"
"polaris/log"
"polaris/pkg/notifier"
"strconv"
"github.com/gin-gonic/gin"
@@ -44,32 +42,3 @@ func (s *Server) AddNotificationClient(c *gin.Context) (interface{}, error) {
}
return nil, nil
}
func (s *Server) sendMsg(msg string) {
clients, err := s.db.GetAllNotificationClients2()
if err != nil {
log.Errorf("query notification clients: %v", err)
return
}
for _, cl := range clients {
if !cl.Enabled {
continue
}
handler, ok := notifier.Gethandler(cl.Service)
if !ok {
log.Errorf("no notification implementation of service %s", cl.Service)
continue
}
noCl, err := handler(cl.Settings)
if err != nil {
log.Errorf("handle setting for name %s error: %v", cl.Name, err)
continue
}
err = noCl.SendMsg(msg)
if err != nil {
log.Errorf("send message error: %v", err)
continue
}
log.Debugf("send message to %s success, msg is %s", cl.Name, msg)
}
}

View File

@@ -2,14 +2,9 @@ package server
import (
"fmt"
"polaris/ent"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/media"
"polaris/log"
"polaris/pkg/notifier/message"
"polaris/pkg/torznab"
"polaris/pkg/utils"
"polaris/server/core"
"github.com/gin-gonic/gin"
@@ -18,122 +13,17 @@ import (
func (s *Server) searchAndDownloadSeasonPackage(seriesId, seasonNum int) (*string, error) {
res, err := core.SearchSeasonPackage(s.db, seriesId, seasonNum, true)
res, err := core.SearchTvSeries(s.db, seriesId, seasonNum, nil, true, true)
if err != nil {
return nil, err
}
r1 := res[0]
log.Infof("found resource to download: %+v", r1)
return s.downloadSeasonPackage(r1, seriesId, seasonNum)
return s.core.DownloadSeasonPackage(r1, seriesId, seasonNum)
}
func (s *Server) downloadSeasonPackage(r1 torznab.Result, seriesId, seasonNum int) (*string, error) {
trc, dlClient, err := s.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
downloadDir := s.db.GetDownloadDir()
size := utils.AvailableSpace(downloadDir)
if size < uint64(r1.Size) {
log.Errorf("space available %v, space needed %v", size, r1.Size)
return nil, errors.New("no enough space")
}
torrent, err := trc.Download(r1.Link, s.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
series := s.db.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
dir := fmt.Sprintf("%s/Season %02d/", series.TargetDir, seasonNum)
history, err := s.db.SaveHistoryRecord(ent.History{
MediaID: seriesId,
EpisodeID: 0,
SourceTitle: r1.Name,
TargetDir: dir,
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
DownloadClientID: dlClient.ID,
})
if err != nil {
return nil, errors.Wrap(err, "save record")
}
s.db.SetSeasonAllEpisodeStatus(seriesId, seasonNum, episode.StatusDownloading)
s.tasks[history.ID] = &Task{Torrent: torrent}
s.sendMsg(fmt.Sprintf(message.BeginDownload, r1.Name))
return &r1.Name, nil
}
func (s *Server) downloadEpisodeTorrent(r1 torznab.Result, seriesId, seasonNum, episodeNum int) (*string, error) {
trc, dlc, err := s.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
series := s.db.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
var ep *ent.Episode
for _, e := range series.Episodes {
if e.SeasonNumber == seasonNum && e.EpisodeNumber == episodeNum {
ep = e
}
}
if ep == nil {
return nil, errors.Errorf("no episode of season %d episode %d", seasonNum, episodeNum)
}
torrent, err := trc.Download(r1.Link, s.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
dir := fmt.Sprintf("%s/Season %02d/", series.TargetDir, seasonNum)
history, err := s.db.SaveHistoryRecord(ent.History{
MediaID: ep.MediaID,
EpisodeID: ep.ID,
SourceTitle: r1.Name,
TargetDir: dir,
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
DownloadClientID: dlc.ID,
})
if err != nil {
return nil, errors.Wrap(err, "save record")
}
s.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
s.tasks[history.ID] = &Task{Torrent: torrent}
s.sendMsg(fmt.Sprintf(message.BeginDownload, r1.Name))
log.Infof("success add %s to download task", r1.Name)
return &r1.Name, nil
}
func (s *Server) searchAndDownload(seriesId, seasonNum, episodeNum int) (*string, error) {
res, err := core.SearchEpisode(s.db, seriesId, seasonNum, episodeNum, true)
if err != nil {
return nil, err
}
r1 := res[0]
log.Infof("found resource to download: %+v", r1)
return s.downloadEpisodeTorrent(r1, seriesId, seasonNum, episodeNum)
}
type searchAndDownloadIn struct {
ID int `json:"id" binding:"required"`
Season int `json:"season"`
@@ -156,16 +46,16 @@ func (s *Server) SearchAvailableTorrents(c *gin.Context) (interface{}, error) {
if in.Episode == 0 {
//search season package
log.Infof("search series season package S%02d", in.Season)
res, err = core.SearchSeasonPackage(s.db, in.ID, in.Season, false)
res, err = core.SearchTvSeries(s.db, in.ID, in.Season, nil, false, false)
if err != nil {
return nil, errors.Wrap(err, "search season package")
}
} else {
log.Infof("search series episode S%02dE%02d", in.Season, in.Episode)
res, err = core.SearchEpisode(s.db, in.ID, in.Season, in.Episode, false)
res, err = core.SearchTvSeries(s.db, in.ID, in.Season, []int{in.Episode}, false, false)
if err != nil {
if err.Error() == "no resource found" {
return []TorznabSearchResult{}, nil
return []string{}, nil
}
return nil, errors.Wrap(err, "search episode")
}
@@ -173,25 +63,15 @@ func (s *Server) SearchAvailableTorrents(c *gin.Context) (interface{}, error) {
}
} else {
log.Info("search movie %d", in.ID)
res, err = core.SearchMovie(s.db, in.ID, false)
res, err = core.SearchMovie(s.db, in.ID, false, false)
if err != nil {
if err.Error() == "no resource found" {
return []TorznabSearchResult{}, nil
return []string{}, nil
}
return nil, err
}
}
var searchResults []TorznabSearchResult
for _, r := range res {
searchResults = append(searchResults, TorznabSearchResult{
Name: r.Name,
Size: r.Size,
Seeders: r.Seeders,
Peers: r.Peers,
Link: r.Link,
})
}
return searchResults, nil
return res, nil
}
func (s *Server) SearchTvAndDownload(c *gin.Context) (interface{}, error) {
@@ -211,7 +91,7 @@ func (s *Server) SearchTvAndDownload(c *gin.Context) (interface{}, error) {
name = *name1
} else {
log.Infof("season episode search")
name1, err := s.searchAndDownload(in.ID, in.Season, in.Episode)
name1, err := s.core.SearchAndDownload(in.ID, in.Season, in.Episode)
if err != nil {
return nil, errors.Wrap(err, "download")
}
@@ -223,19 +103,11 @@ func (s *Server) SearchTvAndDownload(c *gin.Context) (interface{}, error) {
}, nil
}
type TorznabSearchResult struct {
Name string `json:"name"`
Size int `json:"size"`
Link string `json:"link"`
Seeders int `json:"seeders"`
Peers int `json:"peers"`
Source string `json:"source"`
}
type downloadTorrentIn struct {
MediaID int `json:"id" binding:"required"`
Season int `json:"season"`
Episode int `json:"episode"`
TorznabSearchResult
torznab.Result
}
func (s *Server) DownloadTorrent(c *gin.Context) (interface{}, error) {
@@ -257,54 +129,17 @@ func (s *Server) DownloadTorrent(c *gin.Context) (interface{}, error) {
name = fmt.Sprintf("%v S%02d", m.OriginalName, in.Season)
}
res := torznab.Result{Name: name, Link: in.Link, Size: in.Size}
return s.downloadSeasonPackage(res, in.MediaID, in.Season)
return s.core.DownloadSeasonPackage(res, in.MediaID, in.Season)
}
name := in.Name
if name == "" {
name = fmt.Sprintf("%v S%02dE%02d", m.OriginalName, in.Season, in.Episode)
}
res := torznab.Result{Name: name, Link: in.Link, Size: in.Size}
return s.downloadEpisodeTorrent(res, in.MediaID, in.Season, in.Episode)
res := torznab.Result{Name: name, Link: in.Link, Size: in.Size, IndexerId: in.IndexerId}
return s.core.DownloadEpisodeTorrent(res, in.MediaID, in.Season, in.Episode)
} else {
//movie
trc, dlc, err := s.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
torrent, err := trc.Download(in.Link, s.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
name := in.Name
if name == "" {
name = m.OriginalName
}
go func() {
ep, _ := s.db.GetMovieDummyEpisode(m.ID)
history, err := s.db.SaveHistoryRecord(ent.History{
MediaID: m.ID,
EpisodeID: ep.ID,
SourceTitle: name,
TargetDir: m.TargetDir,
Status: history.StatusRunning,
Size: in.Size,
Saved: torrent.Save(),
DownloadClientID: dlc.ID,
})
if err != nil {
log.Errorf("save history error: %v", err)
}
s.tasks[history.ID] = &Task{Torrent: torrent}
s.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
}()
s.sendMsg(fmt.Sprintf(message.BeginDownload, in.Name))
log.Infof("success add %s to download task", in.Name)
return in.Name, nil
return s.core.DownloadMovie(m, in.Link, in.Name, in.Size, in.IndexerId)
}
}

View File

@@ -8,13 +8,12 @@ import (
"polaris/db"
"polaris/log"
"polaris/pkg/tmdb"
"polaris/pkg/transmission"
"polaris/server/core"
"polaris/ui"
ginzap "github.com/gin-contrib/zap"
"github.com/gin-contrib/static"
"github.com/robfig/cron"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
@@ -22,27 +21,24 @@ import (
func NewServer(db *db.Client) *Server {
r := gin.Default()
return &Server{
r: r,
db: db,
cron: cron.New(),
tasks: make(map[int]*Task),
s := &Server{
r: r,
db: db,
}
s.core = core.NewClient(db, s.language)
return s
}
type Server struct {
r *gin.Engine
db *db.Client
cron *cron.Cron
core *core.Client
language string
tasks map[int]*Task
jwtSerect string
}
func (s *Server) Serve() error {
s.scheduler()
s.reloadTasks()
s.restoreProxy()
s.core.Init()
s.jwtSerect = s.db.GetSetting(db.JwtSerectKey)
//st, _ := fs.Sub(ui.Web, "build/web")
@@ -71,6 +67,7 @@ func (s *Server) Serve() error {
setting.GET("/about", HttpHandler(s.About))
setting.POST("/parse/tv", HttpHandler(s.ParseTv))
setting.POST("/parse/movie", HttpHandler(s.ParseMovie))
setting.POST("/monitoring", HttpHandler(s.ChangeEpisodeMonitoring))
}
activity := api.Group("/activity")
{
@@ -129,9 +126,10 @@ func (s *Server) Serve() error {
func (s *Server) TMDB() (*tmdb.Client, error) {
api := s.db.GetSetting(db.SettingTmdbApiKey)
if api == "" {
return nil, errors.New("tmdb api not set")
return nil, errors.New("TMDB apiKey not set")
}
return tmdb.NewClient(api)
proxy := s.db.GetSetting(db.SettingProxy)
return tmdb.NewClient(api, proxy)
}
func (s *Server) MustTMDB() *tmdb.Client {
@@ -142,22 +140,6 @@ func (s *Server) MustTMDB() *tmdb.Client {
return t
}
func (s *Server) reloadTasks() {
runningTasks := s.db.GetRunningHistories()
if len(runningTasks) == 0 {
return
}
for _, t := range runningTasks {
log.Infof("reloading task: %d %s", t.ID, t.SourceTitle)
torrent, err := transmission.ReloadTorrent(t.Saved)
if err != nil {
log.Errorf("relaod task %s failed: %v", t.SourceTitle, err)
continue
}
s.tasks[t.ID] = &Task{Torrent: torrent}
}
}
func (s *Server) proxyPosters(c *gin.Context) {
remote, _ := url.Parse("https://image.tmdb.org")
proxy := httputil.NewSingleHostReverseProxy(remote)

View File

@@ -1,9 +1,8 @@
package server
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
"polaris/db"
"polaris/ent"
"polaris/log"
@@ -54,29 +53,10 @@ func (s *Server) SetSetting(c *gin.Context) (interface{}, error) {
s.db.SetSetting(db.SettingPlexMatchEnabled, "false")
}
s.setProxy(in.Proxy)
s.db.SetSetting(db.SettingProxy, in.Proxy)
return nil, nil
}
func (s *Server) setProxy(proxy string) {
proxyUrl, err := url.Parse(proxy)
tp := http.DefaultTransport.(*http.Transport)
if proxy == "" || err != nil {
log.Warnf("proxy url not valid, disabling: %v", proxy)
tp.Proxy = nil
s.db.SetSetting(db.SettingProxy, "")
} else {
log.Infof("set proxy to %v", proxy)
tp.Proxy = http.ProxyURL(proxyUrl)
s.db.SetSetting(db.SettingProxy, proxy)
}
}
func (s *Server) restoreProxy() {
p := s.db.GetSetting(db.SettingProxy)
s.setProxy(p)
}
func (s *Server) GetSetting(c *gin.Context) (interface{}, error) {
tmdb := s.db.GetSetting(db.SettingTmdbApiKey)
downloadDir := s.db.GetSetting(db.SettingDownloadDir)
@@ -92,9 +72,13 @@ func (s *Server) GetSetting(c *gin.Context) (interface{}, error) {
}
type addTorznabIn struct {
Name string `json:"name" binding:"required"`
URL string `json:"url" binding:"required"`
ApiKey string `json:"api_key" binding:"required"`
ID int `json:"id"`
Name string `json:"name" binding:"required"`
URL string `json:"url" binding:"required"`
ApiKey string `json:"api_key" binding:"required"`
Disabled bool `json:"disabled"`
Priority int `json:"priority"`
SeedRatio float32 `json:"seed_ratio"`
}
func (s *Server) AddTorznabInfo(c *gin.Context) (interface{}, error) {
@@ -102,10 +86,31 @@ func (s *Server) AddTorznabInfo(c *gin.Context) (interface{}, error) {
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind json")
}
err := s.db.SaveTorznabInfo(in.Name, db.TorznabSetting{
log.Infof("add indexer settings: %+v", in)
setting := db.TorznabSetting{
URL: in.URL,
ApiKey: in.ApiKey,
})
}
data, err := json.Marshal(setting)
if err != nil {
return nil, errors.Wrap(err, "marshal json")
}
if in.Priority > 128 {
in.Priority = 128
}
indexer := ent.Indexers{
ID: in.ID,
Name: in.Name,
Implementation: "torznab",
Settings: string(data),
Priority: in.Priority,
Disabled: in.Disabled,
SeedRatio: in.SeedRatio,
}
err = s.db.SaveIndexer(&indexer)
if err != nil {
return nil, errors.Wrap(err, "add ")
}
@@ -188,3 +193,17 @@ func (s *Server) DeleteDownloadCLient(c *gin.Context) (interface{}, error) {
s.db.DeleteDownloadCLient(id)
return "success", nil
}
type episodeMonitoringIn struct {
EpisodeID int `json:"episode_id"`
Monitor bool `json:"monitor"`
}
func (s *Server) ChangeEpisodeMonitoring(c *gin.Context) (interface{}, error) {
var in episodeMonitoringIn
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind")
}
s.db.SetEpisodeMonitoring(in.EpisodeID, in.Monitor)
return "success", nil
}

View File

@@ -3,8 +3,7 @@ package server
import (
"fmt"
"polaris/db"
"polaris/ent/media"
storage1 "polaris/ent/storage"
"polaris/log"
"polaris/pkg/storage"
"strconv"
@@ -113,30 +112,3 @@ func (s *Server) SuggestedMovieFolderName(c *gin.Context) (interface{}, error) {
log.Infof("tv series of tmdb id %v suggestting name is %v", id, name)
return gin.H{"name": name}, nil
}
func (s *Server) getStorage(storageId int, mediaType media.MediaType) (storage.Storage, error) {
st := s.db.GetStorage(storageId)
targetPath := st.TvPath
if mediaType == media.MediaTypeMovie {
targetPath = st.MoviePath
}
switch st.Implementation {
case storage1.ImplementationLocal:
storageImpl1, err := storage.NewLocalStorage(targetPath)
if err != nil {
return nil, errors.Wrap(err, "new local")
}
return storageImpl1, nil
case storage1.ImplementationWebdav:
ws := st.ToWebDavSetting()
storageImpl1, err := storage.NewWebdavStorage(ws.URL, ws.User, ws.Password, targetPath, ws.ChangeFileHash == "true")
if err != nil {
return nil, errors.Wrap(err, "new webdav")
}
return storageImpl1, nil
}
return nil, errors.New("no storage found")
}

View File

@@ -10,6 +10,7 @@ import (
"polaris/ent"
"polaris/ent/episode"
"polaris/ent/media"
"polaris/ent/schema"
"polaris/log"
"strconv"
"time"
@@ -43,7 +44,11 @@ func (s *Server) SearchMedia(c *gin.Context) (interface{}, error) {
return nil, errors.Wrap(err, "bind query")
}
log.Infof("search media with keyword: %v", q.Query)
r, err := s.MustTMDB().SearchMedia(q.Query, s.language, q.Page)
tmdb, err := s.TMDB()
if err != nil {
return nil, err
}
r, err := tmdb.SearchMedia(q.Query, s.language, q.Page)
if err != nil {
return nil, errors.Wrap(err, "search tv")
}
@@ -62,6 +67,8 @@ type addWatchlistIn struct {
Resolution string `json:"resolution" binding:"required"`
Folder string `json:"folder" binding:"required"`
DownloadHistoryEpisodes bool `json:"download_history_episodes"` //for tv
SizeMin int `json:"size_min"`
SizeMax int `json:"size_max"`
}
func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
@@ -98,12 +105,30 @@ func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
continue
}
for _, ep := range se.Episodes {
shouldMonitor := false
//如果设置下载往期剧集则监控所有剧集。如果没有则监控未上映的剧集考虑时差等问题留24h余量
if in.DownloadHistoryEpisodes {
shouldMonitor = true
} else {
t, err := time.Parse("2006-01-02", ep.AirDate)
if err != nil {
log.Error("air date not known, will monitor: %v", ep.AirDate)
shouldMonitor = true
} else {
if time.Since(t) < 24*time.Hour { //monitor episode air 24h before now
shouldMonitor = true
}
}
}
epid, err := s.db.SaveEposideDetail(&ent.Episode{
SeasonNumber: seasonId,
EpisodeNumber: ep.EpisodeNumber,
Title: ep.Name,
Overview: ep.Overview,
AirDate: ep.AirDate,
Monitored: shouldMonitor,
})
if err != nil {
log.Errorf("save episode info error: %v", err)
@@ -112,7 +137,7 @@ func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
epIds = append(epIds, epid)
}
}
r, err := s.db.AddMediaWatchlist(&ent.Media{
m := &ent.Media{
TmdbID: int(detail.ID),
MediaType: media.MediaTypeTv,
NameCn: nameCn,
@@ -124,7 +149,10 @@ func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
StorageID: in.StorageID,
TargetDir: in.Folder,
DownloadHistoryEpisodes: in.DownloadHistoryEpisodes,
}, epIds)
Limiter: &schema.MediaLimiter{SizeMin: in.SizeMin, SizeMax: in.SizeMax},
}
r, err := s.db.AddMediaWatchlist(m, epIds)
if err != nil {
return nil, errors.Wrap(err, "add to list")
}
@@ -135,7 +163,7 @@ func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
if err := s.downloadBackdrop(detail.BackdropPath, r.ID); err != nil {
log.Errorf("download poster error: %v", err)
}
if err := s.checkDownloadedSeriesFiles(r); err != nil {
if err := s.core.CheckDownloadedSeriesFiles(r); err != nil {
log.Errorf("check downloaded files error: %v", err)
}
@@ -150,6 +178,7 @@ func (s *Server) AddMovie2Watchlist(c *gin.Context) (interface{}, error) {
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind query")
}
log.Infof("add movie watchlist input: %+v", in)
detailCn, err := s.MustTMDB().GetMovieDetails(in.TmdbID, db.LanguageCN)
if err != nil {
return nil, errors.Wrap(err, "get movie detail")
@@ -172,6 +201,7 @@ func (s *Server) AddMovie2Watchlist(c *gin.Context) (interface{}, error) {
Title: "dummy episode for movies",
Overview: "dummy episode for movies",
AirDate: detail.ReleaseDate,
Monitored: true,
})
if err != nil {
return nil, errors.Wrap(err, "add dummy episode")
@@ -189,6 +219,7 @@ func (s *Server) AddMovie2Watchlist(c *gin.Context) (interface{}, error) {
Resolution: media.Resolution(in.Resolution),
StorageID: in.StorageID,
TargetDir: in.Folder,
Limiter: &schema.MediaLimiter{SizeMin: in.SizeMin, SizeMax: in.SizeMax},
}, []int{epid})
if err != nil {
return nil, errors.Wrap(err, "add to list")
@@ -245,8 +276,8 @@ func (s *Server) downloadImage(url string, mediaID int, name string) error {
type MediaWithStatus struct {
*ent.Media
MonitoredNum int `json:"monitored_num"`
DownloadedNum int `json:"downloaded_num"`
MonitoredNum int `json:"monitored_num"`
DownloadedNum int `json:"downloaded_num"`
}
//missing: episode aired missing
@@ -259,7 +290,7 @@ func (s *Server) GetTvWatchlist(c *gin.Context) (interface{}, error) {
res := make([]MediaWithStatus, len(list))
for i, item := range list {
var ms = MediaWithStatus{
Media: item,
Media: item,
MonitoredNum: 0,
DownloadedNum: 0,
}
@@ -267,30 +298,12 @@ func (s *Server) GetTvWatchlist(c *gin.Context) (interface{}, error) {
details := s.db.GetMediaDetails(item.ID)
for _, ep := range details.Episodes {
monitored := false
if ep.SeasonNumber == 0 {
continue
}
if item.DownloadHistoryEpisodes {
monitored = true
} else {
t, err := time.Parse("2006-01-02", ep.AirDate)
if err != nil { //airdate not exist, maybe airdate not set yet
monitored = true
} else {
if item.CreatedAt.Sub(t) > 24*time.Hour { //剧集在加入watchlist之前不去下载
continue
}
monitored = true
}
}
if monitored {
if ep.Monitored {
ms.MonitoredNum++
if ep.Status == episode.StatusDownloaded {
ms.DownloadedNum++
}
}
}
res[i] = ms
}
@@ -302,8 +315,8 @@ func (s *Server) GetMovieWatchlist(c *gin.Context) (interface{}, error) {
res := make([]MediaWithStatus, len(list))
for i, item := range list {
var ms = MediaWithStatus{
Media: item,
MonitoredNum: 1,
Media: item,
MonitoredNum: 1,
DownloadedNum: 0,
}
dummyEp, err := s.db.GetMovieDummyEpisode(item.ID)

View File

@@ -10,7 +10,7 @@ class ActivityPage extends ConsumerStatefulWidget {
static const route = "/activities";
@override
_ActivityPageState createState() => _ActivityPageState();
ConsumerState<ConsumerStatefulWidget> createState() => _ActivityPageState();
}
class _ActivityPageState extends ConsumerState<ActivityPage>

View File

@@ -6,8 +6,8 @@ import 'package:ui/activity.dart';
import 'package:ui/login_page.dart';
import 'package:ui/movie_watchlist.dart';
import 'package:ui/providers/APIs.dart';
import 'package:ui/search.dart';
import 'package:ui/settings.dart';
import 'package:ui/search_page/search.dart';
import 'package:ui/settings/settings.dart';
import 'package:ui/system_page.dart';
import 'package:ui/tv_details.dart';
import 'package:ui/welcome_page.dart';
@@ -35,7 +35,8 @@ CustomTransitionPage buildPageWithDefaultTransition<T>({
reverseTransitionDuration: Duration.zero,
key: state.pageKey,
child: child,
transitionsBuilder: (context, animation, secondaryAnimation, child) => child,
transitionsBuilder: (context, animation, secondaryAnimation, child) =>
child,
);
}
@@ -123,14 +124,22 @@ class _MyAppState extends ConsumerState<MyApp> {
child: MaterialApp.router(
title: 'Polaris 影视追踪下载',
theme: ThemeData(
fontFamily: "NotoSansSC",
colorScheme: ColorScheme.fromSeed(
seedColor: Colors.blueAccent,
brightness: Brightness.dark,
surface: Colors.black54),
useMaterial3: true,
//scaffoldBackgroundColor: Color.fromARGB(255, 26, 24, 24)
),
fontFamily: "NotoSansSC",
colorScheme: ColorScheme.fromSeed(
seedColor: Colors.blueAccent,
brightness: Brightness.dark,
surface: Colors.black54),
useMaterial3: true,
//scaffoldBackgroundColor: Color.fromARGB(255, 26, 24, 24)
tooltipTheme: TooltipThemeData(
textStyle: const TextStyle(
color: Colors.grey,
),
decoration: BoxDecoration(
color: Colors.black54,
borderRadius: BorderRadius.circular(20),
),
)),
routerConfig: router,
),
);

View File

@@ -3,9 +3,8 @@ import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:ui/providers/activity.dart';
import 'package:ui/providers/series_details.dart';
import 'package:ui/widgets/detail_card.dart';
import 'package:ui/widgets/utils.dart';
import 'package:ui/widgets/resource_list.dart';
import 'package:ui/widgets/progress_indicator.dart';
import 'package:ui/widgets/widgets.dart';
class MovieDetailsPage extends ConsumerStatefulWidget {
static const route = "/movie/:id";
@@ -53,7 +52,7 @@ class NestedTabBar extends ConsumerStatefulWidget {
const NestedTabBar({super.key, required this.id});
@override
_NestedTabBarState createState() => _NestedTabBarState();
ConsumerState<ConsumerStatefulWidget> createState() => _NestedTabBarState();
}
class _NestedTabBarState extends ConsumerState<NestedTabBar>
@@ -125,59 +124,7 @@ class _NestedTabBarState extends ConsumerState<NestedTabBar>
error: (error, trace) => Text("$error"),
loading: () => const MyProgressIndicator());
} else {
return Consumer(
builder: (context, ref, child) {
var torrents = ref.watch(mediaTorrentsDataProvider(
(mediaId: widget.id, seasonNumber: 0, episodeNumber: 0)));
return torrents.when(
data: (v) {
if (v.isEmpty) {
return const Center(
child: Text("无可用资源"),
);
}
return DataTable(
columns: const [
DataColumn(label: Text("名称")),
DataColumn(label: Text("大小")),
DataColumn(label: Text("seeders")),
DataColumn(label: Text("peers")),
DataColumn(label: Text("操作"))
],
rows: List.generate(v.length, (i) {
final torrent = v[i];
return DataRow(cells: [
DataCell(Text("${torrent.name}")),
DataCell(
Text("${torrent.size?.readableFileSize()}")),
DataCell(Text("${torrent.seeders}")),
DataCell(Text("${torrent.peers}")),
DataCell(IconButton(
icon: const Icon(Icons.download),
onPressed: () {
final f = ref
.read(mediaTorrentsDataProvider((
mediaId: widget.id,
seasonNumber: 0,
episodeNumber: 0
)).notifier)
.download(torrent)
.then((v) => showSnakeBar(
"开始下载:${torrent.name}"));
// .onError((error, trace) =>
// Utils.showSnakeBar("操作失败: $error"));
showLoadingWithFuture(f);
},
))
]);
}),
);
},
error: (error, trace) => Text("$error"),
loading: () => const MyProgressIndicator());
},
);
return ResourceList(mediaId: widget.id);
}
})
],

View File

@@ -1,7 +1,7 @@
import 'package:flutter/material.dart';
import 'package:go_router/go_router.dart';
import 'package:ui/activity.dart';
import 'package:ui/settings.dart';
import 'package:ui/settings/settings.dart';
import 'package:ui/welcome_page.dart';
class NavDrawer extends StatefulWidget {

View File

@@ -33,6 +33,7 @@ class APIs {
static final logsBaseUrl = "$_baseUrl/api/v1/logs/";
static final logFilesUrl = "$_baseUrl/api/v1/setting/logfiles";
static final aboutUrl = "$_baseUrl/api/v1/setting/about";
static final changeMonitoringUrl = "$_baseUrl/api/v1/setting/monitoring";
static final notifierAllUrl = "$_baseUrl/api/v1/notifier/all";
static final notifierDeleteUrl = "$_baseUrl/api/v1/notifier/id/";

View File

@@ -48,6 +48,19 @@ class SeriesDetailData
var name = (sp.data as Map<String, dynamic>)["name"];
return name;
}
Future<void> changeMonitoringStatus(int episodeId, bool b) async {
final dio = APIs.getDio();
var resp = await dio.post(APIs.changeMonitoringUrl, data: {
"episode_id": episodeId,
"monitor": b,
});
var sp = ServerResponse.fromJson(resp.data);
if (sp.code != 0) {
throw sp.message;
}
ref.invalidateSelf();
}
}
class SeriesDetails {
@@ -98,7 +111,7 @@ class SeriesDetails {
mediaType = json["media_type"];
storage = Storage.fromJson(json["storage"]);
targetDir = json["target_dir"];
downloadHistoryEpisodes = json["download_history_episodes"]??false;
downloadHistoryEpisodes = json["download_history_episodes"] ?? false;
if (json['episodes'] != null) {
episodes = <Episodes>[];
json['episodes'].forEach((v) {
@@ -117,6 +130,7 @@ class Episodes {
int? seasonNumber;
String? overview;
String? status;
bool? monitored;
Episodes(
{this.id,
@@ -126,6 +140,7 @@ class Episodes {
this.airDate,
this.seasonNumber,
this.status,
this.monitored,
this.overview});
Episodes.fromJson(Map<String, dynamic> json) {
@@ -137,6 +152,7 @@ class Episodes {
seasonNumber = json['season_number'];
status = json['status'];
overview = json['overview'];
monitored = json["monitored"];
}
}
@@ -195,13 +211,28 @@ class MediaTorrentResource extends AutoDisposeFamilyAsyncNotifier<
}
class TorrentResource {
TorrentResource({this.name, this.size, this.seeders, this.peers, this.link});
TorrentResource(
{this.name,
this.size,
this.seeders,
this.peers,
this.link,
this.source,
this.indexerId,
this.downloadFactor,
this.uploadFactor,
this.isPrivate});
String? name;
int? size;
int? seeders;
int? peers;
String? link;
String? source;
int? indexerId;
double? downloadFactor;
double? uploadFactor;
bool? isPrivate;
factory TorrentResource.fromJson(Map<String, dynamic> json) {
return TorrentResource(
@@ -209,13 +240,20 @@ class TorrentResource {
size: json["size"],
seeders: json["seeders"],
peers: json["peers"],
link: json["link"]);
link: json["link"],
source: json["source"],
indexerId: json["indexer_id"],
isPrivate: json["is_private"] ?? false,
downloadFactor: json["download_volume_factor"],
uploadFactor: json["upload_volume_factor"]);
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = <String, dynamic>{};
data['name'] = name;
data['size'] = size;
data["link"] = link;
data["indexer_id"] = indexerId;
data["source"] = source;
return data;
}
}

View File

@@ -130,20 +130,30 @@ class Indexer {
String? url;
String? apiKey;
int? id;
int? priority;
double? seedRatio;
bool? disabled;
Indexer({this.name, this.url, this.apiKey});
Indexer({this.name, this.url, this.apiKey, this.id, this.priority=50, this.seedRatio=0, this.disabled});
Indexer.fromJson(Map<String, dynamic> json) {
name = json['name'];
url = json['url'];
apiKey = json['api_key'];
id = json["id"];
priority = json["priority"];
seedRatio = json["seed_ratio"]??0;
disabled = json["disabled"] ?? false;
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = <String, dynamic>{};
data['name'] = name;
data['url'] = url;
data['api_key'] = apiKey;
data["id"] = id;
data["priority"] = priority;
data["seed_ratio"] = seedRatio;
data["disabled"] = disabled;
return data;
}
}
@@ -210,8 +220,8 @@ class DownloadClient {
this.url,
this.user,
this.password,
this.removeCompletedDownloads,
this.removeFailedDownloads});
this.removeCompletedDownloads = true,
this.removeFailedDownloads = true});
DownloadClient.fromJson(Map<String, dynamic> json) {
id = json['id'];

View File

@@ -1,6 +1,7 @@
import 'dart:async';
import 'package:dio/dio.dart';
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:quiver/strings.dart';
import 'package:ui/providers/APIs.dart';
@@ -91,16 +92,24 @@ class SearchPageData
state = newState;
}
Future<void> submit2Watchlist(int tmdbId, int storageId, String resolution,
String mediaType, String folder, bool downloadHistoryEpisodes) async {
final dio = await APIs.getDio();
Future<void> submit2Watchlist(
int tmdbId,
int storageId,
String resolution,
String mediaType,
String folder,
bool downloadHistoryEpisodes,
RangeValues limiter) async {
final dio = APIs.getDio();
if (mediaType == "tv") {
var resp = await dio.post(APIs.watchlistTvUrl, data: {
"tmdb_id": tmdbId,
"storage_id": storageId,
"resolution": resolution,
"folder": folder,
"download_history_episodes": downloadHistoryEpisodes
"download_history_episodes": downloadHistoryEpisodes,
"size_min": (limiter.start * 1000).toInt(),
"size_max": (limiter.end * 1000).toInt(),
});
var sp = ServerResponse.fromJson(resp.data);
if (sp.code != 0) {
@@ -112,7 +121,9 @@ class SearchPageData
"tmdb_id": tmdbId,
"storage_id": storageId,
"resolution": resolution,
"folder": folder
"folder": folder,
"size_min": (limiter.start * 1000).toInt(),
"size_max": (limiter.end * 1000).toInt(),
});
var sp = ServerResponse.fromJson(resp.data);
if (sp.code != 0) {
@@ -185,8 +196,8 @@ class MediaDetail {
resolution = json["resolution"];
storageId = json["storage_id"];
airDate = json["air_date"];
monitoredNum = json["monitored_num"]??0;
downloadedNum = json["downloaded_num"]??0;
monitoredNum = json["monitored_num"] ?? 0;
downloadedNum = json["downloaded_num"] ?? 0;
}
}

View File

@@ -1,352 +0,0 @@
import 'package:flutter/material.dart';
import 'package:flutter_form_builder/flutter_form_builder.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:go_router/go_router.dart';
import 'package:ui/providers/APIs.dart';
import 'package:ui/providers/settings.dart';
import 'package:ui/providers/welcome_data.dart';
import 'package:ui/widgets/progress_indicator.dart';
import 'package:ui/widgets/utils.dart';
import 'package:ui/widgets/widgets.dart';
class SearchPage extends ConsumerStatefulWidget {
const SearchPage({super.key, this.query});
static const route = "/search";
final String? query;
@override
ConsumerState<ConsumerStatefulWidget> createState() {
return _SearchPageState();
}
}
class _SearchPageState extends ConsumerState<SearchPage> {
List<dynamic> list = List.empty();
@override
Widget build(BuildContext context) {
final q = widget.query ?? "";
var searchList = ref.watch(searchPageDataProvider(q));
List<Widget> res = searchList.when(
data: (data) {
if (data.isEmpty) {
return [
Container(
height: MediaQuery.of(context).size.height * 0.6,
alignment: Alignment.center,
child: const Text(
"啥都没有...",
style: TextStyle(fontSize: 16),
))
];
}
var cards = List<Widget>.empty(growable: true);
for (final item in data) {
cards.add(Card(
margin: const EdgeInsets.all(4),
clipBehavior: Clip.hardEdge,
child: InkWell(
//splashColor: Colors.blue.withAlpha(30),
onTap: () {
if (item.inWatchlist != true) {
_showSubmitDialog(context, item);
}
},
child: Row(
children: <Widget>[
Flexible(
child: SizedBox(
width: 150,
height: 200,
child: Image.network(
"${APIs.tmdbImgBaseUrl}${item.posterPath}",
fit: BoxFit.contain,
),
),
),
Flexible(
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
Text(
"${item.name} ${item.name != item.originalName ? item.originalName : ''} (${item.firstAirDate?.year})",
style: const TextStyle(
fontSize: 14,
fontWeight: FontWeight.bold),
),
const SizedBox(
width: 10,
),
item.mediaType == "tv"
? const Chip(
avatar: Icon(Icons.live_tv),
label: Text(
"电视剧",
))
: const Chip(
avatar: Icon(Icons.movie),
label: Text("电影")),
item.inWatchlist == true
? const Chip(
label: Icon(
Icons.done,
color: Colors.green,
))
: const Text("")
],
),
const Text(""),
item.originCountry.isNotEmpty
? Text("国家:${item.originCountry[0]}")
: Container(),
Text("${item.overview}")
],
),
)
],
),
)));
}
return cards;
},
error: (err, trace) => [Text("$err")],
loading: () => [const MyProgressIndicator()]);
var f = NotificationListener(
onNotification: (ScrollNotification scrollInfo) {
if (scrollInfo is ScrollEndNotification &&
scrollInfo.metrics.axisDirection == AxisDirection.down &&
scrollInfo.metrics.pixels >= scrollInfo.metrics.maxScrollExtent) {
ref.read(searchPageDataProvider(q).notifier).queryNextPage();
}
return true;
},
child: ListView(
children: res,
));
return Column(
children: [
TextField(
autofocus: true,
controller: TextEditingController(text: q),
onSubmitted: (value) async {
context.go(
Uri(path: SearchPage.route, queryParameters: {'query': value})
.toString());
},
decoration: const InputDecoration(
labelText: "搜索",
hintText: "搜索剧集名称",
prefixIcon: Icon(Icons.search)),
),
Expanded(child: f)
],
);
}
Future<void> _showSubmitDialog(BuildContext context, SearchResult item) {
final _formKey = GlobalKey<FormBuilderState>();
return showDialog<void>(
context: context,
builder: (BuildContext context) {
return Consumer(
builder: (context, ref, _) {
int storageSelected = 0;
var storage = ref.watch(storageSettingProvider);
var name = ref.watch(suggestNameDataProvider(
(id: item.id!, mediaType: item.mediaType!)));
var pathController = TextEditingController();
return AlertDialog(
title: Text('添加: ${item.name}'),
content: SizedBox(
width: 500,
height: 200,
child: FormBuilder(
key: _formKey,
initialValue: const {
"resolution": "1080p",
"storage": null,
"folder": "",
"history_episodes": false,
},
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
FormBuilderDropdown(
name: "resolution",
decoration: const InputDecoration(labelText: "清晰度"),
items: const [
DropdownMenuItem(
value: "720p", child: Text("720p")),
DropdownMenuItem(
value: "1080p", child: Text("1080p")),
DropdownMenuItem(
value: "2160p", child: Text("2160p")),
],
),
storage.when(
data: (v) {
return StatefulBuilder(
builder: (context, setState) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
FormBuilderDropdown(
onChanged: (v) {
setState(
() {
storageSelected = v!;
},
);
},
name: "storage",
decoration: const InputDecoration(
labelText: "存储位置"),
items: v
.map((s) => DropdownMenuItem(
value: s.id,
child: Text(s.name!)))
.toList(),
),
name.when(
data: (s) {
return storageSelected == 0
? const Text("")
: () {
final storage = v
.where((e) =>
e.id == storageSelected)
.first;
final path =
item.mediaType == "tv"
? storage.tvPath
: storage.moviePath;
pathController.text = s;
return SizedBox(
//width: 300,
child: FormBuilderTextField(
name: "folder",
controller: pathController,
decoration: InputDecoration(
labelText: "存储路径",
prefix: Text(
path ?? "unknown")),
),
);
}();
},
error: (error, stackTrace) =>
Text("$error"),
loading: () => const MyProgressIndicator(
size: 20,
),
),
item.mediaType == "tv"
? SizedBox(
width: 250,
child: FormBuilderCheckbox(
name: "history_episodes",
title: const Text("是否下载往期剧集"),
),
)
: const SizedBox(),
],
);
});
},
error: (err, trace) => Text("$err"),
loading: () => const MyProgressIndicator()),
],
),
),
),
actions: <Widget>[
TextButton(
style: TextButton.styleFrom(
textStyle: Theme.of(context).textTheme.labelLarge,
),
child: const Text('取消'),
onPressed: () {
Navigator.of(context).pop();
},
),
TextButton(
style: TextButton.styleFrom(
textStyle: Theme.of(context).textTheme.labelLarge,
),
child: const Text('确定'),
onPressed: () async {
if (_formKey.currentState!.saveAndValidate()) {
final values = _formKey.currentState!.value;
//print(values);
var f = ref
.read(searchPageDataProvider(widget.query ?? "")
.notifier)
.submit2Watchlist(
item.id!,
values["storage"],
values["resolution"],
item.mediaType!,
values["folder"],
values["history_episodes"] ?? false)
.then((v) {
Navigator.of(context).pop();
showSnakeBar("添加成功:${item.name}");
});
showLoadingWithFuture(f);
}
},
),
],
);
},
);
});
}
}
class SearchBarApp extends StatefulWidget {
const SearchBarApp({
super.key,
required this.onChanged,
});
final ValueChanged<String> onChanged;
@override
State<SearchBarApp> createState() => _SearchBarAppState();
}
class _SearchBarAppState extends State<SearchBarApp> {
@override
Widget build(BuildContext context) {
return SearchAnchor(
builder: (BuildContext context, SearchController controller) {
return SearchBar(
controller: controller,
padding: const WidgetStatePropertyAll<EdgeInsets>(
EdgeInsets.symmetric(horizontal: 16.0)),
onSubmitted: (value) => {widget.onChanged(controller.text)},
leading: const Icon(Icons.search),
);
}, suggestionsBuilder: (BuildContext context, SearchController controller) {
return List<ListTile>.generate(0, (int index) {
final String item = 'item $index';
return ListTile(
title: Text(item),
onTap: () {
setState(() {
controller.closeView(item);
});
},
);
});
});
}
}

View File

@@ -0,0 +1,158 @@
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:go_router/go_router.dart';
import 'package:ui/providers/APIs.dart';
import 'package:ui/providers/welcome_data.dart';
import 'package:ui/search_page/submit_dialog.dart';
import 'package:ui/widgets/progress_indicator.dart';
class SearchPage extends ConsumerStatefulWidget {
const SearchPage({super.key, this.query});
static const route = "/search";
final String? query;
@override
ConsumerState<ConsumerStatefulWidget> createState() {
return _SearchPageState();
}
}
class _SearchPageState extends ConsumerState<SearchPage> {
List<dynamic> list = List.empty();
@override
Widget build(BuildContext context) {
final q = widget.query ?? "";
var searchList = ref.watch(searchPageDataProvider(q));
List<Widget> res = searchList.when(
data: (data) {
if (data.isEmpty) {
return [
Container(
height: MediaQuery.of(context).size.height * 0.6,
alignment: Alignment.center,
child: const Text(
"啥都没有...",
style: TextStyle(fontSize: 16),
))
];
}
var cards = List<Widget>.empty(growable: true);
for (final item in data) {
cards.add(Card(
margin: const EdgeInsets.all(4),
clipBehavior: Clip.hardEdge,
child: InkWell(
//splashColor: Colors.blue.withAlpha(30),
onTap: () {
if (item.inWatchlist != true) {
_showSubmitDialog(context, item);
}
},
child: Row(
children: <Widget>[
Flexible(
child: SizedBox(
width: 150,
height: 200,
child: Image.network(
"${APIs.tmdbImgBaseUrl}${item.posterPath}",
fit: BoxFit.contain,
),
),
),
Flexible(
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
Text(
"${item.name} ${item.name != item.originalName ? item.originalName : ''} (${item.firstAirDate?.year})",
style: const TextStyle(
fontSize: 14,
fontWeight: FontWeight.bold),
),
const SizedBox(
width: 10,
),
item.mediaType == "tv"
? const Chip(
avatar: Icon(Icons.live_tv),
label: Text(
"剧集",
))
: const Chip(
avatar: Icon(Icons.movie),
label: Text("电影")),
item.inWatchlist == true
? const Chip(
label: Icon(
Icons.done,
color: Colors.green,
))
: const Text("")
],
),
const Text(""),
item.originCountry.isNotEmpty
? Text("国家:${item.originCountry[0]}")
: Container(),
Text("${item.overview}")
],
),
)
],
),
)));
}
return cards;
},
error: (err, trace) => [Text("$err")],
loading: () => [const MyProgressIndicator()]);
var f = NotificationListener(
onNotification: (ScrollNotification scrollInfo) {
if (scrollInfo is ScrollEndNotification &&
scrollInfo.metrics.axisDirection == AxisDirection.down &&
scrollInfo.metrics.pixels >= scrollInfo.metrics.maxScrollExtent) {
ref.read(searchPageDataProvider(q).notifier).queryNextPage();
}
return true;
},
child: ListView(
children: res,
));
return Column(
children: [
TextField(
autofocus: true,
controller: TextEditingController(text: q),
onSubmitted: (value) async {
context.go(
Uri(path: SearchPage.route, queryParameters: {'query': value})
.toString());
},
decoration: const InputDecoration(
labelText: "搜索",
hintText: "搜索剧集名称",
prefixIcon: Icon(Icons.search)),
),
Expanded(child: f)
],
);
}
Future<void> _showSubmitDialog(BuildContext context, SearchResult item) {
return showDialog<void>(
context: context,
builder: (BuildContext context) {
return SubmitSearchResult(
item: item,
query: widget.query!,
);
});
}
}

View File

@@ -0,0 +1,225 @@
import 'package:flutter/material.dart';
import 'package:flutter_form_builder/flutter_form_builder.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:ui/providers/settings.dart';
import 'package:ui/providers/welcome_data.dart';
import 'package:ui/widgets/progress_indicator.dart';
import 'package:ui/widgets/utils.dart';
import 'package:ui/widgets/widgets.dart';
class SubmitSearchResult extends ConsumerStatefulWidget {
final SearchResult item;
final String query;
const SubmitSearchResult(
{super.key, required this.item, required this.query});
@override
ConsumerState<ConsumerStatefulWidget> createState() {
return _SubmitSearchResultState();
}
}
class _SubmitSearchResultState extends ConsumerState<SubmitSearchResult> {
final _formKey = GlobalKey<FormBuilderState>();
bool enabledSizedLimiter = false;
double sizeMax = 5000;
@override
Widget build(BuildContext context) {
int storageSelected = 0;
var storage = ref.watch(storageSettingProvider);
var name = ref.watch(suggestNameDataProvider(
(id: widget.item.id!, mediaType: widget.item.mediaType!)));
var pathController = TextEditingController();
return AlertDialog(
title: Text('添加: ${widget.item.name}'),
content: SizedBox(
width: 500,
height: 400,
child: FormBuilder(
key: _formKey,
initialValue: const {
"resolution": "1080p",
"storage": null,
"folder": "",
"history_episodes": false,
"eanble_size_limier": false,
"size_limiter": RangeValues(400, 4000),
},
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
FormBuilderDropdown(
name: "resolution",
decoration: const InputDecoration(labelText: "清晰度"),
items: const [
DropdownMenuItem(value: "720p", child: Text("720p")),
DropdownMenuItem(value: "1080p", child: Text("1080p")),
DropdownMenuItem(value: "2160p", child: Text("2160p")),
],
),
storage.when(
data: (v) {
return StatefulBuilder(builder: (context, setState) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
FormBuilderDropdown(
onChanged: (v) {
setState(
() {
storageSelected = v!;
},
);
},
name: "storage",
decoration:
const InputDecoration(labelText: "存储位置"),
items: v
.map((s) => DropdownMenuItem(
value: s.id, child: Text(s.name!)))
.toList(),
),
name.when(
data: (s) {
return storageSelected == 0
? const Text("")
: () {
final storage = v
.where((e) => e.id == storageSelected)
.first;
final path = widget.item.mediaType == "tv"
? storage.tvPath
: storage.moviePath;
pathController.text = s;
return SizedBox(
//width: 300,
child: FormBuilderTextField(
name: "folder",
controller: pathController,
decoration: InputDecoration(
labelText: "存储路径",
prefix: Text(path ?? "unknown")),
),
);
}();
},
error: (error, stackTrace) => Text("$error"),
loading: () => const MyProgressIndicator(
size: 20,
),
),
FormBuilderSwitch(
name: "eanble_size_limier",
title: Text(widget.item.mediaType == "tv"
? "是否限制每集文件大小"
: "是否限制电影文件大小"),
onChanged: (value) {
setState(
() {
enabledSizedLimiter = value!;
},
);
},
),
enabledSizedLimiter
? FormBuilderRangeSlider(
maxValueWidget: (max) =>
Text("${sizeMax / 1000} GB"),
minValueWidget: (min) => Text("0"),
valueWidget: (value) {
final sss = value.split(" ");
return Text(
"${readableSize(sss[0])} - ${readableSize(sss[2])}");
},
onChangeEnd: (value) {
if (value.end > sizeMax * 0.9) {
setState(
() {
sizeMax = sizeMax * 5;
},
);
} else if (value.end < sizeMax * 0.2) {
if (sizeMax > 5000) {
setState(
() {
sizeMax = sizeMax / 5;
},
);
}
}
},
name: "size_limiter",
min: 0,
max: sizeMax)
: const SizedBox(),
widget.item.mediaType == "tv"
? SizedBox(
width: 250,
child: FormBuilderCheckbox(
name: "history_episodes",
title: const Text("是否下载往期剧集"),
),
)
: const SizedBox(),
],
);
});
},
error: (err, trace) => Text("$err"),
loading: () => const MyProgressIndicator()),
],
),
),
),
actions: <Widget>[
TextButton(
style: TextButton.styleFrom(
textStyle: Theme.of(context).textTheme.labelLarge,
),
child: const Text('取消'),
onPressed: () {
Navigator.of(context).pop();
},
),
TextButton(
style: TextButton.styleFrom(
textStyle: Theme.of(context).textTheme.labelLarge,
),
child: const Text('确定'),
onPressed: () async {
if (_formKey.currentState!.saveAndValidate()) {
final values = _formKey.currentState!.value;
var f = ref
.read(searchPageDataProvider(widget.query ?? "").notifier)
.submit2Watchlist(
widget.item.id!,
values["storage"],
values["resolution"],
widget.item.mediaType!,
values["folder"],
values["history_episodes"] ?? false,
enabledSizedLimiter
? values["size_limiter"]
: const RangeValues(-1, -1))
.then((v) {
Navigator.of(context).pop();
showSnakeBar("添加成功:${widget.item.name}");
});
showLoadingWithFuture(f);
}
},
),
],
);
}
String readableSize(String v) {
if (v.endsWith("K")) {
return v.replaceAll("K", " GB");
}
return "$v MB";
}
}

View File

@@ -16,7 +16,7 @@ Future<void> showSettingDialog(
title: Text(title),
content: SingleChildScrollView(
child: SizedBox(
width: 300,
width: 400,
child: body,
),
),

View File

@@ -8,7 +8,6 @@ import 'package:ui/widgets/progress_indicator.dart';
import 'package:ui/widgets/widgets.dart';
class IndexerSettings extends ConsumerStatefulWidget {
const IndexerSettings({super.key});
@override
ConsumerState<ConsumerStatefulWidget> createState() {
@@ -47,7 +46,10 @@ class _IndexerState extends ConsumerState<IndexerSettings> {
"name": indexer.name,
"url": indexer.url,
"api_key": indexer.apiKey,
"impl": "torznab"
"impl": "torznab",
"priority": indexer.priority.toString(),
"seed_ratio": indexer.seedRatio.toString(),
"disabled": indexer.disabled
},
child: Column(
children: [
@@ -76,6 +78,26 @@ class _IndexerState extends ConsumerState<IndexerSettings> {
autovalidateMode: AutovalidateMode.onUserInteraction,
validator: FormBuilderValidators.required(),
),
FormBuilderTextField(
name: "priority",
decoration: const InputDecoration(
labelText: "索引优先级",
helperText: "取值范围1-128 数值越大,优先级越高",
),
autovalidateMode: AutovalidateMode.onUserInteraction,
validator: FormBuilderValidators.positiveNumber(),
),
FormBuilderTextField(
name: "seed_ratio",
decoration: const InputDecoration(
labelText: "做种率",
helperText: "种子的做种率,达到此做种率后,种子才会被删除, 0表示不做种",
hintText: "1.0",
),
autovalidateMode: AutovalidateMode.onUserInteraction,
validator: FormBuilderValidators.numeric(),
),
FormBuilderSwitch(name: "disabled", title: const Text("禁用此索引器"))
],
),
);
@@ -87,9 +109,13 @@ class _IndexerState extends ConsumerState<IndexerSettings> {
if (_formKey.currentState!.saveAndValidate()) {
var values = _formKey.currentState!.value;
return ref.read(indexersProvider.notifier).addIndexer(Indexer(
id: indexer.id,
name: values["name"],
url: values["url"],
apiKey: values["api_key"]));
apiKey: values["api_key"],
priority: int.parse(values["priority"]),
seedRatio: double.parse(values["seed_ratio"]),
disabled: values["disabled"]));
} else {
throw "validation_error";
}

View File

@@ -2,6 +2,7 @@ import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:ui/providers/series_details.dart';
import 'package:ui/widgets/detail_card.dart';
import 'package:ui/widgets/resource_list.dart';
import 'package:ui/widgets/utils.dart';
import 'package:ui/widgets/progress_indicator.dart';
import 'package:ui/widgets/widgets.dart';
@@ -49,17 +50,43 @@ class _TvDetailsPageState extends ConsumerState<TvDetailsPage> {
child: ep.status == "downloading"
? const Tooltip(
message: "下载中",
child: Icon(Icons.downloading),
child: IconButton(onPressed: null, icon: Icon(Icons.downloading)),
)
: (ep.status == "downloaded"
? const Tooltip(
message: "已下载",
child: Icon(Icons.download_done),
child: IconButton(onPressed: null, icon: Icon(Icons.download_done)),
)
: const Tooltip(
message: "未下载",
child: Icon(Icons.warning_amber_rounded),
))),
: (ep.monitored == true
? Tooltip(
message: "监控中",
child: IconButton(
onPressed: () {
ref
.read(mediaDetailsProvider(
widget.seriesId)
.notifier)
.changeMonitoringStatus(
ep.id!, false);
},
icon: const Icon(Icons.alarm)),
)
: Opacity(
opacity: 0.7,
child: Tooltip(
message: "未监控",
child: IconButton(
onPressed: () {
ref
.read(mediaDetailsProvider(
widget.seriesId)
.notifier)
.changeMonitoringStatus(
ep.id!, true);
},
icon: const Icon(Icons.alarm_off)),
),
)))),
),
DataCell(Row(
children: [
@@ -71,7 +98,8 @@ class _TvDetailsPageState extends ConsumerState<TvDetailsPage> {
.read(mediaDetailsProvider(widget.seriesId)
.notifier)
.searchAndDownload(widget.seriesId,
ep.seasonNumber!, ep.episodeNumber!).then((v) => showSnakeBar("开始下载: $v"));
ep.seasonNumber!, ep.episodeNumber!)
.then((v) => showSnakeBar("开始下载: $v"));
showLoadingWithFuture(f);
},
icon: const Icon(Icons.download)),
@@ -79,10 +107,13 @@ class _TvDetailsPageState extends ConsumerState<TvDetailsPage> {
const SizedBox(
width: 10,
),
IconButton(
onPressed: () => showAvailableTorrents(widget.seriesId,
ep.seasonNumber ?? 0, ep.episodeNumber ?? 0),
icon: const Icon(Icons.manage_search))
Tooltip(
message: "查看可用资源",
child: IconButton(
onPressed: () => showAvailableTorrents(widget.seriesId,
ep.seasonNumber ?? 0, ep.episodeNumber ?? 0),
icon: const Icon(Icons.manage_search)),
)
],
))
]);
@@ -118,7 +149,8 @@ class _TvDetailsPageState extends ConsumerState<TvDetailsPage> {
final f = ref
.read(mediaDetailsProvider(widget.seriesId)
.notifier)
.searchAndDownload(widget.seriesId, k, 0).then((v) => showSnakeBar("开始下载: $v"));
.searchAndDownload(widget.seriesId, k, 0)
.then((v) => showSnakeBar("开始下载: $v"));
showLoadingWithFuture(f);
},
icon: const Icon(Icons.download)),
@@ -126,10 +158,13 @@ class _TvDetailsPageState extends ConsumerState<TvDetailsPage> {
const SizedBox(
width: 10,
),
IconButton(
onPressed: () =>
showAvailableTorrents(widget.seriesId, k, 0),
icon: const Icon(Icons.manage_search))
Tooltip(
message: "查看可用资源",
child: IconButton(
onPressed: () =>
showAvailableTorrents(widget.seriesId, k, 0),
icon: const Icon(Icons.manage_search)),
)
],
))
], rows: m[k]!),
@@ -157,60 +192,21 @@ class _TvDetailsPageState extends ConsumerState<TvDetailsPage> {
context: context,
barrierDismissible: true,
builder: (BuildContext context) {
return Consumer(builder: (context, ref, _) {
final torrents = ref.watch(mediaTorrentsDataProvider(
(mediaId: id, seasonNumber: season, episodeNumber: episode)));
return AlertDialog(
//title: Text("资源"),
content: SelectionArea(
child: SizedBox(
width: MediaQuery.of(context).size.width*0.7,
height: MediaQuery.of(context).size.height*0.6,
child: torrents.when(
data: (v) {
return SingleChildScrollView(
child: DataTable(
dataTextStyle:
const TextStyle(fontSize: 12, height: 0),
columns: const [
DataColumn(label: Text("名称")),
DataColumn(label: Text("大小")),
DataColumn(label: Text("seeders")),
DataColumn(label: Text("peers")),
DataColumn(label: Text("操作"))
],
rows: List.generate(v.length, (i) {
final torrent = v[i];
return DataRow(cells: [
DataCell(Text("${torrent.name}")),
DataCell(Text(
"${torrent.size?.readableFileSize()}")),
DataCell(Text("${torrent.seeders}")),
DataCell(Text("${torrent.peers}")),
DataCell(IconButton(
icon: const Icon(Icons.download),
onPressed: () async {
var f = ref
.read(mediaTorrentsDataProvider((
mediaId: id,
seasonNumber: season,
episodeNumber: episode
)).notifier)
.download(torrent).then((v) => showSnakeBar("开始下载:${torrent.name}"));
showLoadingWithFuture(f);
},
))
]);
})));
},
error: (err, trace) {
return Text("$err");
},
loading: () => const MyProgressIndicator()),
return AlertDialog(
//title: Text("资源"),
content: SelectionArea(
child: SizedBox(
width: MediaQuery.of(context).size.width * 0.7,
height: MediaQuery.of(context).size.height * 0.6,
child: SingleChildScrollView(
child: ResourceList(
mediaId: id,
seasonNum: season,
episodeNum: episode,
),
),
));
});
),
));
},
);
}

View File

@@ -70,8 +70,8 @@ class WelcomePage extends ConsumerWidget {
children: [
LinearProgressIndicator(
value: 1,
color: item.downloadedNum ==
item.monitoredNum
color: item.downloadedNum! >=
item.monitoredNum!
? Colors.green
: Colors.blue,
),

View File

@@ -68,19 +68,10 @@ class _DetailCardState extends ConsumerState<DetailCard> {
const SizedBox(
width: 30,
),
Text(
Expanded(child: Text(
"${widget.details.mediaType == "tv" ? widget.details.storage!.tvPath : widget.details.storage!.moviePath}"
"${widget.details.targetDir}"),
const SizedBox(
width: 30,
),
widget.details.mediaType == 'tv'
? (widget.details.downloadHistoryEpisodes ==
true
? const Text("下载所有剧集")
: const Text("只下载更新剧集"))
: const Text("")
],
"${widget.details.targetDir}"),)
],
),
const Divider(thickness: 1, height: 1),
Text(

View File

@@ -0,0 +1,88 @@
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:ui/providers/series_details.dart';
import 'package:ui/widgets/progress_indicator.dart';
import 'package:ui/widgets/utils.dart';
import 'package:ui/widgets/widgets.dart';
class ResourceList extends ConsumerWidget {
final String mediaId;
final int seasonNum;
final int episodeNum;
const ResourceList(
{super.key,
required this.mediaId,
this.seasonNum = 0,
this.episodeNum = 0});
@override
Widget build(BuildContext context, WidgetRef ref) {
final torrents = ref.watch(mediaTorrentsDataProvider((
mediaId: mediaId,
seasonNumber: seasonNum,
episodeNumber: episodeNum
)));
return torrents.when(
data: (v) {
bool hasPrivate = false;
for (final item in v) {
if (item.isPrivate == true) {
hasPrivate = true;
}
}
final columns = [
const DataColumn(label: Text("名称")),
const DataColumn(label: Text("大小")),
const DataColumn(label: Text("S/P")),
const DataColumn(label: Text("来源")),
];
if (hasPrivate) {
columns.add(const DataColumn(label: Text("消耗")));
}
columns.add(const DataColumn(label: Text("下载")));
return DataTable(
dataTextStyle: const TextStyle(fontSize: 12),
columns: columns,
rows: List.generate(v.length, (i) {
final torrent = v[i];
final rows = [
DataCell(Text("${torrent.name}")),
DataCell(Text("${torrent.size?.readableFileSize()}")),
DataCell(Text("${torrent.seeders}/${torrent.peers}")),
DataCell(Text(torrent.source ?? "-")),
];
if (hasPrivate) {
rows.add(DataCell(Text(torrent.isPrivate == true
? "${torrent.downloadFactor}dl/${torrent.uploadFactor}up"
: "-")));
}
rows.add(DataCell(IconButton(
icon: const Icon(Icons.download),
onPressed: () async {
var f = ref
.read(mediaTorrentsDataProvider((
mediaId: mediaId,
seasonNumber: seasonNum,
episodeNumber: episodeNum
)).notifier)
.download(torrent)
.then((v) => showSnakeBar("开始下载:${torrent.name}"));
showLoadingWithFuture(f);
},
)));
return DataRow(cells: rows);
}));
},
error: (err, trace) {
return "$err".contains("no resource found")
? const Center(
child: Text("没有资源"),
)
: Text("$err");
},
loading: () => const MyProgressIndicator());
}
}

View File

@@ -49,7 +49,7 @@ extension FileFormatter on num {
final base = base1024 ? 1024 : 1000;
if (this <= 0) return "0";
final units = ["B", "kB", "MB", "GB", "TB"];
int digitGroups = (log(this) / log(base)).round();
int digitGroups = (log(this) / log(base)).floor();
return "${NumberFormat("#,##0.#").format(this / pow(base, digitGroups))} ${units[digitGroups]}";
}
}