feat: file size limiter

This commit is contained in:
Simon Ding
2024-08-03 12:31:53 +08:00
parent 16216fcc4f
commit 241e30152b
16 changed files with 280 additions and 22 deletions

View File

@@ -147,6 +147,7 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
SetResolution(m.Resolution). SetResolution(m.Resolution).
SetTargetDir(m.TargetDir). SetTargetDir(m.TargetDir).
SetDownloadHistoryEpisodes(m.DownloadHistoryEpisodes). SetDownloadHistoryEpisodes(m.DownloadHistoryEpisodes).
SetLimiter(m.Limiter).
AddEpisodeIDs(episodes...). AddEpisodeIDs(episodes...).
Save(context.TODO()) Save(context.TODO())
return r, err return r, err

View File

@@ -3,8 +3,10 @@
package ent package ent
import ( import (
"encoding/json"
"fmt" "fmt"
"polaris/ent/media" "polaris/ent/media"
"polaris/ent/schema"
"strings" "strings"
"time" "time"
@@ -43,6 +45,8 @@ type Media struct {
TargetDir string `json:"target_dir,omitempty"` TargetDir string `json:"target_dir,omitempty"`
// tv series only // tv series only
DownloadHistoryEpisodes bool `json:"download_history_episodes,omitempty"` DownloadHistoryEpisodes bool `json:"download_history_episodes,omitempty"`
// Limiter holds the value of the "limiter" field.
Limiter *schema.MediaLimiter `json:"limiter,omitempty"`
// Edges holds the relations/edges for other nodes in the graph. // Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the MediaQuery when eager-loading is set. // The values are being populated by the MediaQuery when eager-loading is set.
Edges MediaEdges `json:"edges"` Edges MediaEdges `json:"edges"`
@@ -72,6 +76,8 @@ func (*Media) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns)) values := make([]any, len(columns))
for i := range columns { for i := range columns {
switch columns[i] { switch columns[i] {
case media.FieldLimiter:
values[i] = new([]byte)
case media.FieldDownloadHistoryEpisodes: case media.FieldDownloadHistoryEpisodes:
values[i] = new(sql.NullBool) values[i] = new(sql.NullBool)
case media.FieldID, media.FieldTmdbID, media.FieldStorageID: case media.FieldID, media.FieldTmdbID, media.FieldStorageID:
@@ -179,6 +185,14 @@ func (m *Media) assignValues(columns []string, values []any) error {
} else if value.Valid { } else if value.Valid {
m.DownloadHistoryEpisodes = value.Bool m.DownloadHistoryEpisodes = value.Bool
} }
case media.FieldLimiter:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field limiter", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &m.Limiter); err != nil {
return fmt.Errorf("unmarshal field limiter: %w", err)
}
}
default: default:
m.selectValues.Set(columns[i], values[i]) m.selectValues.Set(columns[i], values[i])
} }
@@ -258,6 +272,9 @@ func (m *Media) String() string {
builder.WriteString(", ") builder.WriteString(", ")
builder.WriteString("download_history_episodes=") builder.WriteString("download_history_episodes=")
builder.WriteString(fmt.Sprintf("%v", m.DownloadHistoryEpisodes)) builder.WriteString(fmt.Sprintf("%v", m.DownloadHistoryEpisodes))
builder.WriteString(", ")
builder.WriteString("limiter=")
builder.WriteString(fmt.Sprintf("%v", m.Limiter))
builder.WriteByte(')') builder.WriteByte(')')
return builder.String() return builder.String()
} }

View File

@@ -41,6 +41,8 @@ const (
FieldTargetDir = "target_dir" FieldTargetDir = "target_dir"
// FieldDownloadHistoryEpisodes holds the string denoting the download_history_episodes field in the database. // FieldDownloadHistoryEpisodes holds the string denoting the download_history_episodes field in the database.
FieldDownloadHistoryEpisodes = "download_history_episodes" FieldDownloadHistoryEpisodes = "download_history_episodes"
// FieldLimiter holds the string denoting the limiter field in the database.
FieldLimiter = "limiter"
// EdgeEpisodes holds the string denoting the episodes edge name in mutations. // EdgeEpisodes holds the string denoting the episodes edge name in mutations.
EdgeEpisodes = "episodes" EdgeEpisodes = "episodes"
// Table holds the table name of the media in the database. // Table holds the table name of the media in the database.
@@ -70,6 +72,7 @@ var Columns = []string{
FieldStorageID, FieldStorageID,
FieldTargetDir, FieldTargetDir,
FieldDownloadHistoryEpisodes, FieldDownloadHistoryEpisodes,
FieldLimiter,
} }
// ValidColumn reports if the column name is valid (part of the table columns). // ValidColumn reports if the column name is valid (part of the table columns).

View File

@@ -775,6 +775,16 @@ func DownloadHistoryEpisodesNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldDownloadHistoryEpisodes)) return predicate.Media(sql.FieldNotNull(FieldDownloadHistoryEpisodes))
} }
// LimiterIsNil applies the IsNil predicate on the "limiter" field.
func LimiterIsNil() predicate.Media {
return predicate.Media(sql.FieldIsNull(FieldLimiter))
}
// LimiterNotNil applies the NotNil predicate on the "limiter" field.
func LimiterNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldLimiter))
}
// HasEpisodes applies the HasEdge predicate on the "episodes" edge. // HasEpisodes applies the HasEdge predicate on the "episodes" edge.
func HasEpisodes() predicate.Media { func HasEpisodes() predicate.Media {
return predicate.Media(func(s *sql.Selector) { return predicate.Media(func(s *sql.Selector) {

View File

@@ -8,6 +8,7 @@ import (
"fmt" "fmt"
"polaris/ent/episode" "polaris/ent/episode"
"polaris/ent/media" "polaris/ent/media"
"polaris/ent/schema"
"time" "time"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
@@ -155,6 +156,12 @@ func (mc *MediaCreate) SetNillableDownloadHistoryEpisodes(b *bool) *MediaCreate
return mc return mc
} }
// SetLimiter sets the "limiter" field.
func (mc *MediaCreate) SetLimiter(sl *schema.MediaLimiter) *MediaCreate {
mc.mutation.SetLimiter(sl)
return mc
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs. // AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (mc *MediaCreate) AddEpisodeIDs(ids ...int) *MediaCreate { func (mc *MediaCreate) AddEpisodeIDs(ids ...int) *MediaCreate {
mc.mutation.AddEpisodeIDs(ids...) mc.mutation.AddEpisodeIDs(ids...)
@@ -340,6 +347,10 @@ func (mc *MediaCreate) createSpec() (*Media, *sqlgraph.CreateSpec) {
_spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value) _spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value)
_node.DownloadHistoryEpisodes = value _node.DownloadHistoryEpisodes = value
} }
if value, ok := mc.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
_node.Limiter = value
}
if nodes := mc.mutation.EpisodesIDs(); len(nodes) > 0 { if nodes := mc.mutation.EpisodesIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{ edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M, Rel: sqlgraph.O2M,

View File

@@ -9,6 +9,7 @@ import (
"polaris/ent/episode" "polaris/ent/episode"
"polaris/ent/media" "polaris/ent/media"
"polaris/ent/predicate" "polaris/ent/predicate"
"polaris/ent/schema"
"time" "time"
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
@@ -249,6 +250,18 @@ func (mu *MediaUpdate) ClearDownloadHistoryEpisodes() *MediaUpdate {
return mu return mu
} }
// SetLimiter sets the "limiter" field.
func (mu *MediaUpdate) SetLimiter(sl *schema.MediaLimiter) *MediaUpdate {
mu.mutation.SetLimiter(sl)
return mu
}
// ClearLimiter clears the value of the "limiter" field.
func (mu *MediaUpdate) ClearLimiter() *MediaUpdate {
mu.mutation.ClearLimiter()
return mu
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs. // AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (mu *MediaUpdate) AddEpisodeIDs(ids ...int) *MediaUpdate { func (mu *MediaUpdate) AddEpisodeIDs(ids ...int) *MediaUpdate {
mu.mutation.AddEpisodeIDs(ids...) mu.mutation.AddEpisodeIDs(ids...)
@@ -401,6 +414,12 @@ func (mu *MediaUpdate) sqlSave(ctx context.Context) (n int, err error) {
if mu.mutation.DownloadHistoryEpisodesCleared() { if mu.mutation.DownloadHistoryEpisodesCleared() {
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool) _spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
} }
if value, ok := mu.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
}
if mu.mutation.LimiterCleared() {
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
}
if mu.mutation.EpisodesCleared() { if mu.mutation.EpisodesCleared() {
edge := &sqlgraph.EdgeSpec{ edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M, Rel: sqlgraph.O2M,
@@ -686,6 +705,18 @@ func (muo *MediaUpdateOne) ClearDownloadHistoryEpisodes() *MediaUpdateOne {
return muo return muo
} }
// SetLimiter sets the "limiter" field.
func (muo *MediaUpdateOne) SetLimiter(sl *schema.MediaLimiter) *MediaUpdateOne {
muo.mutation.SetLimiter(sl)
return muo
}
// ClearLimiter clears the value of the "limiter" field.
func (muo *MediaUpdateOne) ClearLimiter() *MediaUpdateOne {
muo.mutation.ClearLimiter()
return muo
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs. // AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (muo *MediaUpdateOne) AddEpisodeIDs(ids ...int) *MediaUpdateOne { func (muo *MediaUpdateOne) AddEpisodeIDs(ids ...int) *MediaUpdateOne {
muo.mutation.AddEpisodeIDs(ids...) muo.mutation.AddEpisodeIDs(ids...)
@@ -868,6 +899,12 @@ func (muo *MediaUpdateOne) sqlSave(ctx context.Context) (_node *Media, err error
if muo.mutation.DownloadHistoryEpisodesCleared() { if muo.mutation.DownloadHistoryEpisodesCleared() {
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool) _spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
} }
if value, ok := muo.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
}
if muo.mutation.LimiterCleared() {
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
}
if muo.mutation.EpisodesCleared() { if muo.mutation.EpisodesCleared() {
edge := &sqlgraph.EdgeSpec{ edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M, Rel: sqlgraph.O2M,

View File

@@ -108,6 +108,7 @@ var (
{Name: "storage_id", Type: field.TypeInt, Nullable: true}, {Name: "storage_id", Type: field.TypeInt, Nullable: true},
{Name: "target_dir", Type: field.TypeString, Nullable: true}, {Name: "target_dir", Type: field.TypeString, Nullable: true},
{Name: "download_history_episodes", Type: field.TypeBool, Nullable: true, Default: false}, {Name: "download_history_episodes", Type: field.TypeBool, Nullable: true, Default: false},
{Name: "limiter", Type: field.TypeJSON, Nullable: true},
} }
// MediaTable holds the schema information for the "media" table. // MediaTable holds the schema information for the "media" table.
MediaTable = &schema.Table{ MediaTable = &schema.Table{

View File

@@ -13,6 +13,7 @@ import (
"polaris/ent/media" "polaris/ent/media"
"polaris/ent/notificationclient" "polaris/ent/notificationclient"
"polaris/ent/predicate" "polaris/ent/predicate"
"polaris/ent/schema"
"polaris/ent/settings" "polaris/ent/settings"
"polaris/ent/storage" "polaris/ent/storage"
"sync" "sync"
@@ -3600,6 +3601,7 @@ type MediaMutation struct {
addstorage_id *int addstorage_id *int
target_dir *string target_dir *string
download_history_episodes *bool download_history_episodes *bool
limiter **schema.MediaLimiter
clearedFields map[string]struct{} clearedFields map[string]struct{}
episodes map[int]struct{} episodes map[int]struct{}
removedepisodes map[int]struct{} removedepisodes map[int]struct{}
@@ -4268,6 +4270,55 @@ func (m *MediaMutation) ResetDownloadHistoryEpisodes() {
delete(m.clearedFields, media.FieldDownloadHistoryEpisodes) delete(m.clearedFields, media.FieldDownloadHistoryEpisodes)
} }
// SetLimiter sets the "limiter" field.
func (m *MediaMutation) SetLimiter(sl *schema.MediaLimiter) {
m.limiter = &sl
}
// Limiter returns the value of the "limiter" field in the mutation.
func (m *MediaMutation) Limiter() (r *schema.MediaLimiter, exists bool) {
v := m.limiter
if v == nil {
return
}
return *v, true
}
// OldLimiter returns the old "limiter" field's value of the Media entity.
// If the Media object wasn't provided to the builder, the object is fetched from the database.
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
func (m *MediaMutation) OldLimiter(ctx context.Context) (v *schema.MediaLimiter, err error) {
if !m.op.Is(OpUpdateOne) {
return v, errors.New("OldLimiter is only allowed on UpdateOne operations")
}
if m.id == nil || m.oldValue == nil {
return v, errors.New("OldLimiter requires an ID field in the mutation")
}
oldValue, err := m.oldValue(ctx)
if err != nil {
return v, fmt.Errorf("querying old value for OldLimiter: %w", err)
}
return oldValue.Limiter, nil
}
// ClearLimiter clears the value of the "limiter" field.
func (m *MediaMutation) ClearLimiter() {
m.limiter = nil
m.clearedFields[media.FieldLimiter] = struct{}{}
}
// LimiterCleared returns if the "limiter" field was cleared in this mutation.
func (m *MediaMutation) LimiterCleared() bool {
_, ok := m.clearedFields[media.FieldLimiter]
return ok
}
// ResetLimiter resets all changes to the "limiter" field.
func (m *MediaMutation) ResetLimiter() {
m.limiter = nil
delete(m.clearedFields, media.FieldLimiter)
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by ids. // AddEpisodeIDs adds the "episodes" edge to the Episode entity by ids.
func (m *MediaMutation) AddEpisodeIDs(ids ...int) { func (m *MediaMutation) AddEpisodeIDs(ids ...int) {
if m.episodes == nil { if m.episodes == nil {
@@ -4356,7 +4407,7 @@ func (m *MediaMutation) Type() string {
// order to get all numeric fields that were incremented/decremented, call // order to get all numeric fields that were incremented/decremented, call
// AddedFields(). // AddedFields().
func (m *MediaMutation) Fields() []string { func (m *MediaMutation) Fields() []string {
fields := make([]string, 0, 13) fields := make([]string, 0, 14)
if m.tmdb_id != nil { if m.tmdb_id != nil {
fields = append(fields, media.FieldTmdbID) fields = append(fields, media.FieldTmdbID)
} }
@@ -4396,6 +4447,9 @@ func (m *MediaMutation) Fields() []string {
if m.download_history_episodes != nil { if m.download_history_episodes != nil {
fields = append(fields, media.FieldDownloadHistoryEpisodes) fields = append(fields, media.FieldDownloadHistoryEpisodes)
} }
if m.limiter != nil {
fields = append(fields, media.FieldLimiter)
}
return fields return fields
} }
@@ -4430,6 +4484,8 @@ func (m *MediaMutation) Field(name string) (ent.Value, bool) {
return m.TargetDir() return m.TargetDir()
case media.FieldDownloadHistoryEpisodes: case media.FieldDownloadHistoryEpisodes:
return m.DownloadHistoryEpisodes() return m.DownloadHistoryEpisodes()
case media.FieldLimiter:
return m.Limiter()
} }
return nil, false return nil, false
} }
@@ -4465,6 +4521,8 @@ func (m *MediaMutation) OldField(ctx context.Context, name string) (ent.Value, e
return m.OldTargetDir(ctx) return m.OldTargetDir(ctx)
case media.FieldDownloadHistoryEpisodes: case media.FieldDownloadHistoryEpisodes:
return m.OldDownloadHistoryEpisodes(ctx) return m.OldDownloadHistoryEpisodes(ctx)
case media.FieldLimiter:
return m.OldLimiter(ctx)
} }
return nil, fmt.Errorf("unknown Media field %s", name) return nil, fmt.Errorf("unknown Media field %s", name)
} }
@@ -4565,6 +4623,13 @@ func (m *MediaMutation) SetField(name string, value ent.Value) error {
} }
m.SetDownloadHistoryEpisodes(v) m.SetDownloadHistoryEpisodes(v)
return nil return nil
case media.FieldLimiter:
v, ok := value.(*schema.MediaLimiter)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.SetLimiter(v)
return nil
} }
return fmt.Errorf("unknown Media field %s", name) return fmt.Errorf("unknown Media field %s", name)
} }
@@ -4634,6 +4699,9 @@ func (m *MediaMutation) ClearedFields() []string {
if m.FieldCleared(media.FieldDownloadHistoryEpisodes) { if m.FieldCleared(media.FieldDownloadHistoryEpisodes) {
fields = append(fields, media.FieldDownloadHistoryEpisodes) fields = append(fields, media.FieldDownloadHistoryEpisodes)
} }
if m.FieldCleared(media.FieldLimiter) {
fields = append(fields, media.FieldLimiter)
}
return fields return fields
} }
@@ -4660,6 +4728,9 @@ func (m *MediaMutation) ClearField(name string) error {
case media.FieldDownloadHistoryEpisodes: case media.FieldDownloadHistoryEpisodes:
m.ClearDownloadHistoryEpisodes() m.ClearDownloadHistoryEpisodes()
return nil return nil
case media.FieldLimiter:
m.ClearLimiter()
return nil
} }
return fmt.Errorf("unknown Media nullable field %s", name) return fmt.Errorf("unknown Media nullable field %s", name)
} }
@@ -4707,6 +4778,9 @@ func (m *MediaMutation) ResetField(name string) error {
case media.FieldDownloadHistoryEpisodes: case media.FieldDownloadHistoryEpisodes:
m.ResetDownloadHistoryEpisodes() m.ResetDownloadHistoryEpisodes()
return nil return nil
case media.FieldLimiter:
m.ResetLimiter()
return nil
} }
return fmt.Errorf("unknown Media field %s", name) return fmt.Errorf("unknown Media field %s", name)
} }

View File

@@ -29,6 +29,7 @@ func (Media) Fields() []ent.Field {
field.Int("storage_id").Optional(), field.Int("storage_id").Optional(),
field.String("target_dir").Optional(), field.String("target_dir").Optional(),
field.Bool("download_history_episodes").Optional().Default(false).Comment("tv series only"), field.Bool("download_history_episodes").Optional().Default(false).Comment("tv series only"),
field.JSON("limiter", &MediaLimiter{}).Optional(),
} }
} }
@@ -38,3 +39,8 @@ func (Media) Edges() []ent.Edge {
edge.To("episodes", Episode.Type), edge.To("episodes", Episode.Type),
} }
} }
type MediaLimiter struct {
SizeMin int `json:"size_min"`
SizeMax int `json:"size_max"`
}

View File

@@ -112,7 +112,7 @@ func (c *Client) DownloadEpisodeTorrent(r1 torznab.Result, seriesId, seasonNum,
} }
func (c *Client) SearchAndDownload(seriesId, seasonNum, episodeNum int) (*string, error) { func (c *Client) SearchAndDownload(seriesId, seasonNum, episodeNum int) (*string, error) {
res, err := SearchTvSeries(c.db, seriesId, seasonNum, []int{episodeNum}, true) res, err := SearchTvSeries(c.db, seriesId, seasonNum, []int{episodeNum}, true, true)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -262,7 +262,7 @@ func (c *Client) downloadMovieSingleEpisode(ep *ent.Episode) error {
return errors.Wrap(err, "connect transmission") return errors.Wrap(err, "connect transmission")
} }
res, err := SearchMovie(c.db, ep.MediaID, true) res, err := SearchMovie(c.db, ep.MediaID, true, true)
if err != nil { if err != nil {
return errors.Wrap(err, "search movie") return errors.Wrap(err, "search movie")

View File

@@ -16,7 +16,7 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
) )
func SearchTvSeries(db1 *db.Client, seriesId, seasonNum int, episodes []int, checkResolution bool) ([]torznab.Result, error) { func SearchTvSeries(db1 *db.Client, seriesId, seasonNum int, episodes []int, checkResolution bool, checkFileSize bool) ([]torznab.Result, error) {
series := db1.GetMediaDetails(seriesId) series := db1.GetMediaDetails(seriesId)
if series == nil { if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId) return nil, fmt.Errorf("no tv series of id %v", seriesId)
@@ -55,6 +55,17 @@ func SearchTvSeries(db1 *db.Client, seriesId, seasonNum int, episodes []int, che
if !utils.IsNameAcceptable(meta.NameEn, series.NameEn) && !utils.IsNameAcceptable(meta.NameCn, series.NameCn) { if !utils.IsNameAcceptable(meta.NameEn, series.NameEn) && !utils.IsNameAcceptable(meta.NameCn, series.NameCn) {
continue continue
} }
if checkFileSize && series.Limiter != nil {
if series.Limiter.SizeMin > 0 && r.Size < series.Limiter.SizeMin {
//min size not satified
continue
}
if series.Limiter.SizeMax > 0 && r.Size > series.Limiter.SizeMax {
//max size not satified
continue
}
}
filtered = append(filtered, r) filtered = append(filtered, r)
} }
if len(filtered) == 0 { if len(filtered) == 0 {
@@ -80,7 +91,7 @@ func isNumberedSeries(detail *db.MediaDetails) bool {
return hasSeason2 && !season2HasEpisode1 //only one 1st episode return hasSeason2 && !season2HasEpisode1 //only one 1st episode
} }
func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.Result, error) { func SearchMovie(db1 *db.Client, movieId int, checkResolution bool, checkFileSize bool) ([]torznab.Result, error) {
movieDetail := db1.GetMediaDetails(movieId) movieDetail := db1.GetMediaDetails(movieId)
if movieDetail == nil { if movieDetail == nil {
return nil, errors.New("no media found of id") return nil, errors.New("no media found of id")
@@ -103,6 +114,18 @@ func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.R
if checkResolution && meta.Resolution != movieDetail.Resolution.String() { if checkResolution && meta.Resolution != movieDetail.Resolution.String() {
continue continue
} }
if checkFileSize && movieDetail.Limiter != nil {
if movieDetail.Limiter.SizeMin > 0 && r.Size < movieDetail.Limiter.SizeMin {
//min size not satified
continue
}
if movieDetail.Limiter.SizeMax > 0 && r.Size > movieDetail.Limiter.SizeMax {
//max size not satified
continue
}
}
ss := strings.Split(movieDetail.AirDate, "-")[0] ss := strings.Split(movieDetail.AirDate, "-")[0]
year, _ := strconv.Atoi(ss) year, _ := strconv.Atoi(ss)
if meta.Year != year && meta.Year != year-1 && meta.Year != year+1 { //year not match if meta.Year != year && meta.Year != year-1 && meta.Year != year+1 { //year not match

View File

@@ -13,7 +13,7 @@ import (
func (s *Server) searchAndDownloadSeasonPackage(seriesId, seasonNum int) (*string, error) { func (s *Server) searchAndDownloadSeasonPackage(seriesId, seasonNum int) (*string, error) {
res, err := core.SearchTvSeries(s.db, seriesId, seasonNum, nil, true) res, err := core.SearchTvSeries(s.db, seriesId, seasonNum, nil, true, true)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -46,13 +46,13 @@ func (s *Server) SearchAvailableTorrents(c *gin.Context) (interface{}, error) {
if in.Episode == 0 { if in.Episode == 0 {
//search season package //search season package
log.Infof("search series season package S%02d", in.Season) log.Infof("search series season package S%02d", in.Season)
res, err = core.SearchTvSeries(s.db, in.ID, in.Season, nil, false) res, err = core.SearchTvSeries(s.db, in.ID, in.Season, nil, false, false)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "search season package") return nil, errors.Wrap(err, "search season package")
} }
} else { } else {
log.Infof("search series episode S%02dE%02d", in.Season, in.Episode) log.Infof("search series episode S%02dE%02d", in.Season, in.Episode)
res, err = core.SearchTvSeries(s.db, in.ID, in.Season, []int{in.Episode}, false) res, err = core.SearchTvSeries(s.db, in.ID, in.Season, []int{in.Episode}, false, false)
if err != nil { if err != nil {
if err.Error() == "no resource found" { if err.Error() == "no resource found" {
return []string{}, nil return []string{}, nil
@@ -63,7 +63,7 @@ func (s *Server) SearchAvailableTorrents(c *gin.Context) (interface{}, error) {
} }
} else { } else {
log.Info("search movie %d", in.ID) log.Info("search movie %d", in.ID)
res, err = core.SearchMovie(s.db, in.ID, false) res, err = core.SearchMovie(s.db, in.ID, false, false)
if err != nil { if err != nil {
if err.Error() == "no resource found" { if err.Error() == "no resource found" {
return []string{}, nil return []string{}, nil

View File

@@ -10,6 +10,7 @@ import (
"polaris/ent" "polaris/ent"
"polaris/ent/episode" "polaris/ent/episode"
"polaris/ent/media" "polaris/ent/media"
"polaris/ent/schema"
"polaris/log" "polaris/log"
"strconv" "strconv"
"time" "time"
@@ -66,6 +67,8 @@ type addWatchlistIn struct {
Resolution string `json:"resolution" binding:"required"` Resolution string `json:"resolution" binding:"required"`
Folder string `json:"folder" binding:"required"` Folder string `json:"folder" binding:"required"`
DownloadHistoryEpisodes bool `json:"download_history_episodes"` //for tv DownloadHistoryEpisodes bool `json:"download_history_episodes"` //for tv
SizeMin int `json:"size_min"`
SizeMax int `json:"size_max"`
} }
func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) { func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
@@ -134,7 +137,7 @@ func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
epIds = append(epIds, epid) epIds = append(epIds, epid)
} }
} }
r, err := s.db.AddMediaWatchlist(&ent.Media{ m := &ent.Media{
TmdbID: int(detail.ID), TmdbID: int(detail.ID),
MediaType: media.MediaTypeTv, MediaType: media.MediaTypeTv,
NameCn: nameCn, NameCn: nameCn,
@@ -146,7 +149,10 @@ func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
StorageID: in.StorageID, StorageID: in.StorageID,
TargetDir: in.Folder, TargetDir: in.Folder,
DownloadHistoryEpisodes: in.DownloadHistoryEpisodes, DownloadHistoryEpisodes: in.DownloadHistoryEpisodes,
}, epIds) Limiter: &schema.MediaLimiter{SizeMin: in.SizeMin, SizeMax: in.SizeMax},
}
r, err := s.db.AddMediaWatchlist(m, epIds)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "add to list") return nil, errors.Wrap(err, "add to list")
} }
@@ -172,6 +178,7 @@ func (s *Server) AddMovie2Watchlist(c *gin.Context) (interface{}, error) {
if err := c.ShouldBindJSON(&in); err != nil { if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind query") return nil, errors.Wrap(err, "bind query")
} }
log.Infof("add movie watchlist input: %+v", in)
detailCn, err := s.MustTMDB().GetMovieDetails(in.TmdbID, db.LanguageCN) detailCn, err := s.MustTMDB().GetMovieDetails(in.TmdbID, db.LanguageCN)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "get movie detail") return nil, errors.Wrap(err, "get movie detail")
@@ -212,6 +219,7 @@ func (s *Server) AddMovie2Watchlist(c *gin.Context) (interface{}, error) {
Resolution: media.Resolution(in.Resolution), Resolution: media.Resolution(in.Resolution),
StorageID: in.StorageID, StorageID: in.StorageID,
TargetDir: in.Folder, TargetDir: in.Folder,
Limiter: &schema.MediaLimiter{SizeMin: in.SizeMin, SizeMax: in.SizeMax},
}, []int{epid}) }, []int{epid})
if err != nil { if err != nil {
return nil, errors.Wrap(err, "add to list") return nil, errors.Wrap(err, "add to list")

View File

@@ -1,6 +1,7 @@
import 'dart:async'; import 'dart:async';
import 'package:dio/dio.dart'; import 'package:dio/dio.dart';
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart'; import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:quiver/strings.dart'; import 'package:quiver/strings.dart';
import 'package:ui/providers/APIs.dart'; import 'package:ui/providers/APIs.dart';
@@ -91,16 +92,24 @@ class SearchPageData
state = newState; state = newState;
} }
Future<void> submit2Watchlist(int tmdbId, int storageId, String resolution, Future<void> submit2Watchlist(
String mediaType, String folder, bool downloadHistoryEpisodes) async { int tmdbId,
final dio = await APIs.getDio(); int storageId,
String resolution,
String mediaType,
String folder,
bool downloadHistoryEpisodes,
RangeValues limiter) async {
final dio = APIs.getDio();
if (mediaType == "tv") { if (mediaType == "tv") {
var resp = await dio.post(APIs.watchlistTvUrl, data: { var resp = await dio.post(APIs.watchlistTvUrl, data: {
"tmdb_id": tmdbId, "tmdb_id": tmdbId,
"storage_id": storageId, "storage_id": storageId,
"resolution": resolution, "resolution": resolution,
"folder": folder, "folder": folder,
"download_history_episodes": downloadHistoryEpisodes "download_history_episodes": downloadHistoryEpisodes,
"size_min": (limiter.start * 1000).toInt(),
"size_max": (limiter.end * 1000).toInt(),
}); });
var sp = ServerResponse.fromJson(resp.data); var sp = ServerResponse.fromJson(resp.data);
if (sp.code != 0) { if (sp.code != 0) {
@@ -112,7 +121,9 @@ class SearchPageData
"tmdb_id": tmdbId, "tmdb_id": tmdbId,
"storage_id": storageId, "storage_id": storageId,
"resolution": resolution, "resolution": resolution,
"folder": folder "folder": folder,
"size_min": (limiter.start * 1000).toInt(),
"size_max": (limiter.end * 1000).toInt(),
}); });
var sp = ServerResponse.fromJson(resp.data); var sp = ServerResponse.fromJson(resp.data);
if (sp.code != 0) { if (sp.code != 0) {

View File

@@ -150,6 +150,8 @@ class _SearchPageState extends ConsumerState<SearchPage> {
Future<void> _showSubmitDialog(BuildContext context, SearchResult item) { Future<void> _showSubmitDialog(BuildContext context, SearchResult item) {
final _formKey = GlobalKey<FormBuilderState>(); final _formKey = GlobalKey<FormBuilderState>();
bool enabledSizedLimiter = false;
double sizeMax = 5000;
return showDialog<void>( return showDialog<void>(
context: context, context: context,
@@ -166,7 +168,7 @@ class _SearchPageState extends ConsumerState<SearchPage> {
title: Text('添加: ${item.name}'), title: Text('添加: ${item.name}'),
content: SizedBox( content: SizedBox(
width: 500, width: 500,
height: 200, height: 400,
child: FormBuilder( child: FormBuilder(
key: _formKey, key: _formKey,
initialValue: const { initialValue: const {
@@ -174,6 +176,8 @@ class _SearchPageState extends ConsumerState<SearchPage> {
"storage": null, "storage": null,
"folder": "", "folder": "",
"history_episodes": false, "history_episodes": false,
"eanble_size_limier": false,
"size_limiter": RangeValues(400, 4000),
}, },
child: Column( child: Column(
crossAxisAlignment: CrossAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.start,
@@ -248,6 +252,51 @@ class _SearchPageState extends ConsumerState<SearchPage> {
size: 20, size: 20,
), ),
), ),
FormBuilderSwitch(
name: "eanble_size_limier",
title: Text(item.mediaType == "tv"
? "是否限制每集文件大小"
: "是否限制电影文件大小"),
onChanged: (value) {
setState(
() {
enabledSizedLimiter = value!;
},
);
},
),
enabledSizedLimiter
? FormBuilderRangeSlider(
maxValueWidget: (max) =>
Text("${sizeMax / 1000} GB"),
minValueWidget: (min) => Text("0"),
valueWidget: (value) {
final sss = value.split(" ");
return Text(
"${readableSize(sss[0])} - ${readableSize(sss[2])}");
},
onChangeEnd: (value) {
if (value.end > sizeMax * 0.9) {
setState(
() {
sizeMax = sizeMax * 5;
},
);
} else if (value.end <
sizeMax * 0.2) {
if (sizeMax > 5000) {
setState(
() {
sizeMax = sizeMax / 5;
},
);
}
}
},
name: "size_limiter",
min: 0,
max: sizeMax)
: const SizedBox(),
item.mediaType == "tv" item.mediaType == "tv"
? SizedBox( ? SizedBox(
width: 250, width: 250,
@@ -285,7 +334,6 @@ class _SearchPageState extends ConsumerState<SearchPage> {
onPressed: () async { onPressed: () async {
if (_formKey.currentState!.saveAndValidate()) { if (_formKey.currentState!.saveAndValidate()) {
final values = _formKey.currentState!.value; final values = _formKey.currentState!.value;
//print(values);
var f = ref var f = ref
.read(searchPageDataProvider(widget.query ?? "") .read(searchPageDataProvider(widget.query ?? "")
.notifier) .notifier)
@@ -295,7 +343,8 @@ class _SearchPageState extends ConsumerState<SearchPage> {
values["resolution"], values["resolution"],
item.mediaType!, item.mediaType!,
values["folder"], values["folder"],
values["history_episodes"] ?? false) values["history_episodes"] ?? false,
enabledSizedLimiter ? values["size_limiter"] : const RangeValues(-1, -1))
.then((v) { .then((v) {
Navigator.of(context).pop(); Navigator.of(context).pop();
showSnakeBar("添加成功:${item.name}"); showSnakeBar("添加成功:${item.name}");
@@ -310,6 +359,13 @@ class _SearchPageState extends ConsumerState<SearchPage> {
); );
}); });
} }
String readableSize(String v) {
if (v.endsWith("K")) {
return v.replaceAll("K", " GB");
}
return "$v MB";
}
} }
class SearchBarApp extends StatefulWidget { class SearchBarApp extends StatefulWidget {