mirror of
https://github.com/simon-ding/polaris.git
synced 2026-02-06 15:10:49 +08:00
feat: file size limiter
This commit is contained in:
1
db/db.go
1
db/db.go
@@ -147,6 +147,7 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
|
||||
SetResolution(m.Resolution).
|
||||
SetTargetDir(m.TargetDir).
|
||||
SetDownloadHistoryEpisodes(m.DownloadHistoryEpisodes).
|
||||
SetLimiter(m.Limiter).
|
||||
AddEpisodeIDs(episodes...).
|
||||
Save(context.TODO())
|
||||
return r, err
|
||||
|
||||
17
ent/media.go
17
ent/media.go
@@ -3,8 +3,10 @@
|
||||
package ent
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/schema"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -43,6 +45,8 @@ type Media struct {
|
||||
TargetDir string `json:"target_dir,omitempty"`
|
||||
// tv series only
|
||||
DownloadHistoryEpisodes bool `json:"download_history_episodes,omitempty"`
|
||||
// Limiter holds the value of the "limiter" field.
|
||||
Limiter *schema.MediaLimiter `json:"limiter,omitempty"`
|
||||
// Edges holds the relations/edges for other nodes in the graph.
|
||||
// The values are being populated by the MediaQuery when eager-loading is set.
|
||||
Edges MediaEdges `json:"edges"`
|
||||
@@ -72,6 +76,8 @@ func (*Media) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case media.FieldLimiter:
|
||||
values[i] = new([]byte)
|
||||
case media.FieldDownloadHistoryEpisodes:
|
||||
values[i] = new(sql.NullBool)
|
||||
case media.FieldID, media.FieldTmdbID, media.FieldStorageID:
|
||||
@@ -179,6 +185,14 @@ func (m *Media) assignValues(columns []string, values []any) error {
|
||||
} else if value.Valid {
|
||||
m.DownloadHistoryEpisodes = value.Bool
|
||||
}
|
||||
case media.FieldLimiter:
|
||||
if value, ok := values[i].(*[]byte); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field limiter", values[i])
|
||||
} else if value != nil && len(*value) > 0 {
|
||||
if err := json.Unmarshal(*value, &m.Limiter); err != nil {
|
||||
return fmt.Errorf("unmarshal field limiter: %w", err)
|
||||
}
|
||||
}
|
||||
default:
|
||||
m.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
@@ -258,6 +272,9 @@ func (m *Media) String() string {
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("download_history_episodes=")
|
||||
builder.WriteString(fmt.Sprintf("%v", m.DownloadHistoryEpisodes))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("limiter=")
|
||||
builder.WriteString(fmt.Sprintf("%v", m.Limiter))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
@@ -41,6 +41,8 @@ const (
|
||||
FieldTargetDir = "target_dir"
|
||||
// FieldDownloadHistoryEpisodes holds the string denoting the download_history_episodes field in the database.
|
||||
FieldDownloadHistoryEpisodes = "download_history_episodes"
|
||||
// FieldLimiter holds the string denoting the limiter field in the database.
|
||||
FieldLimiter = "limiter"
|
||||
// EdgeEpisodes holds the string denoting the episodes edge name in mutations.
|
||||
EdgeEpisodes = "episodes"
|
||||
// Table holds the table name of the media in the database.
|
||||
@@ -70,6 +72,7 @@ var Columns = []string{
|
||||
FieldStorageID,
|
||||
FieldTargetDir,
|
||||
FieldDownloadHistoryEpisodes,
|
||||
FieldLimiter,
|
||||
}
|
||||
|
||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||
|
||||
@@ -775,6 +775,16 @@ func DownloadHistoryEpisodesNotNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldNotNull(FieldDownloadHistoryEpisodes))
|
||||
}
|
||||
|
||||
// LimiterIsNil applies the IsNil predicate on the "limiter" field.
|
||||
func LimiterIsNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldIsNull(FieldLimiter))
|
||||
}
|
||||
|
||||
// LimiterNotNil applies the NotNil predicate on the "limiter" field.
|
||||
func LimiterNotNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldNotNull(FieldLimiter))
|
||||
}
|
||||
|
||||
// HasEpisodes applies the HasEdge predicate on the "episodes" edge.
|
||||
func HasEpisodes() predicate.Media {
|
||||
return predicate.Media(func(s *sql.Selector) {
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"fmt"
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/schema"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
@@ -155,6 +156,12 @@ func (mc *MediaCreate) SetNillableDownloadHistoryEpisodes(b *bool) *MediaCreate
|
||||
return mc
|
||||
}
|
||||
|
||||
// SetLimiter sets the "limiter" field.
|
||||
func (mc *MediaCreate) SetLimiter(sl *schema.MediaLimiter) *MediaCreate {
|
||||
mc.mutation.SetLimiter(sl)
|
||||
return mc
|
||||
}
|
||||
|
||||
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
|
||||
func (mc *MediaCreate) AddEpisodeIDs(ids ...int) *MediaCreate {
|
||||
mc.mutation.AddEpisodeIDs(ids...)
|
||||
@@ -340,6 +347,10 @@ func (mc *MediaCreate) createSpec() (*Media, *sqlgraph.CreateSpec) {
|
||||
_spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value)
|
||||
_node.DownloadHistoryEpisodes = value
|
||||
}
|
||||
if value, ok := mc.mutation.Limiter(); ok {
|
||||
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
|
||||
_node.Limiter = value
|
||||
}
|
||||
if nodes := mc.mutation.EpisodesIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.O2M,
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/predicate"
|
||||
"polaris/ent/schema"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
@@ -249,6 +250,18 @@ func (mu *MediaUpdate) ClearDownloadHistoryEpisodes() *MediaUpdate {
|
||||
return mu
|
||||
}
|
||||
|
||||
// SetLimiter sets the "limiter" field.
|
||||
func (mu *MediaUpdate) SetLimiter(sl *schema.MediaLimiter) *MediaUpdate {
|
||||
mu.mutation.SetLimiter(sl)
|
||||
return mu
|
||||
}
|
||||
|
||||
// ClearLimiter clears the value of the "limiter" field.
|
||||
func (mu *MediaUpdate) ClearLimiter() *MediaUpdate {
|
||||
mu.mutation.ClearLimiter()
|
||||
return mu
|
||||
}
|
||||
|
||||
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
|
||||
func (mu *MediaUpdate) AddEpisodeIDs(ids ...int) *MediaUpdate {
|
||||
mu.mutation.AddEpisodeIDs(ids...)
|
||||
@@ -401,6 +414,12 @@ func (mu *MediaUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if mu.mutation.DownloadHistoryEpisodesCleared() {
|
||||
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
|
||||
}
|
||||
if value, ok := mu.mutation.Limiter(); ok {
|
||||
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
|
||||
}
|
||||
if mu.mutation.LimiterCleared() {
|
||||
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
|
||||
}
|
||||
if mu.mutation.EpisodesCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.O2M,
|
||||
@@ -686,6 +705,18 @@ func (muo *MediaUpdateOne) ClearDownloadHistoryEpisodes() *MediaUpdateOne {
|
||||
return muo
|
||||
}
|
||||
|
||||
// SetLimiter sets the "limiter" field.
|
||||
func (muo *MediaUpdateOne) SetLimiter(sl *schema.MediaLimiter) *MediaUpdateOne {
|
||||
muo.mutation.SetLimiter(sl)
|
||||
return muo
|
||||
}
|
||||
|
||||
// ClearLimiter clears the value of the "limiter" field.
|
||||
func (muo *MediaUpdateOne) ClearLimiter() *MediaUpdateOne {
|
||||
muo.mutation.ClearLimiter()
|
||||
return muo
|
||||
}
|
||||
|
||||
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
|
||||
func (muo *MediaUpdateOne) AddEpisodeIDs(ids ...int) *MediaUpdateOne {
|
||||
muo.mutation.AddEpisodeIDs(ids...)
|
||||
@@ -868,6 +899,12 @@ func (muo *MediaUpdateOne) sqlSave(ctx context.Context) (_node *Media, err error
|
||||
if muo.mutation.DownloadHistoryEpisodesCleared() {
|
||||
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
|
||||
}
|
||||
if value, ok := muo.mutation.Limiter(); ok {
|
||||
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
|
||||
}
|
||||
if muo.mutation.LimiterCleared() {
|
||||
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
|
||||
}
|
||||
if muo.mutation.EpisodesCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.O2M,
|
||||
|
||||
@@ -108,6 +108,7 @@ var (
|
||||
{Name: "storage_id", Type: field.TypeInt, Nullable: true},
|
||||
{Name: "target_dir", Type: field.TypeString, Nullable: true},
|
||||
{Name: "download_history_episodes", Type: field.TypeBool, Nullable: true, Default: false},
|
||||
{Name: "limiter", Type: field.TypeJSON, Nullable: true},
|
||||
}
|
||||
// MediaTable holds the schema information for the "media" table.
|
||||
MediaTable = &schema.Table{
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/notificationclient"
|
||||
"polaris/ent/predicate"
|
||||
"polaris/ent/schema"
|
||||
"polaris/ent/settings"
|
||||
"polaris/ent/storage"
|
||||
"sync"
|
||||
@@ -3600,6 +3601,7 @@ type MediaMutation struct {
|
||||
addstorage_id *int
|
||||
target_dir *string
|
||||
download_history_episodes *bool
|
||||
limiter **schema.MediaLimiter
|
||||
clearedFields map[string]struct{}
|
||||
episodes map[int]struct{}
|
||||
removedepisodes map[int]struct{}
|
||||
@@ -4268,6 +4270,55 @@ func (m *MediaMutation) ResetDownloadHistoryEpisodes() {
|
||||
delete(m.clearedFields, media.FieldDownloadHistoryEpisodes)
|
||||
}
|
||||
|
||||
// SetLimiter sets the "limiter" field.
|
||||
func (m *MediaMutation) SetLimiter(sl *schema.MediaLimiter) {
|
||||
m.limiter = &sl
|
||||
}
|
||||
|
||||
// Limiter returns the value of the "limiter" field in the mutation.
|
||||
func (m *MediaMutation) Limiter() (r *schema.MediaLimiter, exists bool) {
|
||||
v := m.limiter
|
||||
if v == nil {
|
||||
return
|
||||
}
|
||||
return *v, true
|
||||
}
|
||||
|
||||
// OldLimiter returns the old "limiter" field's value of the Media entity.
|
||||
// If the Media object wasn't provided to the builder, the object is fetched from the database.
|
||||
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
|
||||
func (m *MediaMutation) OldLimiter(ctx context.Context) (v *schema.MediaLimiter, err error) {
|
||||
if !m.op.Is(OpUpdateOne) {
|
||||
return v, errors.New("OldLimiter is only allowed on UpdateOne operations")
|
||||
}
|
||||
if m.id == nil || m.oldValue == nil {
|
||||
return v, errors.New("OldLimiter requires an ID field in the mutation")
|
||||
}
|
||||
oldValue, err := m.oldValue(ctx)
|
||||
if err != nil {
|
||||
return v, fmt.Errorf("querying old value for OldLimiter: %w", err)
|
||||
}
|
||||
return oldValue.Limiter, nil
|
||||
}
|
||||
|
||||
// ClearLimiter clears the value of the "limiter" field.
|
||||
func (m *MediaMutation) ClearLimiter() {
|
||||
m.limiter = nil
|
||||
m.clearedFields[media.FieldLimiter] = struct{}{}
|
||||
}
|
||||
|
||||
// LimiterCleared returns if the "limiter" field was cleared in this mutation.
|
||||
func (m *MediaMutation) LimiterCleared() bool {
|
||||
_, ok := m.clearedFields[media.FieldLimiter]
|
||||
return ok
|
||||
}
|
||||
|
||||
// ResetLimiter resets all changes to the "limiter" field.
|
||||
func (m *MediaMutation) ResetLimiter() {
|
||||
m.limiter = nil
|
||||
delete(m.clearedFields, media.FieldLimiter)
|
||||
}
|
||||
|
||||
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by ids.
|
||||
func (m *MediaMutation) AddEpisodeIDs(ids ...int) {
|
||||
if m.episodes == nil {
|
||||
@@ -4356,7 +4407,7 @@ func (m *MediaMutation) Type() string {
|
||||
// order to get all numeric fields that were incremented/decremented, call
|
||||
// AddedFields().
|
||||
func (m *MediaMutation) Fields() []string {
|
||||
fields := make([]string, 0, 13)
|
||||
fields := make([]string, 0, 14)
|
||||
if m.tmdb_id != nil {
|
||||
fields = append(fields, media.FieldTmdbID)
|
||||
}
|
||||
@@ -4396,6 +4447,9 @@ func (m *MediaMutation) Fields() []string {
|
||||
if m.download_history_episodes != nil {
|
||||
fields = append(fields, media.FieldDownloadHistoryEpisodes)
|
||||
}
|
||||
if m.limiter != nil {
|
||||
fields = append(fields, media.FieldLimiter)
|
||||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
@@ -4430,6 +4484,8 @@ func (m *MediaMutation) Field(name string) (ent.Value, bool) {
|
||||
return m.TargetDir()
|
||||
case media.FieldDownloadHistoryEpisodes:
|
||||
return m.DownloadHistoryEpisodes()
|
||||
case media.FieldLimiter:
|
||||
return m.Limiter()
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
@@ -4465,6 +4521,8 @@ func (m *MediaMutation) OldField(ctx context.Context, name string) (ent.Value, e
|
||||
return m.OldTargetDir(ctx)
|
||||
case media.FieldDownloadHistoryEpisodes:
|
||||
return m.OldDownloadHistoryEpisodes(ctx)
|
||||
case media.FieldLimiter:
|
||||
return m.OldLimiter(ctx)
|
||||
}
|
||||
return nil, fmt.Errorf("unknown Media field %s", name)
|
||||
}
|
||||
@@ -4565,6 +4623,13 @@ func (m *MediaMutation) SetField(name string, value ent.Value) error {
|
||||
}
|
||||
m.SetDownloadHistoryEpisodes(v)
|
||||
return nil
|
||||
case media.FieldLimiter:
|
||||
v, ok := value.(*schema.MediaLimiter)
|
||||
if !ok {
|
||||
return fmt.Errorf("unexpected type %T for field %s", value, name)
|
||||
}
|
||||
m.SetLimiter(v)
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("unknown Media field %s", name)
|
||||
}
|
||||
@@ -4634,6 +4699,9 @@ func (m *MediaMutation) ClearedFields() []string {
|
||||
if m.FieldCleared(media.FieldDownloadHistoryEpisodes) {
|
||||
fields = append(fields, media.FieldDownloadHistoryEpisodes)
|
||||
}
|
||||
if m.FieldCleared(media.FieldLimiter) {
|
||||
fields = append(fields, media.FieldLimiter)
|
||||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
@@ -4660,6 +4728,9 @@ func (m *MediaMutation) ClearField(name string) error {
|
||||
case media.FieldDownloadHistoryEpisodes:
|
||||
m.ClearDownloadHistoryEpisodes()
|
||||
return nil
|
||||
case media.FieldLimiter:
|
||||
m.ClearLimiter()
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("unknown Media nullable field %s", name)
|
||||
}
|
||||
@@ -4707,6 +4778,9 @@ func (m *MediaMutation) ResetField(name string) error {
|
||||
case media.FieldDownloadHistoryEpisodes:
|
||||
m.ResetDownloadHistoryEpisodes()
|
||||
return nil
|
||||
case media.FieldLimiter:
|
||||
m.ResetLimiter()
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("unknown Media field %s", name)
|
||||
}
|
||||
|
||||
@@ -29,6 +29,7 @@ func (Media) Fields() []ent.Field {
|
||||
field.Int("storage_id").Optional(),
|
||||
field.String("target_dir").Optional(),
|
||||
field.Bool("download_history_episodes").Optional().Default(false).Comment("tv series only"),
|
||||
field.JSON("limiter", &MediaLimiter{}).Optional(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,3 +39,8 @@ func (Media) Edges() []ent.Edge {
|
||||
edge.To("episodes", Episode.Type),
|
||||
}
|
||||
}
|
||||
|
||||
type MediaLimiter struct {
|
||||
SizeMin int `json:"size_min"`
|
||||
SizeMax int `json:"size_max"`
|
||||
}
|
||||
|
||||
@@ -112,7 +112,7 @@ func (c *Client) DownloadEpisodeTorrent(r1 torznab.Result, seriesId, seasonNum,
|
||||
}
|
||||
func (c *Client) SearchAndDownload(seriesId, seasonNum, episodeNum int) (*string, error) {
|
||||
|
||||
res, err := SearchTvSeries(c.db, seriesId, seasonNum, []int{episodeNum}, true)
|
||||
res, err := SearchTvSeries(c.db, seriesId, seasonNum, []int{episodeNum}, true, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -262,7 +262,7 @@ func (c *Client) downloadMovieSingleEpisode(ep *ent.Episode) error {
|
||||
return errors.Wrap(err, "connect transmission")
|
||||
}
|
||||
|
||||
res, err := SearchMovie(c.db, ep.MediaID, true)
|
||||
res, err := SearchMovie(c.db, ep.MediaID, true, true)
|
||||
if err != nil {
|
||||
|
||||
return errors.Wrap(err, "search movie")
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func SearchTvSeries(db1 *db.Client, seriesId, seasonNum int, episodes []int, checkResolution bool) ([]torznab.Result, error) {
|
||||
func SearchTvSeries(db1 *db.Client, seriesId, seasonNum int, episodes []int, checkResolution bool, checkFileSize bool) ([]torznab.Result, error) {
|
||||
series := db1.GetMediaDetails(seriesId)
|
||||
if series == nil {
|
||||
return nil, fmt.Errorf("no tv series of id %v", seriesId)
|
||||
@@ -55,6 +55,17 @@ func SearchTvSeries(db1 *db.Client, seriesId, seasonNum int, episodes []int, che
|
||||
if !utils.IsNameAcceptable(meta.NameEn, series.NameEn) && !utils.IsNameAcceptable(meta.NameCn, series.NameCn) {
|
||||
continue
|
||||
}
|
||||
|
||||
if checkFileSize && series.Limiter != nil {
|
||||
if series.Limiter.SizeMin > 0 && r.Size < series.Limiter.SizeMin {
|
||||
//min size not satified
|
||||
continue
|
||||
}
|
||||
if series.Limiter.SizeMax > 0 && r.Size > series.Limiter.SizeMax {
|
||||
//max size not satified
|
||||
continue
|
||||
}
|
||||
}
|
||||
filtered = append(filtered, r)
|
||||
}
|
||||
if len(filtered) == 0 {
|
||||
@@ -80,7 +91,7 @@ func isNumberedSeries(detail *db.MediaDetails) bool {
|
||||
return hasSeason2 && !season2HasEpisode1 //only one 1st episode
|
||||
}
|
||||
|
||||
func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.Result, error) {
|
||||
func SearchMovie(db1 *db.Client, movieId int, checkResolution bool, checkFileSize bool) ([]torznab.Result, error) {
|
||||
movieDetail := db1.GetMediaDetails(movieId)
|
||||
if movieDetail == nil {
|
||||
return nil, errors.New("no media found of id")
|
||||
@@ -103,6 +114,18 @@ func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.R
|
||||
if checkResolution && meta.Resolution != movieDetail.Resolution.String() {
|
||||
continue
|
||||
}
|
||||
|
||||
if checkFileSize && movieDetail.Limiter != nil {
|
||||
if movieDetail.Limiter.SizeMin > 0 && r.Size < movieDetail.Limiter.SizeMin {
|
||||
//min size not satified
|
||||
continue
|
||||
}
|
||||
if movieDetail.Limiter.SizeMax > 0 && r.Size > movieDetail.Limiter.SizeMax {
|
||||
//max size not satified
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
ss := strings.Split(movieDetail.AirDate, "-")[0]
|
||||
year, _ := strconv.Atoi(ss)
|
||||
if meta.Year != year && meta.Year != year-1 && meta.Year != year+1 { //year not match
|
||||
|
||||
@@ -13,7 +13,7 @@ import (
|
||||
|
||||
func (s *Server) searchAndDownloadSeasonPackage(seriesId, seasonNum int) (*string, error) {
|
||||
|
||||
res, err := core.SearchTvSeries(s.db, seriesId, seasonNum, nil, true)
|
||||
res, err := core.SearchTvSeries(s.db, seriesId, seasonNum, nil, true, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -46,13 +46,13 @@ func (s *Server) SearchAvailableTorrents(c *gin.Context) (interface{}, error) {
|
||||
if in.Episode == 0 {
|
||||
//search season package
|
||||
log.Infof("search series season package S%02d", in.Season)
|
||||
res, err = core.SearchTvSeries(s.db, in.ID, in.Season, nil, false)
|
||||
res, err = core.SearchTvSeries(s.db, in.ID, in.Season, nil, false, false)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "search season package")
|
||||
}
|
||||
} else {
|
||||
log.Infof("search series episode S%02dE%02d", in.Season, in.Episode)
|
||||
res, err = core.SearchTvSeries(s.db, in.ID, in.Season, []int{in.Episode}, false)
|
||||
res, err = core.SearchTvSeries(s.db, in.ID, in.Season, []int{in.Episode}, false, false)
|
||||
if err != nil {
|
||||
if err.Error() == "no resource found" {
|
||||
return []string{}, nil
|
||||
@@ -63,7 +63,7 @@ func (s *Server) SearchAvailableTorrents(c *gin.Context) (interface{}, error) {
|
||||
}
|
||||
} else {
|
||||
log.Info("search movie %d", in.ID)
|
||||
res, err = core.SearchMovie(s.db, in.ID, false)
|
||||
res, err = core.SearchMovie(s.db, in.ID, false, false)
|
||||
if err != nil {
|
||||
if err.Error() == "no resource found" {
|
||||
return []string{}, nil
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"polaris/ent"
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/schema"
|
||||
"polaris/log"
|
||||
"strconv"
|
||||
"time"
|
||||
@@ -66,6 +67,8 @@ type addWatchlistIn struct {
|
||||
Resolution string `json:"resolution" binding:"required"`
|
||||
Folder string `json:"folder" binding:"required"`
|
||||
DownloadHistoryEpisodes bool `json:"download_history_episodes"` //for tv
|
||||
SizeMin int `json:"size_min"`
|
||||
SizeMax int `json:"size_max"`
|
||||
}
|
||||
|
||||
func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
|
||||
@@ -134,7 +137,7 @@ func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
|
||||
epIds = append(epIds, epid)
|
||||
}
|
||||
}
|
||||
r, err := s.db.AddMediaWatchlist(&ent.Media{
|
||||
m := &ent.Media{
|
||||
TmdbID: int(detail.ID),
|
||||
MediaType: media.MediaTypeTv,
|
||||
NameCn: nameCn,
|
||||
@@ -146,7 +149,10 @@ func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
|
||||
StorageID: in.StorageID,
|
||||
TargetDir: in.Folder,
|
||||
DownloadHistoryEpisodes: in.DownloadHistoryEpisodes,
|
||||
}, epIds)
|
||||
Limiter: &schema.MediaLimiter{SizeMin: in.SizeMin, SizeMax: in.SizeMax},
|
||||
}
|
||||
|
||||
r, err := s.db.AddMediaWatchlist(m, epIds)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "add to list")
|
||||
}
|
||||
@@ -172,6 +178,7 @@ func (s *Server) AddMovie2Watchlist(c *gin.Context) (interface{}, error) {
|
||||
if err := c.ShouldBindJSON(&in); err != nil {
|
||||
return nil, errors.Wrap(err, "bind query")
|
||||
}
|
||||
log.Infof("add movie watchlist input: %+v", in)
|
||||
detailCn, err := s.MustTMDB().GetMovieDetails(in.TmdbID, db.LanguageCN)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "get movie detail")
|
||||
@@ -212,6 +219,7 @@ func (s *Server) AddMovie2Watchlist(c *gin.Context) (interface{}, error) {
|
||||
Resolution: media.Resolution(in.Resolution),
|
||||
StorageID: in.StorageID,
|
||||
TargetDir: in.Folder,
|
||||
Limiter: &schema.MediaLimiter{SizeMin: in.SizeMin, SizeMax: in.SizeMax},
|
||||
}, []int{epid})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "add to list")
|
||||
@@ -294,7 +302,7 @@ func (s *Server) GetTvWatchlist(c *gin.Context) (interface{}, error) {
|
||||
ms.MonitoredNum++
|
||||
if ep.Status == episode.StatusDownloaded {
|
||||
ms.DownloadedNum++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
res[i] = ms
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:dio/dio.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_riverpod/flutter_riverpod.dart';
|
||||
import 'package:quiver/strings.dart';
|
||||
import 'package:ui/providers/APIs.dart';
|
||||
@@ -91,16 +92,24 @@ class SearchPageData
|
||||
state = newState;
|
||||
}
|
||||
|
||||
Future<void> submit2Watchlist(int tmdbId, int storageId, String resolution,
|
||||
String mediaType, String folder, bool downloadHistoryEpisodes) async {
|
||||
final dio = await APIs.getDio();
|
||||
Future<void> submit2Watchlist(
|
||||
int tmdbId,
|
||||
int storageId,
|
||||
String resolution,
|
||||
String mediaType,
|
||||
String folder,
|
||||
bool downloadHistoryEpisodes,
|
||||
RangeValues limiter) async {
|
||||
final dio = APIs.getDio();
|
||||
if (mediaType == "tv") {
|
||||
var resp = await dio.post(APIs.watchlistTvUrl, data: {
|
||||
"tmdb_id": tmdbId,
|
||||
"storage_id": storageId,
|
||||
"resolution": resolution,
|
||||
"folder": folder,
|
||||
"download_history_episodes": downloadHistoryEpisodes
|
||||
"download_history_episodes": downloadHistoryEpisodes,
|
||||
"size_min": (limiter.start * 1000).toInt(),
|
||||
"size_max": (limiter.end * 1000).toInt(),
|
||||
});
|
||||
var sp = ServerResponse.fromJson(resp.data);
|
||||
if (sp.code != 0) {
|
||||
@@ -112,7 +121,9 @@ class SearchPageData
|
||||
"tmdb_id": tmdbId,
|
||||
"storage_id": storageId,
|
||||
"resolution": resolution,
|
||||
"folder": folder
|
||||
"folder": folder,
|
||||
"size_min": (limiter.start * 1000).toInt(),
|
||||
"size_max": (limiter.end * 1000).toInt(),
|
||||
});
|
||||
var sp = ServerResponse.fromJson(resp.data);
|
||||
if (sp.code != 0) {
|
||||
@@ -185,8 +196,8 @@ class MediaDetail {
|
||||
resolution = json["resolution"];
|
||||
storageId = json["storage_id"];
|
||||
airDate = json["air_date"];
|
||||
monitoredNum = json["monitored_num"]??0;
|
||||
downloadedNum = json["downloaded_num"]??0;
|
||||
monitoredNum = json["monitored_num"] ?? 0;
|
||||
downloadedNum = json["downloaded_num"] ?? 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -150,6 +150,8 @@ class _SearchPageState extends ConsumerState<SearchPage> {
|
||||
|
||||
Future<void> _showSubmitDialog(BuildContext context, SearchResult item) {
|
||||
final _formKey = GlobalKey<FormBuilderState>();
|
||||
bool enabledSizedLimiter = false;
|
||||
double sizeMax = 5000;
|
||||
|
||||
return showDialog<void>(
|
||||
context: context,
|
||||
@@ -166,7 +168,7 @@ class _SearchPageState extends ConsumerState<SearchPage> {
|
||||
title: Text('添加: ${item.name}'),
|
||||
content: SizedBox(
|
||||
width: 500,
|
||||
height: 200,
|
||||
height: 400,
|
||||
child: FormBuilder(
|
||||
key: _formKey,
|
||||
initialValue: const {
|
||||
@@ -174,6 +176,8 @@ class _SearchPageState extends ConsumerState<SearchPage> {
|
||||
"storage": null,
|
||||
"folder": "",
|
||||
"history_episodes": false,
|
||||
"eanble_size_limier": false,
|
||||
"size_limiter": RangeValues(400, 4000),
|
||||
},
|
||||
child: Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
@@ -248,6 +252,51 @@ class _SearchPageState extends ConsumerState<SearchPage> {
|
||||
size: 20,
|
||||
),
|
||||
),
|
||||
FormBuilderSwitch(
|
||||
name: "eanble_size_limier",
|
||||
title: Text(item.mediaType == "tv"
|
||||
? "是否限制每集文件大小"
|
||||
: "是否限制电影文件大小"),
|
||||
onChanged: (value) {
|
||||
setState(
|
||||
() {
|
||||
enabledSizedLimiter = value!;
|
||||
},
|
||||
);
|
||||
},
|
||||
),
|
||||
enabledSizedLimiter
|
||||
? FormBuilderRangeSlider(
|
||||
maxValueWidget: (max) =>
|
||||
Text("${sizeMax / 1000} GB"),
|
||||
minValueWidget: (min) => Text("0"),
|
||||
valueWidget: (value) {
|
||||
final sss = value.split(" ");
|
||||
return Text(
|
||||
"${readableSize(sss[0])} - ${readableSize(sss[2])}");
|
||||
},
|
||||
onChangeEnd: (value) {
|
||||
if (value.end > sizeMax * 0.9) {
|
||||
setState(
|
||||
() {
|
||||
sizeMax = sizeMax * 5;
|
||||
},
|
||||
);
|
||||
} else if (value.end <
|
||||
sizeMax * 0.2) {
|
||||
if (sizeMax > 5000) {
|
||||
setState(
|
||||
() {
|
||||
sizeMax = sizeMax / 5;
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
name: "size_limiter",
|
||||
min: 0,
|
||||
max: sizeMax)
|
||||
: const SizedBox(),
|
||||
item.mediaType == "tv"
|
||||
? SizedBox(
|
||||
width: 250,
|
||||
@@ -285,7 +334,6 @@ class _SearchPageState extends ConsumerState<SearchPage> {
|
||||
onPressed: () async {
|
||||
if (_formKey.currentState!.saveAndValidate()) {
|
||||
final values = _formKey.currentState!.value;
|
||||
//print(values);
|
||||
var f = ref
|
||||
.read(searchPageDataProvider(widget.query ?? "")
|
||||
.notifier)
|
||||
@@ -295,7 +343,8 @@ class _SearchPageState extends ConsumerState<SearchPage> {
|
||||
values["resolution"],
|
||||
item.mediaType!,
|
||||
values["folder"],
|
||||
values["history_episodes"] ?? false)
|
||||
values["history_episodes"] ?? false,
|
||||
enabledSizedLimiter ? values["size_limiter"] : const RangeValues(-1, -1))
|
||||
.then((v) {
|
||||
Navigator.of(context).pop();
|
||||
showSnakeBar("添加成功:${item.name}");
|
||||
@@ -310,6 +359,13 @@ class _SearchPageState extends ConsumerState<SearchPage> {
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
String readableSize(String v) {
|
||||
if (v.endsWith("K")) {
|
||||
return v.replaceAll("K", " GB");
|
||||
}
|
||||
return "$v MB";
|
||||
}
|
||||
}
|
||||
|
||||
class SearchBarApp extends StatefulWidget {
|
||||
|
||||
Reference in New Issue
Block a user