mirror of
https://github.com/simon-ding/polaris.git
synced 2026-02-06 23:21:00 +08:00
basic find and download ability
This commit is contained in:
139
ent/history.go
Normal file
139
ent/history.go
Normal file
@@ -0,0 +1,139 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"polaris/ent/history"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
// History is the model entity for the History schema.
|
||||
type History struct {
|
||||
config `json:"-"`
|
||||
// ID of the ent.
|
||||
ID int `json:"id,omitempty"`
|
||||
// SeriesID holds the value of the "series_id" field.
|
||||
SeriesID int `json:"series_id,omitempty"`
|
||||
// EpisodeID holds the value of the "episode_id" field.
|
||||
EpisodeID int `json:"episode_id,omitempty"`
|
||||
// SourceTitle holds the value of the "source_title" field.
|
||||
SourceTitle string `json:"source_title,omitempty"`
|
||||
// Date holds the value of the "date" field.
|
||||
Date time.Time `json:"date,omitempty"`
|
||||
selectValues sql.SelectValues
|
||||
}
|
||||
|
||||
// scanValues returns the types for scanning values from sql.Rows.
|
||||
func (*History) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case history.FieldID, history.FieldSeriesID, history.FieldEpisodeID:
|
||||
values[i] = new(sql.NullInt64)
|
||||
case history.FieldSourceTitle:
|
||||
values[i] = new(sql.NullString)
|
||||
case history.FieldDate:
|
||||
values[i] = new(sql.NullTime)
|
||||
default:
|
||||
values[i] = new(sql.UnknownType)
|
||||
}
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||
// to the History fields.
|
||||
func (h *History) assignValues(columns []string, values []any) error {
|
||||
if m, n := len(values), len(columns); m < n {
|
||||
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||
}
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case history.FieldID:
|
||||
value, ok := values[i].(*sql.NullInt64)
|
||||
if !ok {
|
||||
return fmt.Errorf("unexpected type %T for field id", value)
|
||||
}
|
||||
h.ID = int(value.Int64)
|
||||
case history.FieldSeriesID:
|
||||
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field series_id", values[i])
|
||||
} else if value.Valid {
|
||||
h.SeriesID = int(value.Int64)
|
||||
}
|
||||
case history.FieldEpisodeID:
|
||||
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field episode_id", values[i])
|
||||
} else if value.Valid {
|
||||
h.EpisodeID = int(value.Int64)
|
||||
}
|
||||
case history.FieldSourceTitle:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field source_title", values[i])
|
||||
} else if value.Valid {
|
||||
h.SourceTitle = value.String
|
||||
}
|
||||
case history.FieldDate:
|
||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field date", values[i])
|
||||
} else if value.Valid {
|
||||
h.Date = value.Time
|
||||
}
|
||||
default:
|
||||
h.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value returns the ent.Value that was dynamically selected and assigned to the History.
|
||||
// This includes values selected through modifiers, order, etc.
|
||||
func (h *History) Value(name string) (ent.Value, error) {
|
||||
return h.selectValues.Get(name)
|
||||
}
|
||||
|
||||
// Update returns a builder for updating this History.
|
||||
// Note that you need to call History.Unwrap() before calling this method if this History
|
||||
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||
func (h *History) Update() *HistoryUpdateOne {
|
||||
return NewHistoryClient(h.config).UpdateOne(h)
|
||||
}
|
||||
|
||||
// Unwrap unwraps the History entity that was returned from a transaction after it was closed,
|
||||
// so that all future queries will be executed through the driver which created the transaction.
|
||||
func (h *History) Unwrap() *History {
|
||||
_tx, ok := h.config.driver.(*txDriver)
|
||||
if !ok {
|
||||
panic("ent: History is not a transactional entity")
|
||||
}
|
||||
h.config.driver = _tx.drv
|
||||
return h
|
||||
}
|
||||
|
||||
// String implements the fmt.Stringer.
|
||||
func (h *History) String() string {
|
||||
var builder strings.Builder
|
||||
builder.WriteString("History(")
|
||||
builder.WriteString(fmt.Sprintf("id=%v, ", h.ID))
|
||||
builder.WriteString("series_id=")
|
||||
builder.WriteString(fmt.Sprintf("%v", h.SeriesID))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("episode_id=")
|
||||
builder.WriteString(fmt.Sprintf("%v", h.EpisodeID))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("source_title=")
|
||||
builder.WriteString(h.SourceTitle)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("date=")
|
||||
builder.WriteString(h.Date.Format(time.ANSIC))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
// Histories is a parsable slice of History.
|
||||
type Histories []*History
|
||||
71
ent/history/history.go
Normal file
71
ent/history/history.go
Normal file
@@ -0,0 +1,71 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package history
|
||||
|
||||
import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
const (
|
||||
// Label holds the string label denoting the history type in the database.
|
||||
Label = "history"
|
||||
// FieldID holds the string denoting the id field in the database.
|
||||
FieldID = "id"
|
||||
// FieldSeriesID holds the string denoting the series_id field in the database.
|
||||
FieldSeriesID = "series_id"
|
||||
// FieldEpisodeID holds the string denoting the episode_id field in the database.
|
||||
FieldEpisodeID = "episode_id"
|
||||
// FieldSourceTitle holds the string denoting the source_title field in the database.
|
||||
FieldSourceTitle = "source_title"
|
||||
// FieldDate holds the string denoting the date field in the database.
|
||||
FieldDate = "date"
|
||||
// Table holds the table name of the history in the database.
|
||||
Table = "histories"
|
||||
)
|
||||
|
||||
// Columns holds all SQL columns for history fields.
|
||||
var Columns = []string{
|
||||
FieldID,
|
||||
FieldSeriesID,
|
||||
FieldEpisodeID,
|
||||
FieldSourceTitle,
|
||||
FieldDate,
|
||||
}
|
||||
|
||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||
func ValidColumn(column string) bool {
|
||||
for i := range Columns {
|
||||
if column == Columns[i] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// OrderOption defines the ordering options for the History queries.
|
||||
type OrderOption func(*sql.Selector)
|
||||
|
||||
// ByID orders the results by the id field.
|
||||
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// BySeriesID orders the results by the series_id field.
|
||||
func BySeriesID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldSeriesID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByEpisodeID orders the results by the episode_id field.
|
||||
func ByEpisodeID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldEpisodeID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// BySourceTitle orders the results by the source_title field.
|
||||
func BySourceTitle(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldSourceTitle, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByDate orders the results by the date field.
|
||||
func ByDate(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldDate, opts...).ToFunc()
|
||||
}
|
||||
275
ent/history/where.go
Normal file
275
ent/history/where.go
Normal file
@@ -0,0 +1,275 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package history
|
||||
|
||||
import (
|
||||
"polaris/ent/predicate"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
func ID(id int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDEQ applies the EQ predicate on the ID field.
|
||||
func IDEQ(id int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDNEQ applies the NEQ predicate on the ID field.
|
||||
func IDNEQ(id int) predicate.History {
|
||||
return predicate.History(sql.FieldNEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDIn applies the In predicate on the ID field.
|
||||
func IDIn(ids ...int) predicate.History {
|
||||
return predicate.History(sql.FieldIn(FieldID, ids...))
|
||||
}
|
||||
|
||||
// IDNotIn applies the NotIn predicate on the ID field.
|
||||
func IDNotIn(ids ...int) predicate.History {
|
||||
return predicate.History(sql.FieldNotIn(FieldID, ids...))
|
||||
}
|
||||
|
||||
// IDGT applies the GT predicate on the ID field.
|
||||
func IDGT(id int) predicate.History {
|
||||
return predicate.History(sql.FieldGT(FieldID, id))
|
||||
}
|
||||
|
||||
// IDGTE applies the GTE predicate on the ID field.
|
||||
func IDGTE(id int) predicate.History {
|
||||
return predicate.History(sql.FieldGTE(FieldID, id))
|
||||
}
|
||||
|
||||
// IDLT applies the LT predicate on the ID field.
|
||||
func IDLT(id int) predicate.History {
|
||||
return predicate.History(sql.FieldLT(FieldID, id))
|
||||
}
|
||||
|
||||
// IDLTE applies the LTE predicate on the ID field.
|
||||
func IDLTE(id int) predicate.History {
|
||||
return predicate.History(sql.FieldLTE(FieldID, id))
|
||||
}
|
||||
|
||||
// SeriesID applies equality check predicate on the "series_id" field. It's identical to SeriesIDEQ.
|
||||
func SeriesID(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldSeriesID, v))
|
||||
}
|
||||
|
||||
// EpisodeID applies equality check predicate on the "episode_id" field. It's identical to EpisodeIDEQ.
|
||||
func EpisodeID(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldEpisodeID, v))
|
||||
}
|
||||
|
||||
// SourceTitle applies equality check predicate on the "source_title" field. It's identical to SourceTitleEQ.
|
||||
func SourceTitle(v string) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// Date applies equality check predicate on the "date" field. It's identical to DateEQ.
|
||||
func Date(v time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldDate, v))
|
||||
}
|
||||
|
||||
// SeriesIDEQ applies the EQ predicate on the "series_id" field.
|
||||
func SeriesIDEQ(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldSeriesID, v))
|
||||
}
|
||||
|
||||
// SeriesIDNEQ applies the NEQ predicate on the "series_id" field.
|
||||
func SeriesIDNEQ(v int) predicate.History {
|
||||
return predicate.History(sql.FieldNEQ(FieldSeriesID, v))
|
||||
}
|
||||
|
||||
// SeriesIDIn applies the In predicate on the "series_id" field.
|
||||
func SeriesIDIn(vs ...int) predicate.History {
|
||||
return predicate.History(sql.FieldIn(FieldSeriesID, vs...))
|
||||
}
|
||||
|
||||
// SeriesIDNotIn applies the NotIn predicate on the "series_id" field.
|
||||
func SeriesIDNotIn(vs ...int) predicate.History {
|
||||
return predicate.History(sql.FieldNotIn(FieldSeriesID, vs...))
|
||||
}
|
||||
|
||||
// SeriesIDGT applies the GT predicate on the "series_id" field.
|
||||
func SeriesIDGT(v int) predicate.History {
|
||||
return predicate.History(sql.FieldGT(FieldSeriesID, v))
|
||||
}
|
||||
|
||||
// SeriesIDGTE applies the GTE predicate on the "series_id" field.
|
||||
func SeriesIDGTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldGTE(FieldSeriesID, v))
|
||||
}
|
||||
|
||||
// SeriesIDLT applies the LT predicate on the "series_id" field.
|
||||
func SeriesIDLT(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLT(FieldSeriesID, v))
|
||||
}
|
||||
|
||||
// SeriesIDLTE applies the LTE predicate on the "series_id" field.
|
||||
func SeriesIDLTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLTE(FieldSeriesID, v))
|
||||
}
|
||||
|
||||
// EpisodeIDEQ applies the EQ predicate on the "episode_id" field.
|
||||
func EpisodeIDEQ(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldEpisodeID, v))
|
||||
}
|
||||
|
||||
// EpisodeIDNEQ applies the NEQ predicate on the "episode_id" field.
|
||||
func EpisodeIDNEQ(v int) predicate.History {
|
||||
return predicate.History(sql.FieldNEQ(FieldEpisodeID, v))
|
||||
}
|
||||
|
||||
// EpisodeIDIn applies the In predicate on the "episode_id" field.
|
||||
func EpisodeIDIn(vs ...int) predicate.History {
|
||||
return predicate.History(sql.FieldIn(FieldEpisodeID, vs...))
|
||||
}
|
||||
|
||||
// EpisodeIDNotIn applies the NotIn predicate on the "episode_id" field.
|
||||
func EpisodeIDNotIn(vs ...int) predicate.History {
|
||||
return predicate.History(sql.FieldNotIn(FieldEpisodeID, vs...))
|
||||
}
|
||||
|
||||
// EpisodeIDGT applies the GT predicate on the "episode_id" field.
|
||||
func EpisodeIDGT(v int) predicate.History {
|
||||
return predicate.History(sql.FieldGT(FieldEpisodeID, v))
|
||||
}
|
||||
|
||||
// EpisodeIDGTE applies the GTE predicate on the "episode_id" field.
|
||||
func EpisodeIDGTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldGTE(FieldEpisodeID, v))
|
||||
}
|
||||
|
||||
// EpisodeIDLT applies the LT predicate on the "episode_id" field.
|
||||
func EpisodeIDLT(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLT(FieldEpisodeID, v))
|
||||
}
|
||||
|
||||
// EpisodeIDLTE applies the LTE predicate on the "episode_id" field.
|
||||
func EpisodeIDLTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLTE(FieldEpisodeID, v))
|
||||
}
|
||||
|
||||
// SourceTitleEQ applies the EQ predicate on the "source_title" field.
|
||||
func SourceTitleEQ(v string) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleNEQ applies the NEQ predicate on the "source_title" field.
|
||||
func SourceTitleNEQ(v string) predicate.History {
|
||||
return predicate.History(sql.FieldNEQ(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleIn applies the In predicate on the "source_title" field.
|
||||
func SourceTitleIn(vs ...string) predicate.History {
|
||||
return predicate.History(sql.FieldIn(FieldSourceTitle, vs...))
|
||||
}
|
||||
|
||||
// SourceTitleNotIn applies the NotIn predicate on the "source_title" field.
|
||||
func SourceTitleNotIn(vs ...string) predicate.History {
|
||||
return predicate.History(sql.FieldNotIn(FieldSourceTitle, vs...))
|
||||
}
|
||||
|
||||
// SourceTitleGT applies the GT predicate on the "source_title" field.
|
||||
func SourceTitleGT(v string) predicate.History {
|
||||
return predicate.History(sql.FieldGT(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleGTE applies the GTE predicate on the "source_title" field.
|
||||
func SourceTitleGTE(v string) predicate.History {
|
||||
return predicate.History(sql.FieldGTE(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleLT applies the LT predicate on the "source_title" field.
|
||||
func SourceTitleLT(v string) predicate.History {
|
||||
return predicate.History(sql.FieldLT(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleLTE applies the LTE predicate on the "source_title" field.
|
||||
func SourceTitleLTE(v string) predicate.History {
|
||||
return predicate.History(sql.FieldLTE(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleContains applies the Contains predicate on the "source_title" field.
|
||||
func SourceTitleContains(v string) predicate.History {
|
||||
return predicate.History(sql.FieldContains(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleHasPrefix applies the HasPrefix predicate on the "source_title" field.
|
||||
func SourceTitleHasPrefix(v string) predicate.History {
|
||||
return predicate.History(sql.FieldHasPrefix(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleHasSuffix applies the HasSuffix predicate on the "source_title" field.
|
||||
func SourceTitleHasSuffix(v string) predicate.History {
|
||||
return predicate.History(sql.FieldHasSuffix(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleEqualFold applies the EqualFold predicate on the "source_title" field.
|
||||
func SourceTitleEqualFold(v string) predicate.History {
|
||||
return predicate.History(sql.FieldEqualFold(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// SourceTitleContainsFold applies the ContainsFold predicate on the "source_title" field.
|
||||
func SourceTitleContainsFold(v string) predicate.History {
|
||||
return predicate.History(sql.FieldContainsFold(FieldSourceTitle, v))
|
||||
}
|
||||
|
||||
// DateEQ applies the EQ predicate on the "date" field.
|
||||
func DateEQ(v time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldDate, v))
|
||||
}
|
||||
|
||||
// DateNEQ applies the NEQ predicate on the "date" field.
|
||||
func DateNEQ(v time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldNEQ(FieldDate, v))
|
||||
}
|
||||
|
||||
// DateIn applies the In predicate on the "date" field.
|
||||
func DateIn(vs ...time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldIn(FieldDate, vs...))
|
||||
}
|
||||
|
||||
// DateNotIn applies the NotIn predicate on the "date" field.
|
||||
func DateNotIn(vs ...time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldNotIn(FieldDate, vs...))
|
||||
}
|
||||
|
||||
// DateGT applies the GT predicate on the "date" field.
|
||||
func DateGT(v time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldGT(FieldDate, v))
|
||||
}
|
||||
|
||||
// DateGTE applies the GTE predicate on the "date" field.
|
||||
func DateGTE(v time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldGTE(FieldDate, v))
|
||||
}
|
||||
|
||||
// DateLT applies the LT predicate on the "date" field.
|
||||
func DateLT(v time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldLT(FieldDate, v))
|
||||
}
|
||||
|
||||
// DateLTE applies the LTE predicate on the "date" field.
|
||||
func DateLTE(v time.Time) predicate.History {
|
||||
return predicate.History(sql.FieldLTE(FieldDate, v))
|
||||
}
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.History) predicate.History {
|
||||
return predicate.History(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.History) predicate.History {
|
||||
return predicate.History(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.History) predicate.History {
|
||||
return predicate.History(sql.NotPredicates(p))
|
||||
}
|
||||
223
ent/history_create.go
Normal file
223
ent/history_create.go
Normal file
@@ -0,0 +1,223 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"polaris/ent/history"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// HistoryCreate is the builder for creating a History entity.
|
||||
type HistoryCreate struct {
|
||||
config
|
||||
mutation *HistoryMutation
|
||||
hooks []Hook
|
||||
}
|
||||
|
||||
// SetSeriesID sets the "series_id" field.
|
||||
func (hc *HistoryCreate) SetSeriesID(i int) *HistoryCreate {
|
||||
hc.mutation.SetSeriesID(i)
|
||||
return hc
|
||||
}
|
||||
|
||||
// SetEpisodeID sets the "episode_id" field.
|
||||
func (hc *HistoryCreate) SetEpisodeID(i int) *HistoryCreate {
|
||||
hc.mutation.SetEpisodeID(i)
|
||||
return hc
|
||||
}
|
||||
|
||||
// SetSourceTitle sets the "source_title" field.
|
||||
func (hc *HistoryCreate) SetSourceTitle(s string) *HistoryCreate {
|
||||
hc.mutation.SetSourceTitle(s)
|
||||
return hc
|
||||
}
|
||||
|
||||
// SetDate sets the "date" field.
|
||||
func (hc *HistoryCreate) SetDate(t time.Time) *HistoryCreate {
|
||||
hc.mutation.SetDate(t)
|
||||
return hc
|
||||
}
|
||||
|
||||
// Mutation returns the HistoryMutation object of the builder.
|
||||
func (hc *HistoryCreate) Mutation() *HistoryMutation {
|
||||
return hc.mutation
|
||||
}
|
||||
|
||||
// Save creates the History in the database.
|
||||
func (hc *HistoryCreate) Save(ctx context.Context) (*History, error) {
|
||||
return withHooks(ctx, hc.sqlSave, hc.mutation, hc.hooks)
|
||||
}
|
||||
|
||||
// SaveX calls Save and panics if Save returns an error.
|
||||
func (hc *HistoryCreate) SaveX(ctx context.Context) *History {
|
||||
v, err := hc.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (hc *HistoryCreate) Exec(ctx context.Context) error {
|
||||
_, err := hc.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (hc *HistoryCreate) ExecX(ctx context.Context) {
|
||||
if err := hc.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
func (hc *HistoryCreate) check() error {
|
||||
if _, ok := hc.mutation.SeriesID(); !ok {
|
||||
return &ValidationError{Name: "series_id", err: errors.New(`ent: missing required field "History.series_id"`)}
|
||||
}
|
||||
if _, ok := hc.mutation.EpisodeID(); !ok {
|
||||
return &ValidationError{Name: "episode_id", err: errors.New(`ent: missing required field "History.episode_id"`)}
|
||||
}
|
||||
if _, ok := hc.mutation.SourceTitle(); !ok {
|
||||
return &ValidationError{Name: "source_title", err: errors.New(`ent: missing required field "History.source_title"`)}
|
||||
}
|
||||
if _, ok := hc.mutation.Date(); !ok {
|
||||
return &ValidationError{Name: "date", err: errors.New(`ent: missing required field "History.date"`)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (hc *HistoryCreate) sqlSave(ctx context.Context) (*History, error) {
|
||||
if err := hc.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_node, _spec := hc.createSpec()
|
||||
if err := sqlgraph.CreateNode(ctx, hc.driver, _spec); err != nil {
|
||||
if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
id := _spec.ID.Value.(int64)
|
||||
_node.ID = int(id)
|
||||
hc.mutation.id = &_node.ID
|
||||
hc.mutation.done = true
|
||||
return _node, nil
|
||||
}
|
||||
|
||||
func (hc *HistoryCreate) createSpec() (*History, *sqlgraph.CreateSpec) {
|
||||
var (
|
||||
_node = &History{config: hc.config}
|
||||
_spec = sqlgraph.NewCreateSpec(history.Table, sqlgraph.NewFieldSpec(history.FieldID, field.TypeInt))
|
||||
)
|
||||
if value, ok := hc.mutation.SeriesID(); ok {
|
||||
_spec.SetField(history.FieldSeriesID, field.TypeInt, value)
|
||||
_node.SeriesID = value
|
||||
}
|
||||
if value, ok := hc.mutation.EpisodeID(); ok {
|
||||
_spec.SetField(history.FieldEpisodeID, field.TypeInt, value)
|
||||
_node.EpisodeID = value
|
||||
}
|
||||
if value, ok := hc.mutation.SourceTitle(); ok {
|
||||
_spec.SetField(history.FieldSourceTitle, field.TypeString, value)
|
||||
_node.SourceTitle = value
|
||||
}
|
||||
if value, ok := hc.mutation.Date(); ok {
|
||||
_spec.SetField(history.FieldDate, field.TypeTime, value)
|
||||
_node.Date = value
|
||||
}
|
||||
return _node, _spec
|
||||
}
|
||||
|
||||
// HistoryCreateBulk is the builder for creating many History entities in bulk.
|
||||
type HistoryCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*HistoryCreate
|
||||
}
|
||||
|
||||
// Save creates the History entities in the database.
|
||||
func (hcb *HistoryCreateBulk) Save(ctx context.Context) ([]*History, error) {
|
||||
if hcb.err != nil {
|
||||
return nil, hcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(hcb.builders))
|
||||
nodes := make([]*History, len(hcb.builders))
|
||||
mutators := make([]Mutator, len(hcb.builders))
|
||||
for i := range hcb.builders {
|
||||
func(i int, root context.Context) {
|
||||
builder := hcb.builders[i]
|
||||
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||
mutation, ok := m.(*HistoryMutation)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||
}
|
||||
if err := builder.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
builder.mutation = mutation
|
||||
var err error
|
||||
nodes[i], specs[i] = builder.createSpec()
|
||||
if i < len(mutators)-1 {
|
||||
_, err = mutators[i+1].Mutate(root, hcb.builders[i+1].mutation)
|
||||
} else {
|
||||
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
|
||||
// Invoke the actual operation on the latest mutation in the chain.
|
||||
if err = sqlgraph.BatchCreate(ctx, hcb.driver, spec); err != nil {
|
||||
if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mutation.id = &nodes[i].ID
|
||||
if specs[i].ID.Value != nil {
|
||||
id := specs[i].ID.Value.(int64)
|
||||
nodes[i].ID = int(id)
|
||||
}
|
||||
mutation.done = true
|
||||
return nodes[i], nil
|
||||
})
|
||||
for i := len(builder.hooks) - 1; i >= 0; i-- {
|
||||
mut = builder.hooks[i](mut)
|
||||
}
|
||||
mutators[i] = mut
|
||||
}(i, ctx)
|
||||
}
|
||||
if len(mutators) > 0 {
|
||||
if _, err := mutators[0].Mutate(ctx, hcb.builders[0].mutation); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (hcb *HistoryCreateBulk) SaveX(ctx context.Context) []*History {
|
||||
v, err := hcb.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (hcb *HistoryCreateBulk) Exec(ctx context.Context) error {
|
||||
_, err := hcb.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (hcb *HistoryCreateBulk) ExecX(ctx context.Context) {
|
||||
if err := hcb.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
88
ent/history_delete.go
Normal file
88
ent/history_delete.go
Normal file
@@ -0,0 +1,88 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"polaris/ent/history"
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// HistoryDelete is the builder for deleting a History entity.
|
||||
type HistoryDelete struct {
|
||||
config
|
||||
hooks []Hook
|
||||
mutation *HistoryMutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the HistoryDelete builder.
|
||||
func (hd *HistoryDelete) Where(ps ...predicate.History) *HistoryDelete {
|
||||
hd.mutation.Where(ps...)
|
||||
return hd
|
||||
}
|
||||
|
||||
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||
func (hd *HistoryDelete) Exec(ctx context.Context) (int, error) {
|
||||
return withHooks(ctx, hd.sqlExec, hd.mutation, hd.hooks)
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (hd *HistoryDelete) ExecX(ctx context.Context) int {
|
||||
n, err := hd.Exec(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (hd *HistoryDelete) sqlExec(ctx context.Context) (int, error) {
|
||||
_spec := sqlgraph.NewDeleteSpec(history.Table, sqlgraph.NewFieldSpec(history.FieldID, field.TypeInt))
|
||||
if ps := hd.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
affected, err := sqlgraph.DeleteNodes(ctx, hd.driver, _spec)
|
||||
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
hd.mutation.done = true
|
||||
return affected, err
|
||||
}
|
||||
|
||||
// HistoryDeleteOne is the builder for deleting a single History entity.
|
||||
type HistoryDeleteOne struct {
|
||||
hd *HistoryDelete
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the HistoryDelete builder.
|
||||
func (hdo *HistoryDeleteOne) Where(ps ...predicate.History) *HistoryDeleteOne {
|
||||
hdo.hd.mutation.Where(ps...)
|
||||
return hdo
|
||||
}
|
||||
|
||||
// Exec executes the deletion query.
|
||||
func (hdo *HistoryDeleteOne) Exec(ctx context.Context) error {
|
||||
n, err := hdo.hd.Exec(ctx)
|
||||
switch {
|
||||
case err != nil:
|
||||
return err
|
||||
case n == 0:
|
||||
return &NotFoundError{history.Label}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (hdo *HistoryDeleteOne) ExecX(ctx context.Context) {
|
||||
if err := hdo.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
526
ent/history_query.go
Normal file
526
ent/history_query.go
Normal file
@@ -0,0 +1,526 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math"
|
||||
"polaris/ent/history"
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// HistoryQuery is the builder for querying History entities.
|
||||
type HistoryQuery struct {
|
||||
config
|
||||
ctx *QueryContext
|
||||
order []history.OrderOption
|
||||
inters []Interceptor
|
||||
predicates []predicate.History
|
||||
// intermediate query (i.e. traversal path).
|
||||
sql *sql.Selector
|
||||
path func(context.Context) (*sql.Selector, error)
|
||||
}
|
||||
|
||||
// Where adds a new predicate for the HistoryQuery builder.
|
||||
func (hq *HistoryQuery) Where(ps ...predicate.History) *HistoryQuery {
|
||||
hq.predicates = append(hq.predicates, ps...)
|
||||
return hq
|
||||
}
|
||||
|
||||
// Limit the number of records to be returned by this query.
|
||||
func (hq *HistoryQuery) Limit(limit int) *HistoryQuery {
|
||||
hq.ctx.Limit = &limit
|
||||
return hq
|
||||
}
|
||||
|
||||
// Offset to start from.
|
||||
func (hq *HistoryQuery) Offset(offset int) *HistoryQuery {
|
||||
hq.ctx.Offset = &offset
|
||||
return hq
|
||||
}
|
||||
|
||||
// Unique configures the query builder to filter duplicate records on query.
|
||||
// By default, unique is set to true, and can be disabled using this method.
|
||||
func (hq *HistoryQuery) Unique(unique bool) *HistoryQuery {
|
||||
hq.ctx.Unique = &unique
|
||||
return hq
|
||||
}
|
||||
|
||||
// Order specifies how the records should be ordered.
|
||||
func (hq *HistoryQuery) Order(o ...history.OrderOption) *HistoryQuery {
|
||||
hq.order = append(hq.order, o...)
|
||||
return hq
|
||||
}
|
||||
|
||||
// First returns the first History entity from the query.
|
||||
// Returns a *NotFoundError when no History was found.
|
||||
func (hq *HistoryQuery) First(ctx context.Context) (*History, error) {
|
||||
nodes, err := hq.Limit(1).All(setContextOp(ctx, hq.ctx, "First"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(nodes) == 0 {
|
||||
return nil, &NotFoundError{history.Label}
|
||||
}
|
||||
return nodes[0], nil
|
||||
}
|
||||
|
||||
// FirstX is like First, but panics if an error occurs.
|
||||
func (hq *HistoryQuery) FirstX(ctx context.Context) *History {
|
||||
node, err := hq.First(ctx)
|
||||
if err != nil && !IsNotFound(err) {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// FirstID returns the first History ID from the query.
|
||||
// Returns a *NotFoundError when no History ID was found.
|
||||
func (hq *HistoryQuery) FirstID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = hq.Limit(1).IDs(setContextOp(ctx, hq.ctx, "FirstID")); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
err = &NotFoundError{history.Label}
|
||||
return
|
||||
}
|
||||
return ids[0], nil
|
||||
}
|
||||
|
||||
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||
func (hq *HistoryQuery) FirstIDX(ctx context.Context) int {
|
||||
id, err := hq.FirstID(ctx)
|
||||
if err != nil && !IsNotFound(err) {
|
||||
panic(err)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// Only returns a single History entity found by the query, ensuring it only returns one.
|
||||
// Returns a *NotSingularError when more than one History entity is found.
|
||||
// Returns a *NotFoundError when no History entities are found.
|
||||
func (hq *HistoryQuery) Only(ctx context.Context) (*History, error) {
|
||||
nodes, err := hq.Limit(2).All(setContextOp(ctx, hq.ctx, "Only"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch len(nodes) {
|
||||
case 1:
|
||||
return nodes[0], nil
|
||||
case 0:
|
||||
return nil, &NotFoundError{history.Label}
|
||||
default:
|
||||
return nil, &NotSingularError{history.Label}
|
||||
}
|
||||
}
|
||||
|
||||
// OnlyX is like Only, but panics if an error occurs.
|
||||
func (hq *HistoryQuery) OnlyX(ctx context.Context) *History {
|
||||
node, err := hq.Only(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// OnlyID is like Only, but returns the only History ID in the query.
|
||||
// Returns a *NotSingularError when more than one History ID is found.
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (hq *HistoryQuery) OnlyID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = hq.Limit(2).IDs(setContextOp(ctx, hq.ctx, "OnlyID")); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
case 1:
|
||||
id = ids[0]
|
||||
case 0:
|
||||
err = &NotFoundError{history.Label}
|
||||
default:
|
||||
err = &NotSingularError{history.Label}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||
func (hq *HistoryQuery) OnlyIDX(ctx context.Context) int {
|
||||
id, err := hq.OnlyID(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// All executes the query and returns a list of Histories.
|
||||
func (hq *HistoryQuery) All(ctx context.Context) ([]*History, error) {
|
||||
ctx = setContextOp(ctx, hq.ctx, "All")
|
||||
if err := hq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
qr := querierAll[[]*History, *HistoryQuery]()
|
||||
return withInterceptors[[]*History](ctx, hq, qr, hq.inters)
|
||||
}
|
||||
|
||||
// AllX is like All, but panics if an error occurs.
|
||||
func (hq *HistoryQuery) AllX(ctx context.Context) []*History {
|
||||
nodes, err := hq.All(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return nodes
|
||||
}
|
||||
|
||||
// IDs executes the query and returns a list of History IDs.
|
||||
func (hq *HistoryQuery) IDs(ctx context.Context) (ids []int, err error) {
|
||||
if hq.ctx.Unique == nil && hq.path != nil {
|
||||
hq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, hq.ctx, "IDs")
|
||||
if err = hq.Select(history.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ids, nil
|
||||
}
|
||||
|
||||
// IDsX is like IDs, but panics if an error occurs.
|
||||
func (hq *HistoryQuery) IDsX(ctx context.Context) []int {
|
||||
ids, err := hq.IDs(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (hq *HistoryQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, hq.ctx, "Count")
|
||||
if err := hq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return withInterceptors[int](ctx, hq, querierCount[*HistoryQuery](), hq.inters)
|
||||
}
|
||||
|
||||
// CountX is like Count, but panics if an error occurs.
|
||||
func (hq *HistoryQuery) CountX(ctx context.Context) int {
|
||||
count, err := hq.Count(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (hq *HistoryQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, hq.ctx, "Exist")
|
||||
switch _, err := hq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
case err != nil:
|
||||
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||
default:
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
// ExistX is like Exist, but panics if an error occurs.
|
||||
func (hq *HistoryQuery) ExistX(ctx context.Context) bool {
|
||||
exist, err := hq.Exist(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return exist
|
||||
}
|
||||
|
||||
// Clone returns a duplicate of the HistoryQuery builder, including all associated steps. It can be
|
||||
// used to prepare common query builders and use them differently after the clone is made.
|
||||
func (hq *HistoryQuery) Clone() *HistoryQuery {
|
||||
if hq == nil {
|
||||
return nil
|
||||
}
|
||||
return &HistoryQuery{
|
||||
config: hq.config,
|
||||
ctx: hq.ctx.Clone(),
|
||||
order: append([]history.OrderOption{}, hq.order...),
|
||||
inters: append([]Interceptor{}, hq.inters...),
|
||||
predicates: append([]predicate.History{}, hq.predicates...),
|
||||
// clone intermediate query.
|
||||
sql: hq.sql.Clone(),
|
||||
path: hq.path,
|
||||
}
|
||||
}
|
||||
|
||||
// GroupBy is used to group vertices by one or more fields/columns.
|
||||
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var v []struct {
|
||||
// SeriesID int `json:"series_id,omitempty"`
|
||||
// Count int `json:"count,omitempty"`
|
||||
// }
|
||||
//
|
||||
// client.History.Query().
|
||||
// GroupBy(history.FieldSeriesID).
|
||||
// Aggregate(ent.Count()).
|
||||
// Scan(ctx, &v)
|
||||
func (hq *HistoryQuery) GroupBy(field string, fields ...string) *HistoryGroupBy {
|
||||
hq.ctx.Fields = append([]string{field}, fields...)
|
||||
grbuild := &HistoryGroupBy{build: hq}
|
||||
grbuild.flds = &hq.ctx.Fields
|
||||
grbuild.label = history.Label
|
||||
grbuild.scan = grbuild.Scan
|
||||
return grbuild
|
||||
}
|
||||
|
||||
// Select allows the selection one or more fields/columns for the given query,
|
||||
// instead of selecting all fields in the entity.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var v []struct {
|
||||
// SeriesID int `json:"series_id,omitempty"`
|
||||
// }
|
||||
//
|
||||
// client.History.Query().
|
||||
// Select(history.FieldSeriesID).
|
||||
// Scan(ctx, &v)
|
||||
func (hq *HistoryQuery) Select(fields ...string) *HistorySelect {
|
||||
hq.ctx.Fields = append(hq.ctx.Fields, fields...)
|
||||
sbuild := &HistorySelect{HistoryQuery: hq}
|
||||
sbuild.label = history.Label
|
||||
sbuild.flds, sbuild.scan = &hq.ctx.Fields, sbuild.Scan
|
||||
return sbuild
|
||||
}
|
||||
|
||||
// Aggregate returns a HistorySelect configured with the given aggregations.
|
||||
func (hq *HistoryQuery) Aggregate(fns ...AggregateFunc) *HistorySelect {
|
||||
return hq.Select().Aggregate(fns...)
|
||||
}
|
||||
|
||||
func (hq *HistoryQuery) prepareQuery(ctx context.Context) error {
|
||||
for _, inter := range hq.inters {
|
||||
if inter == nil {
|
||||
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
|
||||
}
|
||||
if trv, ok := inter.(Traverser); ok {
|
||||
if err := trv.Traverse(ctx, hq); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, f := range hq.ctx.Fields {
|
||||
if !history.ValidColumn(f) {
|
||||
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||
}
|
||||
}
|
||||
if hq.path != nil {
|
||||
prev, err := hq.path(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
hq.sql = prev
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (hq *HistoryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*History, error) {
|
||||
var (
|
||||
nodes = []*History{}
|
||||
_spec = hq.querySpec()
|
||||
)
|
||||
_spec.ScanValues = func(columns []string) ([]any, error) {
|
||||
return (*History).scanValues(nil, columns)
|
||||
}
|
||||
_spec.Assign = func(columns []string, values []any) error {
|
||||
node := &History{config: hq.config}
|
||||
nodes = append(nodes, node)
|
||||
return node.assignValues(columns, values)
|
||||
}
|
||||
for i := range hooks {
|
||||
hooks[i](ctx, _spec)
|
||||
}
|
||||
if err := sqlgraph.QueryNodes(ctx, hq.driver, _spec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(nodes) == 0 {
|
||||
return nodes, nil
|
||||
}
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
func (hq *HistoryQuery) sqlCount(ctx context.Context) (int, error) {
|
||||
_spec := hq.querySpec()
|
||||
_spec.Node.Columns = hq.ctx.Fields
|
||||
if len(hq.ctx.Fields) > 0 {
|
||||
_spec.Unique = hq.ctx.Unique != nil && *hq.ctx.Unique
|
||||
}
|
||||
return sqlgraph.CountNodes(ctx, hq.driver, _spec)
|
||||
}
|
||||
|
||||
func (hq *HistoryQuery) querySpec() *sqlgraph.QuerySpec {
|
||||
_spec := sqlgraph.NewQuerySpec(history.Table, history.Columns, sqlgraph.NewFieldSpec(history.FieldID, field.TypeInt))
|
||||
_spec.From = hq.sql
|
||||
if unique := hq.ctx.Unique; unique != nil {
|
||||
_spec.Unique = *unique
|
||||
} else if hq.path != nil {
|
||||
_spec.Unique = true
|
||||
}
|
||||
if fields := hq.ctx.Fields; len(fields) > 0 {
|
||||
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, history.FieldID)
|
||||
for i := range fields {
|
||||
if fields[i] != history.FieldID {
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
if ps := hq.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if limit := hq.ctx.Limit; limit != nil {
|
||||
_spec.Limit = *limit
|
||||
}
|
||||
if offset := hq.ctx.Offset; offset != nil {
|
||||
_spec.Offset = *offset
|
||||
}
|
||||
if ps := hq.order; len(ps) > 0 {
|
||||
_spec.Order = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
return _spec
|
||||
}
|
||||
|
||||
func (hq *HistoryQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||
builder := sql.Dialect(hq.driver.Dialect())
|
||||
t1 := builder.Table(history.Table)
|
||||
columns := hq.ctx.Fields
|
||||
if len(columns) == 0 {
|
||||
columns = history.Columns
|
||||
}
|
||||
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||
if hq.sql != nil {
|
||||
selector = hq.sql
|
||||
selector.Select(selector.Columns(columns...)...)
|
||||
}
|
||||
if hq.ctx.Unique != nil && *hq.ctx.Unique {
|
||||
selector.Distinct()
|
||||
}
|
||||
for _, p := range hq.predicates {
|
||||
p(selector)
|
||||
}
|
||||
for _, p := range hq.order {
|
||||
p(selector)
|
||||
}
|
||||
if offset := hq.ctx.Offset; offset != nil {
|
||||
// limit is mandatory for offset clause. We start
|
||||
// with default value, and override it below if needed.
|
||||
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||
}
|
||||
if limit := hq.ctx.Limit; limit != nil {
|
||||
selector.Limit(*limit)
|
||||
}
|
||||
return selector
|
||||
}
|
||||
|
||||
// HistoryGroupBy is the group-by builder for History entities.
|
||||
type HistoryGroupBy struct {
|
||||
selector
|
||||
build *HistoryQuery
|
||||
}
|
||||
|
||||
// Aggregate adds the given aggregation functions to the group-by query.
|
||||
func (hgb *HistoryGroupBy) Aggregate(fns ...AggregateFunc) *HistoryGroupBy {
|
||||
hgb.fns = append(hgb.fns, fns...)
|
||||
return hgb
|
||||
}
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (hgb *HistoryGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, hgb.build.ctx, "GroupBy")
|
||||
if err := hgb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return scanWithInterceptors[*HistoryQuery, *HistoryGroupBy](ctx, hgb.build, hgb, hgb.build.inters, v)
|
||||
}
|
||||
|
||||
func (hgb *HistoryGroupBy) sqlScan(ctx context.Context, root *HistoryQuery, v any) error {
|
||||
selector := root.sqlQuery(ctx).Select()
|
||||
aggregation := make([]string, 0, len(hgb.fns))
|
||||
for _, fn := range hgb.fns {
|
||||
aggregation = append(aggregation, fn(selector))
|
||||
}
|
||||
if len(selector.SelectedColumns()) == 0 {
|
||||
columns := make([]string, 0, len(*hgb.flds)+len(hgb.fns))
|
||||
for _, f := range *hgb.flds {
|
||||
columns = append(columns, selector.C(f))
|
||||
}
|
||||
columns = append(columns, aggregation...)
|
||||
selector.Select(columns...)
|
||||
}
|
||||
selector.GroupBy(selector.Columns(*hgb.flds...)...)
|
||||
if err := selector.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
rows := &sql.Rows{}
|
||||
query, args := selector.Query()
|
||||
if err := hgb.build.driver.Query(ctx, query, args, rows); err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
return sql.ScanSlice(rows, v)
|
||||
}
|
||||
|
||||
// HistorySelect is the builder for selecting fields of History entities.
|
||||
type HistorySelect struct {
|
||||
*HistoryQuery
|
||||
selector
|
||||
}
|
||||
|
||||
// Aggregate adds the given aggregation functions to the selector query.
|
||||
func (hs *HistorySelect) Aggregate(fns ...AggregateFunc) *HistorySelect {
|
||||
hs.fns = append(hs.fns, fns...)
|
||||
return hs
|
||||
}
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (hs *HistorySelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, hs.ctx, "Select")
|
||||
if err := hs.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return scanWithInterceptors[*HistoryQuery, *HistorySelect](ctx, hs.HistoryQuery, hs, hs.inters, v)
|
||||
}
|
||||
|
||||
func (hs *HistorySelect) sqlScan(ctx context.Context, root *HistoryQuery, v any) error {
|
||||
selector := root.sqlQuery(ctx)
|
||||
aggregation := make([]string, 0, len(hs.fns))
|
||||
for _, fn := range hs.fns {
|
||||
aggregation = append(aggregation, fn(selector))
|
||||
}
|
||||
switch n := len(*hs.selector.flds); {
|
||||
case n == 0 && len(aggregation) > 0:
|
||||
selector.Select(aggregation...)
|
||||
case n != 0 && len(aggregation) > 0:
|
||||
selector.AppendSelect(aggregation...)
|
||||
}
|
||||
rows := &sql.Rows{}
|
||||
query, args := selector.Query()
|
||||
if err := hs.driver.Query(ctx, query, args, rows); err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
return sql.ScanSlice(rows, v)
|
||||
}
|
||||
352
ent/history_update.go
Normal file
352
ent/history_update.go
Normal file
@@ -0,0 +1,352 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"polaris/ent/history"
|
||||
"polaris/ent/predicate"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// HistoryUpdate is the builder for updating History entities.
|
||||
type HistoryUpdate struct {
|
||||
config
|
||||
hooks []Hook
|
||||
mutation *HistoryMutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the HistoryUpdate builder.
|
||||
func (hu *HistoryUpdate) Where(ps ...predicate.History) *HistoryUpdate {
|
||||
hu.mutation.Where(ps...)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetSeriesID sets the "series_id" field.
|
||||
func (hu *HistoryUpdate) SetSeriesID(i int) *HistoryUpdate {
|
||||
hu.mutation.ResetSeriesID()
|
||||
hu.mutation.SetSeriesID(i)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetNillableSeriesID sets the "series_id" field if the given value is not nil.
|
||||
func (hu *HistoryUpdate) SetNillableSeriesID(i *int) *HistoryUpdate {
|
||||
if i != nil {
|
||||
hu.SetSeriesID(*i)
|
||||
}
|
||||
return hu
|
||||
}
|
||||
|
||||
// AddSeriesID adds i to the "series_id" field.
|
||||
func (hu *HistoryUpdate) AddSeriesID(i int) *HistoryUpdate {
|
||||
hu.mutation.AddSeriesID(i)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetEpisodeID sets the "episode_id" field.
|
||||
func (hu *HistoryUpdate) SetEpisodeID(i int) *HistoryUpdate {
|
||||
hu.mutation.ResetEpisodeID()
|
||||
hu.mutation.SetEpisodeID(i)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetNillableEpisodeID sets the "episode_id" field if the given value is not nil.
|
||||
func (hu *HistoryUpdate) SetNillableEpisodeID(i *int) *HistoryUpdate {
|
||||
if i != nil {
|
||||
hu.SetEpisodeID(*i)
|
||||
}
|
||||
return hu
|
||||
}
|
||||
|
||||
// AddEpisodeID adds i to the "episode_id" field.
|
||||
func (hu *HistoryUpdate) AddEpisodeID(i int) *HistoryUpdate {
|
||||
hu.mutation.AddEpisodeID(i)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetSourceTitle sets the "source_title" field.
|
||||
func (hu *HistoryUpdate) SetSourceTitle(s string) *HistoryUpdate {
|
||||
hu.mutation.SetSourceTitle(s)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetNillableSourceTitle sets the "source_title" field if the given value is not nil.
|
||||
func (hu *HistoryUpdate) SetNillableSourceTitle(s *string) *HistoryUpdate {
|
||||
if s != nil {
|
||||
hu.SetSourceTitle(*s)
|
||||
}
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetDate sets the "date" field.
|
||||
func (hu *HistoryUpdate) SetDate(t time.Time) *HistoryUpdate {
|
||||
hu.mutation.SetDate(t)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetNillableDate sets the "date" field if the given value is not nil.
|
||||
func (hu *HistoryUpdate) SetNillableDate(t *time.Time) *HistoryUpdate {
|
||||
if t != nil {
|
||||
hu.SetDate(*t)
|
||||
}
|
||||
return hu
|
||||
}
|
||||
|
||||
// Mutation returns the HistoryMutation object of the builder.
|
||||
func (hu *HistoryUpdate) Mutation() *HistoryMutation {
|
||||
return hu.mutation
|
||||
}
|
||||
|
||||
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||
func (hu *HistoryUpdate) Save(ctx context.Context) (int, error) {
|
||||
return withHooks(ctx, hu.sqlSave, hu.mutation, hu.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (hu *HistoryUpdate) SaveX(ctx context.Context) int {
|
||||
affected, err := hu.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return affected
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (hu *HistoryUpdate) Exec(ctx context.Context) error {
|
||||
_, err := hu.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (hu *HistoryUpdate) ExecX(ctx context.Context) {
|
||||
if err := hu.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (hu *HistoryUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
_spec := sqlgraph.NewUpdateSpec(history.Table, history.Columns, sqlgraph.NewFieldSpec(history.FieldID, field.TypeInt))
|
||||
if ps := hu.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok := hu.mutation.SeriesID(); ok {
|
||||
_spec.SetField(history.FieldSeriesID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := hu.mutation.AddedSeriesID(); ok {
|
||||
_spec.AddField(history.FieldSeriesID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := hu.mutation.EpisodeID(); ok {
|
||||
_spec.SetField(history.FieldEpisodeID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := hu.mutation.AddedEpisodeID(); ok {
|
||||
_spec.AddField(history.FieldEpisodeID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := hu.mutation.SourceTitle(); ok {
|
||||
_spec.SetField(history.FieldSourceTitle, field.TypeString, value)
|
||||
}
|
||||
if value, ok := hu.mutation.Date(); ok {
|
||||
_spec.SetField(history.FieldDate, field.TypeTime, value)
|
||||
}
|
||||
if n, err = sqlgraph.UpdateNodes(ctx, hu.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{history.Label}
|
||||
} else if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return 0, err
|
||||
}
|
||||
hu.mutation.done = true
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// HistoryUpdateOne is the builder for updating a single History entity.
|
||||
type HistoryUpdateOne struct {
|
||||
config
|
||||
fields []string
|
||||
hooks []Hook
|
||||
mutation *HistoryMutation
|
||||
}
|
||||
|
||||
// SetSeriesID sets the "series_id" field.
|
||||
func (huo *HistoryUpdateOne) SetSeriesID(i int) *HistoryUpdateOne {
|
||||
huo.mutation.ResetSeriesID()
|
||||
huo.mutation.SetSeriesID(i)
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetNillableSeriesID sets the "series_id" field if the given value is not nil.
|
||||
func (huo *HistoryUpdateOne) SetNillableSeriesID(i *int) *HistoryUpdateOne {
|
||||
if i != nil {
|
||||
huo.SetSeriesID(*i)
|
||||
}
|
||||
return huo
|
||||
}
|
||||
|
||||
// AddSeriesID adds i to the "series_id" field.
|
||||
func (huo *HistoryUpdateOne) AddSeriesID(i int) *HistoryUpdateOne {
|
||||
huo.mutation.AddSeriesID(i)
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetEpisodeID sets the "episode_id" field.
|
||||
func (huo *HistoryUpdateOne) SetEpisodeID(i int) *HistoryUpdateOne {
|
||||
huo.mutation.ResetEpisodeID()
|
||||
huo.mutation.SetEpisodeID(i)
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetNillableEpisodeID sets the "episode_id" field if the given value is not nil.
|
||||
func (huo *HistoryUpdateOne) SetNillableEpisodeID(i *int) *HistoryUpdateOne {
|
||||
if i != nil {
|
||||
huo.SetEpisodeID(*i)
|
||||
}
|
||||
return huo
|
||||
}
|
||||
|
||||
// AddEpisodeID adds i to the "episode_id" field.
|
||||
func (huo *HistoryUpdateOne) AddEpisodeID(i int) *HistoryUpdateOne {
|
||||
huo.mutation.AddEpisodeID(i)
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetSourceTitle sets the "source_title" field.
|
||||
func (huo *HistoryUpdateOne) SetSourceTitle(s string) *HistoryUpdateOne {
|
||||
huo.mutation.SetSourceTitle(s)
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetNillableSourceTitle sets the "source_title" field if the given value is not nil.
|
||||
func (huo *HistoryUpdateOne) SetNillableSourceTitle(s *string) *HistoryUpdateOne {
|
||||
if s != nil {
|
||||
huo.SetSourceTitle(*s)
|
||||
}
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetDate sets the "date" field.
|
||||
func (huo *HistoryUpdateOne) SetDate(t time.Time) *HistoryUpdateOne {
|
||||
huo.mutation.SetDate(t)
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetNillableDate sets the "date" field if the given value is not nil.
|
||||
func (huo *HistoryUpdateOne) SetNillableDate(t *time.Time) *HistoryUpdateOne {
|
||||
if t != nil {
|
||||
huo.SetDate(*t)
|
||||
}
|
||||
return huo
|
||||
}
|
||||
|
||||
// Mutation returns the HistoryMutation object of the builder.
|
||||
func (huo *HistoryUpdateOne) Mutation() *HistoryMutation {
|
||||
return huo.mutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the HistoryUpdate builder.
|
||||
func (huo *HistoryUpdateOne) Where(ps ...predicate.History) *HistoryUpdateOne {
|
||||
huo.mutation.Where(ps...)
|
||||
return huo
|
||||
}
|
||||
|
||||
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||
// The default is selecting all fields defined in the entity schema.
|
||||
func (huo *HistoryUpdateOne) Select(field string, fields ...string) *HistoryUpdateOne {
|
||||
huo.fields = append([]string{field}, fields...)
|
||||
return huo
|
||||
}
|
||||
|
||||
// Save executes the query and returns the updated History entity.
|
||||
func (huo *HistoryUpdateOne) Save(ctx context.Context) (*History, error) {
|
||||
return withHooks(ctx, huo.sqlSave, huo.mutation, huo.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (huo *HistoryUpdateOne) SaveX(ctx context.Context) *History {
|
||||
node, err := huo.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// Exec executes the query on the entity.
|
||||
func (huo *HistoryUpdateOne) Exec(ctx context.Context) error {
|
||||
_, err := huo.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (huo *HistoryUpdateOne) ExecX(ctx context.Context) {
|
||||
if err := huo.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (huo *HistoryUpdateOne) sqlSave(ctx context.Context) (_node *History, err error) {
|
||||
_spec := sqlgraph.NewUpdateSpec(history.Table, history.Columns, sqlgraph.NewFieldSpec(history.FieldID, field.TypeInt))
|
||||
id, ok := huo.mutation.ID()
|
||||
if !ok {
|
||||
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "History.id" for update`)}
|
||||
}
|
||||
_spec.Node.ID.Value = id
|
||||
if fields := huo.fields; len(fields) > 0 {
|
||||
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, history.FieldID)
|
||||
for _, f := range fields {
|
||||
if !history.ValidColumn(f) {
|
||||
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||
}
|
||||
if f != history.FieldID {
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
if ps := huo.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok := huo.mutation.SeriesID(); ok {
|
||||
_spec.SetField(history.FieldSeriesID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := huo.mutation.AddedSeriesID(); ok {
|
||||
_spec.AddField(history.FieldSeriesID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := huo.mutation.EpisodeID(); ok {
|
||||
_spec.SetField(history.FieldEpisodeID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := huo.mutation.AddedEpisodeID(); ok {
|
||||
_spec.AddField(history.FieldEpisodeID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := huo.mutation.SourceTitle(); ok {
|
||||
_spec.SetField(history.FieldSourceTitle, field.TypeString, value)
|
||||
}
|
||||
if value, ok := huo.mutation.Date(); ok {
|
||||
_spec.SetField(history.FieldDate, field.TypeTime, value)
|
||||
}
|
||||
_node = &History{config: huo.config}
|
||||
_spec.Assign = _node.assignValues
|
||||
_spec.ScanValues = _node.scanValues
|
||||
if err = sqlgraph.UpdateNode(ctx, huo.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{history.Label}
|
||||
} else if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
huo.mutation.done = true
|
||||
return _node, nil
|
||||
}
|
||||
26
ent/schema/history.go
Normal file
26
ent/schema/history.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package schema
|
||||
|
||||
import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// History holds the schema definition for the History entity.
|
||||
type History struct {
|
||||
ent.Schema
|
||||
}
|
||||
|
||||
// Fields of the History.
|
||||
func (History) Fields() []ent.Field {
|
||||
return []ent.Field{
|
||||
field.Int("series_id"),
|
||||
field.Int("episode_id"),
|
||||
field.String("source_title"),
|
||||
field.Time("date"),
|
||||
}
|
||||
}
|
||||
|
||||
// Edges of the History.
|
||||
func (History) Edges() []ent.Edge {
|
||||
return nil
|
||||
}
|
||||
25
pkg/downloader/torrent.go
Normal file
25
pkg/downloader/torrent.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package downloader
|
||||
|
||||
import (
|
||||
"github.com/anacrolix/torrent"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
|
||||
|
||||
func DownloadByMagnet(magnet string, dir string) (*torrent.Torrent, error) {
|
||||
c, err := torrent.NewClient(nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "new torrent")
|
||||
}
|
||||
defer c.Close()
|
||||
t, err := c.AddMagnet(magnet)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "add torrent")
|
||||
}
|
||||
|
||||
<-t.GotInfo()
|
||||
t.DownloadAll()
|
||||
c.WaitAll()
|
||||
return t, nil
|
||||
}
|
||||
206
pkg/jackett/category.go
Normal file
206
pkg/jackett/category.go
Normal file
@@ -0,0 +1,206 @@
|
||||
package jackett
|
||||
|
||||
import "fmt"
|
||||
|
||||
type Category struct {
|
||||
ID int
|
||||
Name string
|
||||
}
|
||||
|
||||
func (c Category) String() string {
|
||||
return fmt.Sprintf("%s[%d]", c.Name, c.ID)
|
||||
}
|
||||
|
||||
const (
|
||||
CustomCategoryOffset = 100000
|
||||
)
|
||||
|
||||
// Categories from the Newznab spec
|
||||
// https://github.com/nZEDb/nZEDb/blob/0.x/docs/newznab_api_specification.txt#L627
|
||||
var (
|
||||
CategoryOther = Category{0, "Other"}
|
||||
CategoryOther_Misc = Category{10, "Other/Misc"}
|
||||
CategoryOther_Hashed = Category{20, "Other/Hashed"}
|
||||
CategoryConsole = Category{1000, "Console"}
|
||||
CategoryConsole_NDS = Category{1010, "Console/NDS"}
|
||||
CategoryConsole_PSP = Category{1020, "Console/PSP"}
|
||||
CategoryConsole_Wii = Category{1030, "Console/Wii"}
|
||||
CategoryConsole_XBOX = Category{1040, "Console/Xbox"}
|
||||
CategoryConsole_XBOX360 = Category{1050, "Console/Xbox360"}
|
||||
CategoryConsole_WiiwareVC = Category{1060, "Console/Wiiware/V"}
|
||||
CategoryConsole_XBOX360DLC = Category{1070, "Console/Xbox360"}
|
||||
CategoryConsole_PS3 = Category{1080, "Console/PS3"}
|
||||
CategoryConsole_Other = Category{1999, "Console/Other"}
|
||||
CategoryConsole_3DS = Category{1110, "Console/3DS"}
|
||||
CategoryConsole_PSVita = Category{1120, "Console/PS Vita"}
|
||||
CategoryConsole_WiiU = Category{1130, "Console/WiiU"}
|
||||
CategoryConsole_XBOXOne = Category{1140, "Console/XboxOne"}
|
||||
CategoryConsole_PS4 = Category{1180, "Console/PS4"}
|
||||
CategoryMovies = Category{2000, "Movies"}
|
||||
CategoryMovies_Foreign = Category{2010, "Movies/Foreign"}
|
||||
CategoryMovies_Other = Category{2020, "Movies/Other"}
|
||||
CategoryMovies_SD = Category{2030, "Movies/SD"}
|
||||
CategoryMovies_HD = Category{2040, "Movies/HD"}
|
||||
CategoryMovies_3D = Category{2050, "Movies/3D"}
|
||||
CategoryMovies_BluRay = Category{2060, "Movies/BluRay"}
|
||||
CategoryMovies_DVD = Category{2070, "Movies/DVD"}
|
||||
CategoryMovies_WEBDL = Category{2080, "Movies/WEBDL"}
|
||||
CategoryAudio = Category{3000, "Audio"}
|
||||
CategoryAudio_MP3 = Category{3010, "Audio/MP3"}
|
||||
CategoryAudio_Video = Category{3020, "Audio/Video"}
|
||||
CategoryAudio_Audiobook = Category{3030, "Audio/Audiobook"}
|
||||
CategoryAudio_Lossless = Category{3040, "Audio/Lossless"}
|
||||
CategoryAudio_Other = Category{3999, "Audio/Other"}
|
||||
CategoryAudio_Foreign = Category{3060, "Audio/Foreign"}
|
||||
CategoryPC = Category{4000, "PC"}
|
||||
CategoryPC_0day = Category{4010, "PC/0day"}
|
||||
CategoryPC_ISO = Category{4020, "PC/ISO"}
|
||||
CategoryPC_Mac = Category{4030, "PC/Mac"}
|
||||
CategoryPC_PhoneOther = Category{4040, "PC/Phone-Other"}
|
||||
CategoryPC_Games = Category{4050, "PC/Games"}
|
||||
CategoryPC_PhoneIOS = Category{4060, "PC/Phone-IOS"}
|
||||
CategoryPC_PhoneAndroid = Category{4070, "PC/Phone-Android"}
|
||||
CategoryTV = Category{5000, "TV"}
|
||||
CategoryTV_WEBDL = Category{5010, "TV/WEB-DL"}
|
||||
CategoryTV_FOREIGN = Category{5020, "TV/Foreign"}
|
||||
CategoryTV_SD = Category{5030, "TV/SD"}
|
||||
CategoryTV_HD = Category{5040, "TV/HD"}
|
||||
CategoryTV_Other = Category{5999, "TV/Other"}
|
||||
CategoryTV_Sport = Category{5060, "TV/Sport"}
|
||||
CategoryTV_Anime = Category{5070, "TV/Anime"}
|
||||
CategoryTV_Documentary = Category{5080, "TV/Documentary"}
|
||||
CategoryXXX = Category{6000, "XXX"}
|
||||
CategoryXXX_DVD = Category{6010, "XXX/DVD"}
|
||||
CategoryXXX_WMV = Category{6020, "XXX/WMV"}
|
||||
CategoryXXX_XviD = Category{6030, "XXX/XviD"}
|
||||
CategoryXXX_x264 = Category{6040, "XXX/x264"}
|
||||
CategoryXXX_Other = Category{6999, "XXX/Other"}
|
||||
CategoryXXX_Imageset = Category{6060, "XXX/Imageset"}
|
||||
CategoryXXX_Packs = Category{6070, "XXX/Packs"}
|
||||
CategoryBooks = Category{7000, "Books"}
|
||||
CategoryBooks_Magazines = Category{7010, "Books/Magazines"}
|
||||
CategoryBooks_Ebook = Category{7020, "Books/Ebook"}
|
||||
CategoryBooks_Comics = Category{7030, "Books/Comics"}
|
||||
CategoryBooks_Technical = Category{7040, "Books/Technical"}
|
||||
CategoryBooks_Foreign = Category{7060, "Books/Foreign"}
|
||||
CategoryBooks_Unknown = Category{7999, "Books/Unknown"}
|
||||
)
|
||||
|
||||
var AllCategories = Categories{
|
||||
CategoryOther,
|
||||
CategoryOther_Misc,
|
||||
CategoryOther_Hashed,
|
||||
CategoryConsole,
|
||||
CategoryConsole_NDS,
|
||||
CategoryConsole_PSP,
|
||||
CategoryConsole_Wii,
|
||||
CategoryConsole_XBOX,
|
||||
CategoryConsole_XBOX360,
|
||||
CategoryConsole_WiiwareVC,
|
||||
CategoryConsole_XBOX360DLC,
|
||||
CategoryConsole_PS3,
|
||||
CategoryConsole_Other,
|
||||
CategoryConsole_3DS,
|
||||
CategoryConsole_PSVita,
|
||||
CategoryConsole_WiiU,
|
||||
CategoryConsole_XBOXOne,
|
||||
CategoryConsole_PS4,
|
||||
CategoryMovies,
|
||||
CategoryMovies_Foreign,
|
||||
CategoryMovies_Other,
|
||||
CategoryMovies_SD,
|
||||
CategoryMovies_HD,
|
||||
CategoryMovies_3D,
|
||||
CategoryMovies_BluRay,
|
||||
CategoryMovies_DVD,
|
||||
CategoryMovies_WEBDL,
|
||||
CategoryAudio,
|
||||
CategoryAudio_MP3,
|
||||
CategoryAudio_Video,
|
||||
CategoryAudio_Audiobook,
|
||||
CategoryAudio_Lossless,
|
||||
CategoryAudio_Other,
|
||||
CategoryAudio_Foreign,
|
||||
CategoryPC,
|
||||
CategoryPC_0day,
|
||||
CategoryPC_ISO,
|
||||
CategoryPC_Mac,
|
||||
CategoryPC_PhoneOther,
|
||||
CategoryPC_Games,
|
||||
CategoryPC_PhoneIOS,
|
||||
CategoryPC_PhoneAndroid,
|
||||
CategoryTV,
|
||||
CategoryTV_WEBDL,
|
||||
CategoryTV_FOREIGN,
|
||||
CategoryTV_SD,
|
||||
CategoryTV_HD,
|
||||
CategoryTV_Other,
|
||||
CategoryTV_Sport,
|
||||
CategoryTV_Anime,
|
||||
CategoryTV_Documentary,
|
||||
CategoryXXX,
|
||||
CategoryXXX_DVD,
|
||||
CategoryXXX_WMV,
|
||||
CategoryXXX_XviD,
|
||||
CategoryXXX_x264,
|
||||
CategoryXXX_Other,
|
||||
CategoryXXX_Imageset,
|
||||
CategoryXXX_Packs,
|
||||
CategoryBooks,
|
||||
CategoryBooks_Magazines,
|
||||
CategoryBooks_Ebook,
|
||||
CategoryBooks_Comics,
|
||||
CategoryBooks_Technical,
|
||||
CategoryBooks_Foreign,
|
||||
CategoryBooks_Unknown,
|
||||
}
|
||||
|
||||
func ParentCategory(c Category) Category {
|
||||
switch {
|
||||
case c.ID < 1000:
|
||||
return CategoryOther
|
||||
case c.ID < 2000:
|
||||
return CategoryConsole
|
||||
case c.ID < 3000:
|
||||
return CategoryMovies
|
||||
case c.ID < 4000:
|
||||
return CategoryAudio
|
||||
case c.ID < 5000:
|
||||
return CategoryPC
|
||||
case c.ID < 6000:
|
||||
return CategoryTV
|
||||
case c.ID < 7000:
|
||||
return CategoryXXX
|
||||
case c.ID < 8000:
|
||||
return CategoryBooks
|
||||
}
|
||||
return CategoryOther
|
||||
}
|
||||
|
||||
type Categories []Category
|
||||
|
||||
func (slice Categories) Subset(ids ...int) Categories {
|
||||
cats := Categories{}
|
||||
|
||||
for _, cat := range AllCategories {
|
||||
for _, id := range ids {
|
||||
if cat.ID == id {
|
||||
cats = append(cats, cat)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cats
|
||||
}
|
||||
|
||||
func (slice Categories) Len() int {
|
||||
return len(slice)
|
||||
}
|
||||
|
||||
func (slice Categories) Less(i, j int) bool {
|
||||
return slice[i].ID < slice[j].ID
|
||||
}
|
||||
|
||||
func (slice Categories) Swap(i, j int) {
|
||||
slice[i], slice[j] = slice[j], slice[i]
|
||||
}
|
||||
168
pkg/jackett/jackett.go
Normal file
168
pkg/jackett/jackett.go
Normal file
@@ -0,0 +1,168 @@
|
||||
package jackett
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"golang.org/x/net/context"
|
||||
)
|
||||
|
||||
var (
|
||||
apiURL string
|
||||
apiKey string
|
||||
)
|
||||
|
||||
type Settings struct {
|
||||
ApiURL string
|
||||
ApiKey string
|
||||
Client *http.Client
|
||||
}
|
||||
|
||||
type FetchRequest struct {
|
||||
Query string
|
||||
Trackers []string
|
||||
Categories []uint
|
||||
}
|
||||
|
||||
type FetchResponse struct {
|
||||
Results []Result
|
||||
Indexers []Indexer
|
||||
}
|
||||
|
||||
type jackettTime struct {
|
||||
time.Time
|
||||
}
|
||||
|
||||
func (jt *jackettTime) UnmarshalJSON(b []byte) (err error) {
|
||||
str := strings.Trim(string(b), `"`)
|
||||
if str == "0001-01-01T00:00:00" {
|
||||
} else if len(str) == 19 {
|
||||
jt.Time, err = time.Parse(time.RFC3339, str+"Z")
|
||||
} else {
|
||||
jt.Time, err = time.Parse(time.RFC3339, str)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
BannerUrl string
|
||||
BlackholeLink string
|
||||
Category []uint
|
||||
CategoryDesc string
|
||||
Comments string
|
||||
Description string
|
||||
DownloadVolumeFactor float32
|
||||
Files uint
|
||||
FirstSeen jackettTime
|
||||
Gain float32
|
||||
Grabs uint
|
||||
Guid string
|
||||
Imdb uint
|
||||
InfoHash string
|
||||
Link string
|
||||
MagnetUri string
|
||||
MinimumRatio float32
|
||||
MinimumSeedTime uint
|
||||
Peers uint
|
||||
PublishDate jackettTime
|
||||
RageID uint
|
||||
Seeders uint
|
||||
Size uint
|
||||
TMDb uint
|
||||
TVDBId uint
|
||||
Title string
|
||||
Tracker string
|
||||
TrackerId string
|
||||
UploadVolumeFactor float32
|
||||
}
|
||||
|
||||
type Indexer struct {
|
||||
Error string
|
||||
ID string
|
||||
Name string
|
||||
Results uint
|
||||
Status uint
|
||||
}
|
||||
|
||||
type Jackett struct {
|
||||
settings *Settings
|
||||
}
|
||||
|
||||
func NewJackett(s *Settings) *Jackett {
|
||||
if s.ApiURL == "" && apiURL != "" {
|
||||
s.ApiURL = apiURL
|
||||
}
|
||||
if s.ApiKey == "" && apiKey != "" {
|
||||
s.ApiKey = apiKey
|
||||
}
|
||||
if s.Client == nil {
|
||||
s.Client = http.DefaultClient
|
||||
}
|
||||
return &Jackett{settings: s}
|
||||
}
|
||||
|
||||
func (j *Jackett) generateFetchURL(fr *FetchRequest) (string, error) {
|
||||
u, err := url.Parse(j.settings.ApiURL)
|
||||
if err != nil {
|
||||
return "", errors.Wrapf(err, "failed to parse apiURL %q", j.settings.ApiURL)
|
||||
}
|
||||
u.Path = "/api/v2.0/indexers/all/results"
|
||||
q := u.Query()
|
||||
q.Set("apikey", j.settings.ApiKey)
|
||||
for _, t := range fr.Trackers {
|
||||
q.Add("Tracker[]", t)
|
||||
}
|
||||
for _, c := range fr.Categories {
|
||||
q.Add("Category[]", fmt.Sprintf("%v", c))
|
||||
}
|
||||
if fr.Query != "" {
|
||||
q.Add("Query", fr.Query)
|
||||
}
|
||||
u.RawQuery = q.Encode()
|
||||
return u.String(), nil
|
||||
}
|
||||
|
||||
func (j *Jackett) Fetch(ctx context.Context, fr *FetchRequest) (*FetchResponse, error) {
|
||||
u, err := j.generateFetchURL(fr)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to generate fetch url")
|
||||
}
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", u, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to make fetch request")
|
||||
}
|
||||
res, err := j.settings.Client.Do(req)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to invoke fetch request")
|
||||
}
|
||||
defer res.Body.Close()
|
||||
data, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to read fetch data")
|
||||
}
|
||||
var fres FetchResponse
|
||||
err = json.Unmarshal(data, &fres)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to unmarshal fetch data with url=%v and data=%v", u, string(data))
|
||||
}
|
||||
return &fres, nil
|
||||
}
|
||||
|
||||
func SearchTvWithJacket(q string, url, apiKey string) (*FetchResponse, error) {
|
||||
j := NewJackett(&Settings{
|
||||
ApiURL: url,
|
||||
ApiKey: apiKey,
|
||||
})
|
||||
resp, err := j.Fetch(context.TODO(), &FetchRequest{
|
||||
//Categories: []uint{uint(CategoryTV.ID)},
|
||||
Query: q,
|
||||
})
|
||||
return resp, err
|
||||
}
|
||||
134
pkg/torznab/torznab.go
Normal file
134
pkg/torznab/torznab.go
Normal file
@@ -0,0 +1,134 @@
|
||||
package torznab
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"polaris/log"
|
||||
"strconv"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type Response struct {
|
||||
XMLName xml.Name `xml:"rss"`
|
||||
Text string `xml:",chardata"`
|
||||
Version string `xml:"version,attr"`
|
||||
Atom string `xml:"atom,attr"`
|
||||
Torznab string `xml:"torznab,attr"`
|
||||
Channel struct {
|
||||
Text string `xml:",chardata"`
|
||||
Link struct {
|
||||
Text string `xml:",chardata"`
|
||||
Href string `xml:"href,attr"`
|
||||
Rel string `xml:"rel,attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
} `xml:"link"`
|
||||
Title string `xml:"title"`
|
||||
Description string `xml:"description"`
|
||||
Language string `xml:"language"`
|
||||
Category string `xml:"category"`
|
||||
Item []Item `xml:"item"`
|
||||
} `xml:"channel"`
|
||||
}
|
||||
|
||||
type Item struct {
|
||||
Text string `xml:",chardata"`
|
||||
Title string `xml:"title"`
|
||||
Guid string `xml:"guid"`
|
||||
Jackettindexer struct {
|
||||
Text string `xml:",chardata"`
|
||||
ID string `xml:"id,attr"`
|
||||
} `xml:"jackettindexer"`
|
||||
Type string `xml:"type"`
|
||||
Comments string `xml:"comments"`
|
||||
PubDate string `xml:"pubDate"`
|
||||
Size string `xml:"size"`
|
||||
Description string `xml:"description"`
|
||||
Link string `xml:"link"`
|
||||
Category []string `xml:"category"`
|
||||
Enclosure struct {
|
||||
Text string `xml:",chardata"`
|
||||
URL string `xml:"url,attr"`
|
||||
Length string `xml:"length,attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
} `xml:"enclosure"`
|
||||
Attr []struct {
|
||||
Text string `xml:",chardata"`
|
||||
Name string `xml:"name,attr"`
|
||||
Value string `xml:"value,attr"`
|
||||
} `xml:"attr"`
|
||||
|
||||
}
|
||||
|
||||
func (i *Item) GetAttr(key string) string {
|
||||
for _, a := range i.Attr {
|
||||
if a.Name == key {
|
||||
return a.Value
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (r *Response) ToResults() []Result {
|
||||
var res []Result
|
||||
for _, item := range r.Channel.Item {
|
||||
r := Result{
|
||||
Name: item.Title,
|
||||
Magnet: item.Link,
|
||||
Size: mustAtoI(item.Size),
|
||||
Seeders: mustAtoI(item.GetAttr("seeders")),
|
||||
Peers: mustAtoI(item.GetAttr("peers")),
|
||||
Category: mustAtoI(item.GetAttr("category")),
|
||||
Source: r.Channel.Title,
|
||||
}
|
||||
res = append(res, r)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func mustAtoI(key string) int {
|
||||
i, err := strconv.Atoi(key)
|
||||
if err != nil {
|
||||
log.Errorf("must atoi error: %v", err)
|
||||
panic(err)
|
||||
}
|
||||
return i
|
||||
}
|
||||
func Search(torznabUrl, api, keyWord string) ([]Result, error) {
|
||||
req, err := http.NewRequest(http.MethodGet, torznabUrl, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "new request")
|
||||
}
|
||||
var q = url.Values{}
|
||||
q.Add("apikey", api)
|
||||
q.Add("t", "search")
|
||||
q.Add("q", keyWord)
|
||||
req.URL.RawQuery = q.Encode()
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "do http")
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "read http body")
|
||||
}
|
||||
var res Response
|
||||
err = xml.Unmarshal(data, &res)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "json unmarshal")
|
||||
}
|
||||
return res.ToResults(), nil
|
||||
}
|
||||
|
||||
type Result struct {
|
||||
Name string
|
||||
Magnet string
|
||||
Size int
|
||||
Seeders int
|
||||
Peers int
|
||||
Category int
|
||||
Source string
|
||||
}
|
||||
71
server/resources.go
Normal file
71
server/resources.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"polaris/db"
|
||||
"polaris/log"
|
||||
"polaris/pkg/downloader"
|
||||
"polaris/pkg/torznab"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func (s *Server) searchTvWithTorznab(name string, season, episode int) []torznab.Result {
|
||||
q := fmt.Sprintf("%s S%02dE%02d", name, season, episode)
|
||||
|
||||
var res []torznab.Result
|
||||
allTorznab := s.db.GetAllTorznabInfo()
|
||||
for name, setting := range allTorznab {
|
||||
resp, err := torznab.Search(setting.URL, setting.ApiKey, q)
|
||||
if err != nil {
|
||||
log.Errorf("search %s error: %v", name, err)
|
||||
continue
|
||||
}
|
||||
res = append(res, resp...)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
type addTorznabIn struct {
|
||||
Name string `json:"name"`
|
||||
URL string `json:"url"`
|
||||
ApiKey string `json:"api_key"`
|
||||
}
|
||||
|
||||
func (s *Server) AddTorznabInfo(c *gin.Context) (interface{}, error) {
|
||||
var in addTorznabIn
|
||||
if err := c.ShouldBindJSON(&in); err != nil {
|
||||
return nil, errors.Wrap(err, "bind json")
|
||||
}
|
||||
err := s.db.SaveTorznabInfo(in.Name, db.TorznabSetting{
|
||||
URL: in.URL,
|
||||
ApiKey: in.ApiKey,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "add ")
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
type searchAndDownloadIn struct {
|
||||
Title string `json:"title"`
|
||||
Season int `json:"season"`
|
||||
Episode int `json:"episode"`
|
||||
}
|
||||
|
||||
func (s *Server) SearchAndDownload(c *gin.Context) (interface{}, error) {
|
||||
var in searchAndDownloadIn
|
||||
if err := c.ShouldBindJSON(&in); err != nil {
|
||||
return nil, errors.Wrap(err, "bind json")
|
||||
}
|
||||
res := s.searchTvWithTorznab(in.Title, in.Season, in.Episode)
|
||||
r1 := res[0]
|
||||
t, err := downloader.DownloadByMagnet(r1.Magnet, "~")
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "download torrent")
|
||||
}
|
||||
s.tasks[r1.Name] = t
|
||||
log.Errorf("success add %s to download task", r1.Name)
|
||||
return nil, nil
|
||||
}
|
||||
Reference in New Issue
Block a user