Compare commits

...

164 Commits

Author SHA1 Message Date
Simon Ding
2683c5dbf2 fix: tv name parse 2024-09-27 21:58:34 +08:00
Simon Ding
b717885270 feat: change status color 2024-09-27 15:19:45 +08:00
Simon Ding
4e457e99b9 fix: context 2024-09-27 15:03:00 +08:00
Simon Ding
ecfe31ea45 fix: tv date not exists and folder name suggestion 2024-09-26 09:45:36 +08:00
Simon Ding
89104785d7 fix: monitor lastest season 2024-09-23 23:12:06 +08:00
Simon Ding
f4ccc69b50 feat: trim white space 2024-09-23 22:44:15 +08:00
Simon Ding
40c6e2df5c fix: panic when torrent not exist 2024-09-21 21:47:26 +08:00
Simon Ding
37dfb0fe94 feat: add check 2024-09-21 21:23:47 +08:00
Simon Ding
9968f9f225 feat: add ability to change folder naming convention 2024-09-20 20:03:05 +08:00
Simon Ding
f5c977224b fix 2024-09-20 14:27:49 +08:00
Simon Ding
8af3ffccd3 fix: double downloading status problem 2024-09-20 11:40:22 +08:00
Simon Ding
c535dfd714 feat: do not use lowercase 2024-09-20 11:11:27 +08:00
Simon Ding
f696b78260 feat: default monitor lastest season all episodes 2024-09-20 11:03:01 +08:00
Simon Ding
4f3e3e399d feat: transmission use hash instead of id 2024-09-20 10:45:10 +08:00
Simon Ding
e4e3c9a851 fix: suggestted name when name_cn is in english 2024-09-07 13:45:27 +08:00
Simon Ding
bf608f933d feat: ui to manaul trigger cron jobs 2024-09-07 13:18:12 +08:00
Simon Ding
5923fc73e1 fix: new import list display 2024-09-04 16:51:49 +08:00
Simon Ding
c2d9ccfd4c chore: update ui details 2024-09-04 16:20:33 +08:00
Simon Ding
5d4429bf7c chore: update watchlist 2024-09-04 15:55:15 +08:00
Simon Ding
e4c111ac2a fix: cron jobs 2024-09-04 13:18:14 +08:00
Simon Ding
3e5e20e933 fix: naming 2024-09-03 10:04:56 +08:00
Simon Ding
ba1be8f279 feat: cron trigger api and fix import lists 2024-09-03 09:46:23 +08:00
Simon Ding
361556228b feat: add import list & calendar 2024-09-02 23:47:19 +08:00
Simon Ding
ca414a73ff fix: logout 2024-09-02 14:07:47 +08:00
Simon Ding
32b595e116 fix: change token name and same site lax rule 2024-09-02 13:59:36 +08:00
Simon Ding
b12bbd2ad9 fix: http auth 2024-09-02 13:45:49 +08:00
Simon Ding
60110f4ca6 feat: add db extra fields 2024-08-24 21:03:08 +08:00
Simon Ding
b7ca02429c chore: jav name suggest 2024-08-19 17:45:34 +08:00
Simon Ding
ff63084014 feat: better jav search support 2024-08-19 17:39:37 +08:00
Simon Ding
821d6859ff feat: option to enable adult content 2024-08-19 16:39:34 +08:00
Simon Ding
10e6e99990 update readme 2024-08-16 11:49:23 +08:00
Simon Ding
23a5997814 fix: episode status 2024-08-13 11:27:54 +08:00
Simon Ding
b487c81865 feat: cache errored request 2024-08-13 11:05:46 +08:00
Simon Ding
32914344d1 fix: remove seeding torrent status not right 2024-08-13 10:57:04 +08:00
Simon Ding
644c9ed228 feat: marshal indented 2024-08-13 10:41:40 +08:00
Simon Ding
d3ad80380f feat: nfo support frontend 2024-08-13 10:17:46 +08:00
Simon Ding
19c6308a81 feat: nfo support backend 2024-08-13 10:05:38 +08:00
Simon Ding
7017f32fe3 feat: nfo support backend 2024-08-13 10:02:34 +08:00
Simon Ding
02a23f13f9 fix: mkdir 2024-08-13 00:11:27 +08:00
Simon Ding
cc211a89a4 fix: dir not exist 2024-08-12 23:06:51 +08:00
Simon Ding
4800e6c79d chore: update 2024-08-12 22:57:21 +08:00
Simon Ding
b5f0b28c61 fix 2024-08-12 22:44:36 +08:00
Simon Ding
081338df24 feat: code refactor and support season pack write .plexmatch file 2024-08-12 22:19:57 +08:00
Simon Ding
9632ca45b3 fix: match logic 2024-08-12 20:59:31 +08:00
Simon Ding
b948bff497 chore: if imdbid match no need check name 2024-08-12 19:02:43 +08:00
Simon Ding
29383cf75c feat: should match imdbid if present 2024-08-12 18:47:38 +08:00
Simon Ding
57ec0b9eb9 fix: download series all 2024-08-12 18:13:34 +08:00
Simon Ding
0cce4ffee0 add button to appbar 2024-08-12 14:53:23 +08:00
Simon Ding
5c01c45068 feat: fliter audios 2024-08-12 14:23:20 +08:00
Simon Ding
712bf84c90 feat: add app icon 2024-08-12 14:23:09 +08:00
Simon Ding
fdb63a8459 fix 2024-08-12 10:28:28 +08:00
Simon Ding
990d9dab08 update image 2024-08-12 10:27:20 +08:00
Simon Ding
da863588e4 feat: show snakebar 2024-08-12 10:23:28 +08:00
Simon Ding
09ff67fef7 feat: download per media feature 2024-08-12 10:16:36 +08:00
Simon Ding
3c37948798 ui: change icon 2024-08-11 23:42:43 +08:00
Simon Ding
6fd39d818c feat: better seeding status 2024-08-11 23:09:34 +08:00
Simon Ding
a0e211c328 fix: activity 2024-08-11 22:44:52 +08:00
Simon Ding
27d8b1672a feat: show seed as active 2024-08-11 22:40:38 +08:00
Simon Ding
349e394e8e chore: updates 2024-08-11 22:06:54 +08:00
Simon Ding
620f085ca5 add log 2024-08-11 20:54:12 +08:00
Simon Ding
5b70badb50 feat: add seed ratio display 2024-08-11 20:41:26 +08:00
Simon Ding
5c6ac2c430 fix: movie target dir 2024-08-11 20:19:51 +08:00
Simon Ding
365cfddf8f change icon 2024-08-11 19:20:23 +08:00
Simon Ding
6c26812b92 feat: filter resources that is qiangban 2024-08-11 19:18:27 +08:00
Simon Ding
0057a75a95 feat: find season pack first 2024-08-11 18:06:50 +08:00
Simon Ding
f110f257d4 code refactor and add season pack size limit 2024-08-11 17:40:01 +08:00
Simon Ding
93e8e78591 ui: improve external link display 2024-08-10 17:55:56 +08:00
Simon Ding
9ff12cd86b fix: movie year match 2024-08-10 17:06:33 +08:00
Simon Ding
fd2f4b140f refactor: download api 2024-08-10 16:46:49 +08:00
Simon Ding
4607af6982 feat: search original name 2024-08-10 16:46:19 +08:00
Simon Ding
984bebcfe0 ui: fine tune 2024-08-10 15:23:08 +08:00
Simon Ding
d31abd59ad chore: ui improvement 2024-08-10 15:05:18 +08:00
Simon Ding
e0ad71291c fix width 2024-08-10 11:23:17 +08:00
Simon Ding
8ecc9393cf fix: width 126->120 2024-08-10 11:16:17 +08:00
Simon Ding
b62e0e9bfd feat: small screen 2024-08-10 11:06:29 +08:00
Simon Ding
1391f55f44 feat: small screen 2024-08-10 10:52:48 +08:00
Simon Ding
0c709ee517 feat: detail card fit small screen 2024-08-10 10:45:18 +08:00
Simon Ding
806d821388 feat: better support for small screen 2024-08-10 10:35:57 +08:00
Simon Ding
829043bf28 fix: naming suggestion 2024-08-09 20:40:38 +08:00
Simon Ding
66ab418054 feat: remove name extras characters 2024-08-09 19:47:58 +08:00
Simon Ding
5fe40cc64b feat: add size to activity 2024-08-09 19:00:40 +08:00
Simon Ding
8f6f26f00e refactor: activity list 2024-08-08 19:23:23 +08:00
Simon Ding
ee0bee2b06 fix: formatting 2024-08-08 14:20:20 +08:00
Simon Ding
1bb16a8a66 feat: imdbid support 2024-08-08 14:10:26 +08:00
Simon Ding
d746032114 fix: result ordering 2024-08-08 13:40:07 +08:00
Simon Ding
b34e39889c feat: ui improvement 2024-08-08 10:56:03 +08:00
Simon Ding
64e98647a8 update go.mod 2024-08-08 00:56:49 +08:00
Simon Ding
f91c91e0b1 chore: main page ui update 2024-08-07 23:36:41 +08:00
Simon Ding
f1aaa06d05 chore: update new flutter version 2024-08-07 23:13:36 +08:00
Simon Ding
e8a38aa6f8 chore: ui update 2024-08-07 22:55:24 +08:00
Simon Ding
7e88533ea2 chore: update storage display 2024-08-07 14:12:47 +08:00
Simon Ding
05698f4047 fix: size limiter 2024-08-07 13:37:39 +08:00
Simon Ding
1daad0c236 fix size limiter 2024-08-07 13:27:41 +08:00
Simon Ding
86c8163f9c feat: default select first storage 2024-08-07 13:22:55 +08:00
Simon Ding
78ab8cc8e6 feat: add size display 2024-08-07 13:06:37 +08:00
Simon Ding
1390277b43 feat: second confirmation on deletion 2024-08-07 12:48:15 +08:00
Simon Ding
1aa3dca2c6 update 2024-08-07 11:20:21 +08:00
Simon Ding
f48b3c657e feat: change cache implementation 2024-08-07 11:07:10 +08:00
Simon Ding
d8d570f1b2 feat: change db 2024-08-07 10:46:30 +08:00
Simon Ding
bd385d4f85 feat: add simple cache, due to jackett poor performance 2024-08-07 10:42:12 +08:00
Simon Ding
466596345d feat: edit media details 2024-08-06 23:00:56 +08:00
Simon Ding
8ab33f3d54 ui refactor 2024-08-04 10:54:47 +08:00
Simon Ding
4d3b26135c fix: ui alignment 2024-08-04 10:22:47 +08:00
Simon Ding
56d5cdb2bf refactor ui resource list 2024-08-03 23:05:17 +08:00
Simon Ding
6f80da779b fix: download client id 2024-08-03 17:12:20 +08:00
Simon Ding
5fef156052 fix: indexid 2024-08-03 17:10:49 +08:00
Simon Ding
eab3a6ca2b try fix seed ratio 2024-08-03 16:47:58 +08:00
Simon Ding
ffa5c37c4c fix: name matching 2024-08-03 15:03:47 +08:00
Simon Ding
241e30152b feat: file size limiter 2024-08-03 12:31:53 +08:00
Simon Ding
16216fcc4f feat: change single episode monitoring status 2024-08-03 10:46:52 +08:00
Simon Ding
578b6a9d78 feat: proxy only affects tmdb 2024-08-03 09:54:23 +08:00
Simon Ding
f4da80c845 add testcases & add parse condition 2024-08-02 21:04:34 +08:00
Simon Ding
5a9acd3e6e ui: improve tv display 2024-08-02 19:10:53 +08:00
Simon Ding
8bfa8f84b9 fix monitor 2024-08-02 14:56:14 +08:00
Simon Ding
5b0b2ce5b0 fix: status 2024-08-02 14:39:01 +08:00
Simon Ding
b24c1a1501 ui: add monitored field 2024-08-02 14:06:44 +08:00
Simon Ding
aa320c6dcb fix: monitored 2024-08-02 13:06:38 +08:00
Simon Ding
5132714247 feat: change method to monitor episodes 2024-08-02 12:52:54 +08:00
Simon Ding
3aeecac4fb updates 2024-08-02 12:19:53 +08:00
Simon Ding
7f8c613a65 fix number formats 2024-08-02 11:14:21 +08:00
Simon Ding
c787d71fbd code refactor 2024-08-02 10:08:26 +08:00
Simon Ding
c28e16805e fix: empty list 2024-08-01 20:34:23 +08:00
Simon Ding
fc3d3878bc feat: disable indexer 2024-08-01 20:12:42 +08:00
Simon Ding
e26e86a63f feat: implement seed ratio check logic 2024-08-01 19:52:40 +08:00
Simon Ding
408ff163ef feat: improve support for pt 2024-08-01 19:12:14 +08:00
Simon Ding
35d299b60c feat: improve indexer setting 2024-08-01 17:36:40 +08:00
Simon Ding
6e002b1198 fix: add defaults 2024-08-01 13:05:05 +08:00
Simon Ding
7508a264a6 chore: ci update 2024-08-01 09:42:37 +08:00
Simon Ding
0022c9dad5 fix: umask 2024-08-01 09:39:38 +08:00
Simon Ding
654d8b50b4 chore: add more screenshot 2024-08-01 09:23:13 +08:00
Simon Ding
97ede5d9c9 feat: add badges 2024-08-01 00:01:55 +08:00
Simon Ding
4803567818 fix: remove attestations 2024-07-31 23:43:07 +08:00
Simon Ding
4e0014cb3f fix: sha256 tags in repo 2024-07-31 23:41:58 +08:00
Simon Ding
c256d46d5c test ci 2024-07-31 23:11:55 +08:00
Simon Ding
b765f16ea6 chore: updates 2024-07-31 20:59:40 +08:00
Simon Ding
9350e376f4 fix 2024-07-31 17:50:37 +08:00
Simon Ding
06f935871a chore: improve ui & fix 2024-07-31 17:43:42 +08:00
Simon Ding
001b850d8f feat: add imdbid to .plexmatch file 2024-07-31 17:01:50 +08:00
Simon Ding
1340305f2d feat: change progress display 2024-07-31 16:55:01 +08:00
Simon Ding
b337e40fcc feat: change name suggestting 2024-07-31 16:23:22 +08:00
Simon Ding
e94386e455 chore: better text display 2024-07-31 15:45:49 +08:00
Simon Ding
2b4fb99c89 chore: improve ui 2024-07-31 15:40:24 +08:00
Simon Ding
faa603d5df code refactor & improve search page 2024-07-31 15:26:34 +08:00
Simon Ding
9ba59a7d5a refactor code 2024-07-31 14:41:57 +08:00
Simon Ding
0ea1c040a2 chore: optimize log 2024-07-30 22:56:17 +08:00
Simon Ding
eba646f5db chore: delete not exist tasks 2024-07-30 22:11:41 +08:00
Simon Ding
ebcc0c32da fix: panic 2024-07-30 22:10:18 +08:00
Simon Ding
769f217506 feat: add option to control whether to deleted task 2024-07-30 21:55:54 +08:00
Simon Ding
3525d1bb83 feat: try hard link first 2024-07-30 21:15:35 +08:00
Simon Ding
2c3fd89f2a fix: plexmatch 2024-07-30 20:56:44 +08:00
Simon Ding
19ab8c65de chore: fixes 2024-07-30 20:37:42 +08:00
Simon Ding
979218f615 feat: season plexmatch file 2024-07-30 20:01:41 +08:00
Simon Ding
d4dd2da335 set umask to 0011 2024-07-30 19:22:17 +08:00
Simon Ding
000717fcd9 fix plexmatch 2024-07-30 17:22:28 +08:00
Simon Ding
300f9a478b chore: update readme 2024-07-30 16:35:59 +08:00
Simon Ding
88a554b186 fix: local storage dir 2024-07-30 16:12:10 +08:00
Simon Ding
6ef4bedebe feat: support generate .plexmatch 2024-07-30 15:51:54 +08:00
Simon Ding
233970ef39 feat: display loading animation 2024-07-30 14:02:24 +08:00
Simon Ding
e2bba8ec71 fix: target dir 2024-07-30 11:49:42 +08:00
Simon Ding
b7aeb9c3c6 fix: create file permission 2024-07-30 11:27:06 +08:00
Simon Ding
4a93d51fdc fix: chinese naming 2024-07-30 11:16:51 +08:00
Simon Ding
f158b74be6 fix: movie suggested naming 2024-07-30 11:11:07 +08:00
Simon Ding
2c8c715540 feat: movie also requires suggested dir 2024-07-30 10:50:40 +08:00
Simon Ding
ba532d406a feat: default to html render 2024-07-30 10:08:21 +08:00
131 changed files with 10866 additions and 2410 deletions

View File

@@ -17,11 +17,6 @@ jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
attestations: write
id-token: write
steps:
- uses: actions/checkout@v4
@@ -51,9 +46,3 @@ jobs:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Generate artifact attestation
uses: actions/attest-build-provenance@v1
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@@ -61,4 +61,4 @@ jobs:
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
push-to-registry: false

View File

@@ -3,7 +3,7 @@ WORKDIR /app
COPY ./ui/pubspec.yaml ./ui/pubspec.lock ./
RUN flutter pub get
COPY ./ui/ ./
RUN flutter build web --no-web-resources-cdn
RUN flutter build web --no-web-resources-cdn --web-renderer html
# 打包依赖阶段使用golang作为基础镜像
FROM golang:1.22 as builder

View File

@@ -1,23 +1,52 @@
# polaris
# Polaris
![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/simon-ding/polaris/go.yml)
![GitHub Release](https://img.shields.io/github/v/release/simon-ding/polaris)
![GitHub Repo stars](https://img.shields.io/github/stars/simon-ding/polaris)
![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/simon-ding/polaris)
Polaris 是一个电视剧和电影的追踪软件。配置好了之后当剧集或者电影播出后会第一时间下载对应的资源。支持本地存储或者webdav。
![main_page](./doc/assets/main_page.png)
![detail_page](./doc/assets/detail_page.png)
![anime](./doc/assets/anime_match.png)
交流群: https://t.me/+8R2nzrlSs2JhMDgx
## 功能
## 快速开始
使用此程序参考 [【快速开始】](https://simonding.gitbook.io/polaris/quick_start)
## Features
- [x] 电视剧自动追踪下载
- [x] 电影自动追踪下载
- [x] webdav 存储支持,配合 [alist](https://github.com/alist-org/alist) 或阿里云等实现更多功能
- [x] 事件通知推送,目前支持 Pushover和 Bark还在扩充中
- [x] 后台代理支持
- [x] 用户认证
- [x] plex 刮削支持
- [x] NFO 刮削文件支持
- [x] BT/PT 支持
- [x] and more...
## 使用
## Todos
使用此程序参考 [【快速开始】](./doc/quick_start.md)
- [ ] qbittorrent客户端支持
- [ ] 更多通知客户端支持
- [ ] 第三方watchlist导入支持
- [ ] 手机客户端
## 原理
本程序不提供任何视频相关资源,所有的资源都通过 jackett/prowlarr 所对接的BT/PT站点提供。
1. 此程序通过调用 jackett/prowlarr API搜索相关资源然后匹配上对应的剧集
2. 把搜索到的资源送到下载器下载
3. 下载完成后归入对应的路径
## 对比 sonarr/radarr
* 更好的中文支持
* 对于动漫、日剧的良好支持,配合国内站点基本能匹配上对应资源

View File

@@ -4,10 +4,14 @@ import (
"polaris/db"
"polaris/log"
"polaris/server"
"syscall"
)
func main() {
log.Infof("------------------- Starting Polaris ---------------------")
syscall.Umask(0) //max permission 0777
dbClient, err := db.Open()
if err != nil {
log.Panicf("init db error: %v", err)

View File

@@ -3,26 +3,33 @@ package db
var Version = "undefined"
const (
SettingTmdbApiKey = "tmdb_api_key"
SettingLanguage = "language"
SettingJacketUrl = "jacket_url"
SettingJacketApiKey = "jacket_api_key"
SettingDownloadDir = "download_dir"
SettingLogLevel = "log_level"
SettingProxy = "proxy"
SettingTmdbApiKey = "tmdb_api_key"
SettingLanguage = "language"
SettingJacketUrl = "jacket_url"
SettingJacketApiKey = "jacket_api_key"
SettingDownloadDir = "download_dir"
SettingLogLevel = "log_level"
SettingProxy = "proxy"
SettingPlexMatchEnabled = "plexmatch_enabled"
SettingNfoSupportEnabled = "nfo_support_enabled"
SettingAllowQiangban = "filter_qiangban"
SettingEnableTmdbAdultContent = "tmdb_adult_content"
SetttingSizeLimiter = "size_limiter"
SettingTvNamingFormat = "tv_naming_format"
SettingMovieNamingFormat = "movie_naming_format"
)
const (
SettingAuthEnabled = "auth_enbled"
SettingUsername = "auth_username"
SettingPassword = "auth_password"
SettingUsername = "auth_username"
SettingPassword = "auth_password"
)
const (
IndexerTorznabImpl = "torznab"
DataPath = "./data"
ImgPath = DataPath + "/img"
LogPath = DataPath + "/logs"
DataPath = "./data"
ImgPath = DataPath + "/img"
LogPath = DataPath + "/logs"
)
const (
@@ -30,18 +37,26 @@ const (
LanguageCN = "zh-CN"
)
type ResolutionType string
const DefaultNamingFormat = "{{.NameCN}} {{.NameEN}} {{if .Year}} ({{.Year}}) {{end}}"
const (
Any ResolutionType = "any"
R720p ResolutionType = "720p"
R1080p ResolutionType = "1080p"
R4k ResolutionType = "4k"
)
func (r ResolutionType) String() string {
return string(r)
type NamingInfo struct {
NameCN string
NameEN string
Year string
TmdbID int
}
type ResolutionType string
const JwtSerectKey = "jwt_secrect_key"
const JwtSerectKey = "jwt_secrect_key"
type SizeLimiter struct {
R720p Limiter `json:"720p"`
R1080p Limiter `json:"1080p"`
R2160p Limiter `json:"2160p"`
}
type Limiter struct {
Max int `json:"max"`
Min int `json:"min"`
}

198
db/db.go
View File

@@ -9,8 +9,10 @@ import (
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/importlist"
"polaris/ent/indexers"
"polaris/ent/media"
"polaris/ent/schema"
"polaris/ent/settings"
"polaris/ent/storage"
"polaris/log"
@@ -87,18 +89,16 @@ func (c *Client) generateDefaultLocalStorage() error {
return c.AddStorage(&StorageInfo{
Name: "local",
Implementation: "local",
TvPath: "/data/tv/",
MoviePath: "/data/movies/",
Default: true,
Settings: map[string]string{
"tv_path": "/data/tv/",
"movie_path": "/data/movies/",
},
})
}
func (c *Client) GetSetting(key string) string {
v, err := c.ent.Settings.Query().Where(settings.Key(key)).Only(context.TODO())
if err != nil {
log.Warnf("get setting by key: %s error: %v", key, err)
log.Debugf("get setting by key: %s error: %v", key, err)
return ""
}
return v.Value
@@ -119,7 +119,7 @@ func (c *Client) GetLanguage() string {
lang := c.GetSetting(SettingLanguage)
log.Infof("get application language: %s", lang)
if lang == "" {
return "zh-CN"
return LanguageCN
}
return lang
}
@@ -139,6 +139,7 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
}
r, err := c.ent.Media.Create().
SetTmdbID(m.TmdbID).
SetImdbID(m.ImdbID).
SetStorageID(m.StorageID).
SetOverview(m.Overview).
SetNameCn(m.NameCn).
@@ -148,6 +149,9 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
SetAirDate(m.AirDate).
SetResolution(m.Resolution).
SetTargetDir(m.TargetDir).
SetDownloadHistoryEpisodes(m.DownloadHistoryEpisodes).
SetLimiter(m.Limiter).
SetExtras(m.Extras).
AddEpisodeIDs(episodes...).
Save(context.TODO())
return r, err
@@ -167,6 +171,9 @@ func (c *Client) GetEpisode(seriesId, seasonNum, episodeNum int) (*ent.Episode,
return c.ent.Episode.Query().Where(episode.MediaID(seriesId), episode.SeasonNumber(seasonNum),
episode.EpisodeNumber(episodeNum)).First(context.TODO())
}
func (c *Client) GetEpisodeByID(epID int) (*ent.Episode, error) {
return c.ent.Episode.Query().Where(episode.ID(epID)).First(context.TODO())
}
func (c *Client) UpdateEpiode(episodeId int, name, overview string) error {
return c.ent.Episode.Update().Where(episode.ID(episodeId)).SetTitle(name).SetOverview(overview).Exec(context.TODO())
@@ -220,6 +227,7 @@ func (c *Client) SaveEposideDetail(d *ent.Episode) (int, error) {
SetSeasonNumber(d.SeasonNumber).
SetEpisodeNumber(d.EpisodeNumber).
SetOverview(d.Overview).
SetMonitored(d.Monitored).
SetTitle(d.Title).Save(context.TODO())
if err != nil {
return 0, errors.Wrap(err, "save episode")
@@ -245,19 +253,22 @@ type TorznabSetting struct {
ApiKey string `json:"api_key"`
}
func (c *Client) SaveTorznabInfo(name string, setting TorznabSetting) error {
data, err := json.Marshal(setting)
if err != nil {
return errors.Wrap(err, "marshal json")
func (c *Client) SaveIndexer(in *ent.Indexers) error {
if in.ID != 0 {
//update setting
return c.ent.Indexers.Update().Where(indexers.ID(in.ID)).SetName(in.Name).SetImplementation(in.Implementation).
SetPriority(in.Priority).SetSettings(in.Settings).SetSeedRatio(in.SeedRatio).SetDisabled(in.Disabled).Exec(context.Background())
}
count := c.ent.Indexers.Query().Where(indexers.Name(name)).CountX(context.TODO())
//create new one
count := c.ent.Indexers.Query().Where(indexers.Name(in.Name)).CountX(context.TODO())
if count > 0 {
c.ent.Indexers.Update().Where(indexers.Name(name)).SetSettings(string(data)).Save(context.TODO())
return err
return fmt.Errorf("name already esxits: %v", in.Name)
}
_, err = c.ent.Indexers.Create().
SetName(name).SetImplementation(IndexerTorznabImpl).SetPriority(1).SetSettings(string(data)).Save(context.TODO())
_, err := c.ent.Indexers.Create().
SetName(in.Name).SetImplementation(in.Implementation).SetPriority(in.Priority).SetSettings(in.Settings).SetSeedRatio(in.SeedRatio).
SetDisabled(in.Disabled).Save(context.TODO())
if err != nil {
return errors.Wrap(err, "save db")
}
@@ -269,9 +280,22 @@ func (c *Client) DeleteTorznab(id int) {
c.ent.Indexers.Delete().Where(indexers.ID(id)).Exec(context.TODO())
}
func (c *Client) GetIndexer(id int) (*TorznabInfo, error) {
res, err := c.ent.Indexers.Query().Where(indexers.ID(id)).First(context.TODO())
if err != nil {
return nil, err
}
var ss TorznabSetting
err = json.Unmarshal([]byte(res.Settings), &ss)
if err != nil {
return nil, fmt.Errorf("unmarshal torznab %s error: %v", res.Name, err)
}
return &TorznabInfo{Indexers: res, TorznabSetting: ss}, nil
}
type TorznabInfo struct {
ID int `json:"id"`
Name string `json:"name"`
*ent.Indexers
TorznabSetting
}
@@ -287,8 +311,7 @@ func (c *Client) GetAllTorznabInfo() []*TorznabInfo {
continue
}
l = append(l, &TorznabInfo{
ID: r.ID,
Name: r.Name,
Indexers: r,
TorznabSetting: ss,
})
}
@@ -335,6 +358,8 @@ type StorageInfo struct {
Name string `json:"name" binding:"required"`
Implementation string `json:"implementation" binding:"required"`
Settings map[string]string `json:"settings" binding:"required"`
TvPath string `json:"tv_path" binding:"required"`
MoviePath string `json:"movie_path" binding:"required"`
Default bool `json:"default"`
}
@@ -344,34 +369,28 @@ func (s *StorageInfo) ToWebDavSetting() WebdavSetting {
}
return WebdavSetting{
URL: s.Settings["url"],
TvPath: s.Settings["tv_path"],
MoviePath: s.Settings["movie_path"],
User: s.Settings["user"],
Password: s.Settings["password"],
ChangeFileHash: s.Settings["change_file_hash"],
}
}
type LocalDirSetting struct {
TvPath string `json:"tv_path"`
MoviePath string `json:"movie_path"`
}
type WebdavSetting struct {
URL string `json:"url"`
TvPath string `json:"tv_path"`
MoviePath string `json:"movie_path"`
User string `json:"user"`
Password string `json:"password"`
ChangeFileHash string `json:"change_file_hash"`
}
func (c *Client) AddStorage(st *StorageInfo) error {
if !strings.HasSuffix(st.Settings["tv_path"], "/") {
st.Settings["tv_path"] += "/"
if !strings.HasSuffix(st.TvPath, "/") {
st.TvPath += "/"
}
if !strings.HasSuffix(st.Settings["movie_path"], "/") {
st.Settings["movie_path"] += "/"
if !strings.HasSuffix(st.MoviePath, "/") {
st.MoviePath += "/"
}
if st.Settings == nil {
st.Settings = map[string]string{}
}
data, err := json.Marshal(st.Settings)
@@ -383,7 +402,7 @@ func (c *Client) AddStorage(st *StorageInfo) error {
if count > 0 {
//storage already exist, edit exist one
return c.ent.Storage.Update().Where(storage.Name(st.Name)).
SetImplementation(storage.Implementation(st.Implementation)).
SetImplementation(storage.Implementation(st.Implementation)).SetTvPath(st.TvPath).SetMoviePath(st.MoviePath).
SetSettings(string(data)).Exec(context.TODO())
}
countAll := c.ent.Storage.Query().Where(storage.Deleted(false)).CountX(context.TODO())
@@ -392,7 +411,7 @@ func (c *Client) AddStorage(st *StorageInfo) error {
st.Default = true
}
_, err = c.ent.Storage.Create().SetName(st.Name).
SetImplementation(storage.Implementation(st.Implementation)).
SetImplementation(storage.Implementation(st.Implementation)).SetTvPath(st.TvPath).SetMoviePath(st.MoviePath).
SetSettings(string(data)).SetDefault(st.Default).Save(context.TODO())
if err != nil {
return err
@@ -413,15 +432,6 @@ type Storage struct {
ent.Storage
}
func (s *Storage) ToLocalSetting() LocalDirSetting {
if s.Implementation != storage.ImplementationLocal {
panic("not local storage")
}
var localSetting LocalDirSetting
json.Unmarshal([]byte(s.Settings), &localSetting)
return localSetting
}
func (s *Storage) ToWebDavSetting() WebdavSetting {
if s.Implementation != storage.ImplementationWebdav {
panic("not webdav storage")
@@ -431,12 +441,6 @@ func (s *Storage) ToWebDavSetting() WebdavSetting {
return webdavSetting
}
func (s *Storage) GetPath() (tvPath string, moviePath string) {
var m map[string]string
json.Unmarshal([]byte(s.Settings), &m)
return m["tv_path"], m["movie_path"]
}
func (c *Client) GetStorage(id int) *Storage {
r, err := c.ent.Storage.Query().Where(storage.ID(id)).First(context.TODO())
if err != nil {
@@ -471,7 +475,8 @@ func (c *Client) SetDefaultStorageByName(name string) error {
func (c *Client) SaveHistoryRecord(h ent.History) (*ent.History, error) {
return c.ent.History.Create().SetMediaID(h.MediaID).SetEpisodeID(h.EpisodeID).SetDate(time.Now()).
SetStatus(h.Status).SetTargetDir(h.TargetDir).SetSourceTitle(h.SourceTitle).SetSaved(h.Saved).Save(context.TODO())
SetStatus(h.Status).SetTargetDir(h.TargetDir).SetSourceTitle(h.SourceTitle).SetIndexerID(h.IndexerID).
SetDownloadClientID(h.DownloadClientID).SetSize(h.Size).SetSaved(h.Saved).Save(context.TODO())
}
func (c *Client) SetHistoryStatus(id int, status history.Status) error {
@@ -488,7 +493,7 @@ func (c *Client) GetHistories() ent.Histories {
func (c *Client) GetRunningHistories() ent.Histories {
h, err := c.ent.History.Query().Where(history.Or(history.StatusEQ(history.StatusRunning),
history.StatusEQ(history.StatusUploading))).All(context.TODO())
history.StatusEQ(history.StatusUploading), history.StatusEQ(history.StatusSeeding))).All(context.TODO())
if err != nil {
return nil
}
@@ -525,6 +530,16 @@ func (c *Client) SetEpisodeStatus(id int, status episode.Status) error {
return c.ent.Episode.Update().Where(episode.ID(id)).SetStatus(status).Exec(context.TODO())
}
func (c *Client) IsEpisodeDownloadingOrDownloaded(id int) bool {
his := c.ent.History.Query().Where(history.EpisodeID(id)).AllX(context.Background())
for _, h := range his {
if h.Status != history.StatusFail {
return true
}
}
return false
}
func (c *Client) SetSeasonAllEpisodeStatus(mediaID, seasonNum int, status episode.Status) error {
return c.ent.Episode.Update().Where(episode.MediaID(mediaID), episode.SeasonNumber(seasonNum)).SetStatus(status).Exec(context.TODO())
}
@@ -547,4 +562,85 @@ func (c *Client) GetMovieDummyEpisode(movieId int) (*ent.Episode, error) {
return nil, errors.Wrap(err, "query episode")
}
return ep, nil
}
func (c *Client) GetDownloadClient(id int) (*ent.DownloadClients, error) {
return c.ent.DownloadClients.Query().Where(downloadclients.ID(id)).First(context.Background())
}
func (c *Client) SetEpisodeMonitoring(id int, b bool) error {
return c.ent.Episode.Update().Where(episode.ID(id)).SetMonitored(b).Exec(context.Background())
}
type EditMediaData struct {
ID int `json:"id"`
Resolution media.Resolution `json:"resolution"`
TargetDir string `json:"target_dir"`
Limiter schema.MediaLimiter `json:"limiter"`
}
func (c *Client) EditMediaMetadata(in EditMediaData) error {
return c.ent.Media.Update().Where(media.ID(in.ID)).SetResolution(in.Resolution).SetTargetDir(in.TargetDir).SetLimiter(in.Limiter).
Exec(context.Background())
}
func (c *Client) UpdateEpisodeTargetFile(id int, filename string) error {
return c.ent.Episode.Update().Where(episode.ID(id)).SetTargetFile(filename).Exec(context.Background())
}
func (c *Client) GetSeasonEpisodes(mediaId, seasonNum int) ([]*ent.Episode, error) {
return c.ent.Episode.Query().Where(episode.MediaID(mediaId), episode.SeasonNumber(seasonNum)).All(context.Background())
}
func (c *Client) GetAllImportLists() ([]*ent.ImportList, error) {
return c.ent.ImportList.Query().All(context.Background())
}
func (c *Client) AddImportlist(il *ent.ImportList) error {
count, err := c.ent.ImportList.Query().Where(importlist.Name(il.Name)).Count(context.Background())
if err != nil {
return err
}
if count > 0 {
//edit exist record
return c.ent.ImportList.Update().Where(importlist.Name(il.Name)).
SetURL(il.URL).SetQulity(il.Qulity).SetType(il.Type).SetStorageID(il.StorageID).Exec(context.Background())
}
return c.ent.ImportList.Create().SetName(il.Name).SetURL(il.URL).SetQulity(il.Qulity).SetStorageID(il.StorageID).
SetType(il.Type).Exec(context.Background())
}
func (c *Client) DeleteImportlist(id int) error {
return c.ent.ImportList.DeleteOneID(id).Exec(context.TODO())
}
func (c *Client) GetSizeLimiter() (*SizeLimiter, error) {
v := c.GetSetting(SetttingSizeLimiter)
var limiter SizeLimiter
err := json.Unmarshal([]byte(v), &limiter)
return &limiter, err
}
func (c *Client) SetSizeLimiter(limiter *SizeLimiter) error {
data, err := json.Marshal(limiter)
if err != nil {
return err
}
return c.SetSetting(SetttingSizeLimiter, string(data))
}
func (c *Client) GetTvNamingFormat() string {
s := c.GetSetting(SettingTvNamingFormat)
if s == "" {
return DefaultNamingFormat
}
return s
}
func (c *Client) GetMovingNamingFormat() string {
s := c.GetSetting(SettingMovieNamingFormat)
if s == "" {
return DefaultNamingFormat
}
return s
}

BIN
doc/assets/anime_match.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 774 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 MiB

After

Width:  |  Height:  |  Size: 2.6 MiB

View File

@@ -14,6 +14,7 @@ import (
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/importlist"
"polaris/ent/indexers"
"polaris/ent/media"
"polaris/ent/notificationclient"
@@ -37,6 +38,8 @@ type Client struct {
Episode *EpisodeClient
// History is the client for interacting with the History builders.
History *HistoryClient
// ImportList is the client for interacting with the ImportList builders.
ImportList *ImportListClient
// Indexers is the client for interacting with the Indexers builders.
Indexers *IndexersClient
// Media is the client for interacting with the Media builders.
@@ -61,6 +64,7 @@ func (c *Client) init() {
c.DownloadClients = NewDownloadClientsClient(c.config)
c.Episode = NewEpisodeClient(c.config)
c.History = NewHistoryClient(c.config)
c.ImportList = NewImportListClient(c.config)
c.Indexers = NewIndexersClient(c.config)
c.Media = NewMediaClient(c.config)
c.NotificationClient = NewNotificationClientClient(c.config)
@@ -161,6 +165,7 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
DownloadClients: NewDownloadClientsClient(cfg),
Episode: NewEpisodeClient(cfg),
History: NewHistoryClient(cfg),
ImportList: NewImportListClient(cfg),
Indexers: NewIndexersClient(cfg),
Media: NewMediaClient(cfg),
NotificationClient: NewNotificationClientClient(cfg),
@@ -188,6 +193,7 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
DownloadClients: NewDownloadClientsClient(cfg),
Episode: NewEpisodeClient(cfg),
History: NewHistoryClient(cfg),
ImportList: NewImportListClient(cfg),
Indexers: NewIndexersClient(cfg),
Media: NewMediaClient(cfg),
NotificationClient: NewNotificationClientClient(cfg),
@@ -222,7 +228,7 @@ func (c *Client) Close() error {
// In order to add hooks to a specific client, call: `client.Node.Use(...)`.
func (c *Client) Use(hooks ...Hook) {
for _, n := range []interface{ Use(...Hook) }{
c.DownloadClients, c.Episode, c.History, c.Indexers, c.Media,
c.DownloadClients, c.Episode, c.History, c.ImportList, c.Indexers, c.Media,
c.NotificationClient, c.Settings, c.Storage,
} {
n.Use(hooks...)
@@ -233,7 +239,7 @@ func (c *Client) Use(hooks ...Hook) {
// In order to add interceptors to a specific client, call: `client.Node.Intercept(...)`.
func (c *Client) Intercept(interceptors ...Interceptor) {
for _, n := range []interface{ Intercept(...Interceptor) }{
c.DownloadClients, c.Episode, c.History, c.Indexers, c.Media,
c.DownloadClients, c.Episode, c.History, c.ImportList, c.Indexers, c.Media,
c.NotificationClient, c.Settings, c.Storage,
} {
n.Intercept(interceptors...)
@@ -249,6 +255,8 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) {
return c.Episode.mutate(ctx, m)
case *HistoryMutation:
return c.History.mutate(ctx, m)
case *ImportListMutation:
return c.ImportList.mutate(ctx, m)
case *IndexersMutation:
return c.Indexers.mutate(ctx, m)
case *MediaMutation:
@@ -679,6 +687,139 @@ func (c *HistoryClient) mutate(ctx context.Context, m *HistoryMutation) (Value,
}
}
// ImportListClient is a client for the ImportList schema.
type ImportListClient struct {
config
}
// NewImportListClient returns a client for the ImportList from the given config.
func NewImportListClient(c config) *ImportListClient {
return &ImportListClient{config: c}
}
// Use adds a list of mutation hooks to the hooks stack.
// A call to `Use(f, g, h)` equals to `importlist.Hooks(f(g(h())))`.
func (c *ImportListClient) Use(hooks ...Hook) {
c.hooks.ImportList = append(c.hooks.ImportList, hooks...)
}
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `importlist.Intercept(f(g(h())))`.
func (c *ImportListClient) Intercept(interceptors ...Interceptor) {
c.inters.ImportList = append(c.inters.ImportList, interceptors...)
}
// Create returns a builder for creating a ImportList entity.
func (c *ImportListClient) Create() *ImportListCreate {
mutation := newImportListMutation(c.config, OpCreate)
return &ImportListCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// CreateBulk returns a builder for creating a bulk of ImportList entities.
func (c *ImportListClient) CreateBulk(builders ...*ImportListCreate) *ImportListCreateBulk {
return &ImportListCreateBulk{config: c.config, builders: builders}
}
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
// a builder and applies setFunc on it.
func (c *ImportListClient) MapCreateBulk(slice any, setFunc func(*ImportListCreate, int)) *ImportListCreateBulk {
rv := reflect.ValueOf(slice)
if rv.Kind() != reflect.Slice {
return &ImportListCreateBulk{err: fmt.Errorf("calling to ImportListClient.MapCreateBulk with wrong type %T, need slice", slice)}
}
builders := make([]*ImportListCreate, rv.Len())
for i := 0; i < rv.Len(); i++ {
builders[i] = c.Create()
setFunc(builders[i], i)
}
return &ImportListCreateBulk{config: c.config, builders: builders}
}
// Update returns an update builder for ImportList.
func (c *ImportListClient) Update() *ImportListUpdate {
mutation := newImportListMutation(c.config, OpUpdate)
return &ImportListUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// UpdateOne returns an update builder for the given entity.
func (c *ImportListClient) UpdateOne(il *ImportList) *ImportListUpdateOne {
mutation := newImportListMutation(c.config, OpUpdateOne, withImportList(il))
return &ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// UpdateOneID returns an update builder for the given id.
func (c *ImportListClient) UpdateOneID(id int) *ImportListUpdateOne {
mutation := newImportListMutation(c.config, OpUpdateOne, withImportListID(id))
return &ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// Delete returns a delete builder for ImportList.
func (c *ImportListClient) Delete() *ImportListDelete {
mutation := newImportListMutation(c.config, OpDelete)
return &ImportListDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// DeleteOne returns a builder for deleting the given entity.
func (c *ImportListClient) DeleteOne(il *ImportList) *ImportListDeleteOne {
return c.DeleteOneID(il.ID)
}
// DeleteOneID returns a builder for deleting the given entity by its id.
func (c *ImportListClient) DeleteOneID(id int) *ImportListDeleteOne {
builder := c.Delete().Where(importlist.ID(id))
builder.mutation.id = &id
builder.mutation.op = OpDeleteOne
return &ImportListDeleteOne{builder}
}
// Query returns a query builder for ImportList.
func (c *ImportListClient) Query() *ImportListQuery {
return &ImportListQuery{
config: c.config,
ctx: &QueryContext{Type: TypeImportList},
inters: c.Interceptors(),
}
}
// Get returns a ImportList entity by its id.
func (c *ImportListClient) Get(ctx context.Context, id int) (*ImportList, error) {
return c.Query().Where(importlist.ID(id)).Only(ctx)
}
// GetX is like Get, but panics if an error occurs.
func (c *ImportListClient) GetX(ctx context.Context, id int) *ImportList {
obj, err := c.Get(ctx, id)
if err != nil {
panic(err)
}
return obj
}
// Hooks returns the client hooks.
func (c *ImportListClient) Hooks() []Hook {
return c.hooks.ImportList
}
// Interceptors returns the client interceptors.
func (c *ImportListClient) Interceptors() []Interceptor {
return c.inters.ImportList
}
func (c *ImportListClient) mutate(ctx context.Context, m *ImportListMutation) (Value, error) {
switch m.Op() {
case OpCreate:
return (&ImportListCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpUpdate:
return (&ImportListUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpUpdateOne:
return (&ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpDelete, OpDeleteOne:
return (&ImportListDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
default:
return nil, fmt.Errorf("ent: unknown ImportList mutation op: %q", m.Op())
}
}
// IndexersClient is a client for the Indexers schema.
type IndexersClient struct {
config
@@ -1363,11 +1504,11 @@ func (c *StorageClient) mutate(ctx context.Context, m *StorageMutation) (Value,
// hooks and interceptors per client, for fast access.
type (
hooks struct {
DownloadClients, Episode, History, Indexers, Media, NotificationClient,
Settings, Storage []ent.Hook
DownloadClients, Episode, History, ImportList, Indexers, Media,
NotificationClient, Settings, Storage []ent.Hook
}
inters struct {
DownloadClients, Episode, History, Indexers, Media, NotificationClient,
Settings, Storage []ent.Interceptor
DownloadClients, Episode, History, ImportList, Indexers, Media,
NotificationClient, Settings, Storage []ent.Interceptor
}
)

View File

@@ -9,6 +9,7 @@ import (
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/importlist"
"polaris/ent/indexers"
"polaris/ent/media"
"polaris/ent/notificationclient"
@@ -83,6 +84,7 @@ func checkColumn(table, column string) error {
downloadclients.Table: downloadclients.ValidColumn,
episode.Table: episode.ValidColumn,
history.Table: history.ValidColumn,
importlist.Table: importlist.ValidColumn,
indexers.Table: indexers.ValidColumn,
media.Table: media.ValidColumn,
notificationclient.Table: notificationclient.ValidColumn,

View File

@@ -31,6 +31,10 @@ type Episode struct {
AirDate string `json:"air_date,omitempty"`
// Status holds the value of the "status" field.
Status episode.Status `json:"status,omitempty"`
// Monitored holds the value of the "monitored" field.
Monitored bool `json:"monitored"`
// TargetFile holds the value of the "target_file" field.
TargetFile string `json:"target_file,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the EpisodeQuery when eager-loading is set.
Edges EpisodeEdges `json:"edges"`
@@ -62,9 +66,11 @@ func (*Episode) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case episode.FieldMonitored:
values[i] = new(sql.NullBool)
case episode.FieldID, episode.FieldMediaID, episode.FieldSeasonNumber, episode.FieldEpisodeNumber:
values[i] = new(sql.NullInt64)
case episode.FieldTitle, episode.FieldOverview, episode.FieldAirDate, episode.FieldStatus:
case episode.FieldTitle, episode.FieldOverview, episode.FieldAirDate, episode.FieldStatus, episode.FieldTargetFile:
values[i] = new(sql.NullString)
default:
values[i] = new(sql.UnknownType)
@@ -129,6 +135,18 @@ func (e *Episode) assignValues(columns []string, values []any) error {
} else if value.Valid {
e.Status = episode.Status(value.String)
}
case episode.FieldMonitored:
if value, ok := values[i].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field monitored", values[i])
} else if value.Valid {
e.Monitored = value.Bool
}
case episode.FieldTargetFile:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field target_file", values[i])
} else if value.Valid {
e.TargetFile = value.String
}
default:
e.selectValues.Set(columns[i], values[i])
}
@@ -190,6 +208,12 @@ func (e *Episode) String() string {
builder.WriteString(", ")
builder.WriteString("status=")
builder.WriteString(fmt.Sprintf("%v", e.Status))
builder.WriteString(", ")
builder.WriteString("monitored=")
builder.WriteString(fmt.Sprintf("%v", e.Monitored))
builder.WriteString(", ")
builder.WriteString("target_file=")
builder.WriteString(e.TargetFile)
builder.WriteByte(')')
return builder.String()
}

View File

@@ -28,6 +28,10 @@ const (
FieldAirDate = "air_date"
// FieldStatus holds the string denoting the status field in the database.
FieldStatus = "status"
// FieldMonitored holds the string denoting the monitored field in the database.
FieldMonitored = "monitored"
// FieldTargetFile holds the string denoting the target_file field in the database.
FieldTargetFile = "target_file"
// EdgeMedia holds the string denoting the media edge name in mutations.
EdgeMedia = "media"
// Table holds the table name of the episode in the database.
@@ -51,6 +55,8 @@ var Columns = []string{
FieldOverview,
FieldAirDate,
FieldStatus,
FieldMonitored,
FieldTargetFile,
}
// ValidColumn reports if the column name is valid (part of the table columns).
@@ -63,6 +69,11 @@ func ValidColumn(column string) bool {
return false
}
var (
// DefaultMonitored holds the default value on creation for the "monitored" field.
DefaultMonitored bool
)
// Status defines the type for the "status" enum field.
type Status string
@@ -133,6 +144,16 @@ func ByStatus(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStatus, opts...).ToFunc()
}
// ByMonitored orders the results by the monitored field.
func ByMonitored(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldMonitored, opts...).ToFunc()
}
// ByTargetFile orders the results by the target_file field.
func ByTargetFile(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldTargetFile, opts...).ToFunc()
}
// ByMediaField orders the results by media field.
func ByMediaField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {

View File

@@ -84,6 +84,16 @@ func AirDate(v string) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldAirDate, v))
}
// Monitored applies equality check predicate on the "monitored" field. It's identical to MonitoredEQ.
func Monitored(v bool) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMonitored, v))
}
// TargetFile applies equality check predicate on the "target_file" field. It's identical to TargetFileEQ.
func TargetFile(v string) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldTargetFile, v))
}
// MediaIDEQ applies the EQ predicate on the "media_id" field.
func MediaIDEQ(v int) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMediaID, v))
@@ -409,6 +419,91 @@ func StatusNotIn(vs ...Status) predicate.Episode {
return predicate.Episode(sql.FieldNotIn(FieldStatus, vs...))
}
// MonitoredEQ applies the EQ predicate on the "monitored" field.
func MonitoredEQ(v bool) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMonitored, v))
}
// MonitoredNEQ applies the NEQ predicate on the "monitored" field.
func MonitoredNEQ(v bool) predicate.Episode {
return predicate.Episode(sql.FieldNEQ(FieldMonitored, v))
}
// TargetFileEQ applies the EQ predicate on the "target_file" field.
func TargetFileEQ(v string) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldTargetFile, v))
}
// TargetFileNEQ applies the NEQ predicate on the "target_file" field.
func TargetFileNEQ(v string) predicate.Episode {
return predicate.Episode(sql.FieldNEQ(FieldTargetFile, v))
}
// TargetFileIn applies the In predicate on the "target_file" field.
func TargetFileIn(vs ...string) predicate.Episode {
return predicate.Episode(sql.FieldIn(FieldTargetFile, vs...))
}
// TargetFileNotIn applies the NotIn predicate on the "target_file" field.
func TargetFileNotIn(vs ...string) predicate.Episode {
return predicate.Episode(sql.FieldNotIn(FieldTargetFile, vs...))
}
// TargetFileGT applies the GT predicate on the "target_file" field.
func TargetFileGT(v string) predicate.Episode {
return predicate.Episode(sql.FieldGT(FieldTargetFile, v))
}
// TargetFileGTE applies the GTE predicate on the "target_file" field.
func TargetFileGTE(v string) predicate.Episode {
return predicate.Episode(sql.FieldGTE(FieldTargetFile, v))
}
// TargetFileLT applies the LT predicate on the "target_file" field.
func TargetFileLT(v string) predicate.Episode {
return predicate.Episode(sql.FieldLT(FieldTargetFile, v))
}
// TargetFileLTE applies the LTE predicate on the "target_file" field.
func TargetFileLTE(v string) predicate.Episode {
return predicate.Episode(sql.FieldLTE(FieldTargetFile, v))
}
// TargetFileContains applies the Contains predicate on the "target_file" field.
func TargetFileContains(v string) predicate.Episode {
return predicate.Episode(sql.FieldContains(FieldTargetFile, v))
}
// TargetFileHasPrefix applies the HasPrefix predicate on the "target_file" field.
func TargetFileHasPrefix(v string) predicate.Episode {
return predicate.Episode(sql.FieldHasPrefix(FieldTargetFile, v))
}
// TargetFileHasSuffix applies the HasSuffix predicate on the "target_file" field.
func TargetFileHasSuffix(v string) predicate.Episode {
return predicate.Episode(sql.FieldHasSuffix(FieldTargetFile, v))
}
// TargetFileIsNil applies the IsNil predicate on the "target_file" field.
func TargetFileIsNil() predicate.Episode {
return predicate.Episode(sql.FieldIsNull(FieldTargetFile))
}
// TargetFileNotNil applies the NotNil predicate on the "target_file" field.
func TargetFileNotNil() predicate.Episode {
return predicate.Episode(sql.FieldNotNull(FieldTargetFile))
}
// TargetFileEqualFold applies the EqualFold predicate on the "target_file" field.
func TargetFileEqualFold(v string) predicate.Episode {
return predicate.Episode(sql.FieldEqualFold(FieldTargetFile, v))
}
// TargetFileContainsFold applies the ContainsFold predicate on the "target_file" field.
func TargetFileContainsFold(v string) predicate.Episode {
return predicate.Episode(sql.FieldContainsFold(FieldTargetFile, v))
}
// HasMedia applies the HasEdge predicate on the "media" edge.
func HasMedia() predicate.Episode {
return predicate.Episode(func(s *sql.Selector) {

View File

@@ -78,6 +78,34 @@ func (ec *EpisodeCreate) SetNillableStatus(e *episode.Status) *EpisodeCreate {
return ec
}
// SetMonitored sets the "monitored" field.
func (ec *EpisodeCreate) SetMonitored(b bool) *EpisodeCreate {
ec.mutation.SetMonitored(b)
return ec
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (ec *EpisodeCreate) SetNillableMonitored(b *bool) *EpisodeCreate {
if b != nil {
ec.SetMonitored(*b)
}
return ec
}
// SetTargetFile sets the "target_file" field.
func (ec *EpisodeCreate) SetTargetFile(s string) *EpisodeCreate {
ec.mutation.SetTargetFile(s)
return ec
}
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
func (ec *EpisodeCreate) SetNillableTargetFile(s *string) *EpisodeCreate {
if s != nil {
ec.SetTargetFile(*s)
}
return ec
}
// SetMedia sets the "media" edge to the Media entity.
func (ec *EpisodeCreate) SetMedia(m *Media) *EpisodeCreate {
return ec.SetMediaID(m.ID)
@@ -122,6 +150,10 @@ func (ec *EpisodeCreate) defaults() {
v := episode.DefaultStatus
ec.mutation.SetStatus(v)
}
if _, ok := ec.mutation.Monitored(); !ok {
v := episode.DefaultMonitored
ec.mutation.SetMonitored(v)
}
}
// check runs all checks and user-defined validators on the builder.
@@ -149,6 +181,9 @@ func (ec *EpisodeCreate) check() error {
return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "Episode.status": %w`, err)}
}
}
if _, ok := ec.mutation.Monitored(); !ok {
return &ValidationError{Name: "monitored", err: errors.New(`ent: missing required field "Episode.monitored"`)}
}
return nil
}
@@ -199,6 +234,14 @@ func (ec *EpisodeCreate) createSpec() (*Episode, *sqlgraph.CreateSpec) {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
_node.Status = value
}
if value, ok := ec.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
_node.Monitored = value
}
if value, ok := ec.mutation.TargetFile(); ok {
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
_node.TargetFile = value
}
if nodes := ec.mutation.MediaIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,

View File

@@ -146,6 +146,40 @@ func (eu *EpisodeUpdate) SetNillableStatus(e *episode.Status) *EpisodeUpdate {
return eu
}
// SetMonitored sets the "monitored" field.
func (eu *EpisodeUpdate) SetMonitored(b bool) *EpisodeUpdate {
eu.mutation.SetMonitored(b)
return eu
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (eu *EpisodeUpdate) SetNillableMonitored(b *bool) *EpisodeUpdate {
if b != nil {
eu.SetMonitored(*b)
}
return eu
}
// SetTargetFile sets the "target_file" field.
func (eu *EpisodeUpdate) SetTargetFile(s string) *EpisodeUpdate {
eu.mutation.SetTargetFile(s)
return eu
}
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
func (eu *EpisodeUpdate) SetNillableTargetFile(s *string) *EpisodeUpdate {
if s != nil {
eu.SetTargetFile(*s)
}
return eu
}
// ClearTargetFile clears the value of the "target_file" field.
func (eu *EpisodeUpdate) ClearTargetFile() *EpisodeUpdate {
eu.mutation.ClearTargetFile()
return eu
}
// SetMedia sets the "media" edge to the Media entity.
func (eu *EpisodeUpdate) SetMedia(m *Media) *EpisodeUpdate {
return eu.SetMediaID(m.ID)
@@ -235,6 +269,15 @@ func (eu *EpisodeUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := eu.mutation.Status(); ok {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
}
if value, ok := eu.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
}
if value, ok := eu.mutation.TargetFile(); ok {
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
}
if eu.mutation.TargetFileCleared() {
_spec.ClearField(episode.FieldTargetFile, field.TypeString)
}
if eu.mutation.MediaCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
@@ -402,6 +445,40 @@ func (euo *EpisodeUpdateOne) SetNillableStatus(e *episode.Status) *EpisodeUpdate
return euo
}
// SetMonitored sets the "monitored" field.
func (euo *EpisodeUpdateOne) SetMonitored(b bool) *EpisodeUpdateOne {
euo.mutation.SetMonitored(b)
return euo
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (euo *EpisodeUpdateOne) SetNillableMonitored(b *bool) *EpisodeUpdateOne {
if b != nil {
euo.SetMonitored(*b)
}
return euo
}
// SetTargetFile sets the "target_file" field.
func (euo *EpisodeUpdateOne) SetTargetFile(s string) *EpisodeUpdateOne {
euo.mutation.SetTargetFile(s)
return euo
}
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
func (euo *EpisodeUpdateOne) SetNillableTargetFile(s *string) *EpisodeUpdateOne {
if s != nil {
euo.SetTargetFile(*s)
}
return euo
}
// ClearTargetFile clears the value of the "target_file" field.
func (euo *EpisodeUpdateOne) ClearTargetFile() *EpisodeUpdateOne {
euo.mutation.ClearTargetFile()
return euo
}
// SetMedia sets the "media" edge to the Media entity.
func (euo *EpisodeUpdateOne) SetMedia(m *Media) *EpisodeUpdateOne {
return euo.SetMediaID(m.ID)
@@ -521,6 +598,15 @@ func (euo *EpisodeUpdateOne) sqlSave(ctx context.Context) (_node *Episode, err e
if value, ok := euo.mutation.Status(); ok {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
}
if value, ok := euo.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
}
if value, ok := euo.mutation.TargetFile(); ok {
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
}
if euo.mutation.TargetFileCleared() {
_spec.ClearField(episode.FieldTargetFile, field.TypeString)
}
if euo.mutation.MediaCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,

View File

@@ -29,6 +29,10 @@ type History struct {
TargetDir string `json:"target_dir,omitempty"`
// Size holds the value of the "size" field.
Size int `json:"size,omitempty"`
// DownloadClientID holds the value of the "download_client_id" field.
DownloadClientID int `json:"download_client_id,omitempty"`
// IndexerID holds the value of the "indexer_id" field.
IndexerID int `json:"indexer_id,omitempty"`
// Status holds the value of the "status" field.
Status history.Status `json:"status,omitempty"`
// Saved holds the value of the "saved" field.
@@ -41,7 +45,7 @@ func (*History) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case history.FieldID, history.FieldMediaID, history.FieldEpisodeID, history.FieldSize:
case history.FieldID, history.FieldMediaID, history.FieldEpisodeID, history.FieldSize, history.FieldDownloadClientID, history.FieldIndexerID:
values[i] = new(sql.NullInt64)
case history.FieldSourceTitle, history.FieldTargetDir, history.FieldStatus, history.FieldSaved:
values[i] = new(sql.NullString)
@@ -104,6 +108,18 @@ func (h *History) assignValues(columns []string, values []any) error {
} else if value.Valid {
h.Size = int(value.Int64)
}
case history.FieldDownloadClientID:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field download_client_id", values[i])
} else if value.Valid {
h.DownloadClientID = int(value.Int64)
}
case history.FieldIndexerID:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field indexer_id", values[i])
} else if value.Valid {
h.IndexerID = int(value.Int64)
}
case history.FieldStatus:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field status", values[i])
@@ -170,6 +186,12 @@ func (h *History) String() string {
builder.WriteString("size=")
builder.WriteString(fmt.Sprintf("%v", h.Size))
builder.WriteString(", ")
builder.WriteString("download_client_id=")
builder.WriteString(fmt.Sprintf("%v", h.DownloadClientID))
builder.WriteString(", ")
builder.WriteString("indexer_id=")
builder.WriteString(fmt.Sprintf("%v", h.IndexerID))
builder.WriteString(", ")
builder.WriteString("status=")
builder.WriteString(fmt.Sprintf("%v", h.Status))
builder.WriteString(", ")

View File

@@ -25,6 +25,10 @@ const (
FieldTargetDir = "target_dir"
// FieldSize holds the string denoting the size field in the database.
FieldSize = "size"
// FieldDownloadClientID holds the string denoting the download_client_id field in the database.
FieldDownloadClientID = "download_client_id"
// FieldIndexerID holds the string denoting the indexer_id field in the database.
FieldIndexerID = "indexer_id"
// FieldStatus holds the string denoting the status field in the database.
FieldStatus = "status"
// FieldSaved holds the string denoting the saved field in the database.
@@ -42,6 +46,8 @@ var Columns = []string{
FieldDate,
FieldTargetDir,
FieldSize,
FieldDownloadClientID,
FieldIndexerID,
FieldStatus,
FieldSaved,
}
@@ -70,6 +76,7 @@ const (
StatusSuccess Status = "success"
StatusFail Status = "fail"
StatusUploading Status = "uploading"
StatusSeeding Status = "seeding"
)
func (s Status) String() string {
@@ -79,7 +86,7 @@ func (s Status) String() string {
// StatusValidator is a validator for the "status" field enum values. It is called by the builders before save.
func StatusValidator(s Status) error {
switch s {
case StatusRunning, StatusSuccess, StatusFail, StatusUploading:
case StatusRunning, StatusSuccess, StatusFail, StatusUploading, StatusSeeding:
return nil
default:
return fmt.Errorf("history: invalid enum value for status field: %q", s)
@@ -124,6 +131,16 @@ func BySize(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldSize, opts...).ToFunc()
}
// ByDownloadClientID orders the results by the download_client_id field.
func ByDownloadClientID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldDownloadClientID, opts...).ToFunc()
}
// ByIndexerID orders the results by the indexer_id field.
func ByIndexerID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldIndexerID, opts...).ToFunc()
}
// ByStatus orders the results by the status field.
func ByStatus(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStatus, opts...).ToFunc()

View File

@@ -84,6 +84,16 @@ func Size(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldSize, v))
}
// DownloadClientID applies equality check predicate on the "download_client_id" field. It's identical to DownloadClientIDEQ.
func DownloadClientID(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldDownloadClientID, v))
}
// IndexerID applies equality check predicate on the "indexer_id" field. It's identical to IndexerIDEQ.
func IndexerID(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldIndexerID, v))
}
// Saved applies equality check predicate on the "saved" field. It's identical to SavedEQ.
func Saved(v string) predicate.History {
return predicate.History(sql.FieldEQ(FieldSaved, v))
@@ -389,6 +399,106 @@ func SizeLTE(v int) predicate.History {
return predicate.History(sql.FieldLTE(FieldSize, v))
}
// DownloadClientIDEQ applies the EQ predicate on the "download_client_id" field.
func DownloadClientIDEQ(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldDownloadClientID, v))
}
// DownloadClientIDNEQ applies the NEQ predicate on the "download_client_id" field.
func DownloadClientIDNEQ(v int) predicate.History {
return predicate.History(sql.FieldNEQ(FieldDownloadClientID, v))
}
// DownloadClientIDIn applies the In predicate on the "download_client_id" field.
func DownloadClientIDIn(vs ...int) predicate.History {
return predicate.History(sql.FieldIn(FieldDownloadClientID, vs...))
}
// DownloadClientIDNotIn applies the NotIn predicate on the "download_client_id" field.
func DownloadClientIDNotIn(vs ...int) predicate.History {
return predicate.History(sql.FieldNotIn(FieldDownloadClientID, vs...))
}
// DownloadClientIDGT applies the GT predicate on the "download_client_id" field.
func DownloadClientIDGT(v int) predicate.History {
return predicate.History(sql.FieldGT(FieldDownloadClientID, v))
}
// DownloadClientIDGTE applies the GTE predicate on the "download_client_id" field.
func DownloadClientIDGTE(v int) predicate.History {
return predicate.History(sql.FieldGTE(FieldDownloadClientID, v))
}
// DownloadClientIDLT applies the LT predicate on the "download_client_id" field.
func DownloadClientIDLT(v int) predicate.History {
return predicate.History(sql.FieldLT(FieldDownloadClientID, v))
}
// DownloadClientIDLTE applies the LTE predicate on the "download_client_id" field.
func DownloadClientIDLTE(v int) predicate.History {
return predicate.History(sql.FieldLTE(FieldDownloadClientID, v))
}
// DownloadClientIDIsNil applies the IsNil predicate on the "download_client_id" field.
func DownloadClientIDIsNil() predicate.History {
return predicate.History(sql.FieldIsNull(FieldDownloadClientID))
}
// DownloadClientIDNotNil applies the NotNil predicate on the "download_client_id" field.
func DownloadClientIDNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldDownloadClientID))
}
// IndexerIDEQ applies the EQ predicate on the "indexer_id" field.
func IndexerIDEQ(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldIndexerID, v))
}
// IndexerIDNEQ applies the NEQ predicate on the "indexer_id" field.
func IndexerIDNEQ(v int) predicate.History {
return predicate.History(sql.FieldNEQ(FieldIndexerID, v))
}
// IndexerIDIn applies the In predicate on the "indexer_id" field.
func IndexerIDIn(vs ...int) predicate.History {
return predicate.History(sql.FieldIn(FieldIndexerID, vs...))
}
// IndexerIDNotIn applies the NotIn predicate on the "indexer_id" field.
func IndexerIDNotIn(vs ...int) predicate.History {
return predicate.History(sql.FieldNotIn(FieldIndexerID, vs...))
}
// IndexerIDGT applies the GT predicate on the "indexer_id" field.
func IndexerIDGT(v int) predicate.History {
return predicate.History(sql.FieldGT(FieldIndexerID, v))
}
// IndexerIDGTE applies the GTE predicate on the "indexer_id" field.
func IndexerIDGTE(v int) predicate.History {
return predicate.History(sql.FieldGTE(FieldIndexerID, v))
}
// IndexerIDLT applies the LT predicate on the "indexer_id" field.
func IndexerIDLT(v int) predicate.History {
return predicate.History(sql.FieldLT(FieldIndexerID, v))
}
// IndexerIDLTE applies the LTE predicate on the "indexer_id" field.
func IndexerIDLTE(v int) predicate.History {
return predicate.History(sql.FieldLTE(FieldIndexerID, v))
}
// IndexerIDIsNil applies the IsNil predicate on the "indexer_id" field.
func IndexerIDIsNil() predicate.History {
return predicate.History(sql.FieldIsNull(FieldIndexerID))
}
// IndexerIDNotNil applies the NotNil predicate on the "indexer_id" field.
func IndexerIDNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldIndexerID))
}
// StatusEQ applies the EQ predicate on the "status" field.
func StatusEQ(v Status) predicate.History {
return predicate.History(sql.FieldEQ(FieldStatus, v))

View File

@@ -72,6 +72,34 @@ func (hc *HistoryCreate) SetNillableSize(i *int) *HistoryCreate {
return hc
}
// SetDownloadClientID sets the "download_client_id" field.
func (hc *HistoryCreate) SetDownloadClientID(i int) *HistoryCreate {
hc.mutation.SetDownloadClientID(i)
return hc
}
// SetNillableDownloadClientID sets the "download_client_id" field if the given value is not nil.
func (hc *HistoryCreate) SetNillableDownloadClientID(i *int) *HistoryCreate {
if i != nil {
hc.SetDownloadClientID(*i)
}
return hc
}
// SetIndexerID sets the "indexer_id" field.
func (hc *HistoryCreate) SetIndexerID(i int) *HistoryCreate {
hc.mutation.SetIndexerID(i)
return hc
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (hc *HistoryCreate) SetNillableIndexerID(i *int) *HistoryCreate {
if i != nil {
hc.SetIndexerID(*i)
}
return hc
}
// SetStatus sets the "status" field.
func (hc *HistoryCreate) SetStatus(h history.Status) *HistoryCreate {
hc.mutation.SetStatus(h)
@@ -208,6 +236,14 @@ func (hc *HistoryCreate) createSpec() (*History, *sqlgraph.CreateSpec) {
_spec.SetField(history.FieldSize, field.TypeInt, value)
_node.Size = value
}
if value, ok := hc.mutation.DownloadClientID(); ok {
_spec.SetField(history.FieldDownloadClientID, field.TypeInt, value)
_node.DownloadClientID = value
}
if value, ok := hc.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
_node.IndexerID = value
}
if value, ok := hc.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
_node.Status = value

View File

@@ -139,6 +139,60 @@ func (hu *HistoryUpdate) AddSize(i int) *HistoryUpdate {
return hu
}
// SetDownloadClientID sets the "download_client_id" field.
func (hu *HistoryUpdate) SetDownloadClientID(i int) *HistoryUpdate {
hu.mutation.ResetDownloadClientID()
hu.mutation.SetDownloadClientID(i)
return hu
}
// SetNillableDownloadClientID sets the "download_client_id" field if the given value is not nil.
func (hu *HistoryUpdate) SetNillableDownloadClientID(i *int) *HistoryUpdate {
if i != nil {
hu.SetDownloadClientID(*i)
}
return hu
}
// AddDownloadClientID adds i to the "download_client_id" field.
func (hu *HistoryUpdate) AddDownloadClientID(i int) *HistoryUpdate {
hu.mutation.AddDownloadClientID(i)
return hu
}
// ClearDownloadClientID clears the value of the "download_client_id" field.
func (hu *HistoryUpdate) ClearDownloadClientID() *HistoryUpdate {
hu.mutation.ClearDownloadClientID()
return hu
}
// SetIndexerID sets the "indexer_id" field.
func (hu *HistoryUpdate) SetIndexerID(i int) *HistoryUpdate {
hu.mutation.ResetIndexerID()
hu.mutation.SetIndexerID(i)
return hu
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (hu *HistoryUpdate) SetNillableIndexerID(i *int) *HistoryUpdate {
if i != nil {
hu.SetIndexerID(*i)
}
return hu
}
// AddIndexerID adds i to the "indexer_id" field.
func (hu *HistoryUpdate) AddIndexerID(i int) *HistoryUpdate {
hu.mutation.AddIndexerID(i)
return hu
}
// ClearIndexerID clears the value of the "indexer_id" field.
func (hu *HistoryUpdate) ClearIndexerID() *HistoryUpdate {
hu.mutation.ClearIndexerID()
return hu
}
// SetStatus sets the "status" field.
func (hu *HistoryUpdate) SetStatus(h history.Status) *HistoryUpdate {
hu.mutation.SetStatus(h)
@@ -257,6 +311,24 @@ func (hu *HistoryUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := hu.mutation.AddedSize(); ok {
_spec.AddField(history.FieldSize, field.TypeInt, value)
}
if value, ok := hu.mutation.DownloadClientID(); ok {
_spec.SetField(history.FieldDownloadClientID, field.TypeInt, value)
}
if value, ok := hu.mutation.AddedDownloadClientID(); ok {
_spec.AddField(history.FieldDownloadClientID, field.TypeInt, value)
}
if hu.mutation.DownloadClientIDCleared() {
_spec.ClearField(history.FieldDownloadClientID, field.TypeInt)
}
if value, ok := hu.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
}
if value, ok := hu.mutation.AddedIndexerID(); ok {
_spec.AddField(history.FieldIndexerID, field.TypeInt, value)
}
if hu.mutation.IndexerIDCleared() {
_spec.ClearField(history.FieldIndexerID, field.TypeInt)
}
if value, ok := hu.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
}
@@ -397,6 +469,60 @@ func (huo *HistoryUpdateOne) AddSize(i int) *HistoryUpdateOne {
return huo
}
// SetDownloadClientID sets the "download_client_id" field.
func (huo *HistoryUpdateOne) SetDownloadClientID(i int) *HistoryUpdateOne {
huo.mutation.ResetDownloadClientID()
huo.mutation.SetDownloadClientID(i)
return huo
}
// SetNillableDownloadClientID sets the "download_client_id" field if the given value is not nil.
func (huo *HistoryUpdateOne) SetNillableDownloadClientID(i *int) *HistoryUpdateOne {
if i != nil {
huo.SetDownloadClientID(*i)
}
return huo
}
// AddDownloadClientID adds i to the "download_client_id" field.
func (huo *HistoryUpdateOne) AddDownloadClientID(i int) *HistoryUpdateOne {
huo.mutation.AddDownloadClientID(i)
return huo
}
// ClearDownloadClientID clears the value of the "download_client_id" field.
func (huo *HistoryUpdateOne) ClearDownloadClientID() *HistoryUpdateOne {
huo.mutation.ClearDownloadClientID()
return huo
}
// SetIndexerID sets the "indexer_id" field.
func (huo *HistoryUpdateOne) SetIndexerID(i int) *HistoryUpdateOne {
huo.mutation.ResetIndexerID()
huo.mutation.SetIndexerID(i)
return huo
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (huo *HistoryUpdateOne) SetNillableIndexerID(i *int) *HistoryUpdateOne {
if i != nil {
huo.SetIndexerID(*i)
}
return huo
}
// AddIndexerID adds i to the "indexer_id" field.
func (huo *HistoryUpdateOne) AddIndexerID(i int) *HistoryUpdateOne {
huo.mutation.AddIndexerID(i)
return huo
}
// ClearIndexerID clears the value of the "indexer_id" field.
func (huo *HistoryUpdateOne) ClearIndexerID() *HistoryUpdateOne {
huo.mutation.ClearIndexerID()
return huo
}
// SetStatus sets the "status" field.
func (huo *HistoryUpdateOne) SetStatus(h history.Status) *HistoryUpdateOne {
huo.mutation.SetStatus(h)
@@ -545,6 +671,24 @@ func (huo *HistoryUpdateOne) sqlSave(ctx context.Context) (_node *History, err e
if value, ok := huo.mutation.AddedSize(); ok {
_spec.AddField(history.FieldSize, field.TypeInt, value)
}
if value, ok := huo.mutation.DownloadClientID(); ok {
_spec.SetField(history.FieldDownloadClientID, field.TypeInt, value)
}
if value, ok := huo.mutation.AddedDownloadClientID(); ok {
_spec.AddField(history.FieldDownloadClientID, field.TypeInt, value)
}
if huo.mutation.DownloadClientIDCleared() {
_spec.ClearField(history.FieldDownloadClientID, field.TypeInt)
}
if value, ok := huo.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
}
if value, ok := huo.mutation.AddedIndexerID(); ok {
_spec.AddField(history.FieldIndexerID, field.TypeInt, value)
}
if huo.mutation.IndexerIDCleared() {
_spec.ClearField(history.FieldIndexerID, field.TypeInt)
}
if value, ok := huo.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
}

View File

@@ -44,6 +44,18 @@ func (f HistoryFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, err
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.HistoryMutation", m)
}
// The ImportListFunc type is an adapter to allow the use of ordinary
// function as ImportList mutator.
type ImportListFunc func(context.Context, *ent.ImportListMutation) (ent.Value, error)
// Mutate calls f(ctx, m).
func (f ImportListFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
if mv, ok := m.(*ent.ImportListMutation); ok {
return f(ctx, mv)
}
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.ImportListMutation", m)
}
// The IndexersFunc type is an adapter to allow the use of ordinary
// function as Indexers mutator.
type IndexersFunc func(context.Context, *ent.IndexersMutation) (ent.Value, error)

164
ent/importlist.go Normal file
View File

@@ -0,0 +1,164 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"encoding/json"
"fmt"
"polaris/ent/importlist"
"polaris/ent/schema"
"strings"
"entgo.io/ent"
"entgo.io/ent/dialect/sql"
)
// ImportList is the model entity for the ImportList schema.
type ImportList struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Name holds the value of the "name" field.
Name string `json:"name,omitempty"`
// Type holds the value of the "type" field.
Type importlist.Type `json:"type,omitempty"`
// URL holds the value of the "url" field.
URL string `json:"url,omitempty"`
// Qulity holds the value of the "qulity" field.
Qulity string `json:"qulity,omitempty"`
// StorageID holds the value of the "storage_id" field.
StorageID int `json:"storage_id,omitempty"`
// Settings holds the value of the "settings" field.
Settings schema.ImportListSettings `json:"settings,omitempty"`
selectValues sql.SelectValues
}
// scanValues returns the types for scanning values from sql.Rows.
func (*ImportList) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case importlist.FieldSettings:
values[i] = new([]byte)
case importlist.FieldID, importlist.FieldStorageID:
values[i] = new(sql.NullInt64)
case importlist.FieldName, importlist.FieldType, importlist.FieldURL, importlist.FieldQulity:
values[i] = new(sql.NullString)
default:
values[i] = new(sql.UnknownType)
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the ImportList fields.
func (il *ImportList) assignValues(columns []string, values []any) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case importlist.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
il.ID = int(value.Int64)
case importlist.FieldName:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field name", values[i])
} else if value.Valid {
il.Name = value.String
}
case importlist.FieldType:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field type", values[i])
} else if value.Valid {
il.Type = importlist.Type(value.String)
}
case importlist.FieldURL:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field url", values[i])
} else if value.Valid {
il.URL = value.String
}
case importlist.FieldQulity:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field qulity", values[i])
} else if value.Valid {
il.Qulity = value.String
}
case importlist.FieldStorageID:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field storage_id", values[i])
} else if value.Valid {
il.StorageID = int(value.Int64)
}
case importlist.FieldSettings:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field settings", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &il.Settings); err != nil {
return fmt.Errorf("unmarshal field settings: %w", err)
}
}
default:
il.selectValues.Set(columns[i], values[i])
}
}
return nil
}
// Value returns the ent.Value that was dynamically selected and assigned to the ImportList.
// This includes values selected through modifiers, order, etc.
func (il *ImportList) Value(name string) (ent.Value, error) {
return il.selectValues.Get(name)
}
// Update returns a builder for updating this ImportList.
// Note that you need to call ImportList.Unwrap() before calling this method if this ImportList
// was returned from a transaction, and the transaction was committed or rolled back.
func (il *ImportList) Update() *ImportListUpdateOne {
return NewImportListClient(il.config).UpdateOne(il)
}
// Unwrap unwraps the ImportList entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (il *ImportList) Unwrap() *ImportList {
_tx, ok := il.config.driver.(*txDriver)
if !ok {
panic("ent: ImportList is not a transactional entity")
}
il.config.driver = _tx.drv
return il
}
// String implements the fmt.Stringer.
func (il *ImportList) String() string {
var builder strings.Builder
builder.WriteString("ImportList(")
builder.WriteString(fmt.Sprintf("id=%v, ", il.ID))
builder.WriteString("name=")
builder.WriteString(il.Name)
builder.WriteString(", ")
builder.WriteString("type=")
builder.WriteString(fmt.Sprintf("%v", il.Type))
builder.WriteString(", ")
builder.WriteString("url=")
builder.WriteString(il.URL)
builder.WriteString(", ")
builder.WriteString("qulity=")
builder.WriteString(il.Qulity)
builder.WriteString(", ")
builder.WriteString("storage_id=")
builder.WriteString(fmt.Sprintf("%v", il.StorageID))
builder.WriteString(", ")
builder.WriteString("settings=")
builder.WriteString(fmt.Sprintf("%v", il.Settings))
builder.WriteByte(')')
return builder.String()
}
// ImportLists is a parsable slice of ImportList.
type ImportLists []*ImportList

View File

@@ -0,0 +1,107 @@
// Code generated by ent, DO NOT EDIT.
package importlist
import (
"fmt"
"entgo.io/ent/dialect/sql"
)
const (
// Label holds the string label denoting the importlist type in the database.
Label = "import_list"
// FieldID holds the string denoting the id field in the database.
FieldID = "id"
// FieldName holds the string denoting the name field in the database.
FieldName = "name"
// FieldType holds the string denoting the type field in the database.
FieldType = "type"
// FieldURL holds the string denoting the url field in the database.
FieldURL = "url"
// FieldQulity holds the string denoting the qulity field in the database.
FieldQulity = "qulity"
// FieldStorageID holds the string denoting the storage_id field in the database.
FieldStorageID = "storage_id"
// FieldSettings holds the string denoting the settings field in the database.
FieldSettings = "settings"
// Table holds the table name of the importlist in the database.
Table = "import_lists"
)
// Columns holds all SQL columns for importlist fields.
var Columns = []string{
FieldID,
FieldName,
FieldType,
FieldURL,
FieldQulity,
FieldStorageID,
FieldSettings,
}
// ValidColumn reports if the column name is valid (part of the table columns).
func ValidColumn(column string) bool {
for i := range Columns {
if column == Columns[i] {
return true
}
}
return false
}
// Type defines the type for the "type" enum field.
type Type string
// Type values.
const (
TypePlex Type = "plex"
TypeDoulist Type = "doulist"
)
func (_type Type) String() string {
return string(_type)
}
// TypeValidator is a validator for the "type" field enum values. It is called by the builders before save.
func TypeValidator(_type Type) error {
switch _type {
case TypePlex, TypeDoulist:
return nil
default:
return fmt.Errorf("importlist: invalid enum value for type field: %q", _type)
}
}
// OrderOption defines the ordering options for the ImportList queries.
type OrderOption func(*sql.Selector)
// ByID orders the results by the id field.
func ByID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldID, opts...).ToFunc()
}
// ByName orders the results by the name field.
func ByName(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldName, opts...).ToFunc()
}
// ByType orders the results by the type field.
func ByType(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldType, opts...).ToFunc()
}
// ByURL orders the results by the url field.
func ByURL(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldURL, opts...).ToFunc()
}
// ByQulity orders the results by the qulity field.
func ByQulity(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldQulity, opts...).ToFunc()
}
// ByStorageID orders the results by the storage_id field.
func ByStorageID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStorageID, opts...).ToFunc()
}

364
ent/importlist/where.go Normal file
View File

@@ -0,0 +1,364 @@
// Code generated by ent, DO NOT EDIT.
package importlist
import (
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
)
// ID filters vertices based on their ID field.
func ID(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldID, id))
}
// IDEQ applies the EQ predicate on the ID field.
func IDEQ(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldID, id))
}
// IDNEQ applies the NEQ predicate on the ID field.
func IDNEQ(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldID, id))
}
// IDIn applies the In predicate on the ID field.
func IDIn(ids ...int) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldID, ids...))
}
// IDNotIn applies the NotIn predicate on the ID field.
func IDNotIn(ids ...int) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldID, ids...))
}
// IDGT applies the GT predicate on the ID field.
func IDGT(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldID, id))
}
// IDGTE applies the GTE predicate on the ID field.
func IDGTE(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldID, id))
}
// IDLT applies the LT predicate on the ID field.
func IDLT(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldID, id))
}
// IDLTE applies the LTE predicate on the ID field.
func IDLTE(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldID, id))
}
// Name applies equality check predicate on the "name" field. It's identical to NameEQ.
func Name(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldName, v))
}
// URL applies equality check predicate on the "url" field. It's identical to URLEQ.
func URL(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldURL, v))
}
// Qulity applies equality check predicate on the "qulity" field. It's identical to QulityEQ.
func Qulity(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldQulity, v))
}
// StorageID applies equality check predicate on the "storage_id" field. It's identical to StorageIDEQ.
func StorageID(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldStorageID, v))
}
// NameEQ applies the EQ predicate on the "name" field.
func NameEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldName, v))
}
// NameNEQ applies the NEQ predicate on the "name" field.
func NameNEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldName, v))
}
// NameIn applies the In predicate on the "name" field.
func NameIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldName, vs...))
}
// NameNotIn applies the NotIn predicate on the "name" field.
func NameNotIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldName, vs...))
}
// NameGT applies the GT predicate on the "name" field.
func NameGT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldName, v))
}
// NameGTE applies the GTE predicate on the "name" field.
func NameGTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldName, v))
}
// NameLT applies the LT predicate on the "name" field.
func NameLT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldName, v))
}
// NameLTE applies the LTE predicate on the "name" field.
func NameLTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldName, v))
}
// NameContains applies the Contains predicate on the "name" field.
func NameContains(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContains(FieldName, v))
}
// NameHasPrefix applies the HasPrefix predicate on the "name" field.
func NameHasPrefix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasPrefix(FieldName, v))
}
// NameHasSuffix applies the HasSuffix predicate on the "name" field.
func NameHasSuffix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasSuffix(FieldName, v))
}
// NameEqualFold applies the EqualFold predicate on the "name" field.
func NameEqualFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEqualFold(FieldName, v))
}
// NameContainsFold applies the ContainsFold predicate on the "name" field.
func NameContainsFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContainsFold(FieldName, v))
}
// TypeEQ applies the EQ predicate on the "type" field.
func TypeEQ(v Type) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldType, v))
}
// TypeNEQ applies the NEQ predicate on the "type" field.
func TypeNEQ(v Type) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldType, v))
}
// TypeIn applies the In predicate on the "type" field.
func TypeIn(vs ...Type) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldType, vs...))
}
// TypeNotIn applies the NotIn predicate on the "type" field.
func TypeNotIn(vs ...Type) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldType, vs...))
}
// URLEQ applies the EQ predicate on the "url" field.
func URLEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldURL, v))
}
// URLNEQ applies the NEQ predicate on the "url" field.
func URLNEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldURL, v))
}
// URLIn applies the In predicate on the "url" field.
func URLIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldURL, vs...))
}
// URLNotIn applies the NotIn predicate on the "url" field.
func URLNotIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldURL, vs...))
}
// URLGT applies the GT predicate on the "url" field.
func URLGT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldURL, v))
}
// URLGTE applies the GTE predicate on the "url" field.
func URLGTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldURL, v))
}
// URLLT applies the LT predicate on the "url" field.
func URLLT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldURL, v))
}
// URLLTE applies the LTE predicate on the "url" field.
func URLLTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldURL, v))
}
// URLContains applies the Contains predicate on the "url" field.
func URLContains(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContains(FieldURL, v))
}
// URLHasPrefix applies the HasPrefix predicate on the "url" field.
func URLHasPrefix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasPrefix(FieldURL, v))
}
// URLHasSuffix applies the HasSuffix predicate on the "url" field.
func URLHasSuffix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasSuffix(FieldURL, v))
}
// URLIsNil applies the IsNil predicate on the "url" field.
func URLIsNil() predicate.ImportList {
return predicate.ImportList(sql.FieldIsNull(FieldURL))
}
// URLNotNil applies the NotNil predicate on the "url" field.
func URLNotNil() predicate.ImportList {
return predicate.ImportList(sql.FieldNotNull(FieldURL))
}
// URLEqualFold applies the EqualFold predicate on the "url" field.
func URLEqualFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEqualFold(FieldURL, v))
}
// URLContainsFold applies the ContainsFold predicate on the "url" field.
func URLContainsFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContainsFold(FieldURL, v))
}
// QulityEQ applies the EQ predicate on the "qulity" field.
func QulityEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldQulity, v))
}
// QulityNEQ applies the NEQ predicate on the "qulity" field.
func QulityNEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldQulity, v))
}
// QulityIn applies the In predicate on the "qulity" field.
func QulityIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldQulity, vs...))
}
// QulityNotIn applies the NotIn predicate on the "qulity" field.
func QulityNotIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldQulity, vs...))
}
// QulityGT applies the GT predicate on the "qulity" field.
func QulityGT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldQulity, v))
}
// QulityGTE applies the GTE predicate on the "qulity" field.
func QulityGTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldQulity, v))
}
// QulityLT applies the LT predicate on the "qulity" field.
func QulityLT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldQulity, v))
}
// QulityLTE applies the LTE predicate on the "qulity" field.
func QulityLTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldQulity, v))
}
// QulityContains applies the Contains predicate on the "qulity" field.
func QulityContains(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContains(FieldQulity, v))
}
// QulityHasPrefix applies the HasPrefix predicate on the "qulity" field.
func QulityHasPrefix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasPrefix(FieldQulity, v))
}
// QulityHasSuffix applies the HasSuffix predicate on the "qulity" field.
func QulityHasSuffix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasSuffix(FieldQulity, v))
}
// QulityEqualFold applies the EqualFold predicate on the "qulity" field.
func QulityEqualFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEqualFold(FieldQulity, v))
}
// QulityContainsFold applies the ContainsFold predicate on the "qulity" field.
func QulityContainsFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContainsFold(FieldQulity, v))
}
// StorageIDEQ applies the EQ predicate on the "storage_id" field.
func StorageIDEQ(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldStorageID, v))
}
// StorageIDNEQ applies the NEQ predicate on the "storage_id" field.
func StorageIDNEQ(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldStorageID, v))
}
// StorageIDIn applies the In predicate on the "storage_id" field.
func StorageIDIn(vs ...int) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldStorageID, vs...))
}
// StorageIDNotIn applies the NotIn predicate on the "storage_id" field.
func StorageIDNotIn(vs ...int) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldStorageID, vs...))
}
// StorageIDGT applies the GT predicate on the "storage_id" field.
func StorageIDGT(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldStorageID, v))
}
// StorageIDGTE applies the GTE predicate on the "storage_id" field.
func StorageIDGTE(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldStorageID, v))
}
// StorageIDLT applies the LT predicate on the "storage_id" field.
func StorageIDLT(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldStorageID, v))
}
// StorageIDLTE applies the LTE predicate on the "storage_id" field.
func StorageIDLTE(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldStorageID, v))
}
// SettingsIsNil applies the IsNil predicate on the "settings" field.
func SettingsIsNil() predicate.ImportList {
return predicate.ImportList(sql.FieldIsNull(FieldSettings))
}
// SettingsNotNil applies the NotNil predicate on the "settings" field.
func SettingsNotNil() predicate.ImportList {
return predicate.ImportList(sql.FieldNotNull(FieldSettings))
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.ImportList) predicate.ImportList {
return predicate.ImportList(sql.AndPredicates(predicates...))
}
// Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.ImportList) predicate.ImportList {
return predicate.ImportList(sql.OrPredicates(predicates...))
}
// Not applies the not operator on the given predicate.
func Not(p predicate.ImportList) predicate.ImportList {
return predicate.ImportList(sql.NotPredicates(p))
}

264
ent/importlist_create.go Normal file
View File

@@ -0,0 +1,264 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"polaris/ent/importlist"
"polaris/ent/schema"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// ImportListCreate is the builder for creating a ImportList entity.
type ImportListCreate struct {
config
mutation *ImportListMutation
hooks []Hook
}
// SetName sets the "name" field.
func (ilc *ImportListCreate) SetName(s string) *ImportListCreate {
ilc.mutation.SetName(s)
return ilc
}
// SetType sets the "type" field.
func (ilc *ImportListCreate) SetType(i importlist.Type) *ImportListCreate {
ilc.mutation.SetType(i)
return ilc
}
// SetURL sets the "url" field.
func (ilc *ImportListCreate) SetURL(s string) *ImportListCreate {
ilc.mutation.SetURL(s)
return ilc
}
// SetNillableURL sets the "url" field if the given value is not nil.
func (ilc *ImportListCreate) SetNillableURL(s *string) *ImportListCreate {
if s != nil {
ilc.SetURL(*s)
}
return ilc
}
// SetQulity sets the "qulity" field.
func (ilc *ImportListCreate) SetQulity(s string) *ImportListCreate {
ilc.mutation.SetQulity(s)
return ilc
}
// SetStorageID sets the "storage_id" field.
func (ilc *ImportListCreate) SetStorageID(i int) *ImportListCreate {
ilc.mutation.SetStorageID(i)
return ilc
}
// SetSettings sets the "settings" field.
func (ilc *ImportListCreate) SetSettings(sls schema.ImportListSettings) *ImportListCreate {
ilc.mutation.SetSettings(sls)
return ilc
}
// SetNillableSettings sets the "settings" field if the given value is not nil.
func (ilc *ImportListCreate) SetNillableSettings(sls *schema.ImportListSettings) *ImportListCreate {
if sls != nil {
ilc.SetSettings(*sls)
}
return ilc
}
// Mutation returns the ImportListMutation object of the builder.
func (ilc *ImportListCreate) Mutation() *ImportListMutation {
return ilc.mutation
}
// Save creates the ImportList in the database.
func (ilc *ImportListCreate) Save(ctx context.Context) (*ImportList, error) {
return withHooks(ctx, ilc.sqlSave, ilc.mutation, ilc.hooks)
}
// SaveX calls Save and panics if Save returns an error.
func (ilc *ImportListCreate) SaveX(ctx context.Context) *ImportList {
v, err := ilc.Save(ctx)
if err != nil {
panic(err)
}
return v
}
// Exec executes the query.
func (ilc *ImportListCreate) Exec(ctx context.Context) error {
_, err := ilc.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (ilc *ImportListCreate) ExecX(ctx context.Context) {
if err := ilc.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (ilc *ImportListCreate) check() error {
if _, ok := ilc.mutation.Name(); !ok {
return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "ImportList.name"`)}
}
if _, ok := ilc.mutation.GetType(); !ok {
return &ValidationError{Name: "type", err: errors.New(`ent: missing required field "ImportList.type"`)}
}
if v, ok := ilc.mutation.GetType(); ok {
if err := importlist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
}
}
if _, ok := ilc.mutation.Qulity(); !ok {
return &ValidationError{Name: "qulity", err: errors.New(`ent: missing required field "ImportList.qulity"`)}
}
if _, ok := ilc.mutation.StorageID(); !ok {
return &ValidationError{Name: "storage_id", err: errors.New(`ent: missing required field "ImportList.storage_id"`)}
}
return nil
}
func (ilc *ImportListCreate) sqlSave(ctx context.Context) (*ImportList, error) {
if err := ilc.check(); err != nil {
return nil, err
}
_node, _spec := ilc.createSpec()
if err := sqlgraph.CreateNode(ctx, ilc.driver, _spec); err != nil {
if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return nil, err
}
id := _spec.ID.Value.(int64)
_node.ID = int(id)
ilc.mutation.id = &_node.ID
ilc.mutation.done = true
return _node, nil
}
func (ilc *ImportListCreate) createSpec() (*ImportList, *sqlgraph.CreateSpec) {
var (
_node = &ImportList{config: ilc.config}
_spec = sqlgraph.NewCreateSpec(importlist.Table, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
)
if value, ok := ilc.mutation.Name(); ok {
_spec.SetField(importlist.FieldName, field.TypeString, value)
_node.Name = value
}
if value, ok := ilc.mutation.GetType(); ok {
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
_node.Type = value
}
if value, ok := ilc.mutation.URL(); ok {
_spec.SetField(importlist.FieldURL, field.TypeString, value)
_node.URL = value
}
if value, ok := ilc.mutation.Qulity(); ok {
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
_node.Qulity = value
}
if value, ok := ilc.mutation.StorageID(); ok {
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
_node.StorageID = value
}
if value, ok := ilc.mutation.Settings(); ok {
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
_node.Settings = value
}
return _node, _spec
}
// ImportListCreateBulk is the builder for creating many ImportList entities in bulk.
type ImportListCreateBulk struct {
config
err error
builders []*ImportListCreate
}
// Save creates the ImportList entities in the database.
func (ilcb *ImportListCreateBulk) Save(ctx context.Context) ([]*ImportList, error) {
if ilcb.err != nil {
return nil, ilcb.err
}
specs := make([]*sqlgraph.CreateSpec, len(ilcb.builders))
nodes := make([]*ImportList, len(ilcb.builders))
mutators := make([]Mutator, len(ilcb.builders))
for i := range ilcb.builders {
func(i int, root context.Context) {
builder := ilcb.builders[i]
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*ImportListMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err := builder.check(); err != nil {
return nil, err
}
builder.mutation = mutation
var err error
nodes[i], specs[i] = builder.createSpec()
if i < len(mutators)-1 {
_, err = mutators[i+1].Mutate(root, ilcb.builders[i+1].mutation)
} else {
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
// Invoke the actual operation on the latest mutation in the chain.
if err = sqlgraph.BatchCreate(ctx, ilcb.driver, spec); err != nil {
if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
}
}
if err != nil {
return nil, err
}
mutation.id = &nodes[i].ID
if specs[i].ID.Value != nil {
id := specs[i].ID.Value.(int64)
nodes[i].ID = int(id)
}
mutation.done = true
return nodes[i], nil
})
for i := len(builder.hooks) - 1; i >= 0; i-- {
mut = builder.hooks[i](mut)
}
mutators[i] = mut
}(i, ctx)
}
if len(mutators) > 0 {
if _, err := mutators[0].Mutate(ctx, ilcb.builders[0].mutation); err != nil {
return nil, err
}
}
return nodes, nil
}
// SaveX is like Save, but panics if an error occurs.
func (ilcb *ImportListCreateBulk) SaveX(ctx context.Context) []*ImportList {
v, err := ilcb.Save(ctx)
if err != nil {
panic(err)
}
return v
}
// Exec executes the query.
func (ilcb *ImportListCreateBulk) Exec(ctx context.Context) error {
_, err := ilcb.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (ilcb *ImportListCreateBulk) ExecX(ctx context.Context) {
if err := ilcb.Exec(ctx); err != nil {
panic(err)
}
}

88
ent/importlist_delete.go Normal file
View File

@@ -0,0 +1,88 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"polaris/ent/importlist"
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// ImportListDelete is the builder for deleting a ImportList entity.
type ImportListDelete struct {
config
hooks []Hook
mutation *ImportListMutation
}
// Where appends a list predicates to the ImportListDelete builder.
func (ild *ImportListDelete) Where(ps ...predicate.ImportList) *ImportListDelete {
ild.mutation.Where(ps...)
return ild
}
// Exec executes the deletion query and returns how many vertices were deleted.
func (ild *ImportListDelete) Exec(ctx context.Context) (int, error) {
return withHooks(ctx, ild.sqlExec, ild.mutation, ild.hooks)
}
// ExecX is like Exec, but panics if an error occurs.
func (ild *ImportListDelete) ExecX(ctx context.Context) int {
n, err := ild.Exec(ctx)
if err != nil {
panic(err)
}
return n
}
func (ild *ImportListDelete) sqlExec(ctx context.Context) (int, error) {
_spec := sqlgraph.NewDeleteSpec(importlist.Table, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
if ps := ild.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
affected, err := sqlgraph.DeleteNodes(ctx, ild.driver, _spec)
if err != nil && sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
ild.mutation.done = true
return affected, err
}
// ImportListDeleteOne is the builder for deleting a single ImportList entity.
type ImportListDeleteOne struct {
ild *ImportListDelete
}
// Where appends a list predicates to the ImportListDelete builder.
func (ildo *ImportListDeleteOne) Where(ps ...predicate.ImportList) *ImportListDeleteOne {
ildo.ild.mutation.Where(ps...)
return ildo
}
// Exec executes the deletion query.
func (ildo *ImportListDeleteOne) Exec(ctx context.Context) error {
n, err := ildo.ild.Exec(ctx)
switch {
case err != nil:
return err
case n == 0:
return &NotFoundError{importlist.Label}
default:
return nil
}
}
// ExecX is like Exec, but panics if an error occurs.
func (ildo *ImportListDeleteOne) ExecX(ctx context.Context) {
if err := ildo.Exec(ctx); err != nil {
panic(err)
}
}

526
ent/importlist_query.go Normal file
View File

@@ -0,0 +1,526 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"fmt"
"math"
"polaris/ent/importlist"
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// ImportListQuery is the builder for querying ImportList entities.
type ImportListQuery struct {
config
ctx *QueryContext
order []importlist.OrderOption
inters []Interceptor
predicates []predicate.ImportList
// intermediate query (i.e. traversal path).
sql *sql.Selector
path func(context.Context) (*sql.Selector, error)
}
// Where adds a new predicate for the ImportListQuery builder.
func (ilq *ImportListQuery) Where(ps ...predicate.ImportList) *ImportListQuery {
ilq.predicates = append(ilq.predicates, ps...)
return ilq
}
// Limit the number of records to be returned by this query.
func (ilq *ImportListQuery) Limit(limit int) *ImportListQuery {
ilq.ctx.Limit = &limit
return ilq
}
// Offset to start from.
func (ilq *ImportListQuery) Offset(offset int) *ImportListQuery {
ilq.ctx.Offset = &offset
return ilq
}
// Unique configures the query builder to filter duplicate records on query.
// By default, unique is set to true, and can be disabled using this method.
func (ilq *ImportListQuery) Unique(unique bool) *ImportListQuery {
ilq.ctx.Unique = &unique
return ilq
}
// Order specifies how the records should be ordered.
func (ilq *ImportListQuery) Order(o ...importlist.OrderOption) *ImportListQuery {
ilq.order = append(ilq.order, o...)
return ilq
}
// First returns the first ImportList entity from the query.
// Returns a *NotFoundError when no ImportList was found.
func (ilq *ImportListQuery) First(ctx context.Context) (*ImportList, error) {
nodes, err := ilq.Limit(1).All(setContextOp(ctx, ilq.ctx, "First"))
if err != nil {
return nil, err
}
if len(nodes) == 0 {
return nil, &NotFoundError{importlist.Label}
}
return nodes[0], nil
}
// FirstX is like First, but panics if an error occurs.
func (ilq *ImportListQuery) FirstX(ctx context.Context) *ImportList {
node, err := ilq.First(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return node
}
// FirstID returns the first ImportList ID from the query.
// Returns a *NotFoundError when no ImportList ID was found.
func (ilq *ImportListQuery) FirstID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = ilq.Limit(1).IDs(setContextOp(ctx, ilq.ctx, "FirstID")); err != nil {
return
}
if len(ids) == 0 {
err = &NotFoundError{importlist.Label}
return
}
return ids[0], nil
}
// FirstIDX is like FirstID, but panics if an error occurs.
func (ilq *ImportListQuery) FirstIDX(ctx context.Context) int {
id, err := ilq.FirstID(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return id
}
// Only returns a single ImportList entity found by the query, ensuring it only returns one.
// Returns a *NotSingularError when more than one ImportList entity is found.
// Returns a *NotFoundError when no ImportList entities are found.
func (ilq *ImportListQuery) Only(ctx context.Context) (*ImportList, error) {
nodes, err := ilq.Limit(2).All(setContextOp(ctx, ilq.ctx, "Only"))
if err != nil {
return nil, err
}
switch len(nodes) {
case 1:
return nodes[0], nil
case 0:
return nil, &NotFoundError{importlist.Label}
default:
return nil, &NotSingularError{importlist.Label}
}
}
// OnlyX is like Only, but panics if an error occurs.
func (ilq *ImportListQuery) OnlyX(ctx context.Context) *ImportList {
node, err := ilq.Only(ctx)
if err != nil {
panic(err)
}
return node
}
// OnlyID is like Only, but returns the only ImportList ID in the query.
// Returns a *NotSingularError when more than one ImportList ID is found.
// Returns a *NotFoundError when no entities are found.
func (ilq *ImportListQuery) OnlyID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = ilq.Limit(2).IDs(setContextOp(ctx, ilq.ctx, "OnlyID")); err != nil {
return
}
switch len(ids) {
case 1:
id = ids[0]
case 0:
err = &NotFoundError{importlist.Label}
default:
err = &NotSingularError{importlist.Label}
}
return
}
// OnlyIDX is like OnlyID, but panics if an error occurs.
func (ilq *ImportListQuery) OnlyIDX(ctx context.Context) int {
id, err := ilq.OnlyID(ctx)
if err != nil {
panic(err)
}
return id
}
// All executes the query and returns a list of ImportLists.
func (ilq *ImportListQuery) All(ctx context.Context) ([]*ImportList, error) {
ctx = setContextOp(ctx, ilq.ctx, "All")
if err := ilq.prepareQuery(ctx); err != nil {
return nil, err
}
qr := querierAll[[]*ImportList, *ImportListQuery]()
return withInterceptors[[]*ImportList](ctx, ilq, qr, ilq.inters)
}
// AllX is like All, but panics if an error occurs.
func (ilq *ImportListQuery) AllX(ctx context.Context) []*ImportList {
nodes, err := ilq.All(ctx)
if err != nil {
panic(err)
}
return nodes
}
// IDs executes the query and returns a list of ImportList IDs.
func (ilq *ImportListQuery) IDs(ctx context.Context) (ids []int, err error) {
if ilq.ctx.Unique == nil && ilq.path != nil {
ilq.Unique(true)
}
ctx = setContextOp(ctx, ilq.ctx, "IDs")
if err = ilq.Select(importlist.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
}
// IDsX is like IDs, but panics if an error occurs.
func (ilq *ImportListQuery) IDsX(ctx context.Context) []int {
ids, err := ilq.IDs(ctx)
if err != nil {
panic(err)
}
return ids
}
// Count returns the count of the given query.
func (ilq *ImportListQuery) Count(ctx context.Context) (int, error) {
ctx = setContextOp(ctx, ilq.ctx, "Count")
if err := ilq.prepareQuery(ctx); err != nil {
return 0, err
}
return withInterceptors[int](ctx, ilq, querierCount[*ImportListQuery](), ilq.inters)
}
// CountX is like Count, but panics if an error occurs.
func (ilq *ImportListQuery) CountX(ctx context.Context) int {
count, err := ilq.Count(ctx)
if err != nil {
panic(err)
}
return count
}
// Exist returns true if the query has elements in the graph.
func (ilq *ImportListQuery) Exist(ctx context.Context) (bool, error) {
ctx = setContextOp(ctx, ilq.ctx, "Exist")
switch _, err := ilq.FirstID(ctx); {
case IsNotFound(err):
return false, nil
case err != nil:
return false, fmt.Errorf("ent: check existence: %w", err)
default:
return true, nil
}
}
// ExistX is like Exist, but panics if an error occurs.
func (ilq *ImportListQuery) ExistX(ctx context.Context) bool {
exist, err := ilq.Exist(ctx)
if err != nil {
panic(err)
}
return exist
}
// Clone returns a duplicate of the ImportListQuery builder, including all associated steps. It can be
// used to prepare common query builders and use them differently after the clone is made.
func (ilq *ImportListQuery) Clone() *ImportListQuery {
if ilq == nil {
return nil
}
return &ImportListQuery{
config: ilq.config,
ctx: ilq.ctx.Clone(),
order: append([]importlist.OrderOption{}, ilq.order...),
inters: append([]Interceptor{}, ilq.inters...),
predicates: append([]predicate.ImportList{}, ilq.predicates...),
// clone intermediate query.
sql: ilq.sql.Clone(),
path: ilq.path,
}
}
// GroupBy is used to group vertices by one or more fields/columns.
// It is often used with aggregate functions, like: count, max, mean, min, sum.
//
// Example:
//
// var v []struct {
// Name string `json:"name,omitempty"`
// Count int `json:"count,omitempty"`
// }
//
// client.ImportList.Query().
// GroupBy(importlist.FieldName).
// Aggregate(ent.Count()).
// Scan(ctx, &v)
func (ilq *ImportListQuery) GroupBy(field string, fields ...string) *ImportListGroupBy {
ilq.ctx.Fields = append([]string{field}, fields...)
grbuild := &ImportListGroupBy{build: ilq}
grbuild.flds = &ilq.ctx.Fields
grbuild.label = importlist.Label
grbuild.scan = grbuild.Scan
return grbuild
}
// Select allows the selection one or more fields/columns for the given query,
// instead of selecting all fields in the entity.
//
// Example:
//
// var v []struct {
// Name string `json:"name,omitempty"`
// }
//
// client.ImportList.Query().
// Select(importlist.FieldName).
// Scan(ctx, &v)
func (ilq *ImportListQuery) Select(fields ...string) *ImportListSelect {
ilq.ctx.Fields = append(ilq.ctx.Fields, fields...)
sbuild := &ImportListSelect{ImportListQuery: ilq}
sbuild.label = importlist.Label
sbuild.flds, sbuild.scan = &ilq.ctx.Fields, sbuild.Scan
return sbuild
}
// Aggregate returns a ImportListSelect configured with the given aggregations.
func (ilq *ImportListQuery) Aggregate(fns ...AggregateFunc) *ImportListSelect {
return ilq.Select().Aggregate(fns...)
}
func (ilq *ImportListQuery) prepareQuery(ctx context.Context) error {
for _, inter := range ilq.inters {
if inter == nil {
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
}
if trv, ok := inter.(Traverser); ok {
if err := trv.Traverse(ctx, ilq); err != nil {
return err
}
}
}
for _, f := range ilq.ctx.Fields {
if !importlist.ValidColumn(f) {
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
}
if ilq.path != nil {
prev, err := ilq.path(ctx)
if err != nil {
return err
}
ilq.sql = prev
}
return nil
}
func (ilq *ImportListQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ImportList, error) {
var (
nodes = []*ImportList{}
_spec = ilq.querySpec()
)
_spec.ScanValues = func(columns []string) ([]any, error) {
return (*ImportList).scanValues(nil, columns)
}
_spec.Assign = func(columns []string, values []any) error {
node := &ImportList{config: ilq.config}
nodes = append(nodes, node)
return node.assignValues(columns, values)
}
for i := range hooks {
hooks[i](ctx, _spec)
}
if err := sqlgraph.QueryNodes(ctx, ilq.driver, _spec); err != nil {
return nil, err
}
if len(nodes) == 0 {
return nodes, nil
}
return nodes, nil
}
func (ilq *ImportListQuery) sqlCount(ctx context.Context) (int, error) {
_spec := ilq.querySpec()
_spec.Node.Columns = ilq.ctx.Fields
if len(ilq.ctx.Fields) > 0 {
_spec.Unique = ilq.ctx.Unique != nil && *ilq.ctx.Unique
}
return sqlgraph.CountNodes(ctx, ilq.driver, _spec)
}
func (ilq *ImportListQuery) querySpec() *sqlgraph.QuerySpec {
_spec := sqlgraph.NewQuerySpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
_spec.From = ilq.sql
if unique := ilq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if ilq.path != nil {
_spec.Unique = true
}
if fields := ilq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, importlist.FieldID)
for i := range fields {
if fields[i] != importlist.FieldID {
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
}
}
}
if ps := ilq.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if limit := ilq.ctx.Limit; limit != nil {
_spec.Limit = *limit
}
if offset := ilq.ctx.Offset; offset != nil {
_spec.Offset = *offset
}
if ps := ilq.order; len(ps) > 0 {
_spec.Order = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
return _spec
}
func (ilq *ImportListQuery) sqlQuery(ctx context.Context) *sql.Selector {
builder := sql.Dialect(ilq.driver.Dialect())
t1 := builder.Table(importlist.Table)
columns := ilq.ctx.Fields
if len(columns) == 0 {
columns = importlist.Columns
}
selector := builder.Select(t1.Columns(columns...)...).From(t1)
if ilq.sql != nil {
selector = ilq.sql
selector.Select(selector.Columns(columns...)...)
}
if ilq.ctx.Unique != nil && *ilq.ctx.Unique {
selector.Distinct()
}
for _, p := range ilq.predicates {
p(selector)
}
for _, p := range ilq.order {
p(selector)
}
if offset := ilq.ctx.Offset; offset != nil {
// limit is mandatory for offset clause. We start
// with default value, and override it below if needed.
selector.Offset(*offset).Limit(math.MaxInt32)
}
if limit := ilq.ctx.Limit; limit != nil {
selector.Limit(*limit)
}
return selector
}
// ImportListGroupBy is the group-by builder for ImportList entities.
type ImportListGroupBy struct {
selector
build *ImportListQuery
}
// Aggregate adds the given aggregation functions to the group-by query.
func (ilgb *ImportListGroupBy) Aggregate(fns ...AggregateFunc) *ImportListGroupBy {
ilgb.fns = append(ilgb.fns, fns...)
return ilgb
}
// Scan applies the selector query and scans the result into the given value.
func (ilgb *ImportListGroupBy) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, ilgb.build.ctx, "GroupBy")
if err := ilgb.build.prepareQuery(ctx); err != nil {
return err
}
return scanWithInterceptors[*ImportListQuery, *ImportListGroupBy](ctx, ilgb.build, ilgb, ilgb.build.inters, v)
}
func (ilgb *ImportListGroupBy) sqlScan(ctx context.Context, root *ImportListQuery, v any) error {
selector := root.sqlQuery(ctx).Select()
aggregation := make([]string, 0, len(ilgb.fns))
for _, fn := range ilgb.fns {
aggregation = append(aggregation, fn(selector))
}
if len(selector.SelectedColumns()) == 0 {
columns := make([]string, 0, len(*ilgb.flds)+len(ilgb.fns))
for _, f := range *ilgb.flds {
columns = append(columns, selector.C(f))
}
columns = append(columns, aggregation...)
selector.Select(columns...)
}
selector.GroupBy(selector.Columns(*ilgb.flds...)...)
if err := selector.Err(); err != nil {
return err
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := ilgb.build.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}
// ImportListSelect is the builder for selecting fields of ImportList entities.
type ImportListSelect struct {
*ImportListQuery
selector
}
// Aggregate adds the given aggregation functions to the selector query.
func (ils *ImportListSelect) Aggregate(fns ...AggregateFunc) *ImportListSelect {
ils.fns = append(ils.fns, fns...)
return ils
}
// Scan applies the selector query and scans the result into the given value.
func (ils *ImportListSelect) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, ils.ctx, "Select")
if err := ils.prepareQuery(ctx); err != nil {
return err
}
return scanWithInterceptors[*ImportListQuery, *ImportListSelect](ctx, ils.ImportListQuery, ils, ils.inters, v)
}
func (ils *ImportListSelect) sqlScan(ctx context.Context, root *ImportListQuery, v any) error {
selector := root.sqlQuery(ctx)
aggregation := make([]string, 0, len(ils.fns))
for _, fn := range ils.fns {
aggregation = append(aggregation, fn(selector))
}
switch n := len(*ils.selector.flds); {
case n == 0 && len(aggregation) > 0:
selector.Select(aggregation...)
case n != 0 && len(aggregation) > 0:
selector.AppendSelect(aggregation...)
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := ils.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}

462
ent/importlist_update.go Normal file
View File

@@ -0,0 +1,462 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"polaris/ent/importlist"
"polaris/ent/predicate"
"polaris/ent/schema"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// ImportListUpdate is the builder for updating ImportList entities.
type ImportListUpdate struct {
config
hooks []Hook
mutation *ImportListMutation
}
// Where appends a list predicates to the ImportListUpdate builder.
func (ilu *ImportListUpdate) Where(ps ...predicate.ImportList) *ImportListUpdate {
ilu.mutation.Where(ps...)
return ilu
}
// SetName sets the "name" field.
func (ilu *ImportListUpdate) SetName(s string) *ImportListUpdate {
ilu.mutation.SetName(s)
return ilu
}
// SetNillableName sets the "name" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableName(s *string) *ImportListUpdate {
if s != nil {
ilu.SetName(*s)
}
return ilu
}
// SetType sets the "type" field.
func (ilu *ImportListUpdate) SetType(i importlist.Type) *ImportListUpdate {
ilu.mutation.SetType(i)
return ilu
}
// SetNillableType sets the "type" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableType(i *importlist.Type) *ImportListUpdate {
if i != nil {
ilu.SetType(*i)
}
return ilu
}
// SetURL sets the "url" field.
func (ilu *ImportListUpdate) SetURL(s string) *ImportListUpdate {
ilu.mutation.SetURL(s)
return ilu
}
// SetNillableURL sets the "url" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableURL(s *string) *ImportListUpdate {
if s != nil {
ilu.SetURL(*s)
}
return ilu
}
// ClearURL clears the value of the "url" field.
func (ilu *ImportListUpdate) ClearURL() *ImportListUpdate {
ilu.mutation.ClearURL()
return ilu
}
// SetQulity sets the "qulity" field.
func (ilu *ImportListUpdate) SetQulity(s string) *ImportListUpdate {
ilu.mutation.SetQulity(s)
return ilu
}
// SetNillableQulity sets the "qulity" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableQulity(s *string) *ImportListUpdate {
if s != nil {
ilu.SetQulity(*s)
}
return ilu
}
// SetStorageID sets the "storage_id" field.
func (ilu *ImportListUpdate) SetStorageID(i int) *ImportListUpdate {
ilu.mutation.ResetStorageID()
ilu.mutation.SetStorageID(i)
return ilu
}
// SetNillableStorageID sets the "storage_id" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableStorageID(i *int) *ImportListUpdate {
if i != nil {
ilu.SetStorageID(*i)
}
return ilu
}
// AddStorageID adds i to the "storage_id" field.
func (ilu *ImportListUpdate) AddStorageID(i int) *ImportListUpdate {
ilu.mutation.AddStorageID(i)
return ilu
}
// SetSettings sets the "settings" field.
func (ilu *ImportListUpdate) SetSettings(sls schema.ImportListSettings) *ImportListUpdate {
ilu.mutation.SetSettings(sls)
return ilu
}
// SetNillableSettings sets the "settings" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableSettings(sls *schema.ImportListSettings) *ImportListUpdate {
if sls != nil {
ilu.SetSettings(*sls)
}
return ilu
}
// ClearSettings clears the value of the "settings" field.
func (ilu *ImportListUpdate) ClearSettings() *ImportListUpdate {
ilu.mutation.ClearSettings()
return ilu
}
// Mutation returns the ImportListMutation object of the builder.
func (ilu *ImportListUpdate) Mutation() *ImportListMutation {
return ilu.mutation
}
// Save executes the query and returns the number of nodes affected by the update operation.
func (ilu *ImportListUpdate) Save(ctx context.Context) (int, error) {
return withHooks(ctx, ilu.sqlSave, ilu.mutation, ilu.hooks)
}
// SaveX is like Save, but panics if an error occurs.
func (ilu *ImportListUpdate) SaveX(ctx context.Context) int {
affected, err := ilu.Save(ctx)
if err != nil {
panic(err)
}
return affected
}
// Exec executes the query.
func (ilu *ImportListUpdate) Exec(ctx context.Context) error {
_, err := ilu.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (ilu *ImportListUpdate) ExecX(ctx context.Context) {
if err := ilu.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (ilu *ImportListUpdate) check() error {
if v, ok := ilu.mutation.GetType(); ok {
if err := importlist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
}
}
return nil
}
func (ilu *ImportListUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := ilu.check(); err != nil {
return n, err
}
_spec := sqlgraph.NewUpdateSpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
if ps := ilu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := ilu.mutation.Name(); ok {
_spec.SetField(importlist.FieldName, field.TypeString, value)
}
if value, ok := ilu.mutation.GetType(); ok {
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
}
if value, ok := ilu.mutation.URL(); ok {
_spec.SetField(importlist.FieldURL, field.TypeString, value)
}
if ilu.mutation.URLCleared() {
_spec.ClearField(importlist.FieldURL, field.TypeString)
}
if value, ok := ilu.mutation.Qulity(); ok {
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
}
if value, ok := ilu.mutation.StorageID(); ok {
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
}
if value, ok := ilu.mutation.AddedStorageID(); ok {
_spec.AddField(importlist.FieldStorageID, field.TypeInt, value)
}
if value, ok := ilu.mutation.Settings(); ok {
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
}
if ilu.mutation.SettingsCleared() {
_spec.ClearField(importlist.FieldSettings, field.TypeJSON)
}
if n, err = sqlgraph.UpdateNodes(ctx, ilu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{importlist.Label}
} else if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return 0, err
}
ilu.mutation.done = true
return n, nil
}
// ImportListUpdateOne is the builder for updating a single ImportList entity.
type ImportListUpdateOne struct {
config
fields []string
hooks []Hook
mutation *ImportListMutation
}
// SetName sets the "name" field.
func (iluo *ImportListUpdateOne) SetName(s string) *ImportListUpdateOne {
iluo.mutation.SetName(s)
return iluo
}
// SetNillableName sets the "name" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableName(s *string) *ImportListUpdateOne {
if s != nil {
iluo.SetName(*s)
}
return iluo
}
// SetType sets the "type" field.
func (iluo *ImportListUpdateOne) SetType(i importlist.Type) *ImportListUpdateOne {
iluo.mutation.SetType(i)
return iluo
}
// SetNillableType sets the "type" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableType(i *importlist.Type) *ImportListUpdateOne {
if i != nil {
iluo.SetType(*i)
}
return iluo
}
// SetURL sets the "url" field.
func (iluo *ImportListUpdateOne) SetURL(s string) *ImportListUpdateOne {
iluo.mutation.SetURL(s)
return iluo
}
// SetNillableURL sets the "url" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableURL(s *string) *ImportListUpdateOne {
if s != nil {
iluo.SetURL(*s)
}
return iluo
}
// ClearURL clears the value of the "url" field.
func (iluo *ImportListUpdateOne) ClearURL() *ImportListUpdateOne {
iluo.mutation.ClearURL()
return iluo
}
// SetQulity sets the "qulity" field.
func (iluo *ImportListUpdateOne) SetQulity(s string) *ImportListUpdateOne {
iluo.mutation.SetQulity(s)
return iluo
}
// SetNillableQulity sets the "qulity" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableQulity(s *string) *ImportListUpdateOne {
if s != nil {
iluo.SetQulity(*s)
}
return iluo
}
// SetStorageID sets the "storage_id" field.
func (iluo *ImportListUpdateOne) SetStorageID(i int) *ImportListUpdateOne {
iluo.mutation.ResetStorageID()
iluo.mutation.SetStorageID(i)
return iluo
}
// SetNillableStorageID sets the "storage_id" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableStorageID(i *int) *ImportListUpdateOne {
if i != nil {
iluo.SetStorageID(*i)
}
return iluo
}
// AddStorageID adds i to the "storage_id" field.
func (iluo *ImportListUpdateOne) AddStorageID(i int) *ImportListUpdateOne {
iluo.mutation.AddStorageID(i)
return iluo
}
// SetSettings sets the "settings" field.
func (iluo *ImportListUpdateOne) SetSettings(sls schema.ImportListSettings) *ImportListUpdateOne {
iluo.mutation.SetSettings(sls)
return iluo
}
// SetNillableSettings sets the "settings" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableSettings(sls *schema.ImportListSettings) *ImportListUpdateOne {
if sls != nil {
iluo.SetSettings(*sls)
}
return iluo
}
// ClearSettings clears the value of the "settings" field.
func (iluo *ImportListUpdateOne) ClearSettings() *ImportListUpdateOne {
iluo.mutation.ClearSettings()
return iluo
}
// Mutation returns the ImportListMutation object of the builder.
func (iluo *ImportListUpdateOne) Mutation() *ImportListMutation {
return iluo.mutation
}
// Where appends a list predicates to the ImportListUpdate builder.
func (iluo *ImportListUpdateOne) Where(ps ...predicate.ImportList) *ImportListUpdateOne {
iluo.mutation.Where(ps...)
return iluo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (iluo *ImportListUpdateOne) Select(field string, fields ...string) *ImportListUpdateOne {
iluo.fields = append([]string{field}, fields...)
return iluo
}
// Save executes the query and returns the updated ImportList entity.
func (iluo *ImportListUpdateOne) Save(ctx context.Context) (*ImportList, error) {
return withHooks(ctx, iluo.sqlSave, iluo.mutation, iluo.hooks)
}
// SaveX is like Save, but panics if an error occurs.
func (iluo *ImportListUpdateOne) SaveX(ctx context.Context) *ImportList {
node, err := iluo.Save(ctx)
if err != nil {
panic(err)
}
return node
}
// Exec executes the query on the entity.
func (iluo *ImportListUpdateOne) Exec(ctx context.Context) error {
_, err := iluo.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (iluo *ImportListUpdateOne) ExecX(ctx context.Context) {
if err := iluo.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (iluo *ImportListUpdateOne) check() error {
if v, ok := iluo.mutation.GetType(); ok {
if err := importlist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
}
}
return nil
}
func (iluo *ImportListUpdateOne) sqlSave(ctx context.Context) (_node *ImportList, err error) {
if err := iluo.check(); err != nil {
return _node, err
}
_spec := sqlgraph.NewUpdateSpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
id, ok := iluo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "ImportList.id" for update`)}
}
_spec.Node.ID.Value = id
if fields := iluo.fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, importlist.FieldID)
for _, f := range fields {
if !importlist.ValidColumn(f) {
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
if f != importlist.FieldID {
_spec.Node.Columns = append(_spec.Node.Columns, f)
}
}
}
if ps := iluo.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := iluo.mutation.Name(); ok {
_spec.SetField(importlist.FieldName, field.TypeString, value)
}
if value, ok := iluo.mutation.GetType(); ok {
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
}
if value, ok := iluo.mutation.URL(); ok {
_spec.SetField(importlist.FieldURL, field.TypeString, value)
}
if iluo.mutation.URLCleared() {
_spec.ClearField(importlist.FieldURL, field.TypeString)
}
if value, ok := iluo.mutation.Qulity(); ok {
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
}
if value, ok := iluo.mutation.StorageID(); ok {
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
}
if value, ok := iluo.mutation.AddedStorageID(); ok {
_spec.AddField(importlist.FieldStorageID, field.TypeInt, value)
}
if value, ok := iluo.mutation.Settings(); ok {
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
}
if iluo.mutation.SettingsCleared() {
_spec.ClearField(importlist.FieldSettings, field.TypeJSON)
}
_node = &ImportList{config: iluo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues
if err = sqlgraph.UpdateNode(ctx, iluo.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{importlist.Label}
} else if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return nil, err
}
iluo.mutation.done = true
return _node, nil
}

View File

@@ -25,7 +25,11 @@ type Indexers struct {
// EnableRss holds the value of the "enable_rss" field.
EnableRss bool `json:"enable_rss,omitempty"`
// Priority holds the value of the "priority" field.
Priority int `json:"priority,omitempty"`
Priority int `json:"priority,omitempty"`
// minimal seed ratio requied, before removing torrent
SeedRatio float32 `json:"seed_ratio,omitempty"`
// Disabled holds the value of the "disabled" field.
Disabled bool `json:"disabled,omitempty"`
selectValues sql.SelectValues
}
@@ -34,8 +38,10 @@ func (*Indexers) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case indexers.FieldEnableRss:
case indexers.FieldEnableRss, indexers.FieldDisabled:
values[i] = new(sql.NullBool)
case indexers.FieldSeedRatio:
values[i] = new(sql.NullFloat64)
case indexers.FieldID, indexers.FieldPriority:
values[i] = new(sql.NullInt64)
case indexers.FieldName, indexers.FieldImplementation, indexers.FieldSettings:
@@ -91,6 +97,18 @@ func (i *Indexers) assignValues(columns []string, values []any) error {
} else if value.Valid {
i.Priority = int(value.Int64)
}
case indexers.FieldSeedRatio:
if value, ok := values[j].(*sql.NullFloat64); !ok {
return fmt.Errorf("unexpected type %T for field seed_ratio", values[j])
} else if value.Valid {
i.SeedRatio = float32(value.Float64)
}
case indexers.FieldDisabled:
if value, ok := values[j].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field disabled", values[j])
} else if value.Valid {
i.Disabled = value.Bool
}
default:
i.selectValues.Set(columns[j], values[j])
}
@@ -141,6 +159,12 @@ func (i *Indexers) String() string {
builder.WriteString(", ")
builder.WriteString("priority=")
builder.WriteString(fmt.Sprintf("%v", i.Priority))
builder.WriteString(", ")
builder.WriteString("seed_ratio=")
builder.WriteString(fmt.Sprintf("%v", i.SeedRatio))
builder.WriteString(", ")
builder.WriteString("disabled=")
builder.WriteString(fmt.Sprintf("%v", i.Disabled))
builder.WriteByte(')')
return builder.String()
}

View File

@@ -21,6 +21,10 @@ const (
FieldEnableRss = "enable_rss"
// FieldPriority holds the string denoting the priority field in the database.
FieldPriority = "priority"
// FieldSeedRatio holds the string denoting the seed_ratio field in the database.
FieldSeedRatio = "seed_ratio"
// FieldDisabled holds the string denoting the disabled field in the database.
FieldDisabled = "disabled"
// Table holds the table name of the indexers in the database.
Table = "indexers"
)
@@ -33,6 +37,8 @@ var Columns = []string{
FieldSettings,
FieldEnableRss,
FieldPriority,
FieldSeedRatio,
FieldDisabled,
}
// ValidColumn reports if the column name is valid (part of the table columns).
@@ -48,6 +54,12 @@ func ValidColumn(column string) bool {
var (
// DefaultEnableRss holds the default value on creation for the "enable_rss" field.
DefaultEnableRss bool
// DefaultPriority holds the default value on creation for the "priority" field.
DefaultPriority int
// DefaultSeedRatio holds the default value on creation for the "seed_ratio" field.
DefaultSeedRatio float32
// DefaultDisabled holds the default value on creation for the "disabled" field.
DefaultDisabled bool
)
// OrderOption defines the ordering options for the Indexers queries.
@@ -82,3 +94,13 @@ func ByEnableRss(opts ...sql.OrderTermOption) OrderOption {
func ByPriority(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldPriority, opts...).ToFunc()
}
// BySeedRatio orders the results by the seed_ratio field.
func BySeedRatio(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldSeedRatio, opts...).ToFunc()
}
// ByDisabled orders the results by the disabled field.
func ByDisabled(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldDisabled, opts...).ToFunc()
}

View File

@@ -78,6 +78,16 @@ func Priority(v int) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldPriority, v))
}
// SeedRatio applies equality check predicate on the "seed_ratio" field. It's identical to SeedRatioEQ.
func SeedRatio(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldSeedRatio, v))
}
// Disabled applies equality check predicate on the "disabled" field. It's identical to DisabledEQ.
func Disabled(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldDisabled, v))
}
// NameEQ applies the EQ predicate on the "name" field.
func NameEQ(v string) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldName, v))
@@ -323,6 +333,76 @@ func PriorityLTE(v int) predicate.Indexers {
return predicate.Indexers(sql.FieldLTE(FieldPriority, v))
}
// SeedRatioEQ applies the EQ predicate on the "seed_ratio" field.
func SeedRatioEQ(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldSeedRatio, v))
}
// SeedRatioNEQ applies the NEQ predicate on the "seed_ratio" field.
func SeedRatioNEQ(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldNEQ(FieldSeedRatio, v))
}
// SeedRatioIn applies the In predicate on the "seed_ratio" field.
func SeedRatioIn(vs ...float32) predicate.Indexers {
return predicate.Indexers(sql.FieldIn(FieldSeedRatio, vs...))
}
// SeedRatioNotIn applies the NotIn predicate on the "seed_ratio" field.
func SeedRatioNotIn(vs ...float32) predicate.Indexers {
return predicate.Indexers(sql.FieldNotIn(FieldSeedRatio, vs...))
}
// SeedRatioGT applies the GT predicate on the "seed_ratio" field.
func SeedRatioGT(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldGT(FieldSeedRatio, v))
}
// SeedRatioGTE applies the GTE predicate on the "seed_ratio" field.
func SeedRatioGTE(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldGTE(FieldSeedRatio, v))
}
// SeedRatioLT applies the LT predicate on the "seed_ratio" field.
func SeedRatioLT(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldLT(FieldSeedRatio, v))
}
// SeedRatioLTE applies the LTE predicate on the "seed_ratio" field.
func SeedRatioLTE(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldLTE(FieldSeedRatio, v))
}
// SeedRatioIsNil applies the IsNil predicate on the "seed_ratio" field.
func SeedRatioIsNil() predicate.Indexers {
return predicate.Indexers(sql.FieldIsNull(FieldSeedRatio))
}
// SeedRatioNotNil applies the NotNil predicate on the "seed_ratio" field.
func SeedRatioNotNil() predicate.Indexers {
return predicate.Indexers(sql.FieldNotNull(FieldSeedRatio))
}
// DisabledEQ applies the EQ predicate on the "disabled" field.
func DisabledEQ(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldDisabled, v))
}
// DisabledNEQ applies the NEQ predicate on the "disabled" field.
func DisabledNEQ(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldNEQ(FieldDisabled, v))
}
// DisabledIsNil applies the IsNil predicate on the "disabled" field.
func DisabledIsNil() predicate.Indexers {
return predicate.Indexers(sql.FieldIsNull(FieldDisabled))
}
// DisabledNotNil applies the NotNil predicate on the "disabled" field.
func DisabledNotNil() predicate.Indexers {
return predicate.Indexers(sql.FieldNotNull(FieldDisabled))
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.Indexers) predicate.Indexers {
return predicate.Indexers(sql.AndPredicates(predicates...))

View File

@@ -57,6 +57,42 @@ func (ic *IndexersCreate) SetPriority(i int) *IndexersCreate {
return ic
}
// SetNillablePriority sets the "priority" field if the given value is not nil.
func (ic *IndexersCreate) SetNillablePriority(i *int) *IndexersCreate {
if i != nil {
ic.SetPriority(*i)
}
return ic
}
// SetSeedRatio sets the "seed_ratio" field.
func (ic *IndexersCreate) SetSeedRatio(f float32) *IndexersCreate {
ic.mutation.SetSeedRatio(f)
return ic
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (ic *IndexersCreate) SetNillableSeedRatio(f *float32) *IndexersCreate {
if f != nil {
ic.SetSeedRatio(*f)
}
return ic
}
// SetDisabled sets the "disabled" field.
func (ic *IndexersCreate) SetDisabled(b bool) *IndexersCreate {
ic.mutation.SetDisabled(b)
return ic
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (ic *IndexersCreate) SetNillableDisabled(b *bool) *IndexersCreate {
if b != nil {
ic.SetDisabled(*b)
}
return ic
}
// Mutation returns the IndexersMutation object of the builder.
func (ic *IndexersCreate) Mutation() *IndexersMutation {
return ic.mutation
@@ -96,6 +132,18 @@ func (ic *IndexersCreate) defaults() {
v := indexers.DefaultEnableRss
ic.mutation.SetEnableRss(v)
}
if _, ok := ic.mutation.Priority(); !ok {
v := indexers.DefaultPriority
ic.mutation.SetPriority(v)
}
if _, ok := ic.mutation.SeedRatio(); !ok {
v := indexers.DefaultSeedRatio
ic.mutation.SetSeedRatio(v)
}
if _, ok := ic.mutation.Disabled(); !ok {
v := indexers.DefaultDisabled
ic.mutation.SetDisabled(v)
}
}
// check runs all checks and user-defined validators on the builder.
@@ -161,6 +209,14 @@ func (ic *IndexersCreate) createSpec() (*Indexers, *sqlgraph.CreateSpec) {
_spec.SetField(indexers.FieldPriority, field.TypeInt, value)
_node.Priority = value
}
if value, ok := ic.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
_node.SeedRatio = value
}
if value, ok := ic.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
_node.Disabled = value
}
return _node, _spec
}

View File

@@ -104,6 +104,53 @@ func (iu *IndexersUpdate) AddPriority(i int) *IndexersUpdate {
return iu
}
// SetSeedRatio sets the "seed_ratio" field.
func (iu *IndexersUpdate) SetSeedRatio(f float32) *IndexersUpdate {
iu.mutation.ResetSeedRatio()
iu.mutation.SetSeedRatio(f)
return iu
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (iu *IndexersUpdate) SetNillableSeedRatio(f *float32) *IndexersUpdate {
if f != nil {
iu.SetSeedRatio(*f)
}
return iu
}
// AddSeedRatio adds f to the "seed_ratio" field.
func (iu *IndexersUpdate) AddSeedRatio(f float32) *IndexersUpdate {
iu.mutation.AddSeedRatio(f)
return iu
}
// ClearSeedRatio clears the value of the "seed_ratio" field.
func (iu *IndexersUpdate) ClearSeedRatio() *IndexersUpdate {
iu.mutation.ClearSeedRatio()
return iu
}
// SetDisabled sets the "disabled" field.
func (iu *IndexersUpdate) SetDisabled(b bool) *IndexersUpdate {
iu.mutation.SetDisabled(b)
return iu
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (iu *IndexersUpdate) SetNillableDisabled(b *bool) *IndexersUpdate {
if b != nil {
iu.SetDisabled(*b)
}
return iu
}
// ClearDisabled clears the value of the "disabled" field.
func (iu *IndexersUpdate) ClearDisabled() *IndexersUpdate {
iu.mutation.ClearDisabled()
return iu
}
// Mutation returns the IndexersMutation object of the builder.
func (iu *IndexersUpdate) Mutation() *IndexersMutation {
return iu.mutation
@@ -163,6 +210,21 @@ func (iu *IndexersUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := iu.mutation.AddedPriority(); ok {
_spec.AddField(indexers.FieldPriority, field.TypeInt, value)
}
if value, ok := iu.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if value, ok := iu.mutation.AddedSeedRatio(); ok {
_spec.AddField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if iu.mutation.SeedRatioCleared() {
_spec.ClearField(indexers.FieldSeedRatio, field.TypeFloat32)
}
if value, ok := iu.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
}
if iu.mutation.DisabledCleared() {
_spec.ClearField(indexers.FieldDisabled, field.TypeBool)
}
if n, err = sqlgraph.UpdateNodes(ctx, iu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{indexers.Label}
@@ -260,6 +322,53 @@ func (iuo *IndexersUpdateOne) AddPriority(i int) *IndexersUpdateOne {
return iuo
}
// SetSeedRatio sets the "seed_ratio" field.
func (iuo *IndexersUpdateOne) SetSeedRatio(f float32) *IndexersUpdateOne {
iuo.mutation.ResetSeedRatio()
iuo.mutation.SetSeedRatio(f)
return iuo
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (iuo *IndexersUpdateOne) SetNillableSeedRatio(f *float32) *IndexersUpdateOne {
if f != nil {
iuo.SetSeedRatio(*f)
}
return iuo
}
// AddSeedRatio adds f to the "seed_ratio" field.
func (iuo *IndexersUpdateOne) AddSeedRatio(f float32) *IndexersUpdateOne {
iuo.mutation.AddSeedRatio(f)
return iuo
}
// ClearSeedRatio clears the value of the "seed_ratio" field.
func (iuo *IndexersUpdateOne) ClearSeedRatio() *IndexersUpdateOne {
iuo.mutation.ClearSeedRatio()
return iuo
}
// SetDisabled sets the "disabled" field.
func (iuo *IndexersUpdateOne) SetDisabled(b bool) *IndexersUpdateOne {
iuo.mutation.SetDisabled(b)
return iuo
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (iuo *IndexersUpdateOne) SetNillableDisabled(b *bool) *IndexersUpdateOne {
if b != nil {
iuo.SetDisabled(*b)
}
return iuo
}
// ClearDisabled clears the value of the "disabled" field.
func (iuo *IndexersUpdateOne) ClearDisabled() *IndexersUpdateOne {
iuo.mutation.ClearDisabled()
return iuo
}
// Mutation returns the IndexersMutation object of the builder.
func (iuo *IndexersUpdateOne) Mutation() *IndexersMutation {
return iuo.mutation
@@ -349,6 +458,21 @@ func (iuo *IndexersUpdateOne) sqlSave(ctx context.Context) (_node *Indexers, err
if value, ok := iuo.mutation.AddedPriority(); ok {
_spec.AddField(indexers.FieldPriority, field.TypeInt, value)
}
if value, ok := iuo.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if value, ok := iuo.mutation.AddedSeedRatio(); ok {
_spec.AddField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if iuo.mutation.SeedRatioCleared() {
_spec.ClearField(indexers.FieldSeedRatio, field.TypeFloat32)
}
if value, ok := iuo.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
}
if iuo.mutation.DisabledCleared() {
_spec.ClearField(indexers.FieldDisabled, field.TypeBool)
}
_node = &Indexers{config: iuo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues

View File

@@ -3,8 +3,10 @@
package ent
import (
"encoding/json"
"fmt"
"polaris/ent/media"
"polaris/ent/schema"
"strings"
"time"
@@ -43,6 +45,10 @@ type Media struct {
TargetDir string `json:"target_dir,omitempty"`
// tv series only
DownloadHistoryEpisodes bool `json:"download_history_episodes,omitempty"`
// Limiter holds the value of the "limiter" field.
Limiter schema.MediaLimiter `json:"limiter,omitempty"`
// Extras holds the value of the "extras" field.
Extras schema.MediaExtras `json:"extras,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the MediaQuery when eager-loading is set.
Edges MediaEdges `json:"edges"`
@@ -72,6 +78,8 @@ func (*Media) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case media.FieldLimiter, media.FieldExtras:
values[i] = new([]byte)
case media.FieldDownloadHistoryEpisodes:
values[i] = new(sql.NullBool)
case media.FieldID, media.FieldTmdbID, media.FieldStorageID:
@@ -179,6 +187,22 @@ func (m *Media) assignValues(columns []string, values []any) error {
} else if value.Valid {
m.DownloadHistoryEpisodes = value.Bool
}
case media.FieldLimiter:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field limiter", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &m.Limiter); err != nil {
return fmt.Errorf("unmarshal field limiter: %w", err)
}
}
case media.FieldExtras:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field extras", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &m.Extras); err != nil {
return fmt.Errorf("unmarshal field extras: %w", err)
}
}
default:
m.selectValues.Set(columns[i], values[i])
}
@@ -258,6 +282,12 @@ func (m *Media) String() string {
builder.WriteString(", ")
builder.WriteString("download_history_episodes=")
builder.WriteString(fmt.Sprintf("%v", m.DownloadHistoryEpisodes))
builder.WriteString(", ")
builder.WriteString("limiter=")
builder.WriteString(fmt.Sprintf("%v", m.Limiter))
builder.WriteString(", ")
builder.WriteString("extras=")
builder.WriteString(fmt.Sprintf("%v", m.Extras))
builder.WriteByte(')')
return builder.String()
}

View File

@@ -41,6 +41,10 @@ const (
FieldTargetDir = "target_dir"
// FieldDownloadHistoryEpisodes holds the string denoting the download_history_episodes field in the database.
FieldDownloadHistoryEpisodes = "download_history_episodes"
// FieldLimiter holds the string denoting the limiter field in the database.
FieldLimiter = "limiter"
// FieldExtras holds the string denoting the extras field in the database.
FieldExtras = "extras"
// EdgeEpisodes holds the string denoting the episodes edge name in mutations.
EdgeEpisodes = "episodes"
// Table holds the table name of the media in the database.
@@ -70,6 +74,8 @@ var Columns = []string{
FieldStorageID,
FieldTargetDir,
FieldDownloadHistoryEpisodes,
FieldLimiter,
FieldExtras,
}
// ValidColumn reports if the column name is valid (part of the table columns).
@@ -124,7 +130,7 @@ const DefaultResolution = Resolution1080p
const (
Resolution720p Resolution = "720p"
Resolution1080p Resolution = "1080p"
Resolution4k Resolution = "4k"
Resolution2160p Resolution = "2160p"
)
func (r Resolution) String() string {
@@ -134,7 +140,7 @@ func (r Resolution) String() string {
// ResolutionValidator is a validator for the "resolution" field enum values. It is called by the builders before save.
func ResolutionValidator(r Resolution) error {
switch r {
case Resolution720p, Resolution1080p, Resolution4k:
case Resolution720p, Resolution1080p, Resolution2160p:
return nil
default:
return fmt.Errorf("media: invalid enum value for resolution field: %q", r)

View File

@@ -775,6 +775,26 @@ func DownloadHistoryEpisodesNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldDownloadHistoryEpisodes))
}
// LimiterIsNil applies the IsNil predicate on the "limiter" field.
func LimiterIsNil() predicate.Media {
return predicate.Media(sql.FieldIsNull(FieldLimiter))
}
// LimiterNotNil applies the NotNil predicate on the "limiter" field.
func LimiterNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldLimiter))
}
// ExtrasIsNil applies the IsNil predicate on the "extras" field.
func ExtrasIsNil() predicate.Media {
return predicate.Media(sql.FieldIsNull(FieldExtras))
}
// ExtrasNotNil applies the NotNil predicate on the "extras" field.
func ExtrasNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldExtras))
}
// HasEpisodes applies the HasEdge predicate on the "episodes" edge.
func HasEpisodes() predicate.Media {
return predicate.Media(func(s *sql.Selector) {

View File

@@ -8,6 +8,7 @@ import (
"fmt"
"polaris/ent/episode"
"polaris/ent/media"
"polaris/ent/schema"
"time"
"entgo.io/ent/dialect/sql/sqlgraph"
@@ -155,6 +156,34 @@ func (mc *MediaCreate) SetNillableDownloadHistoryEpisodes(b *bool) *MediaCreate
return mc
}
// SetLimiter sets the "limiter" field.
func (mc *MediaCreate) SetLimiter(sl schema.MediaLimiter) *MediaCreate {
mc.mutation.SetLimiter(sl)
return mc
}
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
func (mc *MediaCreate) SetNillableLimiter(sl *schema.MediaLimiter) *MediaCreate {
if sl != nil {
mc.SetLimiter(*sl)
}
return mc
}
// SetExtras sets the "extras" field.
func (mc *MediaCreate) SetExtras(se schema.MediaExtras) *MediaCreate {
mc.mutation.SetExtras(se)
return mc
}
// SetNillableExtras sets the "extras" field if the given value is not nil.
func (mc *MediaCreate) SetNillableExtras(se *schema.MediaExtras) *MediaCreate {
if se != nil {
mc.SetExtras(*se)
}
return mc
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (mc *MediaCreate) AddEpisodeIDs(ids ...int) *MediaCreate {
mc.mutation.AddEpisodeIDs(ids...)
@@ -340,6 +369,14 @@ func (mc *MediaCreate) createSpec() (*Media, *sqlgraph.CreateSpec) {
_spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value)
_node.DownloadHistoryEpisodes = value
}
if value, ok := mc.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
_node.Limiter = value
}
if value, ok := mc.mutation.Extras(); ok {
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
_node.Extras = value
}
if nodes := mc.mutation.EpisodesIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,

View File

@@ -9,6 +9,7 @@ import (
"polaris/ent/episode"
"polaris/ent/media"
"polaris/ent/predicate"
"polaris/ent/schema"
"time"
"entgo.io/ent/dialect/sql"
@@ -249,6 +250,46 @@ func (mu *MediaUpdate) ClearDownloadHistoryEpisodes() *MediaUpdate {
return mu
}
// SetLimiter sets the "limiter" field.
func (mu *MediaUpdate) SetLimiter(sl schema.MediaLimiter) *MediaUpdate {
mu.mutation.SetLimiter(sl)
return mu
}
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
func (mu *MediaUpdate) SetNillableLimiter(sl *schema.MediaLimiter) *MediaUpdate {
if sl != nil {
mu.SetLimiter(*sl)
}
return mu
}
// ClearLimiter clears the value of the "limiter" field.
func (mu *MediaUpdate) ClearLimiter() *MediaUpdate {
mu.mutation.ClearLimiter()
return mu
}
// SetExtras sets the "extras" field.
func (mu *MediaUpdate) SetExtras(se schema.MediaExtras) *MediaUpdate {
mu.mutation.SetExtras(se)
return mu
}
// SetNillableExtras sets the "extras" field if the given value is not nil.
func (mu *MediaUpdate) SetNillableExtras(se *schema.MediaExtras) *MediaUpdate {
if se != nil {
mu.SetExtras(*se)
}
return mu
}
// ClearExtras clears the value of the "extras" field.
func (mu *MediaUpdate) ClearExtras() *MediaUpdate {
mu.mutation.ClearExtras()
return mu
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (mu *MediaUpdate) AddEpisodeIDs(ids ...int) *MediaUpdate {
mu.mutation.AddEpisodeIDs(ids...)
@@ -401,6 +442,18 @@ func (mu *MediaUpdate) sqlSave(ctx context.Context) (n int, err error) {
if mu.mutation.DownloadHistoryEpisodesCleared() {
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
}
if value, ok := mu.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
}
if mu.mutation.LimiterCleared() {
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
}
if value, ok := mu.mutation.Extras(); ok {
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
}
if mu.mutation.ExtrasCleared() {
_spec.ClearField(media.FieldExtras, field.TypeJSON)
}
if mu.mutation.EpisodesCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
@@ -686,6 +739,46 @@ func (muo *MediaUpdateOne) ClearDownloadHistoryEpisodes() *MediaUpdateOne {
return muo
}
// SetLimiter sets the "limiter" field.
func (muo *MediaUpdateOne) SetLimiter(sl schema.MediaLimiter) *MediaUpdateOne {
muo.mutation.SetLimiter(sl)
return muo
}
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
func (muo *MediaUpdateOne) SetNillableLimiter(sl *schema.MediaLimiter) *MediaUpdateOne {
if sl != nil {
muo.SetLimiter(*sl)
}
return muo
}
// ClearLimiter clears the value of the "limiter" field.
func (muo *MediaUpdateOne) ClearLimiter() *MediaUpdateOne {
muo.mutation.ClearLimiter()
return muo
}
// SetExtras sets the "extras" field.
func (muo *MediaUpdateOne) SetExtras(se schema.MediaExtras) *MediaUpdateOne {
muo.mutation.SetExtras(se)
return muo
}
// SetNillableExtras sets the "extras" field if the given value is not nil.
func (muo *MediaUpdateOne) SetNillableExtras(se *schema.MediaExtras) *MediaUpdateOne {
if se != nil {
muo.SetExtras(*se)
}
return muo
}
// ClearExtras clears the value of the "extras" field.
func (muo *MediaUpdateOne) ClearExtras() *MediaUpdateOne {
muo.mutation.ClearExtras()
return muo
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (muo *MediaUpdateOne) AddEpisodeIDs(ids ...int) *MediaUpdateOne {
muo.mutation.AddEpisodeIDs(ids...)
@@ -868,6 +961,18 @@ func (muo *MediaUpdateOne) sqlSave(ctx context.Context) (_node *Media, err error
if muo.mutation.DownloadHistoryEpisodesCleared() {
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
}
if value, ok := muo.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
}
if muo.mutation.LimiterCleared() {
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
}
if value, ok := muo.mutation.Extras(); ok {
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
}
if muo.mutation.ExtrasCleared() {
_spec.ClearField(media.FieldExtras, field.TypeJSON)
}
if muo.mutation.EpisodesCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,

View File

@@ -38,6 +38,8 @@ var (
{Name: "overview", Type: field.TypeString},
{Name: "air_date", Type: field.TypeString},
{Name: "status", Type: field.TypeEnum, Enums: []string{"missing", "downloading", "downloaded"}, Default: "missing"},
{Name: "monitored", Type: field.TypeBool, Default: false},
{Name: "target_file", Type: field.TypeString, Nullable: true},
{Name: "media_id", Type: field.TypeInt, Nullable: true},
}
// EpisodesTable holds the schema information for the "episodes" table.
@@ -48,7 +50,7 @@ var (
ForeignKeys: []*schema.ForeignKey{
{
Symbol: "episodes_media_episodes",
Columns: []*schema.Column{EpisodesColumns[7]},
Columns: []*schema.Column{EpisodesColumns[9]},
RefColumns: []*schema.Column{MediaColumns[0]},
OnDelete: schema.SetNull,
},
@@ -63,7 +65,9 @@ var (
{Name: "date", Type: field.TypeTime},
{Name: "target_dir", Type: field.TypeString},
{Name: "size", Type: field.TypeInt, Default: 0},
{Name: "status", Type: field.TypeEnum, Enums: []string{"running", "success", "fail", "uploading"}},
{Name: "download_client_id", Type: field.TypeInt, Nullable: true},
{Name: "indexer_id", Type: field.TypeInt, Nullable: true},
{Name: "status", Type: field.TypeEnum, Enums: []string{"running", "success", "fail", "uploading", "seeding"}},
{Name: "saved", Type: field.TypeString, Nullable: true},
}
// HistoriesTable holds the schema information for the "histories" table.
@@ -72,6 +76,22 @@ var (
Columns: HistoriesColumns,
PrimaryKey: []*schema.Column{HistoriesColumns[0]},
}
// ImportListsColumns holds the columns for the "import_lists" table.
ImportListsColumns = []*schema.Column{
{Name: "id", Type: field.TypeInt, Increment: true},
{Name: "name", Type: field.TypeString},
{Name: "type", Type: field.TypeEnum, Enums: []string{"plex", "doulist"}},
{Name: "url", Type: field.TypeString, Nullable: true},
{Name: "qulity", Type: field.TypeString},
{Name: "storage_id", Type: field.TypeInt},
{Name: "settings", Type: field.TypeJSON, Nullable: true},
}
// ImportListsTable holds the schema information for the "import_lists" table.
ImportListsTable = &schema.Table{
Name: "import_lists",
Columns: ImportListsColumns,
PrimaryKey: []*schema.Column{ImportListsColumns[0]},
}
// IndexersColumns holds the columns for the "indexers" table.
IndexersColumns = []*schema.Column{
{Name: "id", Type: field.TypeInt, Increment: true},
@@ -79,7 +99,9 @@ var (
{Name: "implementation", Type: field.TypeString},
{Name: "settings", Type: field.TypeString},
{Name: "enable_rss", Type: field.TypeBool, Default: true},
{Name: "priority", Type: field.TypeInt},
{Name: "priority", Type: field.TypeInt, Default: 50},
{Name: "seed_ratio", Type: field.TypeFloat32, Nullable: true, Default: 0},
{Name: "disabled", Type: field.TypeBool, Nullable: true, Default: false},
}
// IndexersTable holds the schema information for the "indexers" table.
IndexersTable = &schema.Table{
@@ -99,10 +121,12 @@ var (
{Name: "overview", Type: field.TypeString},
{Name: "created_at", Type: field.TypeTime},
{Name: "air_date", Type: field.TypeString, Default: ""},
{Name: "resolution", Type: field.TypeEnum, Enums: []string{"720p", "1080p", "4k"}, Default: "1080p"},
{Name: "resolution", Type: field.TypeEnum, Enums: []string{"720p", "1080p", "2160p"}, Default: "1080p"},
{Name: "storage_id", Type: field.TypeInt, Nullable: true},
{Name: "target_dir", Type: field.TypeString, Nullable: true},
{Name: "download_history_episodes", Type: field.TypeBool, Nullable: true, Default: false},
{Name: "limiter", Type: field.TypeJSON, Nullable: true},
{Name: "extras", Type: field.TypeJSON, Nullable: true},
}
// MediaTable holds the schema information for the "media" table.
MediaTable = &schema.Table{
@@ -141,6 +165,8 @@ var (
{Name: "id", Type: field.TypeInt, Increment: true},
{Name: "name", Type: field.TypeString, Unique: true},
{Name: "implementation", Type: field.TypeEnum, Enums: []string{"webdav", "local"}},
{Name: "tv_path", Type: field.TypeString, Nullable: true},
{Name: "movie_path", Type: field.TypeString, Nullable: true},
{Name: "settings", Type: field.TypeString, Nullable: true},
{Name: "deleted", Type: field.TypeBool, Default: false},
{Name: "default", Type: field.TypeBool, Default: false},
@@ -156,6 +182,7 @@ var (
DownloadClientsTable,
EpisodesTable,
HistoriesTable,
ImportListsTable,
IndexersTable,
MediaTable,
NotificationClientsTable,

View File

File diff suppressed because it is too large Load Diff

View File

@@ -15,6 +15,9 @@ type Episode func(*sql.Selector)
// History is the predicate function for history builders.
type History func(*sql.Selector)
// ImportList is the predicate function for importlist builders.
type ImportList func(*sql.Selector)
// Indexers is the predicate function for indexers builders.
type Indexers func(*sql.Selector)

View File

@@ -4,6 +4,7 @@ package ent
import (
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/indexers"
"polaris/ent/media"
@@ -49,6 +50,10 @@ func init() {
downloadclients.DefaultTags = downloadclientsDescTags.Default.(string)
episodeFields := schema.Episode{}.Fields()
_ = episodeFields
// episodeDescMonitored is the schema descriptor for monitored field.
episodeDescMonitored := episodeFields[7].Descriptor()
// episode.DefaultMonitored holds the default value on creation for the monitored field.
episode.DefaultMonitored = episodeDescMonitored.Default.(bool)
historyFields := schema.History{}.Fields()
_ = historyFields
// historyDescSize is the schema descriptor for size field.
@@ -61,6 +66,18 @@ func init() {
indexersDescEnableRss := indexersFields[3].Descriptor()
// indexers.DefaultEnableRss holds the default value on creation for the enable_rss field.
indexers.DefaultEnableRss = indexersDescEnableRss.Default.(bool)
// indexersDescPriority is the schema descriptor for priority field.
indexersDescPriority := indexersFields[4].Descriptor()
// indexers.DefaultPriority holds the default value on creation for the priority field.
indexers.DefaultPriority = indexersDescPriority.Default.(int)
// indexersDescSeedRatio is the schema descriptor for seed_ratio field.
indexersDescSeedRatio := indexersFields[5].Descriptor()
// indexers.DefaultSeedRatio holds the default value on creation for the seed_ratio field.
indexers.DefaultSeedRatio = indexersDescSeedRatio.Default.(float32)
// indexersDescDisabled is the schema descriptor for disabled field.
indexersDescDisabled := indexersFields[6].Descriptor()
// indexers.DefaultDisabled holds the default value on creation for the disabled field.
indexers.DefaultDisabled = indexersDescDisabled.Default.(bool)
mediaFields := schema.Media{}.Fields()
_ = mediaFields
// mediaDescCreatedAt is the schema descriptor for created_at field.
@@ -84,11 +101,11 @@ func init() {
storageFields := schema.Storage{}.Fields()
_ = storageFields
// storageDescDeleted is the schema descriptor for deleted field.
storageDescDeleted := storageFields[3].Descriptor()
storageDescDeleted := storageFields[5].Descriptor()
// storage.DefaultDeleted holds the default value on creation for the deleted field.
storage.DefaultDeleted = storageDescDeleted.Default.(bool)
// storageDescDefault is the schema descriptor for default field.
storageDescDefault := storageFields[4].Descriptor()
storageDescDefault := storageFields[6].Descriptor()
// storage.DefaultDefault holds the default value on creation for the default field.
storage.DefaultDefault = storageDescDefault.Default.(bool)
}

View File

@@ -21,16 +21,18 @@ func (Episode) Fields() []ent.Field {
field.String("overview"),
field.String("air_date"),
field.Enum("status").Values("missing", "downloading", "downloaded").Default("missing"),
field.Bool("monitored").Default(false).StructTag("json:\"monitored\""), //whether this episode is monitored
field.String("target_file").Optional(),
}
}
// Edges of the Episode.
func (Episode) Edges() []ent.Edge {
return []ent.Edge{
edge.From("media", Media.Type).
Ref("episodes").
Unique().
edge.From("media", Media.Type).
Ref("episodes").
Unique().
Field("media_id"),
}
}
}

View File

@@ -19,7 +19,9 @@ func (History) Fields() []ent.Field {
field.Time("date"),
field.String("target_dir"),
field.Int("size").Default(0),
field.Enum("status").Values("running", "success", "fail", "uploading"),
field.Int("download_client_id").Optional(),
field.Int("indexer_id").Optional(),
field.Enum("status").Values("running", "success", "fail", "uploading", "seeding"),
field.String("saved").Optional(),
}
}

32
ent/schema/importlist.go Normal file
View File

@@ -0,0 +1,32 @@
package schema
import (
"entgo.io/ent"
"entgo.io/ent/schema/field"
)
// ImportList holds the schema definition for the ImportList entity.
type ImportList struct {
ent.Schema
}
// Fields of the ImportList.
func (ImportList) Fields() []ent.Field {
return []ent.Field{
field.String("name"),
field.Enum("type").Values("plex", "doulist"),
field.String("url").Optional(),
field.String("qulity"),
field.Int("storage_id"),
field.JSON("settings", ImportListSettings{}).Optional(),
}
}
// Edges of the ImportList.
func (ImportList) Edges() []ent.Edge {
return nil
}
type ImportListSettings struct {
//Url string `json:"url"`
}

View File

@@ -17,7 +17,9 @@ func (Indexers) Fields() []ent.Field {
field.String("implementation"),
field.String("settings"),
field.Bool("enable_rss").Default(true),
field.Int("priority"),
field.Int("priority").Default(50),
field.Float32("seed_ratio").Optional().Default(0).Comment("minimal seed ratio requied, before removing torrent"),
field.Bool("disabled").Optional().Default(false),
}
}

View File

@@ -25,10 +25,12 @@ func (Media) Fields() []ent.Field {
field.String("overview"),
field.Time("created_at").Default(time.Now()),
field.String("air_date").Default(""),
field.Enum("resolution").Values("720p", "1080p", "4k").Default("1080p"),
field.Enum("resolution").Values("720p", "1080p", "2160p").Default("1080p"),
field.Int("storage_id").Optional(),
field.String("target_dir").Optional(),
field.Bool("download_history_episodes").Optional().Default(false).Comment("tv series only"),
field.JSON("limiter", MediaLimiter{}).Optional(),
field.JSON("extras", MediaExtras{}).Optional(),
}
}
@@ -38,3 +40,23 @@ func (Media) Edges() []ent.Edge {
edge.To("episodes", Episode.Type),
}
}
type MediaLimiter struct {
SizeMin int `json:"size_min"` //in B
SizeMax int `json:"size_max"` //in B
}
type MediaExtras struct {
IsAdultMovie bool `json:"is_adult_movie"`
JavId string `json:"javid"`
//OriginCountry []string `json:"origin_country"`
OriginalLanguage string `json:"original_language"`
Genres []struct {
ID int64 `json:"id"`
Name string `json:"name"`
} `json:"genres"`
}
func (m *MediaExtras) IsJav() bool {
return m.IsAdultMovie && m.JavId != ""
}

View File

@@ -15,6 +15,8 @@ func (Storage) Fields() []ent.Field {
return []ent.Field{
field.String("name").Unique(),
field.Enum("implementation").Values("webdav", "local"),
field.String("tv_path").Optional(),
field.String("movie_path").Optional(),
field.String("settings").Optional(),
field.Bool("deleted").Default(false),
field.Bool("default").Default(false),

View File

@@ -20,6 +20,10 @@ type Storage struct {
Name string `json:"name,omitempty"`
// Implementation holds the value of the "implementation" field.
Implementation storage.Implementation `json:"implementation,omitempty"`
// TvPath holds the value of the "tv_path" field.
TvPath string `json:"tv_path,omitempty"`
// MoviePath holds the value of the "movie_path" field.
MoviePath string `json:"movie_path,omitempty"`
// Settings holds the value of the "settings" field.
Settings string `json:"settings,omitempty"`
// Deleted holds the value of the "deleted" field.
@@ -38,7 +42,7 @@ func (*Storage) scanValues(columns []string) ([]any, error) {
values[i] = new(sql.NullBool)
case storage.FieldID:
values[i] = new(sql.NullInt64)
case storage.FieldName, storage.FieldImplementation, storage.FieldSettings:
case storage.FieldName, storage.FieldImplementation, storage.FieldTvPath, storage.FieldMoviePath, storage.FieldSettings:
values[i] = new(sql.NullString)
default:
values[i] = new(sql.UnknownType)
@@ -73,6 +77,18 @@ func (s *Storage) assignValues(columns []string, values []any) error {
} else if value.Valid {
s.Implementation = storage.Implementation(value.String)
}
case storage.FieldTvPath:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field tv_path", values[i])
} else if value.Valid {
s.TvPath = value.String
}
case storage.FieldMoviePath:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field movie_path", values[i])
} else if value.Valid {
s.MoviePath = value.String
}
case storage.FieldSettings:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field settings", values[i])
@@ -133,6 +149,12 @@ func (s *Storage) String() string {
builder.WriteString("implementation=")
builder.WriteString(fmt.Sprintf("%v", s.Implementation))
builder.WriteString(", ")
builder.WriteString("tv_path=")
builder.WriteString(s.TvPath)
builder.WriteString(", ")
builder.WriteString("movie_path=")
builder.WriteString(s.MoviePath)
builder.WriteString(", ")
builder.WriteString("settings=")
builder.WriteString(s.Settings)
builder.WriteString(", ")

View File

@@ -17,6 +17,10 @@ const (
FieldName = "name"
// FieldImplementation holds the string denoting the implementation field in the database.
FieldImplementation = "implementation"
// FieldTvPath holds the string denoting the tv_path field in the database.
FieldTvPath = "tv_path"
// FieldMoviePath holds the string denoting the movie_path field in the database.
FieldMoviePath = "movie_path"
// FieldSettings holds the string denoting the settings field in the database.
FieldSettings = "settings"
// FieldDeleted holds the string denoting the deleted field in the database.
@@ -32,6 +36,8 @@ var Columns = []string{
FieldID,
FieldName,
FieldImplementation,
FieldTvPath,
FieldMoviePath,
FieldSettings,
FieldDeleted,
FieldDefault,
@@ -95,6 +101,16 @@ func ByImplementation(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldImplementation, opts...).ToFunc()
}
// ByTvPath orders the results by the tv_path field.
func ByTvPath(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldTvPath, opts...).ToFunc()
}
// ByMoviePath orders the results by the movie_path field.
func ByMoviePath(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldMoviePath, opts...).ToFunc()
}
// BySettings orders the results by the settings field.
func BySettings(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldSettings, opts...).ToFunc()

View File

@@ -58,6 +58,16 @@ func Name(v string) predicate.Storage {
return predicate.Storage(sql.FieldEQ(FieldName, v))
}
// TvPath applies equality check predicate on the "tv_path" field. It's identical to TvPathEQ.
func TvPath(v string) predicate.Storage {
return predicate.Storage(sql.FieldEQ(FieldTvPath, v))
}
// MoviePath applies equality check predicate on the "movie_path" field. It's identical to MoviePathEQ.
func MoviePath(v string) predicate.Storage {
return predicate.Storage(sql.FieldEQ(FieldMoviePath, v))
}
// Settings applies equality check predicate on the "settings" field. It's identical to SettingsEQ.
func Settings(v string) predicate.Storage {
return predicate.Storage(sql.FieldEQ(FieldSettings, v))
@@ -158,6 +168,156 @@ func ImplementationNotIn(vs ...Implementation) predicate.Storage {
return predicate.Storage(sql.FieldNotIn(FieldImplementation, vs...))
}
// TvPathEQ applies the EQ predicate on the "tv_path" field.
func TvPathEQ(v string) predicate.Storage {
return predicate.Storage(sql.FieldEQ(FieldTvPath, v))
}
// TvPathNEQ applies the NEQ predicate on the "tv_path" field.
func TvPathNEQ(v string) predicate.Storage {
return predicate.Storage(sql.FieldNEQ(FieldTvPath, v))
}
// TvPathIn applies the In predicate on the "tv_path" field.
func TvPathIn(vs ...string) predicate.Storage {
return predicate.Storage(sql.FieldIn(FieldTvPath, vs...))
}
// TvPathNotIn applies the NotIn predicate on the "tv_path" field.
func TvPathNotIn(vs ...string) predicate.Storage {
return predicate.Storage(sql.FieldNotIn(FieldTvPath, vs...))
}
// TvPathGT applies the GT predicate on the "tv_path" field.
func TvPathGT(v string) predicate.Storage {
return predicate.Storage(sql.FieldGT(FieldTvPath, v))
}
// TvPathGTE applies the GTE predicate on the "tv_path" field.
func TvPathGTE(v string) predicate.Storage {
return predicate.Storage(sql.FieldGTE(FieldTvPath, v))
}
// TvPathLT applies the LT predicate on the "tv_path" field.
func TvPathLT(v string) predicate.Storage {
return predicate.Storage(sql.FieldLT(FieldTvPath, v))
}
// TvPathLTE applies the LTE predicate on the "tv_path" field.
func TvPathLTE(v string) predicate.Storage {
return predicate.Storage(sql.FieldLTE(FieldTvPath, v))
}
// TvPathContains applies the Contains predicate on the "tv_path" field.
func TvPathContains(v string) predicate.Storage {
return predicate.Storage(sql.FieldContains(FieldTvPath, v))
}
// TvPathHasPrefix applies the HasPrefix predicate on the "tv_path" field.
func TvPathHasPrefix(v string) predicate.Storage {
return predicate.Storage(sql.FieldHasPrefix(FieldTvPath, v))
}
// TvPathHasSuffix applies the HasSuffix predicate on the "tv_path" field.
func TvPathHasSuffix(v string) predicate.Storage {
return predicate.Storage(sql.FieldHasSuffix(FieldTvPath, v))
}
// TvPathIsNil applies the IsNil predicate on the "tv_path" field.
func TvPathIsNil() predicate.Storage {
return predicate.Storage(sql.FieldIsNull(FieldTvPath))
}
// TvPathNotNil applies the NotNil predicate on the "tv_path" field.
func TvPathNotNil() predicate.Storage {
return predicate.Storage(sql.FieldNotNull(FieldTvPath))
}
// TvPathEqualFold applies the EqualFold predicate on the "tv_path" field.
func TvPathEqualFold(v string) predicate.Storage {
return predicate.Storage(sql.FieldEqualFold(FieldTvPath, v))
}
// TvPathContainsFold applies the ContainsFold predicate on the "tv_path" field.
func TvPathContainsFold(v string) predicate.Storage {
return predicate.Storage(sql.FieldContainsFold(FieldTvPath, v))
}
// MoviePathEQ applies the EQ predicate on the "movie_path" field.
func MoviePathEQ(v string) predicate.Storage {
return predicate.Storage(sql.FieldEQ(FieldMoviePath, v))
}
// MoviePathNEQ applies the NEQ predicate on the "movie_path" field.
func MoviePathNEQ(v string) predicate.Storage {
return predicate.Storage(sql.FieldNEQ(FieldMoviePath, v))
}
// MoviePathIn applies the In predicate on the "movie_path" field.
func MoviePathIn(vs ...string) predicate.Storage {
return predicate.Storage(sql.FieldIn(FieldMoviePath, vs...))
}
// MoviePathNotIn applies the NotIn predicate on the "movie_path" field.
func MoviePathNotIn(vs ...string) predicate.Storage {
return predicate.Storage(sql.FieldNotIn(FieldMoviePath, vs...))
}
// MoviePathGT applies the GT predicate on the "movie_path" field.
func MoviePathGT(v string) predicate.Storage {
return predicate.Storage(sql.FieldGT(FieldMoviePath, v))
}
// MoviePathGTE applies the GTE predicate on the "movie_path" field.
func MoviePathGTE(v string) predicate.Storage {
return predicate.Storage(sql.FieldGTE(FieldMoviePath, v))
}
// MoviePathLT applies the LT predicate on the "movie_path" field.
func MoviePathLT(v string) predicate.Storage {
return predicate.Storage(sql.FieldLT(FieldMoviePath, v))
}
// MoviePathLTE applies the LTE predicate on the "movie_path" field.
func MoviePathLTE(v string) predicate.Storage {
return predicate.Storage(sql.FieldLTE(FieldMoviePath, v))
}
// MoviePathContains applies the Contains predicate on the "movie_path" field.
func MoviePathContains(v string) predicate.Storage {
return predicate.Storage(sql.FieldContains(FieldMoviePath, v))
}
// MoviePathHasPrefix applies the HasPrefix predicate on the "movie_path" field.
func MoviePathHasPrefix(v string) predicate.Storage {
return predicate.Storage(sql.FieldHasPrefix(FieldMoviePath, v))
}
// MoviePathHasSuffix applies the HasSuffix predicate on the "movie_path" field.
func MoviePathHasSuffix(v string) predicate.Storage {
return predicate.Storage(sql.FieldHasSuffix(FieldMoviePath, v))
}
// MoviePathIsNil applies the IsNil predicate on the "movie_path" field.
func MoviePathIsNil() predicate.Storage {
return predicate.Storage(sql.FieldIsNull(FieldMoviePath))
}
// MoviePathNotNil applies the NotNil predicate on the "movie_path" field.
func MoviePathNotNil() predicate.Storage {
return predicate.Storage(sql.FieldNotNull(FieldMoviePath))
}
// MoviePathEqualFold applies the EqualFold predicate on the "movie_path" field.
func MoviePathEqualFold(v string) predicate.Storage {
return predicate.Storage(sql.FieldEqualFold(FieldMoviePath, v))
}
// MoviePathContainsFold applies the ContainsFold predicate on the "movie_path" field.
func MoviePathContainsFold(v string) predicate.Storage {
return predicate.Storage(sql.FieldContainsFold(FieldMoviePath, v))
}
// SettingsEQ applies the EQ predicate on the "settings" field.
func SettingsEQ(v string) predicate.Storage {
return predicate.Storage(sql.FieldEQ(FieldSettings, v))

View File

@@ -31,6 +31,34 @@ func (sc *StorageCreate) SetImplementation(s storage.Implementation) *StorageCre
return sc
}
// SetTvPath sets the "tv_path" field.
func (sc *StorageCreate) SetTvPath(s string) *StorageCreate {
sc.mutation.SetTvPath(s)
return sc
}
// SetNillableTvPath sets the "tv_path" field if the given value is not nil.
func (sc *StorageCreate) SetNillableTvPath(s *string) *StorageCreate {
if s != nil {
sc.SetTvPath(*s)
}
return sc
}
// SetMoviePath sets the "movie_path" field.
func (sc *StorageCreate) SetMoviePath(s string) *StorageCreate {
sc.mutation.SetMoviePath(s)
return sc
}
// SetNillableMoviePath sets the "movie_path" field if the given value is not nil.
func (sc *StorageCreate) SetNillableMoviePath(s *string) *StorageCreate {
if s != nil {
sc.SetMoviePath(*s)
}
return sc
}
// SetSettings sets the "settings" field.
func (sc *StorageCreate) SetSettings(s string) *StorageCreate {
sc.mutation.SetSettings(s)
@@ -171,6 +199,14 @@ func (sc *StorageCreate) createSpec() (*Storage, *sqlgraph.CreateSpec) {
_spec.SetField(storage.FieldImplementation, field.TypeEnum, value)
_node.Implementation = value
}
if value, ok := sc.mutation.TvPath(); ok {
_spec.SetField(storage.FieldTvPath, field.TypeString, value)
_node.TvPath = value
}
if value, ok := sc.mutation.MoviePath(); ok {
_spec.SetField(storage.FieldMoviePath, field.TypeString, value)
_node.MoviePath = value
}
if value, ok := sc.mutation.Settings(); ok {
_spec.SetField(storage.FieldSettings, field.TypeString, value)
_node.Settings = value

View File

@@ -55,6 +55,46 @@ func (su *StorageUpdate) SetNillableImplementation(s *storage.Implementation) *S
return su
}
// SetTvPath sets the "tv_path" field.
func (su *StorageUpdate) SetTvPath(s string) *StorageUpdate {
su.mutation.SetTvPath(s)
return su
}
// SetNillableTvPath sets the "tv_path" field if the given value is not nil.
func (su *StorageUpdate) SetNillableTvPath(s *string) *StorageUpdate {
if s != nil {
su.SetTvPath(*s)
}
return su
}
// ClearTvPath clears the value of the "tv_path" field.
func (su *StorageUpdate) ClearTvPath() *StorageUpdate {
su.mutation.ClearTvPath()
return su
}
// SetMoviePath sets the "movie_path" field.
func (su *StorageUpdate) SetMoviePath(s string) *StorageUpdate {
su.mutation.SetMoviePath(s)
return su
}
// SetNillableMoviePath sets the "movie_path" field if the given value is not nil.
func (su *StorageUpdate) SetNillableMoviePath(s *string) *StorageUpdate {
if s != nil {
su.SetMoviePath(*s)
}
return su
}
// ClearMoviePath clears the value of the "movie_path" field.
func (su *StorageUpdate) ClearMoviePath() *StorageUpdate {
su.mutation.ClearMoviePath()
return su
}
// SetSettings sets the "settings" field.
func (su *StorageUpdate) SetSettings(s string) *StorageUpdate {
su.mutation.SetSettings(s)
@@ -163,6 +203,18 @@ func (su *StorageUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := su.mutation.Implementation(); ok {
_spec.SetField(storage.FieldImplementation, field.TypeEnum, value)
}
if value, ok := su.mutation.TvPath(); ok {
_spec.SetField(storage.FieldTvPath, field.TypeString, value)
}
if su.mutation.TvPathCleared() {
_spec.ClearField(storage.FieldTvPath, field.TypeString)
}
if value, ok := su.mutation.MoviePath(); ok {
_spec.SetField(storage.FieldMoviePath, field.TypeString, value)
}
if su.mutation.MoviePathCleared() {
_spec.ClearField(storage.FieldMoviePath, field.TypeString)
}
if value, ok := su.mutation.Settings(); ok {
_spec.SetField(storage.FieldSettings, field.TypeString, value)
}
@@ -223,6 +275,46 @@ func (suo *StorageUpdateOne) SetNillableImplementation(s *storage.Implementation
return suo
}
// SetTvPath sets the "tv_path" field.
func (suo *StorageUpdateOne) SetTvPath(s string) *StorageUpdateOne {
suo.mutation.SetTvPath(s)
return suo
}
// SetNillableTvPath sets the "tv_path" field if the given value is not nil.
func (suo *StorageUpdateOne) SetNillableTvPath(s *string) *StorageUpdateOne {
if s != nil {
suo.SetTvPath(*s)
}
return suo
}
// ClearTvPath clears the value of the "tv_path" field.
func (suo *StorageUpdateOne) ClearTvPath() *StorageUpdateOne {
suo.mutation.ClearTvPath()
return suo
}
// SetMoviePath sets the "movie_path" field.
func (suo *StorageUpdateOne) SetMoviePath(s string) *StorageUpdateOne {
suo.mutation.SetMoviePath(s)
return suo
}
// SetNillableMoviePath sets the "movie_path" field if the given value is not nil.
func (suo *StorageUpdateOne) SetNillableMoviePath(s *string) *StorageUpdateOne {
if s != nil {
suo.SetMoviePath(*s)
}
return suo
}
// ClearMoviePath clears the value of the "movie_path" field.
func (suo *StorageUpdateOne) ClearMoviePath() *StorageUpdateOne {
suo.mutation.ClearMoviePath()
return suo
}
// SetSettings sets the "settings" field.
func (suo *StorageUpdateOne) SetSettings(s string) *StorageUpdateOne {
suo.mutation.SetSettings(s)
@@ -361,6 +453,18 @@ func (suo *StorageUpdateOne) sqlSave(ctx context.Context) (_node *Storage, err e
if value, ok := suo.mutation.Implementation(); ok {
_spec.SetField(storage.FieldImplementation, field.TypeEnum, value)
}
if value, ok := suo.mutation.TvPath(); ok {
_spec.SetField(storage.FieldTvPath, field.TypeString, value)
}
if suo.mutation.TvPathCleared() {
_spec.ClearField(storage.FieldTvPath, field.TypeString)
}
if value, ok := suo.mutation.MoviePath(); ok {
_spec.SetField(storage.FieldMoviePath, field.TypeString, value)
}
if suo.mutation.MoviePathCleared() {
_spec.ClearField(storage.FieldMoviePath, field.TypeString)
}
if value, ok := suo.mutation.Settings(); ok {
_spec.SetField(storage.FieldSettings, field.TypeString, value)
}

View File

@@ -18,6 +18,8 @@ type Tx struct {
Episode *EpisodeClient
// History is the client for interacting with the History builders.
History *HistoryClient
// ImportList is the client for interacting with the ImportList builders.
ImportList *ImportListClient
// Indexers is the client for interacting with the Indexers builders.
Indexers *IndexersClient
// Media is the client for interacting with the Media builders.
@@ -162,6 +164,7 @@ func (tx *Tx) init() {
tx.DownloadClients = NewDownloadClientsClient(tx.config)
tx.Episode = NewEpisodeClient(tx.config)
tx.History = NewHistoryClient(tx.config)
tx.ImportList = NewImportListClient(tx.config)
tx.Indexers = NewIndexersClient(tx.config)
tx.Media = NewMediaClient(tx.config)
tx.NotificationClient = NewNotificationClientClient(tx.config)

5
go.mod
View File

@@ -12,20 +12,21 @@ require (
)
require (
github.com/adrg/strutil v0.3.1
github.com/gin-contrib/zap v1.1.3
github.com/nikoksr/notify v1.0.0
github.com/stretchr/testify v1.9.0
)
require (
github.com/BurntSushi/toml v1.4.0 // indirect
github.com/PuerkitoBio/goquery v1.9.2 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/blinkbean/dingtalk v1.1.3 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/go-telegram-bot-api/telegram-bot-api v4.6.4+incompatible // indirect
github.com/gregdel/pushover v1.3.1 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/testify v1.9.0 // indirect
github.com/technoweenie/multipartstreamer v1.0.1 // indirect
golang.org/x/sync v0.7.0 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect

37
go.sum
View File

@@ -6,10 +6,12 @@ github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0
github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/adrg/strutil v0.3.1 h1:OLvSS7CSJO8lBii4YmBt8jiK9QOtB9CzCzwl4Ic/Fz4=
github.com/adrg/strutil v0.3.1/go.mod h1:8h90y18QLrs11IBffcGX3NW/GFBXCMcNg4M7H6MspPA=
github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE=
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
github.com/blinkbean/dingtalk v1.1.3 h1:MbidFZYom7DTFHD/YIs+eaI7kRy52kmWE/sy0xjo6E4=
@@ -80,6 +82,8 @@ github.com/hekmon/cunits/v2 v2.1.0 h1:k6wIjc4PlacNOHwKEMBgWV2/c8jyD4eRMs5mR1BBhI
github.com/hekmon/cunits/v2 v2.1.0/go.mod h1:9r1TycXYXaTmEWlAIfFV8JT+Xo59U96yUJAYHxzii2M=
github.com/hekmon/transmissionrpc/v3 v3.0.0 h1:0Fb11qE0IBh4V4GlOwHNYpqpjcYDp5GouolwrpmcUDQ=
github.com/hekmon/transmissionrpc/v3 v3.0.0/go.mod h1:38SlNhFzinVUuY87wGj3acOmRxeYZAZfrj6Re7UgCDg=
github.com/jordan-wright/email v4.0.1-0.20210109023952-943e75fe5223+incompatible h1:jdpOPRN1zP63Td1hDQbZW73xKmzDvZHzVdNYxhnTMDA=
github.com/jordan-wright/email v4.0.1-0.20210109023952-943e75fe5223+incompatible/go.mod h1:1c7szIrayyPPB/987hsnvNzLushdWf4o/79s3P08L8A=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
@@ -172,6 +176,7 @@ github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65E
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4=
github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA=
github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
@@ -184,31 +189,59 @@ golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUu
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8=
golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys=
golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg=
golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=

View File

@@ -26,13 +26,14 @@ func init() {
MaxSize: 50, // megabytes
MaxBackups: 3,
MaxAge: 30, // days
Compress: true,
})
}
consoleEncoder := zapcore.NewConsoleEncoder(zap.NewDevelopmentEncoderConfig())
logger := zap.New(zapcore.NewCore(consoleEncoder, w, atom), zap.AddCallerSkip(1))
logger := zap.New(zapcore.NewCore(consoleEncoder, w, atom), zap.AddCallerSkip(1),zap.AddCaller())
sugar = logger.Sugar()

52
pkg/cache/cache.go vendored Normal file
View File

@@ -0,0 +1,52 @@
package cache
import (
"polaris/log"
"polaris/pkg/utils"
"time"
)
func NewCache[T comparable, S any](timeout time.Duration) *Cache[T, S] {
c := &Cache[T, S]{
m: utils.Map[T, inner[S]]{},
timeout: timeout,
}
return c
}
type Cache[T comparable, S any] struct {
m utils.Map[T, inner[S]]
timeout time.Duration
}
type inner[S any] struct {
t time.Time
s S
}
func (c *Cache[T, S]) Set(key T, value S) {
c.m.Store(key, inner[S]{t: time.Now(), s: value})
}
func (c *Cache[T, S]) Get(key T) (S, bool) {
c.m.Range(func(key T, value inner[S]) bool {
if time.Since(value.t) > c.timeout {
log.Debugf("delete old cache: %v", key)
c.m.Delete(key)
}
return true
})
v, ok := c.m.Load(key)
if !ok {
return getZero[S](), ok
}
return v.s, ok
}
func getZero[T any]() T {
var result T
return result
}

View File

@@ -1,13 +1,14 @@
package pkg
type Torrent interface {
Name() string
Progress() int
Name() (string, error)
Progress() (int, error)
Stop() error
Start() error
Remove() error
Save() string
Exists() bool
SeedRatio() (float64, error)
}

View File

@@ -0,0 +1,100 @@
package douban
import (
"fmt"
"net/http"
"polaris/log"
"polaris/pkg/importlist"
"strconv"
"strings"
"github.com/PuerkitoBio/goquery"
)
const ua = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
func ParseDoulist(doulistUrl string) (*importlist.Response, error) {
if !strings.Contains(doulistUrl, "doulist") {
return nil, fmt.Errorf("not doulist")
}
req, err := http.NewRequest("GET", doulistUrl, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", ua)
res, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != 200 {
return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
}
doc, err := goquery.NewDocumentFromReader(res.Body)
if err != nil {
return nil, err
}
doc.Find("div[class=doulist-item]").Each(func(i int, selection *goquery.Selection) {
titleDiv := selection.Find("div[class=title]")
link := titleDiv.Find("div>a")
href, ok := link.Attr("href")
if !ok {
return
}
abstract := selection.Find("div[class=abstract]")
lines := strings.Split(abstract.Text(), "\n")
year := 0
for _, l := range lines {
if strings.Contains(l, "年份") {
ppp := strings.Split(l, ":")
if len(ppp) < 2 {
continue
} else {
n := ppp[1]
n1, err := strconv.Atoi(strings.TrimSpace(n))
if err != nil {
log.Errorf("convert year number %s to int error: %v", n, err)
continue
}
year = n1
}
}
}
item := importlist.Item{
Title: strings.TrimSpace(link.Text()),
Year: year,
}
_ = item
println(link.Text(), href)
})
return nil, nil
}
func parseDetailPage(url string) (string, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return "", err
}
req.Header.Set("User-Agent", ua)
res, err := http.DefaultClient.Do(req)
if err != nil {
return "", err
}
defer res.Body.Close()
if res.StatusCode != 200 {
return "", fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
}
doc, err := goquery.NewDocumentFromReader(res.Body)
if err != nil {
return "", err
}
_ = doc
return "", nil
}

View File

@@ -0,0 +1,11 @@
package douban
import (
"polaris/log"
"testing"
)
func TestParseDoulist(t *testing.T) {
r, err := ParseDoulist("https://www.douban.com/doulist/166422/")
log.Info(r, err)
}

View File

@@ -0,0 +1,13 @@
package importlist
type Item struct {
Title string
Year int
ImdbID string
TvdbID string
TmdbID string
}
type Response struct {
Items []Item
}

View File

@@ -0,0 +1,96 @@
package plexwatchlist
import (
"encoding/xml"
"io"
"net/http"
"polaris/pkg/importlist"
"strings"
"github.com/pkg/errors"
)
type Response struct {
XMLName xml.Name `xml:"rss"`
Text string `xml:",chardata"`
Atom string `xml:"atom,attr"`
Media string `xml:"media,attr"`
Version string `xml:"version,attr"`
Channel struct {
Text string `xml:",chardata"`
Title string `xml:"title"`
Link struct {
Text string `xml:",chardata"`
Href string `xml:"href,attr"`
Rel string `xml:"rel,attr"`
Type string `xml:"type,attr"`
} `xml:"link"`
Description string `xml:"description"`
Category string `xml:"category"`
Item []struct {
Text string `xml:",chardata"`
Title string `xml:"title"`
PubDate string `xml:"pubDate"`
Link string `xml:"link"`
Description string `xml:"description"`
Category string `xml:"category"`
Credit []struct {
Text string `xml:",chardata"`
Role string `xml:"role,attr"`
} `xml:"credit"`
Thumbnail struct {
Text string `xml:",chardata"`
URL string `xml:"url,attr"`
} `xml:"thumbnail"`
Keywords string `xml:"keywords"`
Rating struct {
Text string `xml:",chardata"`
Scheme string `xml:"scheme,attr"`
} `xml:"rating"`
Guid struct {
Text string `xml:",chardata"`
IsPermaLink string `xml:"isPermaLink,attr"`
} `xml:"guid"`
} `xml:"item"`
} `xml:"channel"`
}
func (r *Response) convert() *importlist.Response {
res := &importlist.Response{}
for _, im := range r.Channel.Item {
item := importlist.Item{
Title: im.Title,
}
id := strings.ToLower(im.Guid.Text)
if strings.HasPrefix(id, "tvdb") {
tvdbid := strings.TrimPrefix(id, "tvdb://")
item.TvdbID = tvdbid
} else if strings.HasPrefix(id, "imdb") {
imdbid := strings.TrimPrefix(id, "imdb://")
item.ImdbID = imdbid
} else if strings.HasPrefix(id, "tmdb") {
tmdbid := strings.TrimPrefix(id, "tmdb://")
item.TmdbID = tmdbid
}
res.Items = append(res.Items, item)
}
return res
}
func ParsePlexWatchlist(url string) (*importlist.Response, error) {
resp, err := http.Get(url)
if err != nil {
return nil, errors.Wrap(err, "http get")
}
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
if err != nil {
return nil, errors.Wrap(err, "read data")
}
var rrr Response
err = xml.Unmarshal(data, &rrr)
if err != nil {
return nil, errors.Wrap(err, "xml")
}
return rrr.convert(), nil
}

23
pkg/metadata/doc.go Normal file
View File

@@ -0,0 +1,23 @@
package metadata
/*
tv name examples
Cowboy Cartel S01E02 XviD-AFG [eztv]
The.Bold.and.the.Beautiful.S37E219.XviD-AFG
As Husband As Wife 2024 S01E05-E06 1080p WEB-DL HEVC DDP 2Audios-QHstudIo
Twinkle Love 2024 S04 Complete 2160p WEB-DL HEVC AAC-QHstudIo
[HorribleSubs] One Punch Man S2 - 08 [720p].mkv
[Breeze] One Punch Man S01 S02 [1080p BD AV1][dual audio]
[HYSUB]ONE PUNCH MAN[S1+S2][BDrip][GB_MP4][1920X1080]
Cowboy Cartel S01E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv]
Limitless Wrestling 2021 01 06 The Road Season 2 Episode 12 XviD-AFG [eztv]
[千夏字幕组][小市民系列_Shoushimin Series][第03话][1080p_HEVC][简繁内封][招募新人]
[OPFans楓雪動漫][ONE PIECE 海賊王][第1113話][周日版][1080p][MP4][簡體]
[桜都字幕组] 亦叶亦花 / Nanare Hananare [04][1080p][简体内嵌]
[ANi] 戰國妖狐 千魔混沌篇 - 16 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]
[ANi] 這是妳與我的最後戰場,或是開創世界的聖戰 第二季 - 04 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]
[桜都字幕组][一拳超人 第2季/One Punch Man 2nd Season][01-12 END][BIG5][720P]
一拳超人第二季One.Punch.Man.Season2.2160p.10Bit.HEVC.AAC.CHS&JPN
*/

View File

@@ -8,10 +8,10 @@ import (
)
type MovieMetadata struct {
NameEn string
NameCN string
Name string
Year int
Resolution string
IsQingban bool
}
func ParseMovie(name string) *MovieMetadata {
@@ -29,16 +29,44 @@ func ParseMovie(name string) *MovieMetadata {
panic(fmt.Sprintf("convert %s error: %v", y, err))
}
meta.Year = n
}
if yearIndex != -1 {
meta.NameEn = name[:yearIndex]
} else {
meta.NameEn = name
yearRe := regexp.MustCompile(`\d{4}`)
yearMatches := yearRe.FindAllString(name, -1)
if len(yearMatches) > 0 {
n, err := strconv.Atoi(yearMatches[0])
if err != nil {
panic(fmt.Sprintf("convert %s error: %v", yearMatches[0], err))
}
meta.Year = n
}
}
if yearIndex != -1 {
meta.Name = name[:yearIndex]
} else {
meta.Name = name
}
resRe := regexp.MustCompile(`\d{3,4}p`)
resMatches := resRe.FindAllString(name, -1)
if len(resMatches) > 0 {
meta.Resolution = resMatches[0]
}
meta.IsQingban = isQiangban(name)
return meta
}
// https://en.wikipedia.org/wiki/Pirated_movie_release_types
func isQiangban(name string) bool {
qiangbanFilter := []string{"CAMRip","CAM-Rip", "CAM", "HDCAM", "TS","TSRip", "HDTS", "TELESYNC", "PDVD", "PreDVDRip", "TC", "HDTC", "TELECINE", "WP", "WORKPRINT"}
re := regexp.MustCompile(`\W`)
name = re.ReplaceAllString(strings.ToLower(name), " ")
fields := strings.Fields(name)
for _, q := range qiangbanFilter {
for _, f := range fields {
if strings.EqualFold(q, f) {
return true
}
}
}
return false
}

View File

@@ -2,6 +2,7 @@ package metadata
import (
"fmt"
"polaris/log"
"polaris/pkg/utils"
"regexp"
"strconv"
@@ -29,20 +30,11 @@ func ParseTv(name string) *Metadata {
func parseEnglishName(name string) *Metadata {
re := regexp.MustCompile(`[^\p{L}\w\s]`)
name = re.ReplaceAllString(strings.ToLower(name), " ")
splits := strings.Split(strings.TrimSpace(name), " ")
var newSplits []string
for _, p := range splits {
p = strings.TrimSpace(p)
if p == "" {
continue
}
newSplits = append(newSplits, p)
}
newSplits := strings.Split(strings.TrimSpace(name), " ")
seasonRe := regexp.MustCompile(`^s\d{1,2}`)
resRe := regexp.MustCompile(`^\d{3,4}p`)
episodeRe := regexp.MustCompile(`e\d{1,2}`)
episodeRe := regexp.MustCompile(`e\d{1,3}`)
var seasonIndex = -1
var episodeIndex = -1
@@ -58,7 +50,7 @@ func parseEnglishName(name string) *Metadata {
} else if resRe.MatchString(p) {
resIndex = i
}
if episodeRe.MatchString(p) {
if i >= seasonIndex && episodeRe.MatchString(p) {
episodeIndex = i
}
}
@@ -137,7 +129,7 @@ func parseEnglishName(name string) *Metadata {
//resolution exists
meta.Resolution = newSplits[resIndex]
}
if meta.Episode == -1 || strings.Contains(name, "complete") {
if meta.Episode == -1 {
meta.Episode = -1
meta.IsSeasonPack = true
}
@@ -154,9 +146,11 @@ func parseEnglishName(name string) *Metadata {
}
func parseChineseName(name string) *Metadata {
var meta = &Metadata{
Season: 1,
var meta = parseEnglishName(name)
if meta.Season != -1 && (meta.Episode != -1 || meta.IsSeasonPack) {
return meta
}
meta = &Metadata{Season: 1}
//season pack
packRe := regexp.MustCompile(`(\d{1,2}-\d{1,2})|(全集)`)
if packRe.MatchString(name) {
@@ -177,7 +171,7 @@ func parseChineseName(name string) *Metadata {
}
//episode number
re1 := regexp.MustCompile(`\[\d{1,2}\]`)
re1 := regexp.MustCompile(`\[\d{1,3}\]`)
episodeMatches1 := re1.FindAllString(name, -1)
if len(episodeMatches1) > 0 { //[11] [1080p]
epNum := strings.TrimRight(strings.TrimLeft(episodeMatches1[0], "["), "]")
@@ -235,12 +229,17 @@ func parseChineseName(name string) *Metadata {
}
meta.Season = n
} else {
seasonRe1 := regexp.MustCompile(`第.{1}季`)
seasonRe1 := regexp.MustCompile(`第.{1,2}季`)
seasonMatches := seasonRe1.FindAllString(name, -1)
if len(seasonMatches) > 0 {
se := []rune(seasonMatches[0])
seNum := se[1]
meta.Season = chinese2Num[string(seNum)]
m1 := []rune(seasonMatches[0])
seNum := m1[1 : len(m1)-1]
n, err := strconv.Atoi(string(seNum))
if err != nil {
log.Warnf("parse season number %v error: %v, try to parse using chinese", seNum, err)
n = chinese2Num[string(seNum)]
}
meta.Season = n
}
}
@@ -252,24 +251,29 @@ func parseChineseName(name string) *Metadata {
}
//tv name
title := name
fields := strings.FieldsFunc(title, func(r rune) bool {
fields := strings.FieldsFunc(name, func(r rune) bool {
return r == '[' || r == ']' || r == '【' || r == '】'
})
title = ""
titleCn := ""
title := ""
for _, p := range fields { //寻找匹配的最长的字符串,最有可能是名字
if len([]rune(p)) > len([]rune(title)) {
if utils.ContainsChineseChar(p) && len([]rune(p)) > len([]rune(titleCn)) { //最长含中文字符串
titleCn = p
}
if len([]rune(p)) > len([]rune(title)) { //最长字符串
title = p
}
}
re := regexp.MustCompile(`[^\p{L}\w\s]`)
title = re.ReplaceAllString(strings.TrimSpace(strings.ToLower(title)), "")
title = re.ReplaceAllString(strings.TrimSpace(strings.ToLower(title)), "") //去除标点符号
titleCn = re.ReplaceAllString(strings.TrimSpace(strings.ToLower(titleCn)), "")
meta.NameCn = title
meta.NameCn = titleCn
cnRe := regexp.MustCompile(`\p{Han}.*\p{Han}`)
cnmatches := cnRe.FindAllString(title, -1)
cnmatches := cnRe.FindAllString(titleCn, -1)
//titleCn中最长的中文字符
if len(cnmatches) > 0 {
for _, t := range cnmatches {
if len([]rune(t)) > len([]rune(meta.NameCn)) {
@@ -278,12 +282,13 @@ func parseChineseName(name string) *Metadata {
}
}
//匹配title中最长拉丁字符串
enRe := regexp.MustCompile(`[[:ascii:]]*`)
enM := enRe.FindAllString(title, -1)
if len(enM) > 0 {
for _, t := range enM {
if len(t) > len(meta.NameEn) {
meta.NameEn = strings.ToLower(t)
meta.NameEn = strings.TrimSpace(strings.ToLower(t))
}
}
}

147
pkg/metadata/tv_test.go Normal file
View File

@@ -0,0 +1,147 @@
package metadata
import (
"polaris/log"
"testing"
"github.com/stretchr/testify/assert"
)
func Test_ParseTV1(t *testing.T) {
s1 := "Twinkle Love 2024 S04 Complete 2160p WEB-DL HEVC AAC-QHstudIo"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 4)
assert.Equal(t, m.IsSeasonPack, true)
assert.Equal(t, m.Resolution, "2160p")
}
func Test_ParseTV2(t *testing.T) {
s1 := "Cowboy Cartel S01E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 4)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV3(t *testing.T) {
s1 := "The.Bold.and.the.Beautiful.S37E219.XviD-AFG "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 37)
assert.Equal(t, m.Episode, 219)
assert.Equal(t, m.IsSeasonPack, false)
//assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV4(t *testing.T) {
s1 := "Limitless Wrestling 2021 01 06 The Road Season 2 Episode 12 XviD-AFG [eztv] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 2)
//assert.Equal(t, m.Episode, 219)
assert.Equal(t, m.IsSeasonPack, true)
//assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV5(t *testing.T) {
s1 := "[Breeze] One Punch Man S01 S02 [1080p BD AV1][dual audio]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
//assert.Equal(t, m.Episode, 219)
assert.Equal(t, m.IsSeasonPack, true)
//assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV6(t *testing.T) {
s1 := "[千夏字幕组][小市民系列_Shoushimin Series][第03话][1080p_HEVC][简繁内封][招募新人]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 3)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV7(t *testing.T) {
s1 := " [OPFans楓雪動漫][ONE PIECE 海賊王][第1113話][周日版][1080p][MP4][簡體]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 1113)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV8(t *testing.T) {
s1 := "[桜都字幕组] 亦叶亦花 / Nanare Hananare [04][1080p][简体内嵌] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 4)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV9(t *testing.T) {
s1 := "[ANi] 戰國妖狐 千魔混沌篇 - 16 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, m.Season, 1)
assert.Equal(t, m.Episode, 16)
assert.Equal(t, m.IsSeasonPack, false)
assert.Equal(t, m.Resolution, "1080p")
}
func Test_ParseTV10(t *testing.T) {
s1 := " [桜都字幕组][一拳超人 第2季/One Punch Man 2nd Season][01-12 END][BIG5][720P]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
//assert.Equal(t, 01, m.Episode)
assert.Equal(t, true, m.IsSeasonPack)
assert.Equal(t, "720p", m.Resolution)
}
func Test_ParseTV11(t *testing.T) {
s1 := " [ANi] 這是妳與我的最後戰場,或是開創世界的聖戰 第二季 - 04 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
assert.Equal(t, 4, m.Episode)
assert.Equal(t, false, m.IsSeasonPack)
assert.Equal(t, "1080p", m.Resolution)
}
func Test_ParseTV12(t *testing.T) {
s1 := " 牛仔Cowboy Cartel S02E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv] "
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
assert.Equal(t, 4, m.Episode)
assert.Equal(t, false, m.IsSeasonPack)
assert.Equal(t, "1080p", m.Resolution)
}
func Test_ParseTV13(t *testing.T) {
s1 := "House of Dragon 2024 1080p S02E08 Leaked HQCAM NOT COMPLETE English Audio x264 ESub BOTHD"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 2, m.Season)
assert.Equal(t, 8, m.Episode)
assert.Equal(t, false, m.IsSeasonPack)
assert.Equal(t, "1080p", m.Resolution)
}
func Test_ParseTV14(t *testing.T) {
s1 := "[GM-Team][国漫][斗破苍穹 第5季][Fights Break Sphere ][2022][113][HEVC][GB][4K]"
m := ParseTv(s1)
log.Infof("results: %+v", m)
assert.Equal(t, 5, m.Season)
assert.Equal(t, 113, m.Episode)
assert.Equal(t, false, m.IsSeasonPack)
//assert.Equal(t, "720p", m.Resolution)
}

View File

@@ -13,7 +13,10 @@ import (
type Storage interface {
Move(src, dest string) error
Copy(src, dest string) error
ReadDir(dir string) ([]fs.FileInfo, error)
ReadFile(string) ([]byte, error)
WriteFile(string, []byte) error
}
func NewLocalStorage(dir string) (*LocalStorage, error) {
@@ -26,10 +29,20 @@ type LocalStorage struct {
dir string
}
func (l *LocalStorage) Move(src, dest string) error {
targetDir := filepath.Join(l.dir, dest)
os.MkdirAll(filepath.Dir(targetDir), os.ModePerm)
err := filepath.Walk(src, func(path string, info fs.FileInfo, err error) error {
func (l *LocalStorage) Copy(src, destDir string) error {
os.MkdirAll(filepath.Join(l.dir, destDir), os.ModePerm)
targetBase := filepath.Join(l.dir, destDir, filepath.Base(src)) //文件的场景,要加上文件名, move filename ./dir/
info, err := os.Stat(src)
if err != nil {
return errors.Wrap(err, "read source dir")
}
if info.IsDir() { //如果是路径,则只移动路径里面的文件,不管当前路径, 行为类似 move dirname/* target_dir/
targetBase = filepath.Join(l.dir, destDir)
}
log.Debugf("local storage target base dir is: %v", targetBase)
err = filepath.Walk(src, func(path string, info fs.FileInfo, err error) error {
if err != nil {
return err
}
@@ -37,24 +50,28 @@ func (l *LocalStorage) Move(src, dest string) error {
if err != nil {
return errors.Wrapf(err, "relation between %s and %s", src, path)
}
destName := filepath.Join(targetDir, rel)
destName := filepath.Join(targetBase, rel)
if info.IsDir() {
os.Mkdir(destName, os.ModePerm)
} else { //is file
if writer, err := os.Create(destName); err != nil {
return errors.Wrapf(err, "create file %s", destName)
} else {
defer writer.Close()
if f, err := os.OpenFile(path, os.O_RDONLY, os.ModePerm); err != nil {
return errors.Wrapf(err, "read file %v", path)
} else { //open success
defer f.Close()
_, err := io.Copy(writer, f)
if err != nil {
return errors.Wrap(err, "transmitting data error")
if err := os.Link(path, destName); err != nil {
log.Warnf("hard link file error: %v, will try copy file, source: %s, dest: %s", err, path, destName)
if writer, err := os.OpenFile(destName, os.O_RDWR|os.O_CREATE|os.O_TRUNC, os.ModePerm); err != nil {
return errors.Wrapf(err, "create file %s", destName)
} else {
defer writer.Close()
if f, err := os.OpenFile(path, os.O_RDONLY, os.ModePerm); err != nil {
return errors.Wrapf(err, "read file %v", path)
} else { //open success
defer f.Close()
_, err := io.Copy(writer, f)
if err != nil {
return errors.Wrap(err, "transmitting data error")
}
}
}
}
}
log.Infof("file copy complete: %v", destName)
@@ -63,10 +80,26 @@ func (l *LocalStorage) Move(src, dest string) error {
if err != nil {
return errors.Wrap(err, "move file error")
}
return os.RemoveAll(src)
return nil
}
func (l *LocalStorage) Move(src, destDir string) error {
if err := l.Copy(src, destDir); err != nil {
return err
}
return os.RemoveAll(src)
}
func (l *LocalStorage) ReadDir(dir string) ([]fs.FileInfo, error) {
return ioutil.ReadDir(filepath.Join(l.dir, dir))
}
func (l *LocalStorage) ReadFile(name string) ([]byte, error) {
return os.ReadFile(filepath.Join(l.dir, name))
}
func (l *LocalStorage) WriteFile(name string, data []byte) error {
path := filepath.Join(l.dir, name)
os.MkdirAll(filepath.Dir(path), os.ModePerm)
return os.WriteFile(path, data, os.ModePerm)
}

View File

@@ -14,8 +14,8 @@ import (
)
type WebdavStorage struct {
fs *gowebdav.Client
dir string
fs *gowebdav.Client
dir string
changeMediaHash bool
}
@@ -25,18 +25,24 @@ func NewWebdavStorage(url, user, password, path string, changeMediaHash bool) (*
return nil, errors.Wrap(err, "connect webdav")
}
return &WebdavStorage{
fs: c,
fs: c,
dir: path,
}, nil
}
func (w *WebdavStorage) Move(local, remote string) error {
remoteBase := filepath.Join(w.dir,remote)
func (w *WebdavStorage) Copy(local, remoteDir string) error {
remoteBase := filepath.Join(w.dir, remoteDir, filepath.Base(local))
info, err := os.Stat(local)
if err != nil {
return errors.Wrap(err, "read source dir")
}
if info.IsDir() { //如果是路径,则只移动路径里面的文件,不管当前路径, 行为类似 move dirname/* target_dir/
remoteBase = filepath.Join(w.dir, remoteDir)
}
//log.Infof("remove all content in %s", remoteBase)
//w.fs.RemoveAll(remoteBase)
err := filepath.Walk(local, func(path string, info fs.FileInfo, err error) error {
err = filepath.Walk(local, func(path string, info fs.FileInfo, err error) error {
if err != nil {
return errors.Wrapf(err, "read file %v", path)
}
@@ -73,7 +79,7 @@ func (w *WebdavStorage) Move(local, remote string) error {
r.Header.Set("Content-Type", mtype.String())
r.ContentLength = info.Size()
}
if err := w.fs.WriteStream(remoteName, f, 0666, callback); err != nil {
return errors.Wrap(err, "transmitting data error")
}
@@ -85,9 +91,24 @@ func (w *WebdavStorage) Move(local, remote string) error {
if err != nil {
return errors.Wrap(err, "move file error")
}
return nil
}
func (w *WebdavStorage) Move(local, remoteDir string) error {
if err := w.Copy(local, remoteDir); err != nil {
return err
}
return os.RemoveAll(local)
}
func (w *WebdavStorage) ReadDir(dir string) ([]fs.FileInfo, error) {
return w.fs.ReadDir(filepath.Join(w.dir, dir))
}
func (w *WebdavStorage) ReadFile(name string) ([]byte, error) {
return w.fs.Read(filepath.Join(w.dir, name))
}
func (w *WebdavStorage) WriteFile(name string, data []byte) error {
return w.fs.Write(filepath.Join(w.dir, name), data, os.ModePerm)
}

View File

@@ -1,33 +1,56 @@
package tmdb
import (
"net/http"
"net/url"
"polaris/log"
"strconv"
"strings"
"time"
tmdb "github.com/cyruzin/golang-tmdb"
"github.com/pkg/errors"
)
type Client struct {
apiKey string
tmdbClient *tmdb.Client
apiKey string
tmdbClient *tmdb.Client
enableAdultContent bool
}
func NewClient(apiKey string) (*Client, error) {
func NewClient(apiKey, proxyUrl string, enableAdultContent bool) (*Client, error) {
tmdbClient, err := tmdb.Init(apiKey)
if err != nil {
return nil, errors.Wrap(err, "new tmdb client")
}
if proxyUrl != "" {
//set proxy
u, err := url.Parse(proxyUrl)
if err != nil {
log.Errorf("parse proxy %v error, skip: %v", proxyUrl, err)
} else {
tmdbClient.SetClientConfig(http.Client{
Timeout: time.Second * 10,
Transport: &http.Transport{
Proxy: http.ProxyURL(u),
MaxIdleConns: 10,
IdleConnTimeout: 15 * time.Second,
},
})
}
}
return &Client{
apiKey: apiKey,
tmdbClient: tmdbClient,
apiKey: apiKey,
tmdbClient: tmdbClient,
enableAdultContent: enableAdultContent,
}, nil
}
func (c *Client) GetTvDetails(id int, language string) (*tmdb.TVDetails, error) {
d, err := c.tmdbClient.GetTVDetails(id, withLangOption(language))
d, err := c.tmdbClient.GetTVDetails(id, withExternalIDs(withLangOption(language)))
if err != nil {
return nil, errors.Wrap(err, "get tv detail")
}
@@ -36,8 +59,8 @@ func (c *Client) GetTvDetails(id int, language string) (*tmdb.TVDetails, error)
if !episodeNameUseful(d.LastEpisodeToAir.Name) {
log.Debug("should fetch english version")
var detailEN *tmdb.TVDetails
if language == "zh-CN" {
detailEN, err = c.tmdbClient.GetTVDetails(id, withLangOption("en-US"))
if language == "zh-CN" || language == "" {
detailEN, err = c.tmdbClient.GetTVDetails(id, withExternalIDs(withLangOption("en-US")))
if err != nil {
return d, nil
}
@@ -50,12 +73,11 @@ func (c *Client) GetTvDetails(id int, language string) (*tmdb.TVDetails, error)
}
}
return d, err
}
func (c *Client) GetMovieDetails(id int, language string) (*tmdb.MovieDetails, error) {
return c.tmdbClient.GetMovieDetails(id, withLangOption(language))
return c.tmdbClient.GetMovieDetails(id, withExternalIDs(withLangOption(language)))
}
func (c *Client) SearchTvShow(query string, lang string) (*tmdb.SearchTVShows, error) {
@@ -94,6 +116,9 @@ func (c *Client) SearchMedia(query string, lang string, page int) (*SearchResult
}
options := withLangOption(lang)
options["page"] = strconv.Itoa(page)
if c.enableAdultContent {
options["include_adult"] = "true"
}
res, err := c.tmdbClient.GetSearchMulti(query, options)
if err != nil {
return nil, errors.Wrap(err, "query imdb")
@@ -142,7 +167,7 @@ func (c *Client) GetEposideDetail(id, seasonNumber, eposideNumber int, language
}
if !episodeNameUseful(d.Name) {
var detailEN *tmdb.TVEpisodeDetails
if language == "zh-CN" {
if language == "zh-CN" || language == "" {
detailEN, err = c.tmdbClient.GetTVEpisodeDetails(id, seasonNumber, eposideNumber, withLangOption("en-US"))
if err != nil {
return d, nil
@@ -152,7 +177,7 @@ func (c *Client) GetEposideDetail(id, seasonNumber, eposideNumber int, language
return d, err
}
d.Name = detailEN.Name
d.Overview = detailEN.Overview
d.Overview = detailEN.Overview
}
return d, err
@@ -164,7 +189,7 @@ func (c *Client) GetSeasonDetails(id, seasonNumber int, language string) (*tmdb.
return nil, err
}
var detailEN *tmdb.TVSeasonDetails
if language == "zh-CN" {
if language == "zh-CN" || language == "" {
detailEN, err = c.tmdbClient.GetTVSeasonDetails(id, seasonNumber, withLangOption("en-US"))
if err != nil {
return detailCN, nil
@@ -172,7 +197,7 @@ func (c *Client) GetSeasonDetails(id, seasonNumber int, language string) (*tmdb.
}
for i, ep := range detailCN.Episodes {
if !episodeNameUseful(ep.Name) && episodeNameUseful(detailEN.Episodes[i].Name){
if !episodeNameUseful(ep.Name) && episodeNameUseful(detailEN.Episodes[i].Name) {
detailCN.Episodes[i].Name = detailEN.Episodes[i].Name
detailCN.Episodes[i].Overview = detailEN.Episodes[i].Overview
}
@@ -184,6 +209,22 @@ func (c *Client) GetTVAlternativeTitles(id int, language string) (*tmdb.TVAltern
return c.tmdbClient.GetTVAlternativeTitles(id, withLangOption(language))
}
func (c *Client) GetMovieAlternativeTitles(id int, language string) (*tmdb.MovieAlternativeTitles, error) {
return c.tmdbClient.GetMovieAlternativeTitles(id, withLangOption(language))
}
func (c *Client) GetByImdbId(imdbId string, lang string) (*tmdb.FindByID, error) {
m := withLangOption(lang)
m["external_source"] = "imdb_id"
return c.tmdbClient.GetFindByID(imdbId, m)
}
func (c *Client) GetByTvdbId(imdbId string, lang string) (*tmdb.FindByID, error) {
m := withLangOption(lang)
m["external_source"] = "tvdb_id"
return c.tmdbClient.GetFindByID(imdbId, m)
}
func wrapLanguage(lang string) string {
if lang == "" {
lang = "zh-CN"
@@ -191,6 +232,11 @@ func wrapLanguage(lang string) string {
return lang
}
func withExternalIDs(m map[string]string) map[string]string {
m["append_to_response"] = "external_ids"
return m
}
func withLangOption(language string) map[string]string {
language = wrapLanguage(language)
return map[string]string{
@@ -198,7 +244,6 @@ func withLangOption(language string) map[string]string {
}
}
func episodeNameUseful(name string) bool {
return !strings.HasSuffix(name, "集") && !strings.HasPrefix(strings.ToLower(name), "episode")
}
}

8
pkg/torznab/cache.go Normal file
View File

@@ -0,0 +1,8 @@
package torznab
import (
"polaris/pkg/cache"
"time"
)
var cc = cache.NewCache[string, *Response](time.Minute * 30)

View File

@@ -3,10 +3,13 @@ package torznab
import (
"context"
"encoding/xml"
"fmt"
"io"
"net/http"
"net/url"
"polaris/db"
"polaris/log"
"slices"
"strconv"
"time"
@@ -71,17 +74,26 @@ func (i *Item) GetAttr(key string) string {
}
return ""
}
func (r *Response) ToResults() []Result {
func (r *Response) ToResults(indexer *db.TorznabInfo) []Result {
var res []Result
for _, item := range r.Channel.Item {
if slices.Contains(item.Category, "3000") { //exclude audio files
continue
}
r := Result{
Name: item.Title,
Link: item.Link,
Size: mustAtoI(item.Size),
Seeders: mustAtoI(item.GetAttr("seeders")),
Peers: mustAtoI(item.GetAttr("peers")),
Category: mustAtoI(item.GetAttr("category")),
Source: r.Channel.Title,
Name: item.Title,
Link: item.Link,
Size: mustAtoI(item.Size),
Seeders: mustAtoI(item.GetAttr("seeders")),
Peers: mustAtoI(item.GetAttr("peers")),
Category: mustAtoI(item.GetAttr("category")),
ImdbId: item.GetAttr("imdbid"),
DownloadVolumeFactor: tryParseFloat(item.GetAttr("downloadvolumefactor")),
UploadVolumeFactor: tryParseFloat(item.GetAttr("uploadvolumefactor")),
Source: indexer.Name,
IndexerId: indexer.ID,
Priority: indexer.Priority,
IsPrivate: item.Type == "private",
}
res = append(res, r)
}
@@ -96,20 +108,45 @@ func mustAtoI(key string) int {
}
return i
}
func Search(torznabUrl, api, keyWord string) ([]Result, error) {
func tryParseFloat(s string) float32 {
r, err := strconv.ParseFloat(s, 32)
if err != nil {
log.Warnf("parse float error: %v", err)
return 0
}
return float32(r)
}
func Search(indexer *db.TorznabInfo, keyWord string) ([]Result, error) {
ctx, cancel := context.WithTimeout(context.TODO(), 10*time.Second)
defer cancel()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, torznabUrl, nil)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, indexer.URL, nil)
if err != nil {
return nil, errors.Wrap(err, "new request")
}
var q = url.Values{}
q.Add("apikey", api)
q.Add("apikey", indexer.ApiKey)
q.Add("t", "search")
q.Add("q", keyWord)
req.URL.RawQuery = q.Encode()
key := fmt.Sprintf("%s: %s", indexer.Name, keyWord)
cacheRes, ok := cc.Get(key)
if !ok {
res, err := doRequest(req)
if err != nil {
cc.Set(key, &Response{})
return nil, errors.Wrap(err, "do http request")
}
cacheRes = res
cc.Set(key, cacheRes)
}
return cacheRes.ToResults(indexer), nil
}
func doRequest(req *http.Request) (*Response, error) {
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, errors.Wrap(err, "do http")
@@ -122,17 +159,23 @@ func Search(torznabUrl, api, keyWord string) ([]Result, error) {
var res Response
err = xml.Unmarshal(data, &res)
if err != nil {
return nil, errors.Wrap(err, "json unmarshal")
return nil, errors.Wrapf(err, "xml unmarshal data: %v", string(data))
}
return res.ToResults(), nil
return &res, nil
}
type Result struct {
Name string
Link string
Size int
Seeders int
Peers int
Category int
Source string
Name string `json:"name"`
Link string `json:"link"`
Size int `json:"size"`
Seeders int `json:"seeders"`
Peers int `json:"peers"`
Category int `json:"category"`
Source string `json:"source"`
DownloadVolumeFactor float32 `json:"download_volume_factor"`
UploadVolumeFactor float32 `json:"upload_volume_factor"`
IndexerId int `json:"indexer_id"`
Priority int `json:"priority"`
IsPrivate bool `json:"is_private"`
ImdbId string `json:"imdb_id"`
}

View File

@@ -53,7 +53,7 @@ func (c *Client) GetAll() ([]*Torrent, error) {
var torrents []*Torrent
for _, t := range all {
torrents = append(torrents, &Torrent{
ID: *t.ID,
Hash: *t.HashString,
c: c.c,
Config: c.cfg,
})
@@ -86,12 +86,12 @@ func (c *Client) Download(link, dir string) (*Torrent, error) {
DownloadDir: &dir,
})
log.Infof("get torrent info: %+v", t)
if t.ID == nil {
if t.HashString == nil {
return nil, fmt.Errorf("download torrent error: %v", link)
}
return &Torrent{
ID: *t.ID,
Hash: *t.HashString,
c: c.c,
Config: c.cfg,
}, err
@@ -100,7 +100,7 @@ func (c *Client) Download(link, dir string) (*Torrent, error) {
type Torrent struct {
//t *transmissionrpc.Torrent
c *transmissionrpc.Client
ID int64 `json:"id"`
Hash string `json:"hash"`
Config
}
@@ -113,61 +113,92 @@ func (t *Torrent) reloadClient() error {
return nil
}
func (t *Torrent) getTorrent() transmissionrpc.Torrent {
r, err := t.c.TorrentGetAllFor(context.TODO(), []int64{t.ID})
func (t *Torrent) getTorrent() (transmissionrpc.Torrent, error) {
r, err := t.c.TorrentGetAllForHashes(context.TODO(), []string{t.Hash})
if err != nil {
log.Errorf("get torrent info for error: %v", err)
}
return r[0]
if len(r) == 0 {
return transmissionrpc.Torrent{}, fmt.Errorf("no torrent")
}
return r[0], nil
}
func (t *Torrent) Exists() bool {
r, err := t.c.TorrentGetAllFor(context.TODO(), []int64{t.ID})
r, err := t.c.TorrentGetAllForHashes(context.TODO(), []string{t.Hash})
if err != nil {
log.Errorf("get torrent info for error: %v", err)
}
return len(r) > 0
}
func (t *Torrent) Name() string {
return *t.getTorrent().Name
func (t *Torrent) Name() (string, error) {
tt, err := t.getTorrent()
if err != nil {
return "", err
}
return *tt.Name, nil
}
func (t *Torrent) Progress() int {
if t.getTorrent().IsFinished != nil && *t.getTorrent().IsFinished {
return 100
func (t *Torrent) Progress() (int, error) {
tt, err := t.getTorrent()
if err != nil {
return 0, err
}
if t.getTorrent().PercentComplete != nil && *t.getTorrent().PercentComplete >= 1 {
return 100
if tt.IsFinished != nil && *tt.IsFinished {
return 100, nil
}
if tt.PercentComplete != nil && *tt.PercentComplete >= 1 {
return 100, nil
}
if t.getTorrent().PercentComplete != nil {
p := int(*t.getTorrent().PercentComplete * 100)
if tt.PercentComplete != nil {
p := int(*tt.PercentComplete * 100)
if p == 100 {
p = 99
}
return p
return p, nil
}
return 0
return 0, nil
}
func (t *Torrent) Stop() error {
return t.c.TorrentStopIDs(context.TODO(), []int64{t.ID})
return t.c.TorrentStopHashes(context.TODO(), []string{t.Hash})
}
func (t *Torrent) SeedRatio() (float64, error) {
tt, err := t.getTorrent()
if err != nil {
return 0, err
}
if tt.UploadRatio == nil {
return 0, nil
}
return *tt.UploadRatio, nil
}
func (t *Torrent) Start() error {
return t.c.TorrentStartIDs(context.TODO(), []int64{t.ID})
return t.c.TorrentStartHashes(context.TODO(), []string{t.Hash})
}
func (t *Torrent) Remove() error {
tt, err := t.getTorrent()
if err != nil {
return errors.Wrap(err, "get torrent")
}
return t.c.TorrentRemove(context.TODO(), transmissionrpc.TorrentRemovePayload{
IDs: []int64{t.ID},
IDs: []int64{*tt.ID},
DeleteLocalData: true,
})
}
func (t *Torrent) Size() int {
return int(t.getTorrent().TotalSize.Byte())
func (t *Torrent) Size() (int, error) {
tt, err := t.getTorrent()
if err != nil {
return 0, errors.Wrap(err, "get torrent")
}
return int(tt.TotalSize.Byte()), nil
}
func (t *Torrent) Save() string {

View File

@@ -1,14 +1,13 @@
package utils
import (
"encoding/json"
"os"
"regexp"
"strconv"
"strings"
"unicode"
"github.com/adrg/strutil"
"github.com/adrg/strutil/metrics"
"github.com/pkg/errors"
"golang.org/x/crypto/bcrypt"
"golang.org/x/exp/rand"
@@ -65,7 +64,7 @@ func IsNameAcceptable(name1, name2 string) bool {
if strings.Contains(name1, name2) || strings.Contains(name2, name1) {
return true
}
return strutil.Similarity(name1, name2, metrics.NewHamming()) > 0.4
return false
}
func FindSeasonEpisodeNum(name string) (se int, ep int, err error) {
@@ -112,21 +111,6 @@ func FindSeasonPackageInfo(name string) (se int, err error) {
return se, err
}
func IsSeasonPackageName(name string) bool {
seRe := regexp.MustCompile(`S\d+`)
epRe := regexp.MustCompile(`E\d+`)
nameUpper := strings.ToUpper(name)
matchEp := epRe.FindAllString(nameUpper, -1)
if len(matchEp) != 0 {
return false //episode number should not exist
}
matchSe := seRe.FindAllString(nameUpper, -1)
if len(matchSe) == 0 {
return false //no season num
}
return true
}
func ContainsIgnoreCase(s, substr string) bool {
return strings.Contains(strings.ToLower(s), strings.ToLower(substr))
}
@@ -164,3 +148,38 @@ func ChangeFileHash(name string) error {
}
return nil
}
func TrimFields(v interface{}) error {
bytes, err := json.Marshal(v)
if err != nil {
return err
}
var mapSI map[string]interface{}
if err := json.Unmarshal(bytes, &mapSI); err != nil {
return err
}
mapSI = trimMapStringInterface(mapSI).(map[string]interface{})
bytes2, err := json.Marshal(mapSI)
if err != nil {
return err
}
if err := json.Unmarshal(bytes2, v); err != nil {
return err
}
return nil
}
func trimMapStringInterface(data interface{}) interface{} {
if values, valid := data.([]interface{}); valid {
for i := range values {
data.([]interface{})[i] = trimMapStringInterface(values[i])
}
} else if values, valid := data.(map[string]interface{}); valid {
for k, v := range values {
data.(map[string]interface{})[k] = trimMapStringInterface(v)
}
} else if value, valid := data.(string); valid {
data = strings.TrimSpace(value)
}
return data
}

View File

@@ -15,31 +15,51 @@ import (
type Activity struct {
*ent.History
Progress int `json:"progress"`
Progress int `json:"progress"`
SeedRatio float64 `json:"seed_ratio"`
}
func (s *Server) GetAllActivities(c *gin.Context) (interface{}, error) {
q := c.Query("status")
his := s.db.GetHistories()
var activities = make([]Activity, 0, len(his))
for _, h := range his {
if q == "active" && (h.Status != history.StatusRunning && h.Status != history.StatusUploading) {
continue //active downloads
} else if q == "archive" && (h.Status == history.StatusRunning || h.Status == history.StatusUploading) {
continue //archived downloads
}
a := Activity{
History: h,
}
for id, task := range s.tasks {
if h.ID == id && task.Exists() {
a.Progress = task.Progress()
var activities = make([]Activity, 0)
if q == "active" {
his := s.db.GetRunningHistories()
for _, h := range his {
a := Activity{
History: h,
}
for id, task := range s.core.GetTasks() {
if h.ID == id && task.Exists() {
p, err := task.Progress()
if err != nil {
log.Warnf("get task progress error: %v", err)
} else {
a.Progress = p
}
r, err := task.SeedRatio()
if err != nil {
log.Warnf("get task seed ratio error: %v", err)
} else {
a.SeedRatio = r
}
}
}
activities = append(activities, a)
}
activities = append(activities, a)
}
} else {
his := s.db.GetHistories()
for _, h := range his {
if h.Status == history.StatusRunning || h.Status == history.StatusUploading || h.Status == history.StatusSeeding {
continue //archived downloads
}
a := Activity{
History: h,
}
activities = append(activities, a)
}
}
return activities, nil
}
@@ -54,15 +74,19 @@ func (s *Server) RemoveActivity(c *gin.Context) (interface{}, error) {
log.Errorf("no record of id: %d", id)
return nil, nil
}
torrent := s.tasks[his.ID]
if torrent != nil {
if err := torrent.Remove(); err != nil {
return nil, errors.Wrap(err, "remove torrent")
}
delete(s.tasks, his.ID)
if err := s.core.RemoveTaskAndTorrent(his.ID); err != nil {
return nil, errors.Wrap(err, "remove torrent")
}
err = s.db.DeleteHistory(id)
if err != nil {
return nil, errors.Wrap(err, "db")
}
if his.EpisodeID != 0 {
s.db.SetEpisodeStatus(his.EpisodeID, episode.StatusMissing)
if !s.db.IsEpisodeDownloadingOrDownloaded(his.EpisodeID) {
s.db.SetEpisodeStatus(his.EpisodeID, episode.StatusMissing)
}
} else {
seasonNum, err := utils.SeasonId(his.TargetDir)
@@ -70,14 +94,11 @@ func (s *Server) RemoveActivity(c *gin.Context) (interface{}, error) {
log.Errorf("no season id: %v", his.TargetDir)
seasonNum = -1
}
s.db.SetSeasonAllEpisodeStatus(his.MediaID, seasonNum, episode.StatusMissing)
if his.Status == history.StatusRunning || his.Status == history.StatusUploading {
s.db.SetSeasonAllEpisodeStatus(his.MediaID, seasonNum, episode.StatusMissing)
}
}
err = s.db.DeleteHistory(id)
if err != nil {
return nil, errors.Wrap(err, "db")
}
log.Infof("history record successful deleted: %v", his.SourceTitle)
return nil, nil
}
@@ -96,13 +117,13 @@ func (s *Server) GetMediaDownloadHistory(c *gin.Context) (interface{}, error) {
type TorrentInfo struct {
Name string `json:"name"`
ID int64 `json:"id"`
ID string `json:"id"`
SeedRatio float32 `json:"seed_ratio"`
Progress int `json:"progress"`
}
func (s *Server) GetAllTorrents(c *gin.Context) (interface{}, error) {
trc, err := s.getDownloadClient()
trc, _, err := s.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
@@ -115,10 +136,12 @@ func (s *Server) GetAllTorrents(c *gin.Context) (interface{}, error) {
if !t.Exists() {
continue
}
name, _ := t.Name()
p, _ := t.Progress()
infos = append(infos, TorrentInfo{
Name: t.Name(),
ID: t.ID,
Progress: t.Progress(),
Name: name,
ID: t.Hash,
Progress: p,
})
}
return infos, nil

View File

@@ -22,7 +22,7 @@ func (s *Server) authModdleware(c *gin.Context) {
c.Next()
return
}
token, err := c.Cookie("token")
token, err := c.Cookie("polaris_token")
if err != nil {
log.Errorf("token error: %v", err)
c.AbortWithStatus(http.StatusForbidden)
@@ -90,11 +90,9 @@ func (s *Server) Login(c *gin.Context) (interface{}, error) {
if err != nil {
return nil, errors.Wrap(err, "sign")
}
c.SetSameSite(http.SameSiteNoneMode)
c.SetCookie("token", sig, 0, "/", "", true, false)
return gin.H{
"token": sig,
}, nil
c.SetSameSite(http.SameSiteLaxMode)
c.SetCookie("polaris_token", sig, 0, "/", "", false, false)
return "success", nil
}
func (s *Server) Logout(c *gin.Context) (interface{}, error) {
@@ -102,8 +100,8 @@ func (s *Server) Logout(c *gin.Context) (interface{}, error) {
return nil, errors.New( "auth is not enabled")
}
c.SetSameSite(http.SameSiteNoneMode)
c.SetCookie("token", "", -1, "/", "", true, false)
c.SetSameSite(http.SameSiteLaxMode)
c.SetCookie("polaris_token", "", -1, "/", "", false, false)
return nil, nil
}

View File

@@ -19,7 +19,7 @@ func HttpHandler(f func(*gin.Context) (interface{}, error)) gin.HandlerFunc {
})
return
}
log.Debug("url %v return: %+v", ctx.Request.URL, r)
log.Debugf("url %v return: %+v", ctx.Request.URL, r)
ctx.JSON(200, Response{
Code: 0,

108
server/core/client.go Normal file
View File

@@ -0,0 +1,108 @@
package core
import (
"polaris/db"
"polaris/ent"
"polaris/log"
"polaris/pkg/tmdb"
"polaris/pkg/transmission"
"polaris/pkg/utils"
"github.com/pkg/errors"
"github.com/robfig/cron"
)
func NewClient(db *db.Client, language string) *Client {
return &Client{
db: db,
cron: cron.New(),
tasks: make(map[int]*Task, 0),
language: language,
}
}
type scheduler struct {
cron string
f func() error
}
type Client struct {
db *db.Client
cron *cron.Cron
tasks map[int]*Task
language string
schedulers utils.Map[string, scheduler]
}
func (c *Client) registerCronJob(name string, cron string, f func() error) {
c.schedulers.Store(name, scheduler{
cron: cron,
f: f,
})
}
func (c *Client) Init() {
c.reloadTasks()
c.addSysCron()
}
func (c *Client) reloadTasks() {
allTasks := c.db.GetRunningHistories()
for _, t := range allTasks {
torrent, err := transmission.ReloadTorrent(t.Saved)
if err != nil {
log.Errorf("relaod task %s failed: %v", t.SourceTitle, err)
continue
}
if !torrent.Exists() { //只要种子还存在于客户端中,就重新加载,有可能是还在做种中
continue
}
log.Infof("reloading task: %d %s", t.ID, t.SourceTitle)
c.tasks[t.ID] = &Task{Torrent: torrent}
}
}
func (c *Client) getDownloadClient() (*transmission.Client, *ent.DownloadClients, error) {
tr := c.db.GetTransmission()
trc, err := transmission.NewClient(transmission.Config{
URL: tr.URL,
User: tr.User,
Password: tr.Password,
})
if err != nil {
return nil, nil, errors.Wrap(err, "connect transmission")
}
return trc, tr, nil
}
func (c *Client) TMDB() (*tmdb.Client, error) {
api := c.db.GetSetting(db.SettingTmdbApiKey)
if api == "" {
return nil, errors.New("TMDB apiKey not set")
}
proxy := c.db.GetSetting(db.SettingProxy)
adult := c.db.GetSetting(db.SettingEnableTmdbAdultContent)
return tmdb.NewClient(api, proxy, adult == "true")
}
func (c *Client) MustTMDB() *tmdb.Client {
t, err := c.TMDB()
if err != nil {
log.Panicf("get tmdb: %v", err)
}
return t
}
func (c *Client) RemoveTaskAndTorrent(id int) error {
torrent := c.tasks[id]
if torrent != nil {
if err := torrent.Remove(); err != nil {
return errors.Wrap(err, "remove torrent")
}
delete(c.tasks, id)
}
return nil
}
func (c *Client) GetTasks() map[int]*Task {
return c.tasks
}

457
server/core/importlist.go Normal file
View File

@@ -0,0 +1,457 @@
package core
import (
"bytes"
"fmt"
"html/template"
"io"
"net/http"
"os"
"path/filepath"
"polaris/db"
"polaris/ent"
"polaris/ent/importlist"
"polaris/ent/media"
"polaris/ent/schema"
"polaris/log"
"polaris/pkg/importlist/plexwatchlist"
"polaris/pkg/utils"
"regexp"
"strings"
tmdb "github.com/cyruzin/golang-tmdb"
"github.com/pkg/errors"
)
func (c *Client) periodicallyUpdateImportlist() error {
log.Infof("begin check import list")
lists, err := c.db.GetAllImportLists()
if err != nil {
return errors.Wrap(err, "get from db")
}
for _, l := range lists {
log.Infof("check import list content for %v", l.Name)
if l.Type == importlist.TypePlex {
res, err := plexwatchlist.ParsePlexWatchlist(l.URL)
if err != nil {
log.Errorf("parse plex watchlist: %v", err)
continue
}
for _, item := range res.Items {
var tmdbRes *tmdb.FindByID
if item.ImdbID != "" {
tmdbRes1, err := c.MustTMDB().GetByImdbId(item.ImdbID, c.language)
if err != nil {
log.Errorf("get by imdb id error: %v", err)
continue
}
tmdbRes = tmdbRes1
} else if item.TvdbID != "" {
tmdbRes1, err := c.MustTMDB().GetByTvdbId(item.TvdbID, c.language)
if err != nil {
log.Errorf("get by imdb id error: %v", err)
continue
}
tmdbRes = tmdbRes1
}
if tmdbRes == nil {
log.Errorf("can not find media for : %+v", item)
continue
}
if len(tmdbRes.MovieResults) > 0 {
d := tmdbRes.MovieResults[0]
name, err := c.SuggestedMovieFolderName(int(d.ID))
if err != nil {
log.Errorf("suggesting name error: %v", err)
continue
}
_, err = c.AddMovie2Watchlist(AddWatchlistIn{
TmdbID: int(d.ID),
StorageID: l.StorageID,
Resolution: l.Qulity,
Folder: name,
})
if err != nil {
log.Errorf("[update_import_lists] add movie to watchlist error: %v", err)
} else {
log.Infof("[update_import_lists] add movie to watchlist success")
}
} else if len(tmdbRes.TvResults) > 0 {
d := tmdbRes.TvResults[0]
name, err := c.SuggestedSeriesFolderName(int(d.ID))
if err != nil {
log.Errorf("suggesting name error: %v", err)
continue
}
_, err = c.AddTv2Watchlist(AddWatchlistIn{
TmdbID: int(d.ID),
StorageID: l.StorageID,
Resolution: l.Qulity,
Folder: name,
})
if err != nil {
log.Errorf("[update_import_lists] add tv to watchlist error: %v", err)
} else {
log.Infof("[update_import_lists] add tv to watchlist success")
}
}
}
}
}
return nil
}
type AddWatchlistIn struct {
TmdbID int `json:"tmdb_id" binding:"required"`
StorageID int `json:"storage_id" `
Resolution string `json:"resolution" binding:"required"`
Folder string `json:"folder" binding:"required"`
DownloadHistoryEpisodes bool `json:"download_history_episodes"` //for tv
SizeMin int `json:"size_min"`
SizeMax int `json:"size_max"`
}
func (c *Client) AddTv2Watchlist(in AddWatchlistIn) (interface{}, error) {
log.Debugf("add tv watchlist input %+v", in)
if in.Folder == "" {
return nil, errors.New("folder should be provided")
}
detailCn, err := c.MustTMDB().GetTvDetails(in.TmdbID, db.LanguageCN)
if err != nil {
return nil, errors.Wrap(err, "get tv detail")
}
var nameCn = detailCn.Name
detailEn, _ := c.MustTMDB().GetTvDetails(in.TmdbID, db.LanguageEN)
var nameEn = detailEn.Name
var detail *tmdb.TVDetails
if c.language == "" || c.language == db.LanguageCN {
detail = detailCn
} else {
detail = detailEn
}
log.Infof("find detail for tv id %d: %+v", in.TmdbID, detail)
lastSeason := 0
for _, season := range detail.Seasons {
if season.SeasonNumber > lastSeason && season.EpisodeCount > 0 { //如果最新一季已经有剧集信息,则以最新一季为准
lastSeason = season.SeasonNumber
}
}
log.Debugf("latest season is %v", lastSeason)
var epIds []int
for _, season := range detail.Seasons {
seasonId := season.SeasonNumber
se, err := c.MustTMDB().GetSeasonDetails(int(detail.ID), seasonId, c.language)
if err != nil {
log.Errorf("get season detail (%s) error: %v", detail.Name, err)
continue
}
shouldMonitor := seasonId >= lastSeason //监控最新的一季
for _, ep := range se.Episodes {
// //如果设置下载往期剧集则监控所有剧集。如果没有则监控未上映的剧集考虑时差等问题留24h余量
// if in.DownloadHistoryEpisodes {
// shouldMonitor = true
// } else {
// t, err := time.Parse("2006-01-02", ep.AirDate)
// if err != nil {
// log.Error("air date not known, will monitor: %v", ep.AirDate)
// shouldMonitor = true
// } else {
// if time.Since(t) < 24*time.Hour { //monitor episode air 24h before now
// shouldMonitor = true
// }
// }
// }
ep := ent.Episode{
SeasonNumber: seasonId,
EpisodeNumber: ep.EpisodeNumber,
Title: ep.Name,
Overview: ep.Overview,
AirDate: ep.AirDate,
Monitored: shouldMonitor,
}
epid, err := c.db.SaveEposideDetail(&ep)
if err != nil {
log.Errorf("save episode info error: %v", err)
continue
}
log.Debugf("success save episode %+v", ep)
epIds = append(epIds, epid)
}
}
m := &ent.Media{
TmdbID: int(detail.ID),
ImdbID: detail.IMDbID,
MediaType: media.MediaTypeTv,
NameCn: nameCn,
NameEn: nameEn,
OriginalName: detail.OriginalName,
Overview: detail.Overview,
AirDate: detail.FirstAirDate,
Resolution: media.Resolution(in.Resolution),
StorageID: in.StorageID,
TargetDir: in.Folder,
DownloadHistoryEpisodes: in.DownloadHistoryEpisodes,
Limiter: schema.MediaLimiter{SizeMin: in.SizeMin, SizeMax: in.SizeMax},
Extras: schema.MediaExtras{
OriginalLanguage: detail.OriginalLanguage,
Genres: detail.Genres,
},
}
r, err := c.db.AddMediaWatchlist(m, epIds)
if err != nil {
return nil, errors.Wrap(err, "add to list")
}
go func() {
if err := c.downloadPoster(detail.PosterPath, r.ID); err != nil {
log.Errorf("download poster error: %v", err)
}
if err := c.downloadBackdrop(detail.BackdropPath, r.ID); err != nil {
log.Errorf("download poster error: %v", err)
}
if err := c.CheckDownloadedSeriesFiles(r); err != nil {
log.Errorf("check downloaded files error: %v", err)
}
}()
log.Infof("add tv %s to watchlist success", detail.Name)
return nil, nil
}
func (c *Client) AddMovie2Watchlist(in AddWatchlistIn) (interface{}, error) {
log.Infof("add movie watchlist input: %+v", in)
detailCn, err := c.MustTMDB().GetMovieDetails(in.TmdbID, db.LanguageCN)
if err != nil {
return nil, errors.Wrap(err, "get movie detail")
}
var nameCn = detailCn.Title
detailEn, _ := c.MustTMDB().GetMovieDetails(in.TmdbID, db.LanguageEN)
var nameEn = detailEn.Title
var detail *tmdb.MovieDetails
if c.language == "" || c.language == db.LanguageCN {
detail = detailCn
} else {
detail = detailEn
}
log.Infof("find detail for movie id %d: %v", in.TmdbID, detail)
epid, err := c.db.SaveEposideDetail(&ent.Episode{
SeasonNumber: 1,
EpisodeNumber: 1,
Title: "dummy episode for movies",
Overview: "dummy episode for movies",
AirDate: detail.ReleaseDate,
Monitored: true,
})
if err != nil {
return nil, errors.Wrap(err, "add dummy episode")
}
log.Infof("added dummy episode for movie: %v", nameEn)
movie := ent.Media{
TmdbID: int(detail.ID),
ImdbID: detail.IMDbID,
MediaType: media.MediaTypeMovie,
NameCn: nameCn,
NameEn: nameEn,
OriginalName: detail.OriginalTitle,
Overview: detail.Overview,
AirDate: detail.ReleaseDate,
Resolution: media.Resolution(in.Resolution),
StorageID: in.StorageID,
TargetDir: in.Folder,
Limiter: schema.MediaLimiter{SizeMin: in.SizeMin, SizeMax: in.SizeMax},
}
extras := schema.MediaExtras{
IsAdultMovie: detail.Adult,
OriginalLanguage: detail.OriginalLanguage,
Genres: detail.Genres,
}
if IsJav(detail) {
javid := c.GetJavid(in.TmdbID)
extras.JavId = javid
}
movie.Extras = extras
r, err := c.db.AddMediaWatchlist(&movie, []int{epid})
if err != nil {
return nil, errors.Wrap(err, "add to list")
}
go func() {
if err := c.downloadPoster(detail.PosterPath, r.ID); err != nil {
log.Errorf("download poster error: %v", err)
}
if err := c.downloadBackdrop(detail.BackdropPath, r.ID); err != nil {
log.Errorf("download backdrop error: %v", err)
}
}()
log.Infof("add movie %s to watchlist success", detail.Title)
return nil, nil
}
func IsJav(detail *tmdb.MovieDetails) bool {
if detail.Adult && len(detail.ProductionCountries) > 0 && strings.ToUpper(detail.ProductionCountries[0].Iso3166_1) == "JP" {
return true
}
return false
}
func (c *Client) GetJavid(id int) string {
alters, err := c.MustTMDB().GetMovieAlternativeTitles(id, c.language)
if err != nil {
return ""
}
for _, t := range alters.Titles {
if t.Iso3166_1 == "JP" && t.Type == "" {
return t.Title
}
}
return ""
}
func (c *Client) downloadBackdrop(path string, mediaID int) error {
url := "https://image.tmdb.org/t/p/original" + path
return c.downloadImage(url, mediaID, "backdrop.jpg")
}
func (c *Client) downloadPoster(path string, mediaID int) error {
var url = "https://image.tmdb.org/t/p/original" + path
return c.downloadImage(url, mediaID, "poster.jpg")
}
func (c *Client) downloadImage(url string, mediaID int, name string) error {
log.Infof("try to download image: %v", url)
var resp, err = http.Get(url)
if err != nil {
return errors.Wrap(err, "http get")
}
targetDir := fmt.Sprintf("%v/%d", db.ImgPath, mediaID)
os.MkdirAll(targetDir, 0777)
//ext := filepath.Ext(path)
targetFile := filepath.Join(targetDir, name)
f, err := os.Create(targetFile)
if err != nil {
return errors.Wrap(err, "new file")
}
defer f.Close()
_, err = io.Copy(f, resp.Body)
if err != nil {
return errors.Wrap(err, "copy http response")
}
log.Infof("image successfully downlaoded: %v", targetFile)
return nil
}
func (c *Client) SuggestedMovieFolderName(tmdbId int) (string, error) {
d1, err := c.MustTMDB().GetMovieDetails(tmdbId, c.language)
if err != nil {
return "", errors.Wrap(err, "get movie details")
}
name := d1.Title
if IsJav(d1) {
javid := c.GetJavid(tmdbId)
if javid != "" {
return javid, nil
}
}
info := db.NamingInfo{TmdbID: tmdbId}
if utils.IsASCII(name) {
info.NameEN = stripExtraCharacters(name)
} else {
info.NameCN = stripExtraCharacters(name)
en, err := c.MustTMDB().GetMovieDetails(tmdbId, db.LanguageEN)
if err != nil {
log.Errorf("get en tv detail error: %v", err)
} else {
info.NameEN = stripExtraCharacters(en.Title)
}
}
year := strings.Split(d1.ReleaseDate, "-")[0]
info.Year = year
movieNamingFormat := c.db.GetMovingNamingFormat()
tmpl, err := template.New("test").Parse(movieNamingFormat)
if err != nil {
return "", errors.Wrap(err, "naming format")
}
buff := &bytes.Buffer{}
err = tmpl.Execute(buff, info)
if err != nil {
return "", errors.Wrap(err, "tmpl exec")
}
res := strings.TrimSpace(buff.String())
log.Infof("tv series of tmdb id %v suggestting name is %v", tmdbId, res)
return res, nil
}
func (c *Client) SuggestedSeriesFolderName(tmdbId int) (string, error) {
d, err := c.MustTMDB().GetTvDetails(tmdbId, c.language)
if err != nil {
return "", errors.Wrap(err, "get tv details")
}
name := d.Name
info := db.NamingInfo{TmdbID: tmdbId}
if utils.IsASCII(name) {
info.NameEN = stripExtraCharacters(name)
} else {
info.NameCN = stripExtraCharacters(name)
en, err := c.MustTMDB().GetTvDetails(tmdbId, db.LanguageEN)
if err != nil {
log.Errorf("get en tv detail error: %v", err)
} else {
if en.Name != name { //sometimes en name is in chinese
info.NameEN = stripExtraCharacters(en.Name)
}
}
}
year := strings.Split(d.FirstAirDate, "-")[0]
info.Year = year
tvNamingFormat := c.db.GetTvNamingFormat()
tmpl, err := template.New("test").Parse(tvNamingFormat)
if err != nil {
return "", errors.Wrap(err, "naming format")
}
buff := &bytes.Buffer{}
err = tmpl.Execute(buff, info)
if err != nil {
return "", errors.Wrap(err, "tmpl exec")
}
res := strings.TrimSpace(buff.String())
log.Infof("tv series of tmdb id %v suggestting name is %v", tmdbId, res)
return res, nil
}
func stripExtraCharacters(s string) string {
re := regexp.MustCompile(`[^\p{L}\w\s]`)
s = re.ReplaceAllString(s, " ")
return strings.Join(strings.Fields(s), " ")
}

316
server/core/integration.go Normal file
View File

@@ -0,0 +1,316 @@
package core
import (
"bytes"
"encoding/xml"
"fmt"
"github.com/pkg/errors"
"os"
"path/filepath"
"polaris/db"
"polaris/ent/media"
storage1 "polaris/ent/storage"
"polaris/log"
"polaris/pkg/metadata"
"polaris/pkg/notifier"
"polaris/pkg/storage"
"polaris/pkg/utils"
"slices"
"strconv"
"strings"
)
func (c *Client) writeNfoFile(historyId int) error {
if !c.nfoSupportEnabled() {
return nil
}
his := c.db.GetHistory(historyId)
md, err := c.db.GetMedia(his.MediaID)
if err != nil {
return err
}
if md.MediaType == media.MediaTypeTv { //tvshow.nfo
st, err := c.getStorage(md.StorageID, media.MediaTypeTv)
if err != nil {
return errors.Wrap(err, "get storage")
}
nfoPath := filepath.Join(md.TargetDir, "tvshow.nfo")
_, err = st.ReadFile(nfoPath)
if err != nil {
log.Infof("tvshow.nfo file missing, create new one, tv series name: %s", md.NameEn)
show := Tvshow{
Title: md.NameCn,
Originaltitle: md.OriginalName,
Showtitle: md.NameCn,
Plot: md.Overview,
ID: strconv.Itoa(md.TmdbID),
Uniqueid: []UniqueId{
{
Text: strconv.Itoa(md.TmdbID),
Type: "tmdb",
Default: "true",
},
{
Text: md.ImdbID,
Type: "imdb",
},
},
}
data, err := xml.MarshalIndent(&show, " ", " ")
if err != nil {
return errors.Wrap(err, "xml marshal")
}
return st.WriteFile(nfoPath, data)
}
} else if md.MediaType == media.MediaTypeMovie { //movie.nfo
st, err := c.getStorage(md.StorageID, media.MediaTypeMovie)
if err != nil {
return errors.Wrap(err, "get storage")
}
nfoPath := filepath.Join(md.TargetDir, "movie.nfo")
_, err = st.ReadFile(nfoPath)
if err != nil {
log.Infof("movie.nfo file missing, create new one, tv series name: %s", md.NameEn)
nfoData := Movie{
Title: md.NameCn,
Originaltitle: md.OriginalName,
Sorttitle: md.NameCn,
Plot: md.Overview,
ID: strconv.Itoa(md.TmdbID),
Uniqueid: []UniqueId{
{
Text: strconv.Itoa(md.TmdbID),
Type: "tmdb",
Default: "true",
},
{
Text: md.ImdbID,
Type: "imdb",
},
},
}
data, err := xml.MarshalIndent(&nfoData, " ", " ")
if err != nil {
return errors.Wrap(err, "xml marshal")
}
return st.WriteFile(nfoPath, data)
}
}
return nil
}
func (c *Client) writePlexmatch(historyId int) error {
if !c.plexmatchEnabled() {
return nil
}
his := c.db.GetHistory(historyId)
series, err := c.db.GetMedia(his.MediaID)
if err != nil {
return err
}
if series.MediaType != media.MediaTypeTv { //.plexmatch only support tv series
return nil
}
st, err := c.getStorage(series.StorageID, media.MediaTypeTv)
if err != nil {
return errors.Wrap(err, "get storage")
}
//series plexmatch file
_, err = st.ReadFile(filepath.Join(series.TargetDir, ".plexmatch"))
if err != nil {
//create new
buff := bytes.Buffer{}
if series.ImdbID != "" {
buff.WriteString(fmt.Sprintf("imdbid: %s\n", series.ImdbID))
}
buff.WriteString(fmt.Sprintf("tmdbid: %d\n", series.TmdbID))
log.Warnf(".plexmatch file not found, create new one: %s", series.NameEn)
if err := st.WriteFile(filepath.Join(series.TargetDir, ".plexmatch"), buff.Bytes()); err != nil {
return errors.Wrap(err, "series plexmatch")
}
}
buff := bytes.Buffer{}
seasonPlex := filepath.Join(his.TargetDir, ".plexmatch")
data, err := st.ReadFile(seasonPlex)
if err != nil {
log.Infof("read season plexmatch: %v", err)
} else {
buff.Write(data)
}
if his.EpisodeID > 0 {
//single episode download
ep, err := c.db.GetEpisodeByID(his.EpisodeID)
if err != nil {
return errors.Wrap(err, "query episode")
}
if strings.Contains(buff.String(), ep.TargetFile) {
log.Debugf("already write plex episode line: %v", ep.TargetFile)
return nil
}
buff.WriteString(fmt.Sprintf("\nep: %d: %s\n", ep.EpisodeNumber, ep.TargetFile))
} else {
seasonNum, err := utils.SeasonId(his.TargetDir)
if err != nil {
return errors.Wrap(err, "no season id")
}
allEpisodes, err := c.db.GetSeasonEpisodes(his.MediaID, seasonNum)
if err != nil {
return errors.Wrap(err, "query season episode")
}
for _, ep := range allEpisodes {
if ep.TargetFile == "" {
log.Errorf("no episode file of episode %d, season %d", ep.EpisodeNumber, ep.SeasonNumber)
//TODO update db
continue
}
if strings.Contains(buff.String(), ep.TargetFile) {
log.Debugf("already write plex episode line: %v", ep.TargetFile)
continue
}
buff.WriteString(fmt.Sprintf("\nep: %d: %s\n", ep.EpisodeNumber, ep.TargetFile))
}
}
log.Infof("write season plexmatch file content: %s", buff.String())
return st.WriteFile(seasonPlex, buff.Bytes())
}
func (c *Client) plexmatchEnabled() bool {
return c.db.GetSetting(db.SettingPlexMatchEnabled) == "true"
}
func (c *Client) nfoSupportEnabled() bool {
return c.db.GetSetting(db.SettingNfoSupportEnabled) == "true"
}
func (c *Client) getStorage(storageId int, mediaType media.MediaType) (storage.Storage, error) {
st := c.db.GetStorage(storageId)
targetPath := st.TvPath
if mediaType == media.MediaTypeMovie {
targetPath = st.MoviePath
}
switch st.Implementation {
case storage1.ImplementationLocal:
storageImpl1, err := storage.NewLocalStorage(targetPath)
if err != nil {
return nil, errors.Wrap(err, "new local")
}
return storageImpl1, nil
case storage1.ImplementationWebdav:
ws := st.ToWebDavSetting()
storageImpl1, err := storage.NewWebdavStorage(ws.URL, ws.User, ws.Password, targetPath, ws.ChangeFileHash == "true")
if err != nil {
return nil, errors.Wrap(err, "new webdav")
}
return storageImpl1, nil
}
return nil, errors.New("no storage found")
}
func (c *Client) sendMsg(msg string) {
clients, err := c.db.GetAllNotificationClients2()
if err != nil {
log.Errorf("query notification clients: %v", err)
return
}
for _, cl := range clients {
if !cl.Enabled {
continue
}
handler, ok := notifier.Gethandler(cl.Service)
if !ok {
log.Errorf("no notification implementation of service %s", cl.Service)
continue
}
noCl, err := handler(cl.Settings)
if err != nil {
log.Errorf("handle setting for name %s error: %v", cl.Name, err)
continue
}
err = noCl.SendMsg(msg)
if err != nil {
log.Errorf("send message error: %v", err)
continue
}
log.Debugf("send message to %s success, msg is %s", cl.Name, msg)
}
}
func (c *Client) findEpisodeFilesPreMoving(historyId int) error {
his := c.db.GetHistory(historyId)
isSingleEpisode := his.EpisodeID > 0
downloadDir := c.db.GetDownloadDir()
task := c.tasks[historyId]
name, err := task.Name()
if err != nil {
return err
}
target := filepath.Join(downloadDir, name)
fi, err := os.Stat(target)
if err != nil {
return errors.Wrapf(err, "read dir %v", target)
}
if isSingleEpisode {
if fi.IsDir() {
//download single episode in dir
//TODO
} else {
//is file
if err := c.db.UpdateEpisodeTargetFile(his.EpisodeID, fi.Name()); err != nil {
log.Errorf("writing downloaded file name to db error: %v", err)
}
}
} else {
if !fi.IsDir() {
return fmt.Errorf("not season pack downloaded")
}
seasonNum, err := utils.SeasonId(his.TargetDir)
if err != nil {
return errors.Wrap(err, "no season id")
}
files, err := os.ReadDir(target)
if err != nil {
return err
}
for _, f := range files {
if f.IsDir() { //want media file
continue
}
excludedExt := []string{".txt", ".srt", ".ass", ".sub"}
ext := filepath.Ext(f.Name())
if slices.Contains(excludedExt, strings.ToLower(ext)) {
continue
}
meta := metadata.ParseTv(f.Name())
if meta.Episode > 0 {
//episode exists
ep, err := c.db.GetEpisode(his.MediaID, seasonNum, meta.Episode)
if err != nil {
return err
}
if err := c.db.UpdateEpisodeTargetFile(ep.ID, f.Name()); err != nil {
return errors.Wrap(err, "update episode file")
}
}
}
}
return nil
}

253
server/core/nfo.go Normal file
View File

@@ -0,0 +1,253 @@
package core
import "encoding/xml"
type Tvshow struct {
XMLName xml.Name `xml:"tvshow"`
Text string `xml:",chardata"`
Title string `xml:"title"`
Originaltitle string `xml:"originaltitle"`
Showtitle string `xml:"showtitle"`
Ratings struct {
Text string `xml:",chardata"`
Rating []struct {
Text string `xml:",chardata"`
Name string `xml:"name,attr"`
Max string `xml:"max,attr"`
Default string `xml:"default,attr"`
Value string `xml:"value"`
Votes string `xml:"votes"`
} `xml:"rating"`
} `xml:"ratings"`
Userrating string `xml:"userrating"`
Top250 string `xml:"top250"`
Season string `xml:"season"`
Episode string `xml:"episode"`
Displayseason string `xml:"displayseason"`
Displayepisode string `xml:"displayepisode"`
Outline string `xml:"outline"`
Plot string `xml:"plot"`
Tagline string `xml:"tagline"`
Runtime string `xml:"runtime"`
Thumb []struct {
Text string `xml:",chardata"`
Spoof string `xml:"spoof,attr"`
Cache string `xml:"cache,attr"`
Aspect string `xml:"aspect,attr"`
Preview string `xml:"preview,attr"`
Season string `xml:"season,attr"`
Type string `xml:"type,attr"`
} `xml:"thumb"`
Fanart struct {
Text string `xml:",chardata"`
Thumb []struct {
Text string `xml:",chardata"`
Colors string `xml:"colors,attr"`
Preview string `xml:"preview,attr"`
} `xml:"thumb"`
} `xml:"fanart"`
Mpaa string `xml:"mpaa"`
Playcount string `xml:"playcount"`
Lastplayed string `xml:"lastplayed"`
ID string `xml:"id"`
Uniqueid []UniqueId `xml:"uniqueid"`
Genre string `xml:"genre"`
Premiered string `xml:"premiered"`
Year string `xml:"year"`
Status string `xml:"status"`
Code string `xml:"code"`
Aired string `xml:"aired"`
Studio string `xml:"studio"`
Trailer string `xml:"trailer"`
Actor []struct {
Text string `xml:",chardata"`
Name string `xml:"name"`
Role string `xml:"role"`
Order string `xml:"order"`
Thumb string `xml:"thumb"`
} `xml:"actor"`
Namedseason []struct {
Text string `xml:",chardata"`
Number string `xml:"number,attr"`
} `xml:"namedseason"`
Resume struct {
Text string `xml:",chardata"`
Position string `xml:"position"`
Total string `xml:"total"`
} `xml:"resume"`
Dateadded string `xml:"dateadded"`
}
type UniqueId struct {
Text string `xml:",chardata"`
Type string `xml:"type,attr"`
Default string `xml:"default,attr"`
}
type Episodedetails struct {
XMLName xml.Name `xml:"episodedetails"`
Text string `xml:",chardata"`
Title string `xml:"title"`
Showtitle string `xml:"showtitle"`
Ratings struct {
Text string `xml:",chardata"`
Rating []struct {
Text string `xml:",chardata"`
Name string `xml:"name,attr"`
Max string `xml:"max,attr"`
Default string `xml:"default,attr"`
Value string `xml:"value"`
Votes string `xml:"votes"`
} `xml:"rating"`
} `xml:"ratings"`
Userrating string `xml:"userrating"`
Top250 string `xml:"top250"`
Season string `xml:"season"`
Episode string `xml:"episode"`
Displayseason string `xml:"displayseason"`
Displayepisode string `xml:"displayepisode"`
Outline string `xml:"outline"`
Plot string `xml:"plot"`
Tagline string `xml:"tagline"`
Runtime string `xml:"runtime"`
Thumb []struct {
Text string `xml:",chardata"`
Spoof string `xml:"spoof,attr"`
Cache string `xml:"cache,attr"`
Aspect string `xml:"aspect,attr"`
Preview string `xml:"preview,attr"`
} `xml:"thumb"`
Mpaa string `xml:"mpaa"`
Playcount string `xml:"playcount"`
Lastplayed string `xml:"lastplayed"`
ID string `xml:"id"`
Uniqueid []struct {
Text string `xml:",chardata"`
Type string `xml:"type,attr"`
Default string `xml:"default,attr"`
} `xml:"uniqueid"`
Genre string `xml:"genre"`
Credits []string `xml:"credits"`
Director string `xml:"director"`
Premiered string `xml:"premiered"`
Year string `xml:"year"`
Status string `xml:"status"`
Code string `xml:"code"`
Aired string `xml:"aired"`
Studio string `xml:"studio"`
Trailer string `xml:"trailer"`
Actor []struct {
Text string `xml:",chardata"`
Name string `xml:"name"`
Role string `xml:"role"`
Order string `xml:"order"`
Thumb string `xml:"thumb"`
} `xml:"actor"`
Resume struct {
Text string `xml:",chardata"`
Position string `xml:"position"`
Total string `xml:"total"`
} `xml:"resume"`
Dateadded string `xml:"dateadded"`
}
type Movie struct {
XMLName xml.Name `xml:"movie"`
Text string `xml:",chardata"`
Title string `xml:"title"`
Originaltitle string `xml:"originaltitle"`
Sorttitle string `xml:"sorttitle"`
Ratings struct {
Text string `xml:",chardata"`
Rating []struct {
Text string `xml:",chardata"`
Name string `xml:"name,attr"`
Max string `xml:"max,attr"`
Default string `xml:"default,attr"`
Value string `xml:"value"`
Votes string `xml:"votes"`
} `xml:"rating"`
} `xml:"ratings"`
Userrating string `xml:"userrating"`
Top250 string `xml:"top250"`
Outline string `xml:"outline"`
Plot string `xml:"plot"`
Tagline string `xml:"tagline"`
Runtime string `xml:"runtime"`
Thumb []struct {
Text string `xml:",chardata"`
Spoof string `xml:"spoof,attr"`
Cache string `xml:"cache,attr"`
Aspect string `xml:"aspect,attr"`
Preview string `xml:"preview,attr"`
} `xml:"thumb"`
Fanart struct {
Text string `xml:",chardata"`
Thumb struct {
Text string `xml:",chardata"`
Colors string `xml:"colors,attr"`
Preview string `xml:"preview,attr"`
} `xml:"thumb"`
} `xml:"fanart"`
Mpaa string `xml:"mpaa"`
Playcount string `xml:"playcount"`
Lastplayed string `xml:"lastplayed"`
ID string `xml:"id"`
Uniqueid []UniqueId `xml:"uniqueid"`
Genre string `xml:"genre"`
Country []string `xml:"country"`
Set struct {
Text string `xml:",chardata"`
Name string `xml:"name"`
Overview string `xml:"overview"`
} `xml:"set"`
Tag []string `xml:"tag"`
Videoassettitle string `xml:"videoassettitle"`
Videoassetid string `xml:"videoassetid"`
Videoassettype string `xml:"videoassettype"`
Hasvideoversions string `xml:"hasvideoversions"`
Hasvideoextras string `xml:"hasvideoextras"`
Isdefaultvideoversion string `xml:"isdefaultvideoversion"`
Credits []string `xml:"credits"`
Director string `xml:"director"`
Premiered string `xml:"premiered"`
Year string `xml:"year"`
Status string `xml:"status"`
Code string `xml:"code"`
Aired string `xml:"aired"`
Studio string `xml:"studio"`
Trailer string `xml:"trailer"`
Fileinfo struct {
Text string `xml:",chardata"`
Streamdetails struct {
Text string `xml:",chardata"`
Video struct {
Text string `xml:",chardata"`
Codec string `xml:"codec"`
Aspect string `xml:"aspect"`
Width string `xml:"width"`
Height string `xml:"height"`
Durationinseconds string `xml:"durationinseconds"`
Stereomode string `xml:"stereomode"`
Hdrtype string `xml:"hdrtype"`
} `xml:"video"`
Audio struct {
Text string `xml:",chardata"`
Codec string `xml:"codec"`
Language string `xml:"language"`
Channels string `xml:"channels"`
} `xml:"audio"`
Subtitle struct {
Text string `xml:",chardata"`
Language string `xml:"language"`
} `xml:"subtitle"`
} `xml:"streamdetails"`
} `xml:"fileinfo"`
Actor []struct {
Text string `xml:",chardata"`
Name string `xml:"name"`
Role string `xml:"role"`
Order string `xml:"order"`
Thumb string `xml:"thumb"`
} `xml:"actor"`
}

148
server/core/resources.go Normal file
View File

@@ -0,0 +1,148 @@
package core
import (
"fmt"
"polaris/ent"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/log"
"polaris/pkg/notifier/message"
"polaris/pkg/torznab"
"polaris/pkg/utils"
"github.com/pkg/errors"
)
func (c *Client) DownloadEpisodeTorrent(r1 torznab.Result, seriesId, seasonNum, episodeNum int) (*string, error) {
trc, dlc, err := c.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
series := c.db.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
//check space available
downloadDir := c.db.GetDownloadDir()
size := utils.AvailableSpace(downloadDir)
if size < uint64(r1.Size) {
log.Errorf("space available %v, space needed %v", size, r1.Size)
return nil, errors.New("no enough space")
}
var ep *ent.Episode
if episodeNum > 0 {
for _, e := range series.Episodes {
if e.SeasonNumber == seasonNum && e.EpisodeNumber == episodeNum {
ep = e
}
}
if ep == nil {
return nil, errors.Errorf("no episode of season %d episode %d", seasonNum, episodeNum)
}
} else { //season package download
ep = &ent.Episode{}
}
torrent, err := trc.Download(r1.Link, downloadDir)
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
dir := fmt.Sprintf("%s/Season %02d/", series.TargetDir, seasonNum)
history, err := c.db.SaveHistoryRecord(ent.History{
MediaID: seriesId,
EpisodeID: ep.ID,
SourceTitle: r1.Name,
TargetDir: dir,
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
DownloadClientID: dlc.ID,
IndexerID: r1.IndexerId,
})
if err != nil {
return nil, errors.Wrap(err, "save record")
}
if episodeNum > 0 {
if ep.Status == episode.StatusMissing {
c.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
}
} else {
c.db.SetSeasonAllEpisodeStatus(seriesId, seasonNum, episode.StatusDownloading)
}
c.tasks[history.ID] = &Task{Torrent: torrent}
c.sendMsg(fmt.Sprintf(message.BeginDownload, r1.Name))
log.Infof("success add %s to download task", r1.Name)
return &r1.Name, nil
}
func (c *Client) SearchAndDownload(seriesId, seasonNum, episodeNum int) (*string, error) {
var episodes []int
if episodeNum > 0 {
episodes = append(episodes, episodeNum)
}
res, err := SearchTvSeries(c.db, &SearchParam{
MediaId: seriesId,
SeasonNum: seasonNum,
Episodes: episodes,
CheckFileSize: true,
CheckResolution: true,
})
if err != nil {
return nil, err
}
r1 := res[0]
log.Infof("found resource to download: %+v", r1)
return c.DownloadEpisodeTorrent(r1, seriesId, seasonNum, episodeNum)
}
func (c *Client) DownloadMovie(m *ent.Media, link, name string, size int, indexerID int) (*string, error) {
trc, dlc, err := c.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
torrent, err := trc.Download(link, c.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
if name == "" {
name = m.OriginalName
}
go func() {
ep, _ := c.db.GetMovieDummyEpisode(m.ID)
history, err := c.db.SaveHistoryRecord(ent.History{
MediaID: m.ID,
EpisodeID: ep.ID,
SourceTitle: name,
TargetDir: m.TargetDir,
Status: history.StatusRunning,
Size: size,
Saved: torrent.Save(),
DownloadClientID: dlc.ID,
IndexerID: indexerID,
})
if err != nil {
log.Errorf("save history error: %v", err)
}
c.tasks[history.ID] = &Task{Torrent: torrent}
if ep.Status == episode.StatusMissing {
c.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
}
}()
c.sendMsg(fmt.Sprintf(message.BeginDownload, name))
log.Infof("success add %s to download task", name)
return &name, nil
}

454
server/core/scheduler.go Normal file
View File

@@ -0,0 +1,454 @@
package core
import (
"fmt"
"path/filepath"
"polaris/db"
"polaris/ent"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/media"
"polaris/log"
"polaris/pkg"
"polaris/pkg/notifier/message"
"polaris/pkg/utils"
"github.com/pkg/errors"
)
func (c *Client) addSysCron() {
c.registerCronJob("check_running_tasks", "@every 1m", c.checkTasks)
c.registerCronJob("check_available_medias_to_download", "0 0 * * * *", func() error {
c.downloadAllTvSeries()
c.downloadAllMovies()
return nil
})
c.registerCronJob("check_series_new_release", "0 0 */12 * * *", c.checkAllSeriesNewSeason)
c.registerCronJob("update_import_lists", "0 30 * * * *", c.periodicallyUpdateImportlist)
c.schedulers.Range(func(key string, value scheduler) bool {
log.Debugf("add cron job: %v", key)
c.mustAddCron(value.cron, func() {
if err := value.f(); err != nil {
log.Errorf("exexuting cron job %s error: %v", key, err)
}
})
return true
})
c.cron.Start()
}
func (c *Client) mustAddCron(spec string, cmd func()) {
if err := c.cron.AddFunc(spec, cmd); err != nil {
log.Errorf("add func error: %v", err)
panic(err)
}
}
func (c *Client) TriggerCronJob(name string) error {
job, ok := c.schedulers.Load(name)
if !ok {
return fmt.Errorf("job name not exists: %s", name)
}
return job.f()
}
func (c *Client) checkTasks() error {
log.Debug("begin check tasks...")
for id, t := range c.tasks {
r := c.db.GetHistory(id)
if !t.Exists() {
log.Infof("task no longer exists: %v", id)
delete(c.tasks, id)
continue
}
name, err := t.Name()
if err != nil {
return errors.Wrap(err, "get name")
}
progress, err := t.Progress()
if err != nil {
return errors.Wrap(err, "get progress")
}
log.Infof("task (%s) percentage done: %d%%", name, progress)
if progress == 100 {
if r.Status == history.StatusSeeding {
//task already success, check seed ratio
torrent := c.tasks[id]
ratio, ok := c.isSeedRatioLimitReached(r.IndexerID, torrent)
if ok {
log.Infof("torrent file seed ratio reached, remove: %v, current seed ratio: %v", name, ratio)
torrent.Remove()
delete(c.tasks, id)
} else {
log.Infof("torrent file still sedding: %v, current seed ratio: %v", name, ratio)
}
continue
}
log.Infof("task is done: %v", name)
c.sendMsg(fmt.Sprintf(message.DownloadComplete, name))
go c.postTaskProcessing(id)
}
}
return nil
}
func (c *Client) postTaskProcessing(id int) {
if err := c.findEpisodeFilesPreMoving(id); err != nil {
log.Errorf("finding all episode file error: %v", err)
} else {
if err := c.writePlexmatch(id); err != nil {
log.Errorf("write plexmatch file error: %v", err)
}
if err := c.writeNfoFile(id); err != nil {
log.Errorf("write nfo file error: %v", err)
}
}
if err := c.moveCompletedTask(id); err != nil {
log.Infof("post tasks for id %v fail: %v", id, err)
}
}
func (c *Client) moveCompletedTask(id int) (err1 error) {
torrent := c.tasks[id]
r := c.db.GetHistory(id)
if r.Status == history.StatusUploading {
log.Infof("task %d is already uploading, skip", id)
return nil
}
c.db.SetHistoryStatus(r.ID, history.StatusUploading)
seasonNum, err := utils.SeasonId(r.TargetDir)
if err != nil {
log.Errorf("no season id: %v", r.TargetDir)
seasonNum = -1
}
downloadclient, err := c.db.GetDownloadClient(r.DownloadClientID)
if err != nil {
log.Errorf("get task download client error: %v, use default one", err)
downloadclient = &ent.DownloadClients{RemoveCompletedDownloads: true, RemoveFailedDownloads: true}
}
torrentName, err := torrent.Name()
if err != nil {
return err
}
defer func() {
if err1 != nil {
c.db.SetHistoryStatus(r.ID, history.StatusFail)
if r.EpisodeID != 0 {
if !c.db.IsEpisodeDownloadingOrDownloaded(r.EpisodeID) {
c.db.SetEpisodeStatus(r.EpisodeID, episode.StatusMissing)
}
} else {
c.db.SetSeasonAllEpisodeStatus(r.MediaID, seasonNum, episode.StatusMissing)
}
c.sendMsg(fmt.Sprintf(message.ProcessingFailed, err1))
if downloadclient.RemoveFailedDownloads {
log.Debugf("task failed, remove failed torrent and files related")
delete(c.tasks, r.ID)
torrent.Remove()
}
}
}()
series := c.db.GetMediaDetails(r.MediaID)
if series == nil {
return nil
}
st := c.db.GetStorage(series.StorageID)
log.Infof("move task files to target dir: %v", r.TargetDir)
stImpl, err := c.getStorage(st.ID, series.MediaType)
if err != nil {
return err
}
//如果种子是路径,则会把路径展开,只移动文件,类似 move dir/* dir2/, 如果种子是文件,则会直接移动文件,类似 move file dir/
if err := stImpl.Copy(filepath.Join(c.db.GetDownloadDir(), torrentName), r.TargetDir); err != nil {
return errors.Wrap(err, "move file")
}
c.db.SetHistoryStatus(r.ID, history.StatusSeeding)
if r.EpisodeID != 0 {
c.db.SetEpisodeStatus(r.EpisodeID, episode.StatusDownloaded)
} else {
c.db.SetSeasonAllEpisodeStatus(r.MediaID, seasonNum, episode.StatusDownloaded)
}
c.sendMsg(fmt.Sprintf(message.ProcessingComplete, torrentName))
//判断是否需要删除本地文件
r1, ok := c.isSeedRatioLimitReached(r.IndexerID, torrent)
if downloadclient.RemoveCompletedDownloads && ok {
log.Debugf("download complete,remove torrent and files related, torrent: %v, seed ratio: %v", torrentName, r1)
c.db.SetHistoryStatus(r.ID, history.StatusSuccess)
delete(c.tasks, r.ID)
torrent.Remove()
}
log.Infof("move downloaded files to target dir success, file: %v, target dir: %v", torrentName, r.TargetDir)
return nil
}
func (c *Client) CheckDownloadedSeriesFiles(m *ent.Media) error {
if m.MediaType != media.MediaTypeTv {
return nil
}
log.Infof("check files in directory: %s", m.TargetDir)
var storageImpl, err = c.getStorage(m.StorageID, media.MediaTypeTv)
if err != nil {
return err
}
files, err := storageImpl.ReadDir(m.TargetDir)
if err != nil {
return errors.Wrapf(err, "read dir %s", m.TargetDir)
}
for _, in := range files {
if !in.IsDir() { //season dir, ignore file
continue
}
dir := filepath.Join(m.TargetDir, in.Name())
epFiles, err := storageImpl.ReadDir(dir)
if err != nil {
log.Errorf("read dir %s error: %v", dir, err)
continue
}
for _, ep := range epFiles {
log.Infof("found file: %v", ep.Name())
seNum, epNum, err := utils.FindSeasonEpisodeNum(ep.Name())
if err != nil {
log.Errorf("find season episode num error: %v", err)
continue
}
log.Infof("found match, season num %d, episode num %d", seNum, epNum)
ep, err := c.db.GetEpisode(m.ID, seNum, epNum)
if err != nil {
log.Error("update episode: %v", err)
continue
}
err = c.db.SetEpisodeStatus(ep.ID, episode.StatusDownloaded)
if err != nil {
log.Error("update episode: %v", err)
continue
}
}
}
return nil
}
type Task struct {
//Processing bool
pkg.Torrent
}
func (c *Client) DownloadSeriesAllEpisodes(id int) []string {
tvDetail := c.db.GetMediaDetails(id)
m := make(map[int][]*ent.Episode)
for _, ep := range tvDetail.Episodes {
m[ep.SeasonNumber] = append(m[ep.SeasonNumber], ep)
}
var allNames []string
for seasonNum, epsides := range m {
if seasonNum == 0 {
continue
}
wantedSeasonPack := true
for _, ep := range epsides {
if !ep.Monitored {
wantedSeasonPack = false
}
if ep.Status != episode.StatusMissing {
wantedSeasonPack = false
}
}
if wantedSeasonPack {
name, err := c.SearchAndDownload(id, seasonNum, -1)
if err == nil {
allNames = append(allNames, *name)
log.Infof("begin download torrent resource: %v", name)
} else {
log.Warnf("finding season pack error: %v", err)
wantedSeasonPack = false
}
}
if !wantedSeasonPack {
for _, ep := range epsides {
if !ep.Monitored {
continue
}
if ep.Status != episode.StatusMissing {
continue
}
name, err := c.SearchAndDownload(id, ep.SeasonNumber, ep.EpisodeNumber)
if err != nil {
log.Warnf("finding resoruces of season %d episode %d error: %v", ep.SeasonNumber, ep.EpisodeNumber, err)
continue
} else {
allNames = append(allNames, *name)
log.Infof("begin download torrent resource: %v", name)
}
}
}
}
return allNames
}
func (c *Client) downloadAllTvSeries() {
log.Infof("begin check all tv series resources")
allSeries := c.db.GetMediaWatchlist(media.MediaTypeTv)
for _, series := range allSeries {
c.DownloadSeriesAllEpisodes(series.ID)
}
}
func (c *Client) downloadAllMovies() {
log.Infof("begin check all movie resources")
allSeries := c.db.GetMediaWatchlist(media.MediaTypeMovie)
for _, series := range allSeries {
if _, err := c.DownloadMovieByID(series.ID); err != nil {
log.Errorf("download movie error: %v", err)
}
}
}
func (c *Client) DownloadMovieByID(id int) (string, error) {
detail := c.db.GetMediaDetails(id)
if len(detail.Episodes) == 0 {
return "", fmt.Errorf("no related dummy episode: %v", detail.NameEn)
}
ep := detail.Episodes[0]
if ep.Status != episode.StatusMissing {
return "", nil
}
if name, err := c.downloadMovieSingleEpisode(ep, detail.TargetDir); err != nil {
return "", errors.Wrap(err, "download movie")
} else {
return name, nil
}
}
func (c *Client) downloadMovieSingleEpisode(ep *ent.Episode, targetDir string) (string, error) {
trc, dlc, err := c.getDownloadClient()
if err != nil {
return "", errors.Wrap(err, "connect transmission")
}
qiangban := c.db.GetSetting(db.SettingAllowQiangban)
allowQiangban := false
if qiangban == "true" {
allowQiangban = false
}
res, err := SearchMovie(c.db, &SearchParam{
MediaId: ep.MediaID,
CheckFileSize: true,
CheckResolution: true,
FilterQiangban: !allowQiangban,
})
if err != nil {
return "", errors.Wrap(err, "search movie")
}
r1 := res[0]
log.Infof("begin download torrent resource: %v", r1.Name)
torrent, err := trc.Download(r1.Link, c.db.GetDownloadDir())
if err != nil {
return "", errors.Wrap(err, "downloading")
}
torrent.Start()
history, err := c.db.SaveHistoryRecord(ent.History{
MediaID: ep.MediaID,
EpisodeID: ep.ID,
SourceTitle: r1.Name,
TargetDir: targetDir,
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
DownloadClientID: dlc.ID,
IndexerID: r1.IndexerId,
})
if err != nil {
log.Errorf("save history error: %v", err)
}
c.tasks[history.ID] = &Task{Torrent: torrent}
c.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
return r1.Name, nil
}
func (c *Client) checkAllSeriesNewSeason() error {
log.Infof("begin checking series all new season")
allSeries := c.db.GetMediaWatchlist(media.MediaTypeTv)
for _, series := range allSeries {
err := c.checkSeiesNewSeason(series)
if err != nil {
log.Errorf("check series new season error: series name %v, error: %v", series.NameEn, err)
}
}
return nil
}
func (c *Client) checkSeiesNewSeason(media *ent.Media) error {
d, err := c.MustTMDB().GetTvDetails(media.TmdbID, c.language)
if err != nil {
return errors.Wrap(err, "tmdb")
}
lastsSason := d.NumberOfSeasons
seasonDetail, err := c.MustTMDB().GetSeasonDetails(media.TmdbID, lastsSason, c.language)
if err != nil {
return errors.Wrap(err, "tmdb season")
}
for _, ep := range seasonDetail.Episodes {
epDb, err := c.db.GetEpisode(media.ID, ep.SeasonNumber, ep.EpisodeNumber)
if err != nil {
if ent.IsNotFound(err) {
log.Infof("add new episode: %+v", ep)
episode := &ent.Episode{
MediaID: media.ID,
SeasonNumber: ep.SeasonNumber,
EpisodeNumber: ep.EpisodeNumber,
Title: ep.Name,
Overview: ep.Overview,
AirDate: ep.AirDate,
Status: episode.StatusMissing,
Monitored: true,
}
c.db.SaveEposideDetail2(episode)
}
} else { //update episode
if ep.Name != epDb.Title || ep.Overview != epDb.Overview || ep.AirDate != epDb.AirDate {
log.Infof("update new episode: %+v", ep)
c.db.UpdateEpiode2(epDb.ID, ep.Name, ep.Overview, ep.AirDate)
}
}
}
return nil
}
func (c *Client) isSeedRatioLimitReached(indexId int, t pkg.Torrent)(float64,bool) {
indexer, err := c.db.GetIndexer(indexId)
if err != nil {
return 0, true
}
currentRatio, err := t.SeedRatio()
if err != nil {
log.Warnf("get current seed ratio error: %v", err)
return currentRatio, indexer.SeedRatio == 0
}
return currentRatio, currentRatio >= float64(indexer.SeedRatio)
}

View File

@@ -3,10 +3,12 @@ package core
import (
"fmt"
"polaris/db"
"polaris/ent/media"
"polaris/log"
"polaris/pkg/metadata"
"polaris/pkg/torznab"
"polaris/pkg/utils"
"regexp"
"slices"
"sort"
"strconv"
"strings"
@@ -15,11 +17,128 @@ import (
"github.com/pkg/errors"
)
func SearchSeasonPackage(db1 *db.Client, seriesId, seasonNum int, checkResolution bool) ([]torznab.Result, error) {
return SearchEpisode(db1, seriesId, seasonNum, -1, checkResolution)
type SearchParam struct {
MediaId int
SeasonNum int //for tv
Episodes []int //for tv
CheckResolution bool
CheckFileSize bool
FilterQiangban bool //for movie, 是否过滤枪版电影
}
func isNumberedSeries(detail *db.MediaDetails) bool {
func SearchTvSeries(db1 *db.Client, param *SearchParam) ([]torznab.Result, error) {
series := db1.GetMediaDetails(param.MediaId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", param.MediaId)
}
log.Debugf("check tv series %s, season %d, episode %v", series.NameEn, param.SeasonNum, param.Episodes)
res := searchWithTorznab(db1, series.NameEn, series.NameCn, series.OriginalName)
var filtered []torznab.Result
for _, r := range res {
//log.Infof("torrent resource: %+v", r)
meta := metadata.ParseTv(r.Name)
if meta == nil { //cannot parse name
continue
}
if isImdbidNotMatch(series.ImdbID, r.ImdbId) { //has imdb id and not match
continue
}
if !imdbIDMatchExact(series.ImdbID, r.ImdbId) { //imdb id not exact match, check file name
if !torrentNameOk(series, r.Name) {
continue
}
}
if !isNoSeasonSeries(series) && meta.Season != param.SeasonNum { //do not check season on series that only rely on episode number
continue
}
if isNoSeasonSeries(series) && len(param.Episodes) == 0 {
//should not want season
continue
}
if len(param.Episodes) > 0 && !slices.Contains(param.Episodes, meta.Episode) { //not season pack, but episode number not equal
continue
} else if len(param.Episodes) == 0 && !meta.IsSeasonPack { //want season pack, but not season pack
continue
}
if param.CheckResolution && meta.Resolution != series.Resolution.String() {
continue
}
if !torrentSizeOk(series, r.Size, param) {
continue
}
filtered = append(filtered, r)
}
if len(filtered) == 0 {
return nil, errors.New("no resource found")
}
filtered = dedup(filtered)
return filtered, nil
}
// imdbid not exist consider match
func isImdbidNotMatch(id1, id2 string) bool {
if id1 == "" || id2 == "" {
return false
}
id1 = strings.TrimPrefix(id1, "tt")
id2 = strings.TrimPrefix(id2, "tt")
return id1 != id2
}
// imdbid not exist consider not match
func imdbIDMatchExact(id1, id2 string) bool {
if id1 == "" || id2 == "" {
return false
}
id1 = strings.TrimPrefix(id1, "tt")
id2 = strings.TrimPrefix(id2, "tt")
return id1 == id2
}
func torrentSizeOk(detail *db.MediaDetails, torrentSize int, param *SearchParam) bool {
if param.CheckFileSize {
multiplier := 1 //大小倍数正常为1如果是季包则为季内集数
if detail.MediaType == media.MediaTypeTv && len(param.Episodes) == 0 { //tv season pack
multiplier = seasonEpisodeCount(detail, param.SeasonNum)
}
if detail.Limiter.SizeMin > 0 { //min size
sizeMin := detail.Limiter.SizeMin * multiplier
if torrentSize < sizeMin { //比最小要求的大小还要小
return false
}
}
if detail.Limiter.SizeMax > 0 { //max size
sizeMax := detail.Limiter.SizeMax * multiplier
if torrentSize > sizeMax { //larger than max size wanted
return false
}
}
}
return true
}
func seasonEpisodeCount(detail *db.MediaDetails, seasonNum int) int {
count := 0
for _, ep := range detail.Episodes {
if ep.SeasonNumber == seasonNum {
count++
}
}
return count
}
func isNoSeasonSeries(detail *db.MediaDetails) bool {
hasSeason2 := false
season2HasEpisode1 := false
for _, ep := range detail.Episodes {
@@ -34,64 +153,17 @@ func isNumberedSeries(detail *db.MediaDetails) bool {
return hasSeason2 && !season2HasEpisode1 //only one 1st episode
}
func SearchEpisode(db1 *db.Client, seriesId, seasonNum, episodeNum int, checkResolution bool) ([]torznab.Result, error) {
series := db1.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
res := searchWithTorznab(db1, series.NameEn)
resCn := searchWithTorznab(db1, series.NameCn)
res = append(res, resCn...)
var filtered []torznab.Result
for _, r := range res {
//log.Infof("torrent resource: %+v", r)
meta := metadata.ParseTv(r.Name)
if meta == nil { //cannot parse name
continue
}
if !isNumberedSeries(series) { //do not check season on series that only rely on episode number
if meta.Season != seasonNum {
continue
}
}
if isNumberedSeries(series) && episodeNum == -1 {
//should not want season
continue
}
if episodeNum != -1 && meta.Episode != episodeNum { //not season pack, episode number equals
continue
}else if episodeNum == -1 && !meta.IsSeasonPack { //want season pack, but not season pack
continue
}
if checkResolution && meta.Resolution != series.Resolution.String() {
continue
}
if !utils.IsNameAcceptable(meta.NameEn, series.NameEn) && !utils.IsNameAcceptable(meta.NameCn, series.NameCn) {
continue
}
filtered = append(filtered, r)
}
if len(filtered) == 0 {
return nil, errors.New("no resource found")
}
return filtered, nil
}
func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.Result, error) {
movieDetail := db1.GetMediaDetails(movieId)
func SearchMovie(db1 *db.Client, param *SearchParam) ([]torznab.Result, error) {
movieDetail := db1.GetMediaDetails(param.MediaId)
if movieDetail == nil {
return nil, errors.New("no media found of id")
}
res := searchWithTorznab(db1, movieDetail.NameEn)
res1 := searchWithTorznab(db1, movieDetail.NameCn)
res = append(res, res1...)
res := searchWithTorznab(db1, movieDetail.NameEn, movieDetail.NameCn, movieDetail.OriginalName)
if movieDetail.Extras.IsJav(){
res1 := searchWithTorznab(db1, movieDetail.Extras.JavId)
res = append(res, res1...)
}
if len(res) == 0 {
return nil, fmt.Errorf("no resource found")
@@ -99,15 +171,33 @@ func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.R
var filtered []torznab.Result
for _, r := range res {
meta := metadata.ParseMovie(r.Name)
if !utils.IsNameAcceptable(meta.NameEn, movieDetail.NameEn) {
if isImdbidNotMatch(movieDetail.ImdbID, r.ImdbId) { //imdb id not match
continue
}
if checkResolution && meta.Resolution != movieDetail.Resolution.String() {
if !imdbIDMatchExact(movieDetail.ImdbID, r.ImdbId) {
if !torrentNameOk(movieDetail, r.Name) {
continue
}
if !movieDetail.Extras.IsJav() {
ss := strings.Split(movieDetail.AirDate, "-")[0]
year, _ := strconv.Atoi(ss)
if meta.Year != year && meta.Year != year-1 && meta.Year != year+1 { //year not match
continue
}
}
}
if param.CheckResolution && meta.Resolution != movieDetail.Resolution.String() {
continue
}
ss := strings.Split(movieDetail.AirDate, "-")[0]
year, _ := strconv.Atoi(ss)
if meta.Year != year && meta.Year != year-1 && meta.Year != year+1 { //year not match
if param.FilterQiangban && meta.IsQingban { //过滤枪版电影
continue
}
if !torrentSizeOk(movieDetail, r.Size, param) {
continue
}
@@ -117,12 +207,13 @@ func SearchMovie(db1 *db.Client, movieId int, checkResolution bool) ([]torznab.R
if len(filtered) == 0 {
return nil, errors.New("no resource found")
}
filtered = dedup(filtered)
return filtered, nil
}
func searchWithTorznab(db *db.Client, q string) []torznab.Result {
func searchWithTorznab(db *db.Client, queries ...string) []torznab.Result {
var res []torznab.Result
allTorznab := db.GetAllTorznabInfo()
@@ -130,18 +221,24 @@ func searchWithTorznab(db *db.Client, q string) []torznab.Result {
var wg sync.WaitGroup
for _, tor := range allTorznab {
wg.Add(1)
go func() {
log.Debugf("search torznab %v with %v", tor.Name, q)
defer wg.Done()
resp, err := torznab.Search(tor.URL, tor.ApiKey, q)
if err != nil {
log.Errorf("search %s error: %v", tor.Name, err)
return
}
resChan <- resp
if tor.Disabled {
continue
}
for _, q := range queries {
wg.Add(1)
}()
go func() {
log.Debugf("search torznab %v with %v", tor.Name, queries)
defer wg.Done()
resp, err := torznab.Search(tor, q)
if err != nil {
log.Warnf("search %s with query %s error: %v", tor.Name, q, err)
return
}
resChan <- resp
}()
}
}
go func() {
wg.Wait()
@@ -152,11 +249,70 @@ func searchWithTorznab(db *db.Client, q string) []torznab.Result {
res = append(res, result...)
}
sort.Slice(res, func(i, j int) bool {
res = dedup(res)
sort.SliceStable(res, func(i, j int) bool { //先按做种人数排序
var s1 = res[i]
var s2 = res[j]
return s1.Seeders > s2.Seeders
})
sort.SliceStable(res, func(i, j int) bool { //再按优先级排序,优先级高的种子排前面
var s1 = res[i]
var s2 = res[j]
return s1.Priority > s2.Priority
})
//pt资源中同一indexer内部优先下载free的资源
sort.SliceStable(res, func(i, j int) bool {
var s1 = res[i]
var s2 = res[j]
if s1.IndexerId == s2.IndexerId && s1.IsPrivate {
return s1.DownloadVolumeFactor < s2.DownloadVolumeFactor
}
return false
})
//同一indexer内部如果下载消耗一样则优先下载上传奖励较多的
sort.SliceStable(res, func(i, j int) bool {
var s1 = res[i]
var s2 = res[j]
if s1.IndexerId == s2.IndexerId && s1.IsPrivate && s1.DownloadVolumeFactor == s2.DownloadVolumeFactor {
return s1.UploadVolumeFactor > s2.UploadVolumeFactor
}
return false
})
return res
}
func dedup(list []torznab.Result) []torznab.Result {
var res = make([]torznab.Result, 0, len(list))
seen := make(map[string]bool, 0)
for _, r := range list {
key := fmt.Sprintf("%s%s%d%d", r.Name, r.Source, r.Seeders, r.Peers)
if seen[key] {
continue
}
seen[key] = true
res = append(res, r)
}
return res
}
func torrentNameOk(detail *db.MediaDetails, torrentName string) bool {
if detail.Extras.IsJav() && isNameAcceptable(torrentName, detail.Extras.JavId) {
return true
}
return isNameAcceptable(torrentName, detail.NameCn) || isNameAcceptable(torrentName, detail.NameEn) ||
isNameAcceptable(torrentName, detail.OriginalName)
}
func isNameAcceptable(torrentName, wantedName string) bool {
re := regexp.MustCompile(`[^\p{L}\w\s]`)
torrentName = re.ReplaceAllString(strings.ToLower(torrentName), " ")
wantedName = re.ReplaceAllString(strings.ToLower(wantedName), " ")
torrentName = strings.Join(strings.Fields(torrentName), " ")
wantedName = strings.Join(strings.Fields(wantedName), " ")
return strings.Contains(torrentName, wantedName)
}

63
server/importlist.go Normal file
View File

@@ -0,0 +1,63 @@
package server
import (
"fmt"
"polaris/ent"
"polaris/ent/importlist"
"polaris/pkg/utils"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
)
func (s *Server) getAllImportLists(c *gin.Context) (interface{}, error) {
lists, err := s.db.GetAllImportLists()
return lists, err
}
type addImportlistIn struct {
Name string `json:"name" binding:"required"`
Url string `json:"url"`
Type string `json:"type"`
Qulity string `json:"qulity"`
StorageId int `json:"storage_id"`
}
func (s *Server) addImportlist(c *gin.Context) (interface{}, error) {
var in addImportlistIn
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "json")
}
utils.TrimFields(&in)
st := s.db.GetStorage(in.StorageId)
if st == nil {
return nil, fmt.Errorf("storage id not exist: %v", in.StorageId)
}
err := s.db.AddImportlist(&ent.ImportList{
Name: in.Name,
URL: in.Url,
Type: importlist.Type(in.Type),
Qulity: in.Qulity,
StorageID: in.StorageId,
})
if err != nil {
return nil, err
}
return "success", nil
}
type deleteImportlistIn struct {
ID int `json:"id"`
}
func (s *Server) deleteImportList(c *gin.Context) (interface{}, error) {
var in deleteImportlistIn
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "json")
}
s.db.DeleteImportlist(in.ID)
return "sucess", nil
}

View File

@@ -2,8 +2,7 @@ package server
import (
"polaris/ent"
"polaris/log"
"polaris/pkg/notifier"
"polaris/pkg/utils"
"strconv"
"github.com/gin-gonic/gin"
@@ -37,6 +36,7 @@ func (s *Server) AddNotificationClient(c *gin.Context) (interface{}, error) {
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "json")
}
utils.TrimFields(&in)
err := s.db.AddNotificationClient(in.Name, in.Service, in.Settings, in.Enabled)
if err != nil {
@@ -44,32 +44,3 @@ func (s *Server) AddNotificationClient(c *gin.Context) (interface{}, error) {
}
return nil, nil
}
func (s *Server) sendMsg(msg string) {
clients, err := s.db.GetAllNotificationClients2()
if err != nil {
log.Errorf("query notification clients: %v", err)
return
}
for _, cl := range clients {
if !cl.Enabled {
continue
}
handler, ok := notifier.Gethandler(cl.Service)
if !ok {
log.Errorf("no notification implementation of service %s", cl.Service)
continue
}
noCl, err := handler(cl.Settings)
if err != nil {
log.Errorf("handle setting for name %s error: %v", cl.Name, err)
continue
}
err = noCl.SendMsg(msg)
if err != nil {
log.Errorf("send message error: %v", err)
continue
}
log.Debugf("send message to %s success, msg is %s", cl.Name, msg)
}
}

View File

@@ -2,15 +2,12 @@ package server
import (
"fmt"
"polaris/ent"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/db"
"polaris/ent/media"
"polaris/log"
"polaris/pkg/notifier/message"
"polaris/pkg/torznab"
"polaris/pkg/utils"
"polaris/server/core"
"strconv"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
@@ -18,120 +15,23 @@ import (
func (s *Server) searchAndDownloadSeasonPackage(seriesId, seasonNum int) (*string, error) {
res, err := core.SearchSeasonPackage(s.db, seriesId, seasonNum, true)
res, err := core.SearchTvSeries(s.db, &core.SearchParam{
MediaId: seriesId,
SeasonNum: seasonNum,
Episodes: nil,
CheckResolution: true,
CheckFileSize: true,
})
if err != nil {
return nil, err
}
r1 := res[0]
log.Infof("found resource to download: %+v", r1)
return s.downloadSeasonPackage(r1, seriesId, seasonNum)
return s.core.DownloadEpisodeTorrent(r1, seriesId, seasonNum, -1)
}
func (s *Server) downloadSeasonPackage(r1 torznab.Result, seriesId, seasonNum int) (*string, error) {
trc, err := s.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
downloadDir := s.db.GetDownloadDir()
size := utils.AvailableSpace(downloadDir)
if size < uint64(r1.Size) {
log.Errorf("space available %v, space needed %v", size, r1.Size)
return nil, errors.New("no enough space")
}
torrent, err := trc.Download(r1.Link, s.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
series := s.db.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
dir := fmt.Sprintf("%s/Season %02d/", series.TargetDir, seasonNum)
history, err := s.db.SaveHistoryRecord(ent.History{
MediaID: seriesId,
EpisodeID: 0,
SourceTitle: r1.Name,
TargetDir: dir,
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
})
if err != nil {
return nil, errors.Wrap(err, "save record")
}
s.db.SetSeasonAllEpisodeStatus(seriesId, seasonNum, episode.StatusDownloading)
s.tasks[history.ID] = &Task{Torrent: torrent}
s.sendMsg(fmt.Sprintf(message.BeginDownload, r1.Name))
return &r1.Name, nil
}
func (s *Server) downloadEpisodeTorrent(r1 torznab.Result, seriesId, seasonNum, episodeNum int) (*string, error) {
trc, err := s.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
series := s.db.GetMediaDetails(seriesId)
if series == nil {
return nil, fmt.Errorf("no tv series of id %v", seriesId)
}
var ep *ent.Episode
for _, e := range series.Episodes {
if e.SeasonNumber == seasonNum && e.EpisodeNumber == episodeNum {
ep = e
}
}
if ep == nil {
return nil, errors.Errorf("no episode of season %d episode %d", seasonNum, episodeNum)
}
torrent, err := trc.Download(r1.Link, s.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
dir := fmt.Sprintf("%s/Season %02d/", series.TargetDir, seasonNum)
history, err := s.db.SaveHistoryRecord(ent.History{
MediaID: ep.MediaID,
EpisodeID: ep.ID,
SourceTitle: r1.Name,
TargetDir: dir,
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
})
if err != nil {
return nil, errors.Wrap(err, "save record")
}
s.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
s.tasks[history.ID] = &Task{Torrent: torrent}
s.sendMsg(fmt.Sprintf(message.BeginDownload, r1.Name))
log.Infof("success add %s to download task", r1.Name)
return &r1.Name, nil
}
func (s *Server) searchAndDownload(seriesId, seasonNum, episodeNum int) (*string, error) {
res, err := core.SearchEpisode(s.db, seriesId, seasonNum, episodeNum, true)
if err != nil {
return nil, err
}
r1 := res[0]
log.Infof("found resource to download: %+v", r1)
return s.downloadEpisodeTorrent(r1, seriesId, seasonNum, episodeNum)
}
type searchAndDownloadIn struct {
ID int `json:"id" binding:"required"`
Season int `json:"season"`
@@ -154,16 +54,24 @@ func (s *Server) SearchAvailableTorrents(c *gin.Context) (interface{}, error) {
if in.Episode == 0 {
//search season package
log.Infof("search series season package S%02d", in.Season)
res, err = core.SearchSeasonPackage(s.db, in.ID, in.Season, false)
res, err = core.SearchTvSeries(s.db, &core.SearchParam{
MediaId: in.ID,
SeasonNum: in.Season,
Episodes: nil,
})
if err != nil {
return nil, errors.Wrap(err, "search season package")
}
} else {
log.Infof("search series episode S%02dE%02d", in.Season, in.Episode)
res, err = core.SearchEpisode(s.db, in.ID, in.Season, in.Episode, false)
res, err = core.SearchTvSeries(s.db, &core.SearchParam{
MediaId: in.ID,
SeasonNum: in.Season,
Episodes: []int{in.Episode},
})
if err != nil {
if err.Error() == "no resource found" {
return []TorznabSearchResult{}, nil
return []string{}, nil
}
return nil, errors.Wrap(err, "search episode")
}
@@ -171,25 +79,24 @@ func (s *Server) SearchAvailableTorrents(c *gin.Context) (interface{}, error) {
}
} else {
log.Info("search movie %d", in.ID)
res, err = core.SearchMovie(s.db, in.ID, false)
qiangban := s.db.GetSetting(db.SettingAllowQiangban)
allowQiangban := false
if qiangban == "true" {
allowQiangban = true
}
res, err = core.SearchMovie(s.db, &core.SearchParam{
MediaId: in.ID,
FilterQiangban: !allowQiangban,
})
if err != nil {
if err.Error() == "no resource found" {
return []TorznabSearchResult{}, nil
return []string{}, nil
}
return nil, err
}
}
var searchResults []TorznabSearchResult
for _, r := range res {
searchResults = append(searchResults, TorznabSearchResult{
Name: r.Name,
Size: r.Size,
Seeders: r.Seeders,
Peers: r.Peers,
Link: r.Link,
})
}
return searchResults, nil
return res, nil
}
func (s *Server) SearchTvAndDownload(c *gin.Context) (interface{}, error) {
@@ -209,7 +116,7 @@ func (s *Server) SearchTvAndDownload(c *gin.Context) (interface{}, error) {
name = *name1
} else {
log.Infof("season episode search")
name1, err := s.searchAndDownload(in.ID, in.Season, in.Episode)
name1, err := s.core.SearchAndDownload(in.ID, in.Season, in.Episode)
if err != nil {
return nil, errors.Wrap(err, "download")
}
@@ -221,19 +128,11 @@ func (s *Server) SearchTvAndDownload(c *gin.Context) (interface{}, error) {
}, nil
}
type TorznabSearchResult struct {
Name string `json:"name"`
Size int `json:"size"`
Link string `json:"link"`
Seeders int `json:"seeders"`
Peers int `json:"peers"`
Source string `json:"source"`
}
type downloadTorrentIn struct {
MediaID int `json:"id" binding:"required"`
Season int `json:"season"`
Episode int `json:"episode"`
TorznabSearchResult
torznab.Result
}
func (s *Server) DownloadTorrent(c *gin.Context) (interface{}, error) {
@@ -255,51 +154,35 @@ func (s *Server) DownloadTorrent(c *gin.Context) (interface{}, error) {
name = fmt.Sprintf("%v S%02d", m.OriginalName, in.Season)
}
res := torznab.Result{Name: name, Link: in.Link, Size: in.Size}
return s.downloadSeasonPackage(res, in.MediaID, in.Season)
return s.core.DownloadEpisodeTorrent(res, in.MediaID, in.Season, -1)
}
name := in.Name
if name == "" {
name = fmt.Sprintf("%v S%02dE%02d", m.OriginalName, in.Season, in.Episode)
}
res := torznab.Result{Name: name, Link: in.Link, Size: in.Size}
return s.downloadEpisodeTorrent(res, in.MediaID, in.Season, in.Episode)
res := torznab.Result{Name: name, Link: in.Link, Size: in.Size, IndexerId: in.IndexerId}
return s.core.DownloadEpisodeTorrent(res, in.MediaID, in.Season, in.Episode)
} else {
//movie
return s.core.DownloadMovie(m, in.Link, in.Name, in.Size, in.IndexerId)
}
trc, err := s.getDownloadClient()
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
}
torrent, err := trc.Download(in.Link, s.db.GetDownloadDir())
if err != nil {
return nil, errors.Wrap(err, "downloading")
}
torrent.Start()
name := in.Name
if name == "" {
name = m.OriginalName
}
go func() {
ep, _ := s.db.GetMovieDummyEpisode(m.ID)
history, err := s.db.SaveHistoryRecord(ent.History{
MediaID: m.ID,
EpisodeID: ep.ID,
SourceTitle: name,
TargetDir: "./",
Status: history.StatusRunning,
Size: in.Size,
Saved: torrent.Save(),
})
if err != nil {
log.Errorf("save history error: %v", err)
}
s.tasks[history.ID] = &Task{Torrent: torrent}
s.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
}()
s.sendMsg(fmt.Sprintf(message.BeginDownload, in.Name))
log.Infof("success add %s to download task", in.Name)
return in.Name, nil
}
func (s *Server) DownloadAll(c *gin.Context) (interface{}, error) {
ids := c.Param("id")
id, err := strconv.Atoi(ids)
if err != nil {
return nil, errors.Wrap(err, "convert")
}
m, err := s.db.GetMedia(id)
if err != nil {
return nil, errors.Wrap(err, "get media")
}
if m.MediaType == media.MediaTypeTv {
return s.core.DownloadSeriesAllEpisodes(m.ID), nil
}
name, err := s.core.DownloadMovieByID(m.ID)
return []string{name}, err
}

View File

@@ -1,359 +0,0 @@
package server
import (
"fmt"
"path/filepath"
"polaris/ent"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/media"
storage1 "polaris/ent/storage"
"polaris/log"
"polaris/pkg"
"polaris/pkg/notifier/message"
"polaris/pkg/storage"
"polaris/pkg/utils"
"polaris/server/core"
"time"
"github.com/pkg/errors"
)
func (s *Server) scheduler() {
s.mustAddCron("@every 1m", s.checkTasks)
s.mustAddCron("0 0 * * * *", func() {
s.downloadTvSeries()
s.downloadMovie()
})
s.mustAddCron("0 0 */12 * * *", s.checkAllSeriesNewSeason)
s.cron.Start()
}
func (s *Server) mustAddCron(spec string, cmd func()) {
if err := s.cron.AddFunc(spec, cmd); err != nil {
log.Errorf("add func error: %v", err)
panic(err)
}
}
func (s *Server) checkTasks() {
log.Debug("begin check tasks...")
for id, t := range s.tasks {
if !t.Exists() {
log.Infof("task no longer exists: %v", id)
continue
}
log.Infof("task (%s) percentage done: %d%%", t.Name(), t.Progress())
if t.Progress() == 100 {
log.Infof("task is done: %v", t.Name())
s.sendMsg(fmt.Sprintf(message.DownloadComplete, t.Name()))
go func() {
if err := s.moveCompletedTask(id); err != nil {
log.Infof("post tasks for id %v fail: %v", id, err)
}
}()
}
}
}
func (s *Server) moveCompletedTask(id int) (err1 error) {
torrent := s.tasks[id]
r := s.db.GetHistory(id)
if r.Status == history.StatusUploading {
log.Infof("task %d is already uploading, skip", id)
return nil
}
s.db.SetHistoryStatus(r.ID, history.StatusUploading)
defer func() {
seasonNum, err := utils.SeasonId(r.TargetDir)
if err != nil {
log.Errorf("no season id: %v", r.TargetDir)
seasonNum = -1
}
if err1 != nil {
s.db.SetHistoryStatus(r.ID, history.StatusFail)
if r.EpisodeID != 0 {
s.db.SetEpisodeStatus(r.EpisodeID, episode.StatusMissing)
} else {
s.db.SetSeasonAllEpisodeStatus(r.MediaID, seasonNum, episode.StatusMissing)
}
s.sendMsg(fmt.Sprintf(message.ProcessingFailed, err))
} else {
delete(s.tasks, r.ID)
s.db.SetHistoryStatus(r.ID, history.StatusSuccess)
if r.EpisodeID != 0 {
s.db.SetEpisodeStatus(r.EpisodeID, episode.StatusDownloaded)
} else {
s.db.SetSeasonAllEpisodeStatus(r.MediaID, seasonNum, episode.StatusDownloaded)
}
s.sendMsg(fmt.Sprintf(message.ProcessingComplete, torrent.Name()))
torrent.Remove()
}
}()
series := s.db.GetMediaDetails(r.MediaID)
if series == nil {
return nil
}
st := s.db.GetStorage(series.StorageID)
log.Infof("move task files to target dir: %v", r.TargetDir)
var stImpl storage.Storage
if st.Implementation == storage1.ImplementationWebdav {
ws := st.ToWebDavSetting()
targetPath := ws.TvPath
if series.MediaType == media.MediaTypeMovie {
targetPath = ws.MoviePath
}
storageImpl, err := storage.NewWebdavStorage(ws.URL, ws.User, ws.Password, targetPath, ws.ChangeFileHash == "true")
if err != nil {
return errors.Wrap(err, "new webdav")
}
stImpl = storageImpl
} else if st.Implementation == storage1.ImplementationLocal {
ls := st.ToLocalSetting()
targetPath := ls.TvPath
if series.MediaType == media.MediaTypeMovie {
targetPath = ls.MoviePath
}
storageImpl, err := storage.NewLocalStorage(targetPath)
if err != nil {
return errors.Wrap(err, "new storage")
}
stImpl = storageImpl
}
if r.EpisodeID == 0 {
//season package download
if err := stImpl.Move(filepath.Join(s.db.GetDownloadDir(), torrent.Name()), r.TargetDir); err != nil {
return errors.Wrap(err, "move file")
}
} else {
if err := stImpl.Move(filepath.Join(s.db.GetDownloadDir(), torrent.Name()), filepath.Join(r.TargetDir, torrent.Name())); err != nil {
return errors.Wrap(err, "move file")
}
}
log.Infof("move downloaded files to target dir success, file: %v, target dir: %v", torrent.Name(), r.TargetDir)
return nil
}
func (s *Server) checkDownloadedSeriesFiles(m *ent.Media) error {
if m.MediaType != media.MediaTypeTv {
return nil
}
log.Infof("check files in directory: %s", m.TargetDir)
st := s.db.GetStorage(m.StorageID)
var storageImpl storage.Storage
switch st.Implementation {
case storage1.ImplementationLocal:
ls := st.ToLocalSetting()
targetPath := ls.TvPath
storageImpl1, err := storage.NewLocalStorage(targetPath)
if err != nil {
return errors.Wrap(err, "new local")
}
storageImpl = storageImpl1
case storage1.ImplementationWebdav:
ws := st.ToWebDavSetting()
targetPath := ws.TvPath
storageImpl1, err := storage.NewWebdavStorage(ws.URL, ws.User, ws.Password, targetPath, ws.ChangeFileHash == "true")
if err != nil {
return errors.Wrap(err, "new webdav")
}
storageImpl = storageImpl1
}
files, err := storageImpl.ReadDir(m.TargetDir)
if err != nil {
return errors.Wrapf(err, "read dir %s", m.TargetDir)
}
for _, in := range files {
if !in.IsDir() { //season dir, ignore file
continue
}
dir := filepath.Join(m.TargetDir, in.Name())
epFiles, err := storageImpl.ReadDir(dir)
if err != nil {
log.Errorf("read dir %s error: %v", dir, err)
continue
}
for _, ep := range epFiles {
log.Infof("found file: %v", ep.Name())
seNum, epNum, err := utils.FindSeasonEpisodeNum(ep.Name())
if err != nil {
log.Errorf("find season episode num error: %v", err)
continue
}
log.Infof("found match, season num %d, episode num %d", seNum, epNum)
ep, err := s.db.GetEpisode(m.ID, seNum, epNum)
if err != nil {
log.Error("update episode: %v", err)
continue
}
err = s.db.SetEpisodeStatus(ep.ID, episode.StatusDownloaded)
if err != nil {
log.Error("update episode: %v", err)
continue
}
}
}
return nil
}
type Task struct {
//Processing bool
pkg.Torrent
}
func (s *Server) downloadTvSeries() {
log.Infof("begin check all tv series resources")
allSeries := s.db.GetMediaWatchlist(media.MediaTypeTv)
for _, series := range allSeries {
tvDetail := s.db.GetMediaDetails(series.ID)
for _, ep := range tvDetail.Episodes {
if !series.DownloadHistoryEpisodes { //设置不下载历史已播出剧集,只下载将来剧集
t, err := time.Parse("2006-01-02", ep.AirDate)
if err != nil {
log.Error("air date not known, skip: %v", ep.Title)
continue
}
if series.CreatedAt.Sub(t) > 24*time.Hour { //剧集在加入watchlist之前不去下载
continue
}
}
if ep.Status != episode.StatusMissing { //已经下载的不去下载
continue
}
name, err := s.searchAndDownload(series.ID, ep.SeasonNumber, ep.EpisodeNumber)
if err != nil {
log.Infof("cannot find resource to download for %s: %v", ep.Title, err)
} else {
log.Infof("begin download torrent resource: %v", name)
}
}
}
}
func (s *Server) downloadMovie() {
log.Infof("begin check all movie resources")
allSeries := s.db.GetMediaWatchlist(media.MediaTypeMovie)
for _, series := range allSeries {
detail := s.db.GetMediaDetails(series.ID)
if len(detail.Episodes) == 0 {
log.Errorf("no related dummy episode: %v", detail.NameEn)
continue
}
ep := detail.Episodes[0]
if ep.Status == episode.StatusDownloaded {
continue
}
if err := s.downloadMovieSingleEpisode(ep); err != nil {
log.Errorf("download movie error: %v", err)
}
}
}
func (s *Server) downloadMovieSingleEpisode(ep *ent.Episode) error {
trc, err := s.getDownloadClient()
if err != nil {
return errors.Wrap(err, "connect transmission")
}
res, err := core.SearchMovie(s.db, ep.MediaID, true)
if err != nil {
return errors.Wrap(err, "search movie")
}
r1 := res[0]
log.Infof("begin download torrent resource: %v", r1.Name)
torrent, err := trc.Download(r1.Link, s.db.GetDownloadDir())
if err != nil {
return errors.Wrap(err, "downloading")
}
torrent.Start()
history, err := s.db.SaveHistoryRecord(ent.History{
MediaID: ep.MediaID,
EpisodeID: ep.ID,
SourceTitle: r1.Name,
TargetDir: "./",
Status: history.StatusRunning,
Size: r1.Size,
Saved: torrent.Save(),
})
if err != nil {
log.Errorf("save history error: %v", err)
}
s.tasks[history.ID] = &Task{Torrent: torrent}
s.db.SetEpisodeStatus(ep.ID, episode.StatusDownloading)
return nil
}
func (s *Server) checkAllSeriesNewSeason() {
log.Infof("begin checking series all new season")
allSeries := s.db.GetMediaWatchlist(media.MediaTypeTv)
for _, series := range allSeries {
err := s.checkSeiesNewSeason(series)
if err != nil {
log.Errorf("check series new season error: series name %v, error: %v", series.NameEn, err)
}
}
}
func (s *Server) checkSeiesNewSeason(media *ent.Media) error {
d, err := s.MustTMDB().GetTvDetails(media.TmdbID, s.language)
if err != nil {
return errors.Wrap(err, "tmdb")
}
lastsSason := d.NumberOfSeasons
seasonDetail, err := s.MustTMDB().GetSeasonDetails(media.TmdbID, lastsSason, s.language)
if err != nil {
return errors.Wrap(err, "tmdb season")
}
for _, ep := range seasonDetail.Episodes {
epDb, err := s.db.GetEpisode(media.ID, ep.SeasonNumber, ep.EpisodeNumber)
if err != nil {
if ent.IsNotFound(err) {
log.Infof("add new episode: %+v", ep)
episode := &ent.Episode{
MediaID: media.ID,
SeasonNumber: ep.SeasonNumber,
EpisodeNumber: ep.EpisodeNumber,
Title: ep.Name,
Overview: ep.Overview,
AirDate: ep.AirDate,
Status: episode.StatusMissing,
}
s.db.SaveEposideDetail2(episode)
}
} else { //update episode
if ep.Name != epDb.Title || ep.Overview != epDb.Overview || ep.AirDate != epDb.AirDate {
log.Infof("update new episode: %+v", ep)
s.db.UpdateEpiode2(epDb.ID, ep.Name, ep.Overview, ep.AirDate)
}
}
}
return nil
}

View File

@@ -8,14 +8,12 @@ import (
"polaris/db"
"polaris/log"
"polaris/pkg/tmdb"
"polaris/pkg/transmission"
"polaris/server/core"
"polaris/ui"
"time"
ginzap "github.com/gin-contrib/zap"
"github.com/gin-contrib/static"
"github.com/robfig/cron"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
@@ -23,32 +21,30 @@ import (
func NewServer(db *db.Client) *Server {
r := gin.Default()
return &Server{
r: r,
db: db,
cron: cron.New(),
tasks: make(map[int]*Task),
s := &Server{
r: r,
db: db,
language: db.GetLanguage(),
}
s.core = core.NewClient(db, s.language)
return s
}
type Server struct {
r *gin.Engine
db *db.Client
cron *cron.Cron
core *core.Client
language string
tasks map[int]*Task
jwtSerect string
}
func (s *Server) Serve() error {
s.scheduler()
s.reloadTasks()
s.restoreProxy()
s.core.Init()
s.jwtSerect = s.db.GetSetting(db.JwtSerectKey)
//st, _ := fs.Sub(ui.Web, "build/web")
s.r.Use(static.Serve("/", static.EmbedFolder(ui.Web, "build/web")))
s.r.Use(ginzap.Ginzap(log.Logger().Desugar(), time.RFC3339, false))
//s.r.Use(ginzap.Ginzap(log.Logger().Desugar(), time.RFC3339, false))
s.r.Use(ginzap.RecoveryWithZap(log.Logger().Desugar(), true))
log.SetLogLevel(s.db.GetSetting(db.SettingLogLevel)) //restore log level
@@ -72,6 +68,8 @@ func (s *Server) Serve() error {
setting.GET("/about", HttpHandler(s.About))
setting.POST("/parse/tv", HttpHandler(s.ParseTv))
setting.POST("/parse/movie", HttpHandler(s.ParseMovie))
setting.POST("/monitoring", HttpHandler(s.ChangeEpisodeMonitoring))
setting.POST("/cron/trigger", HttpHandler(s.TriggerCronJob))
}
activity := api.Group("/activity")
{
@@ -84,6 +82,7 @@ func (s *Server) Serve() error {
tv := api.Group("/media")
{
tv.GET("/search", HttpHandler(s.SearchMedia))
tv.POST("/edit", HttpHandler(s.EditMediaMetadata))
tv.POST("/tv/watchlist", HttpHandler(s.AddTv2Watchlist))
tv.GET("/tv/watchlist", HttpHandler(s.GetTvWatchlist))
tv.POST("/torrents", HttpHandler(s.SearchAvailableTorrents))
@@ -92,8 +91,9 @@ func (s *Server) Serve() error {
tv.GET("/movie/watchlist", HttpHandler(s.GetMovieWatchlist))
tv.GET("/record/:id", HttpHandler(s.GetMediaDetails))
tv.DELETE("/record/:id", HttpHandler(s.DeleteFromWatchlist))
tv.GET("/resolutions", HttpHandler(s.GetAvailableResolutions))
tv.GET("/suggest/:tmdb_id", HttpHandler(s.SuggestedSeriesFolderName))
tv.GET("/suggest/tv/:tmdb_id", HttpHandler(s.SuggestedSeriesFolderName))
tv.GET("/suggest/movie/:tmdb_id", HttpHandler(s.SuggestedMovieFolderName))
tv.GET("/downloadall/:id", HttpHandler(s.DownloadAll))
}
indexer := api.Group("/indexer")
{
@@ -122,17 +122,24 @@ func (s *Server) Serve() error {
notifier.DELETE("/id/:id", HttpHandler(s.DeleteNotificationClient))
notifier.POST("/add", HttpHandler(s.AddNotificationClient))
}
importlist := api.Group("/importlist")
{
importlist.GET("/", HttpHandler(s.getAllImportLists))
importlist.POST("/add", HttpHandler(s.addImportlist))
importlist.DELETE("/delete", HttpHandler(s.deleteImportList))
}
s.language = s.db.GetLanguage()
return s.r.Run(":8080")
}
func (s *Server) TMDB() (*tmdb.Client, error) {
api := s.db.GetSetting(db.SettingTmdbApiKey)
if api == "" {
return nil, errors.New("tmdb api not set")
return nil, errors.New("TMDB apiKey not set")
}
return tmdb.NewClient(api)
proxy := s.db.GetSetting(db.SettingProxy)
adult := s.db.GetSetting(db.SettingEnableTmdbAdultContent)
return tmdb.NewClient(api, proxy, adult == "true")
}
func (s *Server) MustTMDB() *tmdb.Client {
@@ -143,22 +150,6 @@ func (s *Server) MustTMDB() *tmdb.Client {
return t
}
func (s *Server) reloadTasks() {
runningTasks := s.db.GetRunningHistories()
if len(runningTasks) == 0 {
return
}
for _, t := range runningTasks {
log.Infof("reloading task: %d %s", t.ID, t.SourceTitle)
torrent, err := transmission.ReloadTorrent(t.Saved)
if err != nil {
log.Errorf("relaod task %s failed: %v", t.SourceTitle, err)
continue
}
s.tasks[t.ID] = &Task{Torrent: torrent}
}
}
func (s *Server) proxyPosters(c *gin.Context) {
remote, _ := url.Parse("https://image.tmdb.org")
proxy := httputil.NewSingleHostReverseProxy(remote)

View File

@@ -1,12 +1,14 @@
package server
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
"html/template"
"polaris/db"
"polaris/ent"
"polaris/log"
"polaris/pkg/transmission"
"polaris/pkg/utils"
"strconv"
"github.com/gin-gonic/gin"
@@ -14,10 +16,16 @@ import (
)
type GeneralSettings struct {
TmdbApiKey string `json:"tmdb_api_key"`
DownloadDir string `json:"download_dir"`
LogLevel string `json:"log_level"`
Proxy string `json:"proxy"`
TmdbApiKey string `json:"tmdb_api_key"`
DownloadDir string `json:"download_dir"`
LogLevel string `json:"log_level"`
Proxy string `json:"proxy"`
EnablePlexmatch bool `json:"enable_plexmatch"`
EnableNfo bool `json:"enable_nfo"`
AllowQiangban bool `json:"allow_qiangban"`
EnableAdultContent bool `json:"enable_adult_content"`
TvNamingFormat string `json:"tv_naming_format"`
MovieNamingFormat string `json:"movie_naming_format"`
}
func (s *Server) SetSetting(c *gin.Context) (interface{}, error) {
@@ -25,6 +33,8 @@ func (s *Server) SetSetting(c *gin.Context) (interface{}, error) {
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind json")
}
utils.TrimFields(&in)
log.Infof("set setting input: %+v", in)
if in.TmdbApiKey != "" {
if err := s.db.SetSetting(db.SettingTmdbApiKey, in.TmdbApiKey); err != nil {
@@ -42,48 +52,87 @@ func (s *Server) SetSetting(c *gin.Context) (interface{}, error) {
if err := s.db.SetSetting(db.SettingLogLevel, in.LogLevel); err != nil {
return nil, errors.Wrap(err, "save log level")
}
}
if in.TvNamingFormat != "" {
if _, err := template.New("test").Parse(in.TvNamingFormat); err != nil {
return nil, errors.Wrap(err, "tv format")
}
s.setProxy(in.Proxy)
return nil, nil
}
func (s *Server) setProxy(proxy string) {
proxyUrl, err := url.Parse(proxy)
tp := http.DefaultTransport.(*http.Transport)
if proxy == "" || err != nil {
log.Warnf("proxy url not valid, disabling: %v", proxy)
tp.Proxy = nil
s.db.SetSetting(db.SettingProxy, "")
s.db.SetSetting(db.SettingTvNamingFormat, in.TvNamingFormat)
} else {
log.Infof("set proxy to %v", proxy)
tp.Proxy = http.ProxyURL(proxyUrl)
s.db.SetSetting(db.SettingProxy, proxy)
s.db.SetSetting(db.SettingTvNamingFormat, "")
}
}
func (s *Server) restoreProxy() {
p := s.db.GetSetting(db.SettingProxy)
s.setProxy(p)
if in.MovieNamingFormat != "" {
if _, err := template.New("test").Parse(in.MovieNamingFormat); err != nil {
return nil, errors.Wrap(err, "movie format")
}
s.db.SetSetting(db.SettingMovieNamingFormat, in.MovieNamingFormat)
} else {
s.db.SetSetting(db.SettingMovieNamingFormat, "")
}
plexmatchEnabled := s.db.GetSetting(db.SettingPlexMatchEnabled)
if in.EnablePlexmatch && plexmatchEnabled != "true" {
s.db.SetSetting(db.SettingPlexMatchEnabled, "true")
} else if !in.EnablePlexmatch && plexmatchEnabled != "false" {
s.db.SetSetting(db.SettingPlexMatchEnabled, "false")
}
s.db.SetSetting(db.SettingProxy, in.Proxy)
if in.AllowQiangban {
s.db.SetSetting(db.SettingAllowQiangban, "true")
} else {
s.db.SetSetting(db.SettingAllowQiangban, "false")
}
if in.EnableNfo {
s.db.SetSetting(db.SettingNfoSupportEnabled, "true")
} else {
s.db.SetSetting(db.SettingNfoSupportEnabled, "false")
}
if in.EnableAdultContent {
s.db.SetSetting(db.SettingEnableTmdbAdultContent, "true")
} else {
s.db.SetSetting(db.SettingEnableTmdbAdultContent, "false")
}
return nil, nil
}
func (s *Server) GetSetting(c *gin.Context) (interface{}, error) {
tmdb := s.db.GetSetting(db.SettingTmdbApiKey)
downloadDir := s.db.GetSetting(db.SettingDownloadDir)
logLevel := s.db.GetSetting(db.SettingLogLevel)
plexmatchEnabled := s.db.GetSetting(db.SettingPlexMatchEnabled)
allowQiangban := s.db.GetSetting(db.SettingAllowQiangban)
enableNfo := s.db.GetSetting(db.SettingNfoSupportEnabled)
enableAdult := s.db.GetSetting(db.SettingEnableTmdbAdultContent)
tvFormat := s.db.GetTvNamingFormat()
movieFormat := s.db.GetMovingNamingFormat()
return &GeneralSettings{
TmdbApiKey: tmdb,
DownloadDir: downloadDir,
LogLevel: logLevel,
Proxy: s.db.GetSetting(db.SettingProxy),
TmdbApiKey: tmdb,
DownloadDir: downloadDir,
LogLevel: logLevel,
Proxy: s.db.GetSetting(db.SettingProxy),
EnablePlexmatch: plexmatchEnabled == "true",
AllowQiangban: allowQiangban == "true",
EnableNfo: enableNfo == "true",
EnableAdultContent: enableAdult == "true",
TvNamingFormat: tvFormat,
MovieNamingFormat: movieFormat,
}, nil
}
type addTorznabIn struct {
Name string `json:"name" binding:"required"`
URL string `json:"url" binding:"required"`
ApiKey string `json:"api_key" binding:"required"`
ID int `json:"id"`
Name string `json:"name" binding:"required"`
URL string `json:"url" binding:"required"`
ApiKey string `json:"api_key" binding:"required"`
Disabled bool `json:"disabled"`
Priority int `json:"priority"`
SeedRatio float32 `json:"seed_ratio"`
}
func (s *Server) AddTorznabInfo(c *gin.Context) (interface{}, error) {
@@ -91,10 +140,32 @@ func (s *Server) AddTorznabInfo(c *gin.Context) (interface{}, error) {
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind json")
}
err := s.db.SaveTorznabInfo(in.Name, db.TorznabSetting{
utils.TrimFields(&in)
log.Infof("add indexer settings: %+v", in)
setting := db.TorznabSetting{
URL: in.URL,
ApiKey: in.ApiKey,
})
}
data, err := json.Marshal(setting)
if err != nil {
return nil, errors.Wrap(err, "marshal json")
}
if in.Priority > 128 {
in.Priority = 128
}
indexer := ent.Indexers{
ID: in.ID,
Name: in.Name,
Implementation: "torznab",
Settings: string(data),
Priority: in.Priority,
Disabled: in.Disabled,
SeedRatio: in.SeedRatio,
}
err = s.db.SaveIndexer(&indexer)
if err != nil {
return nil, errors.Wrap(err, "add ")
}
@@ -119,7 +190,7 @@ func (s *Server) GetAllIndexers(c *gin.Context) (interface{}, error) {
return indexers, nil
}
func (s *Server) getDownloadClient() (*transmission.Client, error) {
func (s *Server) getDownloadClient() (*transmission.Client, *ent.DownloadClients, error) {
tr := s.db.GetTransmission()
trc, err := transmission.NewClient(transmission.Config{
URL: tr.URL,
@@ -127,9 +198,9 @@ func (s *Server) getDownloadClient() (*transmission.Client, error) {
Password: tr.Password,
})
if err != nil {
return nil, errors.Wrap(err, "connect transmission")
return nil, nil, errors.Wrap(err, "connect transmission")
}
return trc, nil
return trc, tr, nil
}
type downloadClientIn struct {
@@ -145,6 +216,7 @@ func (s *Server) AddDownloadClient(c *gin.Context) (interface{}, error) {
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind json")
}
utils.TrimFields(&in)
//test connection
_, err := transmission.NewClient(transmission.Config{
URL: in.URL,
@@ -177,3 +249,46 @@ func (s *Server) DeleteDownloadCLient(c *gin.Context) (interface{}, error) {
s.db.DeleteDownloadCLient(id)
return "success", nil
}
type episodeMonitoringIn struct {
EpisodeID int `json:"episode_id"`
Monitor bool `json:"monitor"`
}
func (s *Server) ChangeEpisodeMonitoring(c *gin.Context) (interface{}, error) {
var in episodeMonitoringIn
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind")
}
s.db.SetEpisodeMonitoring(in.EpisodeID, in.Monitor)
return "success", nil
}
func (s *Server) EditMediaMetadata(c *gin.Context) (interface{}, error) {
var in db.EditMediaData
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind")
}
err := s.db.EditMediaMetadata(in)
if err != nil {
return nil, errors.Wrap(err, "save db")
}
return "success", nil
}
type triggerCronJobIn struct {
JobName string `json:"job_name"`
}
func (s *Server) TriggerCronJob(c *gin.Context) (interface{}, error) {
var in triggerCronJobIn
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind")
}
err := s.core.TriggerCronJob(in.JobName)
if err != nil {
return nil, err
}
return "success", nil
}

View File

@@ -3,11 +3,11 @@ package server
import (
"fmt"
"polaris/db"
"polaris/log"
"polaris/pkg/storage"
"polaris/pkg/utils"
"strconv"
"strings"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
@@ -23,11 +23,12 @@ func (s *Server) AddStorage(c *gin.Context) (interface{}, error) {
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind json")
}
utils.TrimFields(&in)
if in.Implementation == "webdav" {
//test webdav
wd := in.ToWebDavSetting()
st, err := storage.NewWebdavStorage(wd.URL, wd.User, wd.Password, wd.TvPath, false)
st, err := storage.NewWebdavStorage(wd.URL, wd.User, wd.Password, in.TvPath, false)
if err != nil {
return nil, errors.Wrap(err, "new webdav")
}
@@ -60,30 +61,22 @@ func (s *Server) SuggestedSeriesFolderName(c *gin.Context) (interface{}, error)
if err != nil {
return nil, fmt.Errorf("id is not int: %v", ids)
}
var name, originalName, year string
d, err := s.MustTMDB().GetTvDetails(id, s.language)
name, err := s.core.SuggestedSeriesFolderName(id)
if err != nil {
d1, err := s.MustTMDB().GetMovieDetails(id, s.language)
if err != nil {
return nil, errors.Wrap(err, "get movie details")
}
name = d1.Title
originalName = d1.OriginalTitle
year = strings.Split(d1.ReleaseDate, "-")[0]
} else {
name = d.Name
originalName = d.OriginalName
year = strings.Split(d.FirstAirDate, "-")[0]
return nil, err
}
return gin.H{"name": name}, nil
}
func (s *Server) SuggestedMovieFolderName(c *gin.Context) (interface{}, error) {
ids := c.Param("tmdb_id")
id, err := strconv.Atoi(ids)
if err != nil {
return nil, fmt.Errorf("id is not int: %v", ids)
}
name, err := s.core.SuggestedMovieFolderName(id)
if err != nil {
return nil, err
}
name = fmt.Sprintf("%s %s", name, originalName)
if !utils.ContainsChineseChar(name) {
name = originalName
}
if year != "" {
name = fmt.Sprintf("%s (%s)", name, year)
}
log.Infof("tv series of tmdb id %v suggestting name is %v", id, name)
return gin.H{"name": name}, nil
}

View File

@@ -1,9 +1,6 @@
package server
import (
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"polaris/db"
@@ -11,10 +8,9 @@ import (
"polaris/ent/episode"
"polaris/ent/media"
"polaris/log"
"polaris/server/core"
"strconv"
"time"
tmdb "github.com/cyruzin/golang-tmdb"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
)
@@ -43,7 +39,11 @@ func (s *Server) SearchMedia(c *gin.Context) (interface{}, error) {
return nil, errors.Wrap(err, "bind query")
}
log.Infof("search media with keyword: %v", q.Query)
r, err := s.MustTMDB().SearchMedia(q.Query, s.language, q.Page)
tmdb, err := s.TMDB()
if err != nil {
return nil, err
}
r, err := tmdb.SearchMedia(q.Query, s.language, q.Page)
if err != nil {
return nil, errors.Wrap(err, "search tv")
}
@@ -60,191 +60,32 @@ type addWatchlistIn struct {
TmdbID int `json:"tmdb_id" binding:"required"`
StorageID int `json:"storage_id" `
Resolution string `json:"resolution" binding:"required"`
Folder string `json:"folder"`
Folder string `json:"folder" binding:"required"`
DownloadHistoryEpisodes bool `json:"download_history_episodes"` //for tv
SizeMin int `json:"size_min"`
SizeMax int `json:"size_max"`
}
func (s *Server) AddTv2Watchlist(c *gin.Context) (interface{}, error) {
var in addWatchlistIn
var in core.AddWatchlistIn
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind query")
}
if in.Folder == "" {
return nil, errors.New("folder should be provided")
}
detailCn, err := s.MustTMDB().GetTvDetails(in.TmdbID, db.LanguageCN)
if err != nil {
return nil, errors.Wrap(err, "get tv detail")
}
var nameCn = detailCn.Name
detailEn, _ := s.MustTMDB().GetTvDetails(in.TmdbID, db.LanguageEN)
var nameEn = detailEn.Name
var detail *tmdb.TVDetails
if s.language == "" || s.language == db.LanguageCN {
detail = detailCn
} else {
detail = detailEn
}
log.Infof("find detail for tv id %d: %v", in.TmdbID, detail)
var epIds []int
for _, season := range detail.Seasons {
seasonId := season.SeasonNumber
se, err := s.MustTMDB().GetSeasonDetails(int(detail.ID), seasonId, s.language)
if err != nil {
log.Errorf("get season detail (%s) error: %v", detail.Name, err)
continue
}
for _, ep := range se.Episodes {
epid, err := s.db.SaveEposideDetail(&ent.Episode{
SeasonNumber: seasonId,
EpisodeNumber: ep.EpisodeNumber,
Title: ep.Name,
Overview: ep.Overview,
AirDate: ep.AirDate,
})
if err != nil {
log.Errorf("save episode info error: %v", err)
continue
}
epIds = append(epIds, epid)
}
}
r, err := s.db.AddMediaWatchlist(&ent.Media{
TmdbID: int(detail.ID),
MediaType: media.MediaTypeTv,
NameCn: nameCn,
NameEn: nameEn,
OriginalName: detail.OriginalName,
Overview: detail.Overview,
AirDate: detail.FirstAirDate,
Resolution: media.Resolution(in.Resolution),
StorageID: in.StorageID,
TargetDir: in.Folder,
DownloadHistoryEpisodes: in.DownloadHistoryEpisodes,
}, epIds)
if err != nil {
return nil, errors.Wrap(err, "add to list")
}
go func() {
if err := s.downloadPoster(detail.PosterPath, r.ID); err != nil {
log.Errorf("download poster error: %v", err)
}
if err := s.downloadBackdrop(detail.BackdropPath, r.ID); err != nil {
log.Errorf("download poster error: %v", err)
}
if err := s.checkDownloadedSeriesFiles(r); err != nil {
log.Errorf("check downloaded files error: %v", err)
}
}()
log.Infof("add tv %s to watchlist success", detail.Name)
return nil, nil
return s.core.AddTv2Watchlist(in)
}
func (s *Server) AddMovie2Watchlist(c *gin.Context) (interface{}, error) {
var in addWatchlistIn
var in core.AddWatchlistIn
if err := c.ShouldBindJSON(&in); err != nil {
return nil, errors.Wrap(err, "bind query")
}
detailCn, err := s.MustTMDB().GetMovieDetails(in.TmdbID, db.LanguageCN)
if err != nil {
return nil, errors.Wrap(err, "get movie detail")
}
var nameCn = detailCn.Title
detailEn, _ := s.MustTMDB().GetMovieDetails(in.TmdbID, db.LanguageEN)
var nameEn = detailEn.Title
var detail *tmdb.MovieDetails
if s.language == "" || s.language == db.LanguageCN {
detail = detailCn
} else {
detail = detailEn
}
log.Infof("find detail for movie id %d: %v", in.TmdbID, detail)
epid, err := s.db.SaveEposideDetail(&ent.Episode{
SeasonNumber: 1,
EpisodeNumber: 1,
Title: "dummy episode for movies",
Overview: "dummy episode for movies",
AirDate: detail.ReleaseDate,
})
if err != nil {
return nil, errors.Wrap(err, "add dummy episode")
}
log.Infof("added dummy episode for movie: %v", nameEn)
r, err := s.db.AddMediaWatchlist(&ent.Media{
TmdbID: int(detail.ID),
MediaType: media.MediaTypeMovie,
NameCn: nameCn,
NameEn: nameEn,
OriginalName: detail.OriginalTitle,
Overview: detail.Overview,
AirDate: detail.ReleaseDate,
Resolution: media.Resolution(in.Resolution),
StorageID: in.StorageID,
TargetDir: "./",
}, []int{epid})
if err != nil {
return nil, errors.Wrap(err, "add to list")
}
go func() {
if err := s.downloadPoster(detail.PosterPath, r.ID); err != nil {
log.Errorf("download poster error: %v", err)
}
if err := s.downloadBackdrop(detail.BackdropPath, r.ID); err != nil {
log.Errorf("download backdrop error: %v", err)
}
}()
log.Infof("add movie %s to watchlist success", detail.Title)
return nil, nil
}
func (s *Server) downloadBackdrop(path string, mediaID int) error {
url := "https://image.tmdb.org/t/p/original" + path
return s.downloadImage(url, mediaID, "backdrop.jpg")
}
func (s *Server) downloadPoster(path string, mediaID int) error {
var url = "https://image.tmdb.org/t/p/original" + path
return s.downloadImage(url, mediaID, "poster.jpg")
}
func (s *Server) downloadImage(url string, mediaID int, name string) error {
log.Infof("try to download image: %v", url)
var resp, err = http.Get(url)
if err != nil {
return errors.Wrap(err, "http get")
}
targetDir := fmt.Sprintf("%v/%d", db.ImgPath, mediaID)
os.MkdirAll(targetDir, 0777)
//ext := filepath.Ext(path)
targetFile := filepath.Join(targetDir, name)
f, err := os.Create(targetFile)
if err != nil {
return errors.Wrap(err, "new file")
}
defer f.Close()
_, err = io.Copy(f, resp.Body)
if err != nil {
return errors.Wrap(err, "copy http response")
}
log.Infof("image successfully downlaoded: %v", targetFile)
return nil
return s.core.AddMovie2Watchlist(in)
}
type MediaWithStatus struct {
*ent.Media
Status string `json:"status"`
MonitoredNum int `json:"monitored_num"`
DownloadedNum int `json:"downloaded_num"`
}
//missing: episode aired missing
@@ -257,24 +98,18 @@ func (s *Server) GetTvWatchlist(c *gin.Context) (interface{}, error) {
res := make([]MediaWithStatus, len(list))
for i, item := range list {
var ms = MediaWithStatus{
Media: item,
Status: "downloaded",
Media: item,
MonitoredNum: 0,
DownloadedNum: 0,
}
details := s.db.GetMediaDetails(item.ID)
for _, ep := range details.Episodes {
if ep.SeasonNumber == 0 {
continue
}
t, err := time.Parse("2006-01-02", ep.AirDate)
if err != nil { //airdate not exist
ms.Status = "monitoring"
} else {
if item.CreatedAt.Sub(t) > 24*time.Hour { //剧集在加入watchlist之前不去下载
continue
}
if ep.Status == episode.StatusMissing {
ms.Status = "monitoring"
if ep.Monitored {
ms.MonitoredNum++
if ep.Status == episode.StatusDownloaded {
ms.DownloadedNum++
}
}
}
@@ -288,15 +123,16 @@ func (s *Server) GetMovieWatchlist(c *gin.Context) (interface{}, error) {
res := make([]MediaWithStatus, len(list))
for i, item := range list {
var ms = MediaWithStatus{
Media: item,
Status: "monitoring",
Media: item,
MonitoredNum: 1,
DownloadedNum: 0,
}
dummyEp, err := s.db.GetMovieDummyEpisode(item.ID)
if err != nil {
log.Errorf("get dummy episode: %v", err)
} else {
if dummyEp.Status != episode.StatusMissing {
ms.Status = "downloaded"
if dummyEp.Status == episode.StatusDownloaded {
ms.DownloadedNum++
}
}
res[i] = ms
@@ -304,6 +140,11 @@ func (s *Server) GetMovieWatchlist(c *gin.Context) (interface{}, error) {
return res, nil
}
type MediaDetails struct {
*db.MediaDetails
Storage *ent.Storage `json:"storage"`
}
func (s *Server) GetMediaDetails(c *gin.Context) (interface{}, error) {
ids := c.Param("id")
id, err := strconv.Atoi(ids)
@@ -311,15 +152,8 @@ func (s *Server) GetMediaDetails(c *gin.Context) (interface{}, error) {
return nil, errors.Wrap(err, "convert")
}
detail := s.db.GetMediaDetails(id)
return detail, nil
}
func (s *Server) GetAvailableResolutions(c *gin.Context) (interface{}, error) {
return []db.ResolutionType{
db.R720p,
db.R1080p,
db.R4k,
}, nil
st := s.db.GetStorage(detail.StorageID)
return MediaDetails{MediaDetails: detail, Storage: &st.Storage}, nil
}
func (s *Server) DeleteFromWatchlist(c *gin.Context) (interface{}, error) {

View File

@@ -4,7 +4,7 @@
# This file should be version controlled and should not be manually edited.
version:
revision: "761747bfc538b5af34aa0d3fac380f1bc331ec49"
revision: "80c2e84975bbd28ecf5f8d4bd4ca5a2490bfc819"
channel: "stable"
project_type: app
@@ -13,26 +13,11 @@ project_type: app
migration:
platforms:
- platform: root
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: android
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
create_revision: 80c2e84975bbd28ecf5f8d4bd4ca5a2490bfc819
base_revision: 80c2e84975bbd28ecf5f8d4bd4ca5a2490bfc819
- platform: ios
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: linux
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: macos
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: web
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
- platform: windows
create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
create_revision: 80c2e84975bbd28ecf5f8d4bd4ca5a2490bfc819
base_revision: 80c2e84975bbd28ecf5f8d4bd4ca5a2490bfc819
# User provided section

View File

@@ -1,16 +1,17 @@
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:percent_indicator/circular_percent_indicator.dart';
import 'package:ui/providers/activity.dart';
import 'package:ui/widgets/utils.dart';
import 'package:ui/widgets/progress_indicator.dart';
import 'package:ui/widgets/utils.dart';
import 'package:ui/widgets/widgets.dart';
import 'package:timeago/timeago.dart' as timeago;
class ActivityPage extends ConsumerStatefulWidget {
const ActivityPage({super.key});
static const route = "/activities";
@override
_ActivityPageState createState() => _ActivityPageState();
ConsumerState<ConsumerStatefulWidget> createState() => _ActivityPageState();
}
class _ActivityPageState extends ConsumerState<ActivityPage>
@@ -53,28 +54,99 @@ class _ActivityPageState extends ConsumerState<ActivityPage>
],
),
Builder(builder: (context) {
var activitiesWatcher = ref.watch(activitiesDataProvider("active"));
AsyncValue<List<Activity>>? activitiesWatcher;
if (selectedTab == 1) {
activitiesWatcher = ref.watch(activitiesDataProvider("archive"));
} else if (selectedTab == 0) {
activitiesWatcher = ref.watch(activitiesDataProvider("active"));
}
return activitiesWatcher.when(
return activitiesWatcher!.when(
data: (activities) {
return SingleChildScrollView(
child: PaginatedDataTable(
rowsPerPage: 10,
columns: const [
DataColumn(label: Text("#"), numeric: true),
DataColumn(label: Text("名称")),
DataColumn(label: Text("开始时间")),
DataColumn(label: Text("状态")),
DataColumn(label: Text("操作"))
],
source: ActivityDataSource(
activities: activities,
onDelete: selectedTab == 0 ? onDelete() : null),
),
);
return Flexible(
child: ListView.builder(
itemCount: activities.length,
itemBuilder: (context, index) {
final ac = activities[index];
return Column(
children: [
ListTile(
dense: true,
leading: () {
if (ac.status == "uploading") {
return const SizedBox(
width: 20,
height: 20,
child: Tooltip(
message: "正在上传到指定存储",
child: CircularProgressIndicator(),
));
} else if (ac.status == "fail") {
return const Tooltip(
message: "下载失败",
child: Icon(
Icons.close,
color: Colors.red,
));
} else if (ac.status == "seeding") {
//seeding
return Tooltip(
message: "做种中",
child: Icon(
Icons.keyboard_double_arrow_up,
color: Theme.of(context)
.colorScheme
.inversePrimary,
),
);
} else if (ac.status == "success") {
return const Tooltip(
message: "下载成功",
child: Icon(
Icons.check,
color: Colors.green,
),
);
}
double p = ac.progress == null
? 0
: ac.progress!.toDouble() / 100;
return Tooltip(
message: "${ac.progress}%",
child: CircularProgressIndicator(
backgroundColor: Colors.black26,
value: p,
),
);
}(),
title: Text((ac.sourceTitle ?? "")),
subtitle: Opacity(
opacity: 0.7,
child: Wrap(
spacing: 10,
children: [
Text("开始时间:${timeago.format(ac.date!)}"),
Text("大小:${(ac.size ?? 0).readableFileSize()}"),
ac.seedRatio > 0
? Text("分享率:${ac.seedRatio}")
: SizedBox()
],
),
),
trailing: selectedTab == 0
? IconButton(
tooltip: "删除任务",
onPressed: () => onDelete()(ac.id!.toString()),
icon: const Icon(Icons.delete))
: const Text("-"),
),
Divider(),
],
);
},
));
},
error: (err, trace) => Text("$err"),
loading: () => const MyProgressIndicator());
@@ -83,81 +155,12 @@ class _ActivityPageState extends ConsumerState<ActivityPage>
);
}
Function(int) onDelete() {
Function(String) onDelete() {
return (id) {
ref
final f = ref
.read(activitiesDataProvider("active").notifier)
.deleteActivity(id)
.then((v) => Utils.showSnakeBar("删除成功"))
.onError((error, trace) => Utils.showSnakeBar("删除失败:$error"));
.deleteActivity(id);
showLoadingWithFuture(f);
};
}
}
class ActivityDataSource extends DataTableSource {
List<Activity> activities;
Function(int)? onDelete;
ActivityDataSource({required this.activities, this.onDelete});
@override
int get rowCount => activities.length;
@override
DataRow? getRow(int index) {
final activity = activities[index];
return DataRow(cells: [
DataCell(Text("${activity.id}")),
DataCell(Text("${activity.sourceTitle}")),
DataCell(Text("${activity.date!.toLocal()}")),
DataCell(() {
if (activity.status == "uploading") {
return const SizedBox(
width: 20,
height: 20,
child: Tooltip(
message: "正在上传到指定存储",
child: CircularProgressIndicator(),
));
} else if (activity.status == "fail") {
return const Tooltip(
message: "下载失败",
child: Icon(
Icons.close,
color: Colors.red,
));
} else if (activity.status == "success") {
return const Tooltip(
message: "下载成功",
child: Icon(
Icons.check,
color: Colors.green,
),
);
}
double p =
activity.progress == null ? 0 : activity.progress!.toDouble() / 100;
return CircularPercentIndicator(
radius: 15.0,
lineWidth: 5.0,
percent: p,
center: Text("${p * 100}"),
progressColor: Colors.green,
);
}()),
onDelete != null
? DataCell(Tooltip(
message: "删除任务",
child: IconButton(
onPressed: () => onDelete!(activity.id!),
icon: const Icon(Icons.delete))))
: const DataCell(Text("-"))
]);
}
@override
bool get isRowCountApproximate => false;
@override
int get selectedRowCount => 0;
}

60
ui/lib/calendar.dart Normal file
View File

@@ -0,0 +1,60 @@
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:table_calendar/table_calendar.dart';
class Calendar extends ConsumerStatefulWidget {
@override
ConsumerState<ConsumerStatefulWidget> createState() {
return _CalendarSate();
}
}
class _CalendarSate extends ConsumerState<Calendar> {
DateTime? _selectedDay;
DateTime _focusedDay = DateTime.now();
CalendarFormat _calendarFormat = CalendarFormat.month;
@override
Widget build(BuildContext context) {
return TableCalendar(
locale: "zh_CN",
firstDay: DateTime.utc(2010, 10, 16),
lastDay: DateTime.utc(2030, 3, 14),
focusedDay: _focusedDay,
selectedDayPredicate: (day) {
return isSameDay(_selectedDay, day);
},
onDaySelected: (selectedDay, focusedDay) {
setState(() {
_selectedDay = selectedDay;
_focusedDay = focusedDay; // update `_focusedDay` here as well
});
},
calendarFormat: _calendarFormat,
onFormatChanged: (format) {
setState(() {
_calendarFormat = format;
});
},
//locale: "zh_CN",
//eventLoader: (day) {},
);
}
}
showCalendar(BuildContext context) {
return showDialog<void>(
context: context,
barrierDismissible: true,
builder: (BuildContext context) {
return AlertDialog(
//title: Text("资源"),
content: SizedBox(
width: MediaQuery.of(context).size.width * 0.7,
height: MediaQuery.of(context).size.height * 0.6,
child: Calendar()),
);
},
);
}

View File

@@ -2,18 +2,21 @@ import 'package:flutter/material.dart';
import 'package:flutter_adaptive_scaffold/flutter_adaptive_scaffold.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:go_router/go_router.dart';
import 'package:intl/date_symbol_data_local.dart';
import 'package:ui/activity.dart';
import 'package:ui/calendar.dart';
import 'package:ui/login_page.dart';
import 'package:ui/movie_watchlist.dart';
import 'package:ui/providers/APIs.dart';
import 'package:ui/search.dart';
import 'package:ui/settings.dart';
import 'package:ui/search_page/search.dart';
import 'package:ui/settings/settings.dart';
import 'package:ui/system_page.dart';
import 'package:ui/tv_details.dart';
import 'package:ui/welcome_page.dart';
import 'package:ui/widgets/utils.dart';
void main() {
runApp(const MyApp());
initializeDateFormatting().then((_) => runApp(MyApp()));
}
class MyApp extends ConsumerStatefulWidget {
@@ -35,7 +38,8 @@ CustomTransitionPage buildPageWithDefaultTransition<T>({
reverseTransitionDuration: Duration.zero,
key: state.pageKey,
child: child,
transitionsBuilder: (context, animation, secondaryAnimation, child) => child,
transitionsBuilder: (context, animation, secondaryAnimation, child) =>
child,
);
}
@@ -48,7 +52,9 @@ class _MyAppState extends ConsumerState<MyApp> {
builder: (BuildContext context, GoRouterState state, Widget child) {
return SelectionArea(
child: MainSkeleton(
body: Padding(padding: const EdgeInsets.all(20), child: child),
body: Padding(
padding: EdgeInsets.all(isSmallScreen(context) ? 5 : 20),
child: child),
),
);
},
@@ -121,16 +127,24 @@ class _MyAppState extends ConsumerState<MyApp> {
return ProviderScope(
child: MaterialApp.router(
title: 'Polaris 影视追踪',
title: 'Polaris 影视追踪下载',
theme: ThemeData(
fontFamily: "NotoSansSC",
colorScheme: ColorScheme.fromSeed(
seedColor: Colors.blueAccent,
brightness: Brightness.dark,
surface: Colors.black54),
useMaterial3: true,
//scaffoldBackgroundColor: Color.fromARGB(255, 26, 24, 24)
),
fontFamily: "NotoSansSC",
colorScheme: ColorScheme.fromSeed(
seedColor: Colors.blueAccent,
brightness: Brightness.dark,
surface: Colors.black87),
useMaterial3: true,
//scaffoldBackgroundColor: Color.fromARGB(255, 26, 24, 24)
tooltipTheme: TooltipThemeData(
textStyle: const TextStyle(
color: Colors.grey,
),
decoration: BoxDecoration(
color: Colors.black54,
borderRadius: BorderRadius.circular(20),
),
)),
routerConfig: router,
),
);
@@ -174,16 +188,20 @@ class _MainSkeletonState extends State<MainSkeleton> {
backgroundColor: Theme.of(context).colorScheme.inversePrimary,
// Here we take the value from the MyHomePage object that was created by
// the App.build method, and use it to set our appbar title.
title: const Row(
children: [
Text("Polaris"),
],
title: TextButton(
onPressed: () => context.go(WelcomePage.routeTv),
child: Text(
"Polaris",
overflow: TextOverflow.clip,
style: TextStyle(fontSize: 28),
),
),
actions: [
SearchAnchor(
builder: (BuildContext context, SearchController controller) {
return Container(
constraints: const BoxConstraints(maxWidth: 300, maxHeight: 40),
constraints: const BoxConstraints(maxWidth: 250, maxHeight: 40),
child: Opacity(
opacity: 0.8,
child: SearchBar(
@@ -203,6 +221,9 @@ class _MainSkeletonState extends State<MainSkeleton> {
(BuildContext context, SearchController controller) {
return [Text("dadada")];
}),
IconButton(
onPressed: () => showCalendar(context),
icon: Icon(Icons.calendar_month)),
MenuAnchor(
menuChildren: [
MenuItemButton(
@@ -248,27 +269,32 @@ class _MainSkeletonState extends State<MainSkeleton> {
},
destinations: const <NavigationDestination>[
NavigationDestination(
icon: Icon(Icons.live_tv),
icon: Icon(Icons.live_tv_outlined),
selectedIcon: Icon(Icons.live_tv),
label: '剧集',
),
NavigationDestination(
icon: Icon(Icons.movie),
icon: Icon(Icons.movie_outlined),
selectedIcon: Icon(Icons.movie),
label: '电影',
),
NavigationDestination(
icon: Icon(Icons.download),
icon: Icon(Icons.download_outlined),
selectedIcon: Icon(Icons.download),
label: '活动',
),
NavigationDestination(
icon: Icon(Icons.settings),
icon: Icon(Icons.settings_outlined),
selectedIcon: Icon(Icons.settings),
label: '设置',
),
NavigationDestination(
icon: Icon(Icons.computer_rounded),
icon: Icon(Icons.computer_outlined),
selectedIcon: Icon(Icons.computer),
label: '系统',
),
],
body: (context) => widget.body,
body: (context) => SafeArea(child: widget.body),
// Define a default secondaryBody.
// Override the default secondaryBody during the smallBreakpoint to be
// empty. Must use AdaptiveScaffold.emptyBuilder to ensure it is properly

View File

@@ -1,12 +1,9 @@
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:go_router/go_router.dart';
import 'package:ui/providers/APIs.dart';
import 'package:ui/providers/activity.dart';
import 'package:ui/providers/series_details.dart';
import 'package:ui/providers/settings.dart';
import 'package:ui/widgets/utils.dart';
import 'package:ui/welcome_page.dart';
import 'package:ui/widgets/detail_card.dart';
import 'package:ui/widgets/resource_list.dart';
import 'package:ui/widgets/progress_indicator.dart';
class MovieDetailsPage extends ConsumerStatefulWidget {
@@ -30,105 +27,12 @@ class _MovieDetailsPageState extends ConsumerState<MovieDetailsPage> {
@override
Widget build(BuildContext context) {
var seriesDetails = ref.watch(mediaDetailsProvider(widget.id));
var storage = ref.watch(storageSettingProvider);
return seriesDetails.when(
data: (details) {
return ListView(
children: [
Card(
margin: const EdgeInsets.all(4),
clipBehavior: Clip.hardEdge,
child: Container(
decoration: BoxDecoration(
image: DecorationImage(
fit: BoxFit.cover,
opacity: 0.5,
image: NetworkImage(
"${APIs.imagesUrl}/${details.id}/backdrop.jpg",
))),
child: Padding(
padding: const EdgeInsets.all(10),
child: Row(
children: <Widget>[
Flexible(
flex: 1,
child: Padding(
padding: const EdgeInsets.all(10),
child: Image.network(
"${APIs.imagesUrl}/${details.id}/poster.jpg",
fit: BoxFit.contain,
),
),
),
Expanded(
flex: 6,
child: Row(
children: [
Expanded(
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
Text("${details.resolution}"),
const SizedBox(
width: 30,
),
storage.when(
data: (value) {
for (final s in value) {
if (s.id == details.storageId) {
return Text(
"${s.name}(${s.implementation})");
}
}
return const Text("未知存储");
},
error: (error, stackTrace) =>
Text("$error"),
loading: () =>
const MyProgressIndicator()),
],
),
const Divider(thickness: 1, height: 1),
Text(
"${details.name} (${details.airDate!.split("-")[0]})",
style: const TextStyle(
fontSize: 20,
fontWeight: FontWeight.bold),
),
const Text(""),
Text(
details.overview!,
),
],
)),
Column(
children: [
IconButton(
onPressed: () {
ref
.read(mediaDetailsProvider(
widget.id)
.notifier)
.delete()
.then((v) => context
.go(WelcomePage.routeMoivie))
.onError((error, trace) =>
Utils.showSnakeBar(
"删除失败:$error"));
},
icon: const Icon(Icons.delete))
],
)
],
),
),
],
),
)),
),
DetailCard(details: details),
NestedTabBar(
id: widget.id,
)
@@ -148,7 +52,7 @@ class NestedTabBar extends ConsumerStatefulWidget {
const NestedTabBar({super.key, required this.id});
@override
_NestedTabBarState createState() => _NestedTabBarState();
ConsumerState<ConsumerStatefulWidget> createState() => _NestedTabBarState();
}
class _NestedTabBarState extends ConsumerState<NestedTabBar>
@@ -157,7 +61,7 @@ class _NestedTabBarState extends ConsumerState<NestedTabBar>
@override
void initState() {
super.initState();
_nestedTabController = new TabController(length: 2, vsync: this);
_nestedTabController = TabController(length: 2, vsync: this);
}
@override
@@ -220,58 +124,7 @@ class _NestedTabBarState extends ConsumerState<NestedTabBar>
error: (error, trace) => Text("$error"),
loading: () => const MyProgressIndicator());
} else {
return Consumer(
builder: (context, ref, child) {
var torrents = ref.watch(mediaTorrentsDataProvider(
(mediaId: widget.id, seasonNumber: 0, episodeNumber: 0)));
return torrents.when(
data: (v) {
if (v.isEmpty) {
return const Center(
child: Text("无可用资源"),
);
}
return DataTable(
columns: const [
DataColumn(label: Text("名称")),
DataColumn(label: Text("大小")),
DataColumn(label: Text("seeders")),
DataColumn(label: Text("peers")),
DataColumn(label: Text("操作"))
],
rows: List.generate(v.length, (i) {
final torrent = v[i];
return DataRow(cells: [
DataCell(Text("${torrent.name}")),
DataCell(
Text("${torrent.size?.readableFileSize()}")),
DataCell(Text("${torrent.seeders}")),
DataCell(Text("${torrent.peers}")),
DataCell(IconButton(
icon: const Icon(Icons.download),
onPressed: () {
ref
.read(mediaTorrentsDataProvider((
mediaId: widget.id,
seasonNumber: 0,
episodeNumber: 0
)).notifier)
.download(torrent)
.then((v) => Utils.showSnakeBar(
"开始下载:${torrent.name}"))
.onError((error, trace) =>
Utils.showSnakeBar("操作失败: $error"));
},
))
]);
}),
);
},
error: (error, trace) => Text("$error"),
loading: () => const MyProgressIndicator());
},
);
return ResourceList(mediaId: widget.id);
}
})
],

View File

@@ -1,7 +1,7 @@
import 'package:flutter/material.dart';
import 'package:go_router/go_router.dart';
import 'package:ui/activity.dart';
import 'package:ui/settings.dart';
import 'package:ui/settings/settings.dart';
import 'package:ui/welcome_page.dart';
class NavDrawer extends StatefulWidget {

View File

@@ -7,6 +7,8 @@ import 'package:ui/providers/server_response.dart';
class APIs {
static final _baseUrl = baseUrl();
static final searchUrl = "$_baseUrl/api/v1/media/search";
static final editMediaUrl = "$_baseUrl/api/v1/media/edit";
static final downloadAllUrl = "$_baseUrl/api/v1/media/downloadall/";
static final settingsUrl = "$_baseUrl/api/v1/setting/do";
static final settingsGeneralUrl = "$_baseUrl/api/v1/setting/general";
static final watchlistTvUrl = "$_baseUrl/api/v1/media/tv/watchlist";
@@ -14,7 +16,8 @@ class APIs {
static final availableTorrentsUrl = "$_baseUrl/api/v1/media/torrents/";
static final downloadTorrentUrl = "$_baseUrl/api/v1/media/torrents/download";
static final seriesDetailUrl = "$_baseUrl/api/v1/media/record/";
static final suggestedTvName = "$_baseUrl/api/v1/media/suggest/";
static final suggestedTvName = "$_baseUrl/api/v1/media/suggest/tv/";
static final suggestedMovieName = "$_baseUrl/api/v1/media/suggest/movie/";
static final searchAndDownloadUrl = "$_baseUrl/api/v1/indexer/download";
static final allIndexersUrl = "$_baseUrl/api/v1/indexer/";
static final addIndexerUrl = "$_baseUrl/api/v1/indexer/add";
@@ -32,6 +35,10 @@ class APIs {
static final logsBaseUrl = "$_baseUrl/api/v1/logs/";
static final logFilesUrl = "$_baseUrl/api/v1/setting/logfiles";
static final aboutUrl = "$_baseUrl/api/v1/setting/about";
static final changeMonitoringUrl = "$_baseUrl/api/v1/setting/monitoring";
static final addImportlistUrl = "$_baseUrl/api/v1/importlist/add";
static final deleteImportlistUrl = "$_baseUrl/api/v1/importlist/delete";
static final getAllImportlists = "$_baseUrl/api/v1/importlist/";
static final notifierAllUrl = "$_baseUrl/api/v1/notifier/all";
static final notifierDeleteUrl = "$_baseUrl/api/v1/notifier/id/";
@@ -39,6 +46,8 @@ class APIs {
static final tmdbImgBaseUrl = "$_baseUrl/api/v1/posters";
static final cronJobUrl = "$_baseUrl/api/v1/setting/cron/trigger";
static const tmdbApiKey = "tmdb_api_key";
static const downloadDirKey = "download_dir";
@@ -93,4 +102,14 @@ class APIs {
context.go('/login');
}
}
static Future<void> triggerCronJob(String name) async {
var resp = await getDio().post(APIs.cronJobUrl, data: {"job_name": name});
var sp = ServerResponse.fromJson(resp.data);
if (sp.code != 0) {
throw sp.message;
}
}
}

Some files were not shown because too many files have changed in this diff Show More