Compare commits
275 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2683c5dbf2 | ||
|
|
b717885270 | ||
|
|
4e457e99b9 | ||
|
|
ecfe31ea45 | ||
|
|
89104785d7 | ||
|
|
f4ccc69b50 | ||
|
|
40c6e2df5c | ||
|
|
37dfb0fe94 | ||
|
|
9968f9f225 | ||
|
|
f5c977224b | ||
|
|
8af3ffccd3 | ||
|
|
c535dfd714 | ||
|
|
f696b78260 | ||
|
|
4f3e3e399d | ||
|
|
e4e3c9a851 | ||
|
|
bf608f933d | ||
|
|
5923fc73e1 | ||
|
|
c2d9ccfd4c | ||
|
|
5d4429bf7c | ||
|
|
e4c111ac2a | ||
|
|
3e5e20e933 | ||
|
|
ba1be8f279 | ||
|
|
361556228b | ||
|
|
ca414a73ff | ||
|
|
32b595e116 | ||
|
|
b12bbd2ad9 | ||
|
|
60110f4ca6 | ||
|
|
b7ca02429c | ||
|
|
ff63084014 | ||
|
|
821d6859ff | ||
|
|
10e6e99990 | ||
|
|
23a5997814 | ||
|
|
b487c81865 | ||
|
|
32914344d1 | ||
|
|
644c9ed228 | ||
|
|
d3ad80380f | ||
|
|
19c6308a81 | ||
|
|
7017f32fe3 | ||
|
|
02a23f13f9 | ||
|
|
cc211a89a4 | ||
|
|
4800e6c79d | ||
|
|
b5f0b28c61 | ||
|
|
081338df24 | ||
|
|
9632ca45b3 | ||
|
|
b948bff497 | ||
|
|
29383cf75c | ||
|
|
57ec0b9eb9 | ||
|
|
0cce4ffee0 | ||
|
|
5c01c45068 | ||
|
|
712bf84c90 | ||
|
|
fdb63a8459 | ||
|
|
990d9dab08 | ||
|
|
da863588e4 | ||
|
|
09ff67fef7 | ||
|
|
3c37948798 | ||
|
|
6fd39d818c | ||
|
|
a0e211c328 | ||
|
|
27d8b1672a | ||
|
|
349e394e8e | ||
|
|
620f085ca5 | ||
|
|
5b70badb50 | ||
|
|
5c6ac2c430 | ||
|
|
365cfddf8f | ||
|
|
6c26812b92 | ||
|
|
0057a75a95 | ||
|
|
f110f257d4 | ||
|
|
93e8e78591 | ||
|
|
9ff12cd86b | ||
|
|
fd2f4b140f | ||
|
|
4607af6982 | ||
|
|
984bebcfe0 | ||
|
|
d31abd59ad | ||
|
|
e0ad71291c | ||
|
|
8ecc9393cf | ||
|
|
b62e0e9bfd | ||
|
|
1391f55f44 | ||
|
|
0c709ee517 | ||
|
|
806d821388 | ||
|
|
829043bf28 | ||
|
|
66ab418054 | ||
|
|
5fe40cc64b | ||
|
|
8f6f26f00e | ||
|
|
ee0bee2b06 | ||
|
|
1bb16a8a66 | ||
|
|
d746032114 | ||
|
|
b34e39889c | ||
|
|
64e98647a8 | ||
|
|
f91c91e0b1 | ||
|
|
f1aaa06d05 | ||
|
|
e8a38aa6f8 | ||
|
|
7e88533ea2 | ||
|
|
05698f4047 | ||
|
|
1daad0c236 | ||
|
|
86c8163f9c | ||
|
|
78ab8cc8e6 | ||
|
|
1390277b43 | ||
|
|
1aa3dca2c6 | ||
|
|
f48b3c657e | ||
|
|
d8d570f1b2 | ||
|
|
bd385d4f85 | ||
|
|
466596345d | ||
|
|
8ab33f3d54 | ||
|
|
4d3b26135c | ||
|
|
56d5cdb2bf | ||
|
|
6f80da779b | ||
|
|
5fef156052 | ||
|
|
eab3a6ca2b | ||
|
|
ffa5c37c4c | ||
|
|
241e30152b | ||
|
|
16216fcc4f | ||
|
|
578b6a9d78 | ||
|
|
f4da80c845 | ||
|
|
5a9acd3e6e | ||
|
|
8bfa8f84b9 | ||
|
|
5b0b2ce5b0 | ||
|
|
b24c1a1501 | ||
|
|
aa320c6dcb | ||
|
|
5132714247 | ||
|
|
3aeecac4fb | ||
|
|
7f8c613a65 | ||
|
|
c787d71fbd | ||
|
|
c28e16805e | ||
|
|
fc3d3878bc | ||
|
|
e26e86a63f | ||
|
|
408ff163ef | ||
|
|
35d299b60c | ||
|
|
6e002b1198 | ||
|
|
7508a264a6 | ||
|
|
0022c9dad5 | ||
|
|
654d8b50b4 | ||
|
|
97ede5d9c9 | ||
|
|
4803567818 | ||
|
|
4e0014cb3f | ||
|
|
c256d46d5c | ||
|
|
b765f16ea6 | ||
|
|
9350e376f4 | ||
|
|
06f935871a | ||
|
|
001b850d8f | ||
|
|
1340305f2d | ||
|
|
b337e40fcc | ||
|
|
e94386e455 | ||
|
|
2b4fb99c89 | ||
|
|
faa603d5df | ||
|
|
9ba59a7d5a | ||
|
|
0ea1c040a2 | ||
|
|
eba646f5db | ||
|
|
ebcc0c32da | ||
|
|
769f217506 | ||
|
|
3525d1bb83 | ||
|
|
2c3fd89f2a | ||
|
|
19ab8c65de | ||
|
|
979218f615 | ||
|
|
d4dd2da335 | ||
|
|
000717fcd9 | ||
|
|
300f9a478b | ||
|
|
88a554b186 | ||
|
|
6ef4bedebe | ||
|
|
233970ef39 | ||
|
|
e2bba8ec71 | ||
|
|
b7aeb9c3c6 | ||
|
|
4a93d51fdc | ||
|
|
f158b74be6 | ||
|
|
2c8c715540 | ||
|
|
ba532d406a | ||
|
|
cff093ca98 | ||
|
|
8f677b29a7 | ||
|
|
e08c126af2 | ||
|
|
eed72c5eb9 | ||
|
|
fcff47041a | ||
|
|
f9d4f851eb | ||
|
|
262baf769f | ||
|
|
6ef2e5b347 | ||
|
|
e0e11b70bb | ||
|
|
3de2f89107 | ||
|
|
b024b5f6dc | ||
|
|
961d762f35 | ||
|
|
7f025a6246 | ||
|
|
fc86a441f4 | ||
|
|
34fa05e7dd | ||
|
|
9c3757a1bf | ||
|
|
e63a899df5 | ||
|
|
3a4e303d9d | ||
|
|
ef9e4487c6 | ||
|
|
02f6cfb5b7 | ||
|
|
e73ae86801 | ||
|
|
b19938f2df | ||
|
|
bb3c4551af | ||
|
|
eae35ce862 | ||
|
|
feecc9f983 | ||
|
|
5175e651ee | ||
|
|
f065abfbf9 | ||
|
|
cd4d600f5e | ||
|
|
741a4024fd | ||
|
|
0433cc7b0a | ||
|
|
accc02c74c | ||
|
|
87b6c99f1f | ||
|
|
b2a092c64e | ||
|
|
51fc5c3c74 | ||
|
|
5e6a17f86c | ||
|
|
2fedfd6c76 | ||
|
|
61bc9b72bd | ||
|
|
a997726a5f | ||
|
|
7a2c67af04 | ||
|
|
3698170d0b | ||
|
|
6c38db5248 | ||
|
|
b597edab8a | ||
|
|
2e3b67dfce | ||
|
|
1dd61ccbca | ||
|
|
f5f8434832 | ||
|
|
2cb6a15c0b | ||
|
|
317f5655b8 | ||
|
|
00506df5a1 | ||
|
|
57de442eb9 | ||
|
|
690ce272c2 | ||
|
|
6a9f63fff6 | ||
|
|
7b9b619de6 | ||
|
|
8bc9076d90 | ||
|
|
891be34504 | ||
|
|
04df9adfdf | ||
|
|
3c47eba618 | ||
|
|
e85bd231c9 | ||
|
|
58e65b21fb | ||
|
|
520933085d | ||
|
|
5cc88986d2 | ||
|
|
d63a923589 | ||
|
|
bca68befb1 | ||
|
|
1be44bff9e | ||
|
|
3998270cbd | ||
|
|
73e76c2185 | ||
|
|
c72a460509 | ||
|
|
912293d8e8 | ||
|
|
7f2e84ad52 | ||
|
|
e52ad612c1 | ||
|
|
45a212fec5 | ||
|
|
39bfda4cda | ||
|
|
24a4d3152d | ||
|
|
6c6670a8c0 | ||
|
|
63fc4f277b | ||
|
|
45d2a4fb79 | ||
|
|
5e337871c9 | ||
|
|
803dcfeacd | ||
|
|
c26e61bbee | ||
|
|
e334acba32 | ||
|
|
1359df599b | ||
|
|
16ca00d19c | ||
|
|
f4b8d03cfc | ||
|
|
8811b89889 | ||
|
|
daff2cfcfc | ||
|
|
79ec63bfdb | ||
|
|
bd0ada5897 | ||
|
|
a7dfa2d0f0 | ||
|
|
33f0a5b53f | ||
|
|
1878d6b679 | ||
|
|
627f838ab9 | ||
|
|
215511fab0 | ||
|
|
730db5c94a | ||
|
|
55f5ce329c | ||
|
|
5b2d86d301 | ||
|
|
95708a4c0c | ||
|
|
c41b3026df | ||
|
|
fa84f881a4 | ||
|
|
90ac4cddff | ||
|
|
2c5e4d0530 | ||
|
|
fb638dff8b | ||
|
|
11f7b51eb5 | ||
|
|
d2439480c8 | ||
|
|
6826422c2b | ||
|
|
8d2ce9752b | ||
|
|
7e5feaf998 | ||
|
|
e0bdd88706 | ||
|
|
74d5bf54b9 | ||
|
|
03a3bf6d90 | ||
|
|
ee23b75390 | ||
|
|
6e9b88b09b | ||
|
|
93525ae883 |
15
.github/workflows/go.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Create and publish a Docker image
|
||||
name: build docker image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -17,14 +17,11 @@ jobs:
|
||||
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Login to image repository
|
||||
uses: docker/login-action@v2
|
||||
@@ -49,9 +46,3 @@ jobs:
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v1
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
|
||||
8
.github/workflows/release.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Create and publish a Docker image
|
||||
name: release docker image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -12,7 +12,7 @@ env:
|
||||
|
||||
jobs:
|
||||
|
||||
build-and-push-image:
|
||||
build-and-release-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -22,6 +22,8 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set Up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
@@ -59,4 +61,4 @@ jobs:
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
push-to-registry: false
|
||||
|
||||
13
Dockerfile
@@ -3,7 +3,7 @@ WORKDIR /app
|
||||
COPY ./ui/pubspec.yaml ./ui/pubspec.lock ./
|
||||
RUN flutter pub get
|
||||
COPY ./ui/ ./
|
||||
RUN flutter build web
|
||||
RUN flutter build web --no-web-resources-cdn --web-renderer html
|
||||
|
||||
# 打包依赖阶段使用golang作为基础镜像
|
||||
FROM golang:1.22 as builder
|
||||
@@ -22,9 +22,10 @@ COPY . .
|
||||
|
||||
COPY --from=flutter /app/build/web ./ui/build/web/
|
||||
# 指定OS等,并go build
|
||||
RUN CGO_ENABLED=1 go build -o polaris ./cmd/
|
||||
RUN CGO_ENABLED=1 go build -o polaris -ldflags="-X polaris/db.Version=$(git describe --tags --long)" ./cmd/
|
||||
|
||||
FROM debian:12
|
||||
FROM debian:stable-slim
|
||||
ENV TZ="Asia/Shanghai" GIN_MODE=release
|
||||
|
||||
WORKDIR /app
|
||||
RUN apt-get update && apt-get -y install ca-certificates
|
||||
@@ -32,4 +33,8 @@ RUN apt-get update && apt-get -y install ca-certificates
|
||||
# 将上一个阶段publish文件夹下的所有文件复制进来
|
||||
COPY --from=builder /app/polaris .
|
||||
|
||||
EXPOSE 8080
|
||||
EXPOSE 8080
|
||||
|
||||
#USER 1000:1000
|
||||
|
||||
ENTRYPOINT ["./polaris"]
|
||||
92
README.md
@@ -1,70 +1,62 @@
|
||||
# polaris
|
||||
# Polaris
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
|
||||
Polaris 是一个电视剧和电影的追踪软件。配置好了之后,当剧集或者电影播出后,会第一时间下载对应的资源。支持本地存储或者webdav。
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
## 功能
|
||||
交流群: https://t.me/+8R2nzrlSs2JhMDgx
|
||||
|
||||
## 快速开始
|
||||
|
||||
使用此程序参考 [【快速开始】](https://simonding.gitbook.io/polaris/quick_start)
|
||||
|
||||
## Features
|
||||
|
||||
- [x] 电视剧自动追踪下载
|
||||
- [x] 电影自动追踪下载
|
||||
- [x] webdav 存储支持,配合 [alist](https://github.com/alist-org/alist) 或阿里云等实现更多功能
|
||||
- [x] 事件通知推送,目前支持 Pushover和 Bark,还在扩充中
|
||||
- [x] 后台代理支持
|
||||
- [x] 用户认证
|
||||
- [x] plex 刮削支持
|
||||
- [x] NFO 刮削文件支持
|
||||
- [x] BT/PT 支持
|
||||
- [x] and more...
|
||||
|
||||
## Todos
|
||||
|
||||
- [ ] qbittorrent客户端支持
|
||||
- [ ] 更多通知客户端支持
|
||||
- [ ] 第三方watchlist导入支持
|
||||
- [ ] 手机客户端
|
||||
|
||||
|
||||
## 原理
|
||||
|
||||
本程序不提供任何视频相关资源,所有的资源都通过 jackett/prowlarr 所对接的BT/PT站点提供。
|
||||
|
||||
1. 此程序通过调用 jackett/prowlarr API搜索相关资源,然后匹配上对应的剧集
|
||||
2. 把搜索到的资源送到下载器下载
|
||||
3. 下载完成后归入对应的路径
|
||||
|
||||
## 对比 sonarr/radarr
|
||||
* 更好的中文支持
|
||||
* 对于动漫、日剧的良好支持,配合国内站点基本能匹配上对应资源
|
||||
* 支持 webdav 后端存储,可以配合 alist 或者阿里云来实现下载后实时传到云上的功能。这样外出就可以不依靠家里的宽带来看电影了,或者实现个轻 NAS 功能,下载功能放在本地,数据放在云盘
|
||||
* golang 实现后端,相比于 .NET 更节省资源
|
||||
* 一个程序同时实现了电影、电视剧功能,不需要装两个程序
|
||||
* 当然 sonarr/radarr 也是非常优秀的开源项目,目前 Polaris 功能还没有 sonarr/radarr 丰富
|
||||
|
||||
## 快速开始
|
||||
|
||||
最简单部署 Polaris 的方式是使用 docker compose
|
||||
|
||||
```yaml
|
||||
polaris:
|
||||
image: ghcr.io/simon-ding/polaris:latest
|
||||
restart: always
|
||||
volumes:
|
||||
- ./config/polaris:/app/data #程序配置文件路径
|
||||
- /downloads:/downloads #下载路径,需要和下载客户端配置一致
|
||||
- /data:/data #数据存储路径
|
||||
ports:
|
||||
- 8080:8080
|
||||
```
|
||||
|
||||
拉起之后访问 http://< ip >:8080 的形式访问
|
||||
|
||||
## 配置
|
||||
|
||||
要正确使用此程序,需要配置好以下设置:
|
||||
|
||||
### TMDB设置
|
||||
因为此程序需要使用到 TMDB 的数据,使用此程序首先要申请一个 TMDB 的 Api Key
|
||||
|
||||
### 索引器
|
||||
|
||||
索引器是资源提供者,目前支持 torznab 协议,意味着 polarr 或者 jackett 都可以支持。请自行部署相关程序。
|
||||
|
||||
推荐使用 linuxserver 的镜像:https://docs.linuxserver.io/images/docker-jackett/
|
||||
|
||||
### 下载器
|
||||
|
||||
资源由谁下载,目前可支持 tansmission,需要配置好对应下载器
|
||||
|
||||
### 存储设置
|
||||
|
||||
程序默认所有剧集和电影存储在 /data 路径下,如果想修改路径或者webdav存储,需要在存储配置下修改
|
||||
|
||||
## 开始使用
|
||||
|
||||
配置完了这些,开始享受使用此程序吧!可以搜索几部自己想看的电影或者电视机,加入想看列表。当剧集有更新或者电影有资源是就会自动下载对应资源了。
|
||||
|
||||
|
||||
|
||||
-------------
|
||||
|
||||
## 请我喝杯咖啡
|
||||
|
||||
<img src="assets/wechat.JPG" width=40% height=40%>
|
||||
<img src="./doc/assets/wechat.JPG" width=40% height=40%>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.1 MiB |
|
Before Width: | Height: | Size: 2.7 MiB |
@@ -4,9 +4,14 @@ import (
|
||||
"polaris/db"
|
||||
"polaris/log"
|
||||
"polaris/server"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func main() {
|
||||
log.Infof("------------------- Starting Polaris ---------------------")
|
||||
|
||||
syscall.Umask(0) //max permission 0777
|
||||
|
||||
dbClient, err := db.Open()
|
||||
if err != nil {
|
||||
log.Panicf("init db error: %v", err)
|
||||
|
||||
60
db/const.go
@@ -1,23 +1,35 @@
|
||||
package db
|
||||
|
||||
var Version = "undefined"
|
||||
|
||||
const (
|
||||
SettingTmdbApiKey = "tmdb_api_key"
|
||||
SettingLanguage = "language"
|
||||
SettingJacketUrl = "jacket_url"
|
||||
SettingJacketApiKey = "jacket_api_key"
|
||||
SettingDownloadDir = "download_dir"
|
||||
SettingTmdbApiKey = "tmdb_api_key"
|
||||
SettingLanguage = "language"
|
||||
SettingJacketUrl = "jacket_url"
|
||||
SettingJacketApiKey = "jacket_api_key"
|
||||
SettingDownloadDir = "download_dir"
|
||||
SettingLogLevel = "log_level"
|
||||
SettingProxy = "proxy"
|
||||
SettingPlexMatchEnabled = "plexmatch_enabled"
|
||||
SettingNfoSupportEnabled = "nfo_support_enabled"
|
||||
SettingAllowQiangban = "filter_qiangban"
|
||||
SettingEnableTmdbAdultContent = "tmdb_adult_content"
|
||||
SetttingSizeLimiter = "size_limiter"
|
||||
SettingTvNamingFormat = "tv_naming_format"
|
||||
SettingMovieNamingFormat = "movie_naming_format"
|
||||
)
|
||||
|
||||
const (
|
||||
SettingAuthEnabled = "auth_enbled"
|
||||
SettingUsername = "auth_username"
|
||||
SettingPassword = "auth_password"
|
||||
SettingUsername = "auth_username"
|
||||
SettingPassword = "auth_password"
|
||||
)
|
||||
|
||||
const (
|
||||
IndexerTorznabImpl = "torznab"
|
||||
DataPath = "./data"
|
||||
ImgPath = DataPath + "/img"
|
||||
DataPath = "./data"
|
||||
ImgPath = DataPath + "/img"
|
||||
LogPath = DataPath + "/logs"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -25,18 +37,26 @@ const (
|
||||
LanguageCN = "zh-CN"
|
||||
)
|
||||
|
||||
type ResolutionType string
|
||||
const DefaultNamingFormat = "{{.NameCN}} {{.NameEN}} {{if .Year}} ({{.Year}}) {{end}}"
|
||||
|
||||
const (
|
||||
Any ResolutionType = "any"
|
||||
R720p ResolutionType = "720p"
|
||||
R1080p ResolutionType = "1080p"
|
||||
R4k ResolutionType = "4k"
|
||||
)
|
||||
|
||||
func (r ResolutionType) String() string {
|
||||
return string(r)
|
||||
type NamingInfo struct {
|
||||
NameCN string
|
||||
NameEN string
|
||||
Year string
|
||||
TmdbID int
|
||||
}
|
||||
|
||||
type ResolutionType string
|
||||
|
||||
const JwtSerectKey = "jwt_secrect_key"
|
||||
const JwtSerectKey = "jwt_secrect_key"
|
||||
|
||||
type SizeLimiter struct {
|
||||
R720p Limiter `json:"720p"`
|
||||
R1080p Limiter `json:"1080p"`
|
||||
R2160p Limiter `json:"2160p"`
|
||||
}
|
||||
|
||||
type Limiter struct {
|
||||
Max int `json:"max"`
|
||||
Min int `json:"min"`
|
||||
}
|
||||
|
||||
272
db/db.go
@@ -9,8 +9,10 @@ import (
|
||||
"polaris/ent/downloadclients"
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/history"
|
||||
"polaris/ent/importlist"
|
||||
"polaris/ent/indexers"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/schema"
|
||||
"polaris/ent/settings"
|
||||
"polaris/ent/storage"
|
||||
"polaris/log"
|
||||
@@ -42,6 +44,7 @@ func Open() (*Client, error) {
|
||||
c := &Client{
|
||||
ent: client,
|
||||
}
|
||||
c.init()
|
||||
|
||||
return c, nil
|
||||
}
|
||||
@@ -55,7 +58,16 @@ func (c *Client) init() {
|
||||
downloadDir := c.GetSetting(SettingDownloadDir)
|
||||
if downloadDir == "" {
|
||||
log.Infof("set default download dir")
|
||||
c.SetSetting(downloadDir, "/downloads")
|
||||
c.SetSetting(SettingDownloadDir, "/downloads")
|
||||
}
|
||||
logLevel := c.GetSetting(SettingLogLevel)
|
||||
if logLevel == "" {
|
||||
log.Infof("set default log level")
|
||||
c.SetSetting(SettingLogLevel, "info")
|
||||
}
|
||||
if tr := c.GetTransmission(); tr == nil {
|
||||
log.Warnf("no download client, set default download client")
|
||||
c.SaveTransmission("transmission", "http://transmission:9091", "", "")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,18 +89,16 @@ func (c *Client) generateDefaultLocalStorage() error {
|
||||
return c.AddStorage(&StorageInfo{
|
||||
Name: "local",
|
||||
Implementation: "local",
|
||||
TvPath: "/data/tv/",
|
||||
MoviePath: "/data/movies/",
|
||||
Default: true,
|
||||
Settings: map[string]string{
|
||||
"tv_path": "/data/tv/",
|
||||
"movie_path": "/data/movies/",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (c *Client) GetSetting(key string) string {
|
||||
v, err := c.ent.Settings.Query().Where(settings.Key(key)).Only(context.TODO())
|
||||
if err != nil {
|
||||
log.Errorf("get setting by key: %s error: %v", key, err)
|
||||
log.Debugf("get setting by key: %s error: %v", key, err)
|
||||
return ""
|
||||
}
|
||||
return v.Value
|
||||
@@ -109,7 +119,7 @@ func (c *Client) GetLanguage() string {
|
||||
lang := c.GetSetting(SettingLanguage)
|
||||
log.Infof("get application language: %s", lang)
|
||||
if lang == "" {
|
||||
return "zh-CN"
|
||||
return LanguageCN
|
||||
}
|
||||
return lang
|
||||
}
|
||||
@@ -129,6 +139,7 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
|
||||
}
|
||||
r, err := c.ent.Media.Create().
|
||||
SetTmdbID(m.TmdbID).
|
||||
SetImdbID(m.ImdbID).
|
||||
SetStorageID(m.StorageID).
|
||||
SetOverview(m.Overview).
|
||||
SetNameCn(m.NameCn).
|
||||
@@ -138,6 +149,9 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
|
||||
SetAirDate(m.AirDate).
|
||||
SetResolution(m.Resolution).
|
||||
SetTargetDir(m.TargetDir).
|
||||
SetDownloadHistoryEpisodes(m.DownloadHistoryEpisodes).
|
||||
SetLimiter(m.Limiter).
|
||||
SetExtras(m.Extras).
|
||||
AddEpisodeIDs(episodes...).
|
||||
Save(context.TODO())
|
||||
return r, err
|
||||
@@ -157,11 +171,18 @@ func (c *Client) GetEpisode(seriesId, seasonNum, episodeNum int) (*ent.Episode,
|
||||
return c.ent.Episode.Query().Where(episode.MediaID(seriesId), episode.SeasonNumber(seasonNum),
|
||||
episode.EpisodeNumber(episodeNum)).First(context.TODO())
|
||||
}
|
||||
func (c *Client) GetEpisodeByID(epID int) (*ent.Episode, error) {
|
||||
return c.ent.Episode.Query().Where(episode.ID(epID)).First(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) UpdateEpiode(episodeId int, name, overview string) error {
|
||||
return c.ent.Episode.Update().Where(episode.ID(episodeId)).SetTitle(name).SetOverview(overview).Exec(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) UpdateEpiode2(episodeId int, name, overview, airdate string) error {
|
||||
return c.ent.Episode.Update().Where(episode.ID(episodeId)).SetTitle(name).SetOverview(overview).SetAirDate(airdate).Exec(context.TODO())
|
||||
}
|
||||
|
||||
type MediaDetails struct {
|
||||
*ent.Media
|
||||
Episodes []*ent.Episode `json:"episodes"`
|
||||
@@ -187,6 +208,10 @@ func (c *Client) GetMediaDetails(id int) *MediaDetails {
|
||||
return md
|
||||
}
|
||||
|
||||
func (c *Client) GetMedia(id int) (*ent.Media, error) {
|
||||
return c.ent.Media.Query().Where(media.ID(id)).First(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) DeleteMedia(id int) error {
|
||||
_, err := c.ent.Episode.Delete().Where(episode.MediaID(id)).Exec(context.TODO())
|
||||
if err != nil {
|
||||
@@ -202,6 +227,22 @@ func (c *Client) SaveEposideDetail(d *ent.Episode) (int, error) {
|
||||
SetSeasonNumber(d.SeasonNumber).
|
||||
SetEpisodeNumber(d.EpisodeNumber).
|
||||
SetOverview(d.Overview).
|
||||
SetMonitored(d.Monitored).
|
||||
SetTitle(d.Title).Save(context.TODO())
|
||||
if err != nil {
|
||||
return 0, errors.Wrap(err, "save episode")
|
||||
}
|
||||
return ep.ID, nil
|
||||
}
|
||||
|
||||
func (c *Client) SaveEposideDetail2(d *ent.Episode) (int, error) {
|
||||
ep, err := c.ent.Episode.Create().
|
||||
SetAirDate(d.AirDate).
|
||||
SetSeasonNumber(d.SeasonNumber).
|
||||
SetEpisodeNumber(d.EpisodeNumber).
|
||||
SetMediaID(d.MediaID).
|
||||
SetStatus(d.Status).
|
||||
SetOverview(d.Overview).
|
||||
SetTitle(d.Title).Save(context.TODO())
|
||||
|
||||
return ep.ID, err
|
||||
@@ -212,19 +253,22 @@ type TorznabSetting struct {
|
||||
ApiKey string `json:"api_key"`
|
||||
}
|
||||
|
||||
func (c *Client) SaveTorznabInfo(name string, setting TorznabSetting) error {
|
||||
data, err := json.Marshal(setting)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "marshal json")
|
||||
func (c *Client) SaveIndexer(in *ent.Indexers) error {
|
||||
|
||||
if in.ID != 0 {
|
||||
//update setting
|
||||
return c.ent.Indexers.Update().Where(indexers.ID(in.ID)).SetName(in.Name).SetImplementation(in.Implementation).
|
||||
SetPriority(in.Priority).SetSettings(in.Settings).SetSeedRatio(in.SeedRatio).SetDisabled(in.Disabled).Exec(context.Background())
|
||||
}
|
||||
count := c.ent.Indexers.Query().Where(indexers.Name(name)).CountX(context.TODO())
|
||||
//create new one
|
||||
count := c.ent.Indexers.Query().Where(indexers.Name(in.Name)).CountX(context.TODO())
|
||||
if count > 0 {
|
||||
c.ent.Indexers.Update().Where(indexers.Name(name)).SetSettings(string(data)).Save(context.TODO())
|
||||
return err
|
||||
return fmt.Errorf("name already esxits: %v", in.Name)
|
||||
}
|
||||
|
||||
_, err = c.ent.Indexers.Create().
|
||||
SetName(name).SetImplementation(IndexerTorznabImpl).SetPriority(1).SetSettings(string(data)).Save(context.TODO())
|
||||
_, err := c.ent.Indexers.Create().
|
||||
SetName(in.Name).SetImplementation(in.Implementation).SetPriority(in.Priority).SetSettings(in.Settings).SetSeedRatio(in.SeedRatio).
|
||||
SetDisabled(in.Disabled).Save(context.TODO())
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "save db")
|
||||
}
|
||||
@@ -236,9 +280,22 @@ func (c *Client) DeleteTorznab(id int) {
|
||||
c.ent.Indexers.Delete().Where(indexers.ID(id)).Exec(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) GetIndexer(id int) (*TorznabInfo, error) {
|
||||
res, err := c.ent.Indexers.Query().Where(indexers.ID(id)).First(context.TODO())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var ss TorznabSetting
|
||||
err = json.Unmarshal([]byte(res.Settings), &ss)
|
||||
if err != nil {
|
||||
|
||||
return nil, fmt.Errorf("unmarshal torznab %s error: %v", res.Name, err)
|
||||
}
|
||||
return &TorznabInfo{Indexers: res, TorznabSetting: ss}, nil
|
||||
}
|
||||
|
||||
type TorznabInfo struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
*ent.Indexers
|
||||
TorznabSetting
|
||||
}
|
||||
|
||||
@@ -254,8 +311,7 @@ func (c *Client) GetAllTorznabInfo() []*TorznabInfo {
|
||||
continue
|
||||
}
|
||||
l = append(l, &TorznabInfo{
|
||||
ID: r.ID,
|
||||
Name: r.Name,
|
||||
Indexers: r,
|
||||
TorznabSetting: ss,
|
||||
})
|
||||
}
|
||||
@@ -299,31 +355,42 @@ func (c *Client) DeleteDownloadCLient(id int) {
|
||||
|
||||
// Storage is the model entity for the Storage schema.
|
||||
type StorageInfo struct {
|
||||
Name string `json:"name"`
|
||||
Implementation string `json:"implementation"`
|
||||
Settings map[string]string `json:"settings"`
|
||||
Name string `json:"name" binding:"required"`
|
||||
Implementation string `json:"implementation" binding:"required"`
|
||||
Settings map[string]string `json:"settings" binding:"required"`
|
||||
TvPath string `json:"tv_path" binding:"required"`
|
||||
MoviePath string `json:"movie_path" binding:"required"`
|
||||
Default bool `json:"default"`
|
||||
}
|
||||
|
||||
type LocalDirSetting struct {
|
||||
TvPath string `json:"tv_path"`
|
||||
MoviePath string `json:"movie_path"`
|
||||
func (s *StorageInfo) ToWebDavSetting() WebdavSetting {
|
||||
if s.Implementation != storage.ImplementationWebdav.String() {
|
||||
panic("not webdav storage")
|
||||
}
|
||||
return WebdavSetting{
|
||||
URL: s.Settings["url"],
|
||||
User: s.Settings["user"],
|
||||
Password: s.Settings["password"],
|
||||
ChangeFileHash: s.Settings["change_file_hash"],
|
||||
}
|
||||
}
|
||||
|
||||
type WebdavSetting struct {
|
||||
URL string `json:"url"`
|
||||
TvPath string `json:"tv_path"`
|
||||
MoviePath string `json:"movie_path"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
URL string `json:"url"`
|
||||
User string `json:"user"`
|
||||
Password string `json:"password"`
|
||||
ChangeFileHash string `json:"change_file_hash"`
|
||||
}
|
||||
|
||||
func (c *Client) AddStorage(st *StorageInfo) error {
|
||||
if !strings.HasSuffix(st.Settings["tv_path"], "/") {
|
||||
st.Settings["tv_path"] += "/"
|
||||
if !strings.HasSuffix(st.TvPath, "/") {
|
||||
st.TvPath += "/"
|
||||
}
|
||||
if !strings.HasSuffix(st.Settings["movie_path"], "/") {
|
||||
st.Settings["movie_path"] += "/"
|
||||
if !strings.HasSuffix(st.MoviePath, "/") {
|
||||
st.MoviePath += "/"
|
||||
}
|
||||
if st.Settings == nil {
|
||||
st.Settings = map[string]string{}
|
||||
}
|
||||
|
||||
data, err := json.Marshal(st.Settings)
|
||||
@@ -335,7 +402,7 @@ func (c *Client) AddStorage(st *StorageInfo) error {
|
||||
if count > 0 {
|
||||
//storage already exist, edit exist one
|
||||
return c.ent.Storage.Update().Where(storage.Name(st.Name)).
|
||||
SetImplementation(storage.Implementation(st.Implementation)).
|
||||
SetImplementation(storage.Implementation(st.Implementation)).SetTvPath(st.TvPath).SetMoviePath(st.MoviePath).
|
||||
SetSettings(string(data)).Exec(context.TODO())
|
||||
}
|
||||
countAll := c.ent.Storage.Query().Where(storage.Deleted(false)).CountX(context.TODO())
|
||||
@@ -344,7 +411,7 @@ func (c *Client) AddStorage(st *StorageInfo) error {
|
||||
st.Default = true
|
||||
}
|
||||
_, err = c.ent.Storage.Create().SetName(st.Name).
|
||||
SetImplementation(storage.Implementation(st.Implementation)).
|
||||
SetImplementation(storage.Implementation(st.Implementation)).SetTvPath(st.TvPath).SetMoviePath(st.MoviePath).
|
||||
SetSettings(string(data)).SetDefault(st.Default).Save(context.TODO())
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -365,15 +432,6 @@ type Storage struct {
|
||||
ent.Storage
|
||||
}
|
||||
|
||||
func (s *Storage) ToLocalSetting() LocalDirSetting {
|
||||
if s.Implementation != storage.ImplementationLocal {
|
||||
panic("not local storage")
|
||||
}
|
||||
var localSetting LocalDirSetting
|
||||
json.Unmarshal([]byte(s.Settings), &localSetting)
|
||||
return localSetting
|
||||
}
|
||||
|
||||
func (s *Storage) ToWebDavSetting() WebdavSetting {
|
||||
if s.Implementation != storage.ImplementationWebdav {
|
||||
panic("not webdav storage")
|
||||
@@ -383,12 +441,6 @@ func (s *Storage) ToWebDavSetting() WebdavSetting {
|
||||
return webdavSetting
|
||||
}
|
||||
|
||||
func (s *Storage) GetPath() (tvPath string, moviePath string) {
|
||||
var m map[string]string
|
||||
json.Unmarshal([]byte(s.Settings), &m)
|
||||
return m["tv_path"], m["movie_path"]
|
||||
}
|
||||
|
||||
func (c *Client) GetStorage(id int) *Storage {
|
||||
r, err := c.ent.Storage.Query().Where(storage.ID(id)).First(context.TODO())
|
||||
if err != nil {
|
||||
@@ -423,7 +475,8 @@ func (c *Client) SetDefaultStorageByName(name string) error {
|
||||
|
||||
func (c *Client) SaveHistoryRecord(h ent.History) (*ent.History, error) {
|
||||
return c.ent.History.Create().SetMediaID(h.MediaID).SetEpisodeID(h.EpisodeID).SetDate(time.Now()).
|
||||
SetStatus(h.Status).SetTargetDir(h.TargetDir).SetSourceTitle(h.SourceTitle).SetSaved(h.Saved).Save(context.TODO())
|
||||
SetStatus(h.Status).SetTargetDir(h.TargetDir).SetSourceTitle(h.SourceTitle).SetIndexerID(h.IndexerID).
|
||||
SetDownloadClientID(h.DownloadClientID).SetSize(h.Size).SetSaved(h.Saved).Save(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) SetHistoryStatus(id int, status history.Status) error {
|
||||
@@ -440,7 +493,7 @@ func (c *Client) GetHistories() ent.Histories {
|
||||
|
||||
func (c *Client) GetRunningHistories() ent.Histories {
|
||||
h, err := c.ent.History.Query().Where(history.Or(history.StatusEQ(history.StatusRunning),
|
||||
history.StatusEQ(history.StatusUploading))).All(context.TODO())
|
||||
history.StatusEQ(history.StatusUploading), history.StatusEQ(history.StatusSeeding))).All(context.TODO())
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
@@ -464,19 +517,29 @@ func (c *Client) GetDownloadDir() string {
|
||||
return r.Value
|
||||
}
|
||||
|
||||
func (c *Client) UpdateEpisodeFile(mediaID int, seasonNum, episodeNum int, file string) error {
|
||||
func (c *Client) UpdateEpisodeStatus(mediaID int, seasonNum, episodeNum int) error {
|
||||
ep, err := c.ent.Episode.Query().Where(episode.MediaID(mediaID)).Where(episode.EpisodeNumber(episodeNum)).
|
||||
Where(episode.SeasonNumber(seasonNum)).First(context.TODO())
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "finding episode")
|
||||
}
|
||||
return ep.Update().SetFileInStorage(file).SetStatus(episode.StatusDownloaded).Exec(context.TODO())
|
||||
return ep.Update().SetStatus(episode.StatusDownloaded).Exec(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) SetEpisodeStatus(id int, status episode.Status) error {
|
||||
return c.ent.Episode.Update().Where(episode.ID(id)).SetStatus(status).Exec(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) IsEpisodeDownloadingOrDownloaded(id int) bool {
|
||||
his := c.ent.History.Query().Where(history.EpisodeID(id)).AllX(context.Background())
|
||||
for _, h := range his {
|
||||
if h.Status != history.StatusFail {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (c *Client) SetSeasonAllEpisodeStatus(mediaID, seasonNum int, status episode.Status) error {
|
||||
return c.ent.Episode.Update().Where(episode.MediaID(mediaID), episode.SeasonNumber(seasonNum)).SetStatus(status).Exec(context.TODO())
|
||||
}
|
||||
@@ -484,3 +547,100 @@ func (c *Client) SetSeasonAllEpisodeStatus(mediaID, seasonNum int, status episod
|
||||
func (c *Client) TmdbIdInWatchlist(tmdb_id int) bool {
|
||||
return c.ent.Media.Query().Where(media.TmdbID(tmdb_id)).CountX(context.TODO()) > 0
|
||||
}
|
||||
|
||||
func (c *Client) GetDownloadHistory(mediaID int) ([]*ent.History, error) {
|
||||
return c.ent.History.Query().Where(history.MediaID(mediaID)).All(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) GetMovieDummyEpisode(movieId int) (*ent.Episode, error) {
|
||||
_, err := c.ent.Media.Query().Where(media.ID(movieId), media.MediaTypeEQ(media.MediaTypeMovie)).First(context.TODO())
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "get movie")
|
||||
}
|
||||
ep, err := c.ent.Episode.Query().Where(episode.MediaID(movieId)).First(context.TODO())
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "query episode")
|
||||
}
|
||||
return ep, nil
|
||||
}
|
||||
|
||||
func (c *Client) GetDownloadClient(id int) (*ent.DownloadClients, error) {
|
||||
return c.ent.DownloadClients.Query().Where(downloadclients.ID(id)).First(context.Background())
|
||||
}
|
||||
|
||||
func (c *Client) SetEpisodeMonitoring(id int, b bool) error {
|
||||
return c.ent.Episode.Update().Where(episode.ID(id)).SetMonitored(b).Exec(context.Background())
|
||||
}
|
||||
|
||||
type EditMediaData struct {
|
||||
ID int `json:"id"`
|
||||
Resolution media.Resolution `json:"resolution"`
|
||||
TargetDir string `json:"target_dir"`
|
||||
Limiter schema.MediaLimiter `json:"limiter"`
|
||||
}
|
||||
|
||||
func (c *Client) EditMediaMetadata(in EditMediaData) error {
|
||||
return c.ent.Media.Update().Where(media.ID(in.ID)).SetResolution(in.Resolution).SetTargetDir(in.TargetDir).SetLimiter(in.Limiter).
|
||||
Exec(context.Background())
|
||||
}
|
||||
|
||||
func (c *Client) UpdateEpisodeTargetFile(id int, filename string) error {
|
||||
return c.ent.Episode.Update().Where(episode.ID(id)).SetTargetFile(filename).Exec(context.Background())
|
||||
}
|
||||
|
||||
func (c *Client) GetSeasonEpisodes(mediaId, seasonNum int) ([]*ent.Episode, error) {
|
||||
return c.ent.Episode.Query().Where(episode.MediaID(mediaId), episode.SeasonNumber(seasonNum)).All(context.Background())
|
||||
}
|
||||
|
||||
func (c *Client) GetAllImportLists() ([]*ent.ImportList, error) {
|
||||
return c.ent.ImportList.Query().All(context.Background())
|
||||
}
|
||||
|
||||
func (c *Client) AddImportlist(il *ent.ImportList) error {
|
||||
count, err := c.ent.ImportList.Query().Where(importlist.Name(il.Name)).Count(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if count > 0 {
|
||||
//edit exist record
|
||||
return c.ent.ImportList.Update().Where(importlist.Name(il.Name)).
|
||||
SetURL(il.URL).SetQulity(il.Qulity).SetType(il.Type).SetStorageID(il.StorageID).Exec(context.Background())
|
||||
}
|
||||
return c.ent.ImportList.Create().SetName(il.Name).SetURL(il.URL).SetQulity(il.Qulity).SetStorageID(il.StorageID).
|
||||
SetType(il.Type).Exec(context.Background())
|
||||
}
|
||||
|
||||
func (c *Client) DeleteImportlist(id int) error {
|
||||
return c.ent.ImportList.DeleteOneID(id).Exec(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) GetSizeLimiter() (*SizeLimiter, error) {
|
||||
v := c.GetSetting(SetttingSizeLimiter)
|
||||
var limiter SizeLimiter
|
||||
err := json.Unmarshal([]byte(v), &limiter)
|
||||
return &limiter, err
|
||||
}
|
||||
|
||||
func (c *Client) SetSizeLimiter(limiter *SizeLimiter) error {
|
||||
data, err := json.Marshal(limiter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return c.SetSetting(SetttingSizeLimiter, string(data))
|
||||
}
|
||||
|
||||
func (c *Client) GetTvNamingFormat() string {
|
||||
s := c.GetSetting(SettingTvNamingFormat)
|
||||
if s == "" {
|
||||
return DefaultNamingFormat
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (c *Client) GetMovingNamingFormat() string {
|
||||
s := c.GetSetting(SettingMovieNamingFormat)
|
||||
if s == "" {
|
||||
return DefaultNamingFormat
|
||||
}
|
||||
return s
|
||||
}
|
||||
97
db/notification.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"polaris/ent"
|
||||
"polaris/ent/notificationclient"
|
||||
"polaris/pkg/notifier"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func (c *Client) GetAllNotificationClients2() ([]*ent.NotificationClient, error) {
|
||||
return c.ent.NotificationClient.Query().All(context.TODO())
|
||||
}
|
||||
|
||||
func (c *Client) GetAllNotificationClients() ([]*NotificationClient, error) {
|
||||
all, err := c.ent.NotificationClient.Query().All(context.TODO())
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "query db")
|
||||
}
|
||||
var all1 []*NotificationClient
|
||||
for _, item := range all {
|
||||
cl, err := toNotificationClient(item)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "convert")
|
||||
}
|
||||
all1 = append(all1, cl)
|
||||
}
|
||||
return all1, nil
|
||||
}
|
||||
|
||||
func (c *Client) AddNotificationClient(name, service string, setting string, enabled bool) error {
|
||||
// data, err := json.Marshal(setting)
|
||||
// if err != nil {
|
||||
// return errors.Wrap(err, "json")
|
||||
// }
|
||||
service = strings.ToLower(service)
|
||||
count, err := c.ent.NotificationClient.Query().Where(notificationclient.Name(name)).Count(context.Background())
|
||||
if err == nil && count > 0 {
|
||||
//update exist one
|
||||
return c.ent.NotificationClient.Update().Where(notificationclient.Name(name)).SetService(service).
|
||||
SetSettings(setting).SetEnabled(enabled).Exec(context.Background())
|
||||
}
|
||||
|
||||
return c.ent.NotificationClient.Create().SetName(name).SetService(service).
|
||||
SetSettings(setting).SetEnabled(enabled).Exec(context.Background())
|
||||
}
|
||||
|
||||
func (c *Client) DeleteNotificationClient(id int) error {
|
||||
_, err := c.ent.NotificationClient.Delete().Where(notificationclient.ID(id)).Exec(context.Background())
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Client) GetNotificationClient(id int) (*NotificationClient, error) {
|
||||
noti, err := c.ent.NotificationClient.Query().Where(notificationclient.ID(id)).First(context.Background())
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "query")
|
||||
}
|
||||
|
||||
return toNotificationClient(noti)
|
||||
}
|
||||
|
||||
func toNotificationClient(cl *ent.NotificationClient) (*NotificationClient, error) {
|
||||
var settings interface{}
|
||||
switch cl.Service {
|
||||
case "pushover":
|
||||
settings = notifier.PushoverConfig{}
|
||||
case "dingtalk":
|
||||
settings = notifier.DingTalkConfig{}
|
||||
case "telegram":
|
||||
settings = notifier.TelegramConfig{}
|
||||
case "bark":
|
||||
settings = notifier.BarkConfig{}
|
||||
}
|
||||
err := json.Unmarshal([]byte(cl.Settings), &settings)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "json")
|
||||
}
|
||||
return &NotificationClient{
|
||||
ID: cl.ID,
|
||||
Name: cl.Name,
|
||||
Service: cl.Service,
|
||||
Enabled: cl.Enabled,
|
||||
Settings: settings,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
type NotificationClient struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Service string `json:"service"`
|
||||
Enabled bool `json:"enabled"`
|
||||
Settings interface{} `json:"settings"`
|
||||
}
|
||||
BIN
doc/assets/add_indexer.png
Normal file
|
After Width: | Height: | Size: 92 KiB |
BIN
doc/assets/add_series.png
Normal file
|
After Width: | Height: | Size: 804 KiB |
BIN
doc/assets/anime_match.png
Normal file
|
After Width: | Height: | Size: 774 KiB |
BIN
doc/assets/copy_feed.png
Normal file
|
After Width: | Height: | Size: 106 KiB |
BIN
doc/assets/detail_page.png
Normal file
|
After Width: | Height: | Size: 2.6 MiB |
BIN
doc/assets/downloader.png
Normal file
|
After Width: | Height: | Size: 60 KiB |
BIN
doc/assets/jackett_api_key.png
Normal file
|
After Width: | Height: | Size: 115 KiB |
BIN
doc/assets/local_storage.png
Normal file
|
After Width: | Height: | Size: 63 KiB |
BIN
doc/assets/main_page.png
Normal file
|
After Width: | Height: | Size: 3.4 MiB |
BIN
doc/assets/polaris_add_indexer.png
Normal file
|
After Width: | Height: | Size: 39 KiB |
BIN
doc/assets/search_add.png
Normal file
|
After Width: | Height: | Size: 80 KiB |
BIN
doc/assets/search_series.png
Normal file
|
After Width: | Height: | Size: 1.2 MiB |
BIN
doc/assets/webdav_storage.png
Normal file
|
After Width: | Height: | Size: 80 KiB |
|
Before Width: | Height: | Size: 111 KiB After Width: | Height: | Size: 111 KiB |
54
doc/configuration.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# 配置
|
||||
|
||||
要正确使用此程序,需要配置好以下设置:
|
||||
|
||||
### TMDB设置
|
||||
1. 因为此程序需要使用到 TMDB 的数据,使用此程序首先要申请一个 TMDB 的 Api Key. 申请教程请 google [tmdb api key申请](https://www.google.com/search?q=tmdb+api+key%E7%94%B3%E8%AF%B7)
|
||||
|
||||
2. 拿到 TMDB Api Key之后,请填到 *设置 -> 常规设置 -> TMDB Api Key里*
|
||||
|
||||
### 索引器
|
||||
|
||||
索引器是资源提供者,目前支持 torznab 协议,意味着 polarr 或者 jackett 都可以支持。请自行部署相关程序,或者使用的 docker compose 配置一起拉起
|
||||
|
||||
推荐使用 linuxserver 的镜像:https://docs.linuxserver.io/images/docker-jackett/
|
||||
|
||||
#### 索引器配置
|
||||
|
||||
索引器配置这里以 jackett 为例。使用默认 docker compose 配置拉起后以 http://< ip >:9117 可访问 jackett 的主页。
|
||||
|
||||
1. 打开 jackett 主页后,点击页面上面的 Add indexer,会出现 BT/PT 站点列表,选择你需要的站点点击+号添加。如果是PT,请自行配置好相关配置
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
|
||||
2. 添加后主页即会显示相应的BT/PT站点,点击 *Copy Torznab Feed* 即得到了我们需要的地址
|
||||
|
||||

|
||||
|
||||
3. 回到我们的主程序 Polaris 当中,点击 *设置 -> 索引器设置* -> 点击+号增加新的索引器,输入一个名称,拷贝我们第2步得到的地址到地址栏
|
||||
|
||||

|
||||
|
||||
4. 选相框中我们可以看到,还需要一个 API Key,我们回到 Jackett 中,在页面右上角,复制我们需要的 API Key:
|
||||

|
||||
|
||||
5. 恭喜!你已经成功完成了索引器配置。如需要更多的站点,请重复相同的操作完成配置
|
||||
|
||||
### 下载器
|
||||
|
||||
资源下载器,目前可支持 tansmission,请配置好对应配置
|
||||
|
||||

|
||||
|
||||
### 存储设置
|
||||
|
||||
默认配置了名为 local 的本地存储,如果你不知道怎么配置。请使用默认配置
|
||||
|
||||

|
||||
|
||||
类型里选择 webdav 可以使用 webdav 存储,配合 alist/clouddrive 等,可以实现存储到云盘里的功能。
|
||||
|
||||

|
||||
69
doc/quick_start.md
Normal file
@@ -0,0 +1,69 @@
|
||||
## 快速开始
|
||||
|
||||
最简单部署 Polaris 的方式是使用 docker compose,Polaris要完整运行另外需要一个索引客户端和一个下载客户端。索引客户端支持 polarr 或 jackett,下载客户端目前只支持 transmission。
|
||||
|
||||
下面是一个示例 docker-compose 配置,为了简单起见,一起拉起了 transmission 和 jackett,你也可选择单独安装
|
||||
|
||||
**注意:** transmission 的下载路径映射要和 polaris 保持一致,如果您不知道怎么做,请保持默认设置。
|
||||
|
||||
```yaml
|
||||
services:
|
||||
polaris:
|
||||
image: ghcr.io/simon-ding/polaris:latest
|
||||
restart: always
|
||||
volumes:
|
||||
- ./config/polaris:/app/data #程序配置文件路径
|
||||
- /downloads:/downloads #下载路径,需要和下载客户端配置一致
|
||||
- /data:/data #媒体数据存储路径,也可以启动自己配置webdav存储
|
||||
ports:
|
||||
- 8080:8080
|
||||
transmission: #下载客户端,也可以不安装使用已有的
|
||||
image: lscr.io/linuxserver/transmission:latest
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Asia/Shanghai
|
||||
volumes:
|
||||
- ./config/transmission:/config
|
||||
- /downloads:/downloads #此路径要与polaris下载路径保持一致
|
||||
ports:
|
||||
- 9091:9091
|
||||
- 51413:51413
|
||||
- 51413:51413/udp
|
||||
jackett: #索引客户端,也可以不安装使用已有的
|
||||
image: lscr.io/linuxserver/jackett:latest
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Asia/Shanghai
|
||||
volumes:
|
||||
- ./config/jackett:/config
|
||||
ports:
|
||||
- 9117:9117
|
||||
restart: unless-stopped
|
||||
```
|
||||
|
||||
拉起之后访问 http://< ip >:8080 的形式访问
|
||||
|
||||
|
||||

|
||||
|
||||
## 配置
|
||||
|
||||
详细配置请看 [配置篇](./configuration.md)
|
||||
|
||||
|
||||
## 开始使用
|
||||
|
||||
1. 完成配置之后,我们就可以在右上角的搜索按钮里输入我们想看的电影、电视剧。
|
||||

|
||||
|
||||
2. 找到对应电影电视剧后,点击加入想看列表
|
||||

|
||||
|
||||
3. 当电影有资源、或者电视剧有更新时,程序就会自动下载对应资源到指定的存储。对于剧集,您也可以进入剧集的详细页面,点击搜索按钮来自己搜索对应集的资源。
|
||||
|
||||
|
||||
到此,您已经基本掌握了此程序的使用方式,请尽情体验吧!
|
||||
|
||||
|
||||
333
ent/client.go
@@ -14,8 +14,10 @@ import (
|
||||
"polaris/ent/downloadclients"
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/history"
|
||||
"polaris/ent/importlist"
|
||||
"polaris/ent/indexers"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/notificationclient"
|
||||
"polaris/ent/settings"
|
||||
"polaris/ent/storage"
|
||||
|
||||
@@ -36,10 +38,14 @@ type Client struct {
|
||||
Episode *EpisodeClient
|
||||
// History is the client for interacting with the History builders.
|
||||
History *HistoryClient
|
||||
// ImportList is the client for interacting with the ImportList builders.
|
||||
ImportList *ImportListClient
|
||||
// Indexers is the client for interacting with the Indexers builders.
|
||||
Indexers *IndexersClient
|
||||
// Media is the client for interacting with the Media builders.
|
||||
Media *MediaClient
|
||||
// NotificationClient is the client for interacting with the NotificationClient builders.
|
||||
NotificationClient *NotificationClientClient
|
||||
// Settings is the client for interacting with the Settings builders.
|
||||
Settings *SettingsClient
|
||||
// Storage is the client for interacting with the Storage builders.
|
||||
@@ -58,8 +64,10 @@ func (c *Client) init() {
|
||||
c.DownloadClients = NewDownloadClientsClient(c.config)
|
||||
c.Episode = NewEpisodeClient(c.config)
|
||||
c.History = NewHistoryClient(c.config)
|
||||
c.ImportList = NewImportListClient(c.config)
|
||||
c.Indexers = NewIndexersClient(c.config)
|
||||
c.Media = NewMediaClient(c.config)
|
||||
c.NotificationClient = NewNotificationClientClient(c.config)
|
||||
c.Settings = NewSettingsClient(c.config)
|
||||
c.Storage = NewStorageClient(c.config)
|
||||
}
|
||||
@@ -152,15 +160,17 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
|
||||
cfg := c.config
|
||||
cfg.driver = tx
|
||||
return &Tx{
|
||||
ctx: ctx,
|
||||
config: cfg,
|
||||
DownloadClients: NewDownloadClientsClient(cfg),
|
||||
Episode: NewEpisodeClient(cfg),
|
||||
History: NewHistoryClient(cfg),
|
||||
Indexers: NewIndexersClient(cfg),
|
||||
Media: NewMediaClient(cfg),
|
||||
Settings: NewSettingsClient(cfg),
|
||||
Storage: NewStorageClient(cfg),
|
||||
ctx: ctx,
|
||||
config: cfg,
|
||||
DownloadClients: NewDownloadClientsClient(cfg),
|
||||
Episode: NewEpisodeClient(cfg),
|
||||
History: NewHistoryClient(cfg),
|
||||
ImportList: NewImportListClient(cfg),
|
||||
Indexers: NewIndexersClient(cfg),
|
||||
Media: NewMediaClient(cfg),
|
||||
NotificationClient: NewNotificationClientClient(cfg),
|
||||
Settings: NewSettingsClient(cfg),
|
||||
Storage: NewStorageClient(cfg),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -178,15 +188,17 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
|
||||
cfg := c.config
|
||||
cfg.driver = &txDriver{tx: tx, drv: c.driver}
|
||||
return &Tx{
|
||||
ctx: ctx,
|
||||
config: cfg,
|
||||
DownloadClients: NewDownloadClientsClient(cfg),
|
||||
Episode: NewEpisodeClient(cfg),
|
||||
History: NewHistoryClient(cfg),
|
||||
Indexers: NewIndexersClient(cfg),
|
||||
Media: NewMediaClient(cfg),
|
||||
Settings: NewSettingsClient(cfg),
|
||||
Storage: NewStorageClient(cfg),
|
||||
ctx: ctx,
|
||||
config: cfg,
|
||||
DownloadClients: NewDownloadClientsClient(cfg),
|
||||
Episode: NewEpisodeClient(cfg),
|
||||
History: NewHistoryClient(cfg),
|
||||
ImportList: NewImportListClient(cfg),
|
||||
Indexers: NewIndexersClient(cfg),
|
||||
Media: NewMediaClient(cfg),
|
||||
NotificationClient: NewNotificationClientClient(cfg),
|
||||
Settings: NewSettingsClient(cfg),
|
||||
Storage: NewStorageClient(cfg),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -216,8 +228,8 @@ func (c *Client) Close() error {
|
||||
// In order to add hooks to a specific client, call: `client.Node.Use(...)`.
|
||||
func (c *Client) Use(hooks ...Hook) {
|
||||
for _, n := range []interface{ Use(...Hook) }{
|
||||
c.DownloadClients, c.Episode, c.History, c.Indexers, c.Media, c.Settings,
|
||||
c.Storage,
|
||||
c.DownloadClients, c.Episode, c.History, c.ImportList, c.Indexers, c.Media,
|
||||
c.NotificationClient, c.Settings, c.Storage,
|
||||
} {
|
||||
n.Use(hooks...)
|
||||
}
|
||||
@@ -227,8 +239,8 @@ func (c *Client) Use(hooks ...Hook) {
|
||||
// In order to add interceptors to a specific client, call: `client.Node.Intercept(...)`.
|
||||
func (c *Client) Intercept(interceptors ...Interceptor) {
|
||||
for _, n := range []interface{ Intercept(...Interceptor) }{
|
||||
c.DownloadClients, c.Episode, c.History, c.Indexers, c.Media, c.Settings,
|
||||
c.Storage,
|
||||
c.DownloadClients, c.Episode, c.History, c.ImportList, c.Indexers, c.Media,
|
||||
c.NotificationClient, c.Settings, c.Storage,
|
||||
} {
|
||||
n.Intercept(interceptors...)
|
||||
}
|
||||
@@ -243,10 +255,14 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) {
|
||||
return c.Episode.mutate(ctx, m)
|
||||
case *HistoryMutation:
|
||||
return c.History.mutate(ctx, m)
|
||||
case *ImportListMutation:
|
||||
return c.ImportList.mutate(ctx, m)
|
||||
case *IndexersMutation:
|
||||
return c.Indexers.mutate(ctx, m)
|
||||
case *MediaMutation:
|
||||
return c.Media.mutate(ctx, m)
|
||||
case *NotificationClientMutation:
|
||||
return c.NotificationClient.mutate(ctx, m)
|
||||
case *SettingsMutation:
|
||||
return c.Settings.mutate(ctx, m)
|
||||
case *StorageMutation:
|
||||
@@ -671,6 +687,139 @@ func (c *HistoryClient) mutate(ctx context.Context, m *HistoryMutation) (Value,
|
||||
}
|
||||
}
|
||||
|
||||
// ImportListClient is a client for the ImportList schema.
|
||||
type ImportListClient struct {
|
||||
config
|
||||
}
|
||||
|
||||
// NewImportListClient returns a client for the ImportList from the given config.
|
||||
func NewImportListClient(c config) *ImportListClient {
|
||||
return &ImportListClient{config: c}
|
||||
}
|
||||
|
||||
// Use adds a list of mutation hooks to the hooks stack.
|
||||
// A call to `Use(f, g, h)` equals to `importlist.Hooks(f(g(h())))`.
|
||||
func (c *ImportListClient) Use(hooks ...Hook) {
|
||||
c.hooks.ImportList = append(c.hooks.ImportList, hooks...)
|
||||
}
|
||||
|
||||
// Intercept adds a list of query interceptors to the interceptors stack.
|
||||
// A call to `Intercept(f, g, h)` equals to `importlist.Intercept(f(g(h())))`.
|
||||
func (c *ImportListClient) Intercept(interceptors ...Interceptor) {
|
||||
c.inters.ImportList = append(c.inters.ImportList, interceptors...)
|
||||
}
|
||||
|
||||
// Create returns a builder for creating a ImportList entity.
|
||||
func (c *ImportListClient) Create() *ImportListCreate {
|
||||
mutation := newImportListMutation(c.config, OpCreate)
|
||||
return &ImportListCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// CreateBulk returns a builder for creating a bulk of ImportList entities.
|
||||
func (c *ImportListClient) CreateBulk(builders ...*ImportListCreate) *ImportListCreateBulk {
|
||||
return &ImportListCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *ImportListClient) MapCreateBulk(slice any, setFunc func(*ImportListCreate, int)) *ImportListCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &ImportListCreateBulk{err: fmt.Errorf("calling to ImportListClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*ImportListCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &ImportListCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for ImportList.
|
||||
func (c *ImportListClient) Update() *ImportListUpdate {
|
||||
mutation := newImportListMutation(c.config, OpUpdate)
|
||||
return &ImportListUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// UpdateOne returns an update builder for the given entity.
|
||||
func (c *ImportListClient) UpdateOne(il *ImportList) *ImportListUpdateOne {
|
||||
mutation := newImportListMutation(c.config, OpUpdateOne, withImportList(il))
|
||||
return &ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// UpdateOneID returns an update builder for the given id.
|
||||
func (c *ImportListClient) UpdateOneID(id int) *ImportListUpdateOne {
|
||||
mutation := newImportListMutation(c.config, OpUpdateOne, withImportListID(id))
|
||||
return &ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// Delete returns a delete builder for ImportList.
|
||||
func (c *ImportListClient) Delete() *ImportListDelete {
|
||||
mutation := newImportListMutation(c.config, OpDelete)
|
||||
return &ImportListDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// DeleteOne returns a builder for deleting the given entity.
|
||||
func (c *ImportListClient) DeleteOne(il *ImportList) *ImportListDeleteOne {
|
||||
return c.DeleteOneID(il.ID)
|
||||
}
|
||||
|
||||
// DeleteOneID returns a builder for deleting the given entity by its id.
|
||||
func (c *ImportListClient) DeleteOneID(id int) *ImportListDeleteOne {
|
||||
builder := c.Delete().Where(importlist.ID(id))
|
||||
builder.mutation.id = &id
|
||||
builder.mutation.op = OpDeleteOne
|
||||
return &ImportListDeleteOne{builder}
|
||||
}
|
||||
|
||||
// Query returns a query builder for ImportList.
|
||||
func (c *ImportListClient) Query() *ImportListQuery {
|
||||
return &ImportListQuery{
|
||||
config: c.config,
|
||||
ctx: &QueryContext{Type: TypeImportList},
|
||||
inters: c.Interceptors(),
|
||||
}
|
||||
}
|
||||
|
||||
// Get returns a ImportList entity by its id.
|
||||
func (c *ImportListClient) Get(ctx context.Context, id int) (*ImportList, error) {
|
||||
return c.Query().Where(importlist.ID(id)).Only(ctx)
|
||||
}
|
||||
|
||||
// GetX is like Get, but panics if an error occurs.
|
||||
func (c *ImportListClient) GetX(ctx context.Context, id int) *ImportList {
|
||||
obj, err := c.Get(ctx, id)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
// Hooks returns the client hooks.
|
||||
func (c *ImportListClient) Hooks() []Hook {
|
||||
return c.hooks.ImportList
|
||||
}
|
||||
|
||||
// Interceptors returns the client interceptors.
|
||||
func (c *ImportListClient) Interceptors() []Interceptor {
|
||||
return c.inters.ImportList
|
||||
}
|
||||
|
||||
func (c *ImportListClient) mutate(ctx context.Context, m *ImportListMutation) (Value, error) {
|
||||
switch m.Op() {
|
||||
case OpCreate:
|
||||
return (&ImportListCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||
case OpUpdate:
|
||||
return (&ImportListUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||
case OpUpdateOne:
|
||||
return (&ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||
case OpDelete, OpDeleteOne:
|
||||
return (&ImportListDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
|
||||
default:
|
||||
return nil, fmt.Errorf("ent: unknown ImportList mutation op: %q", m.Op())
|
||||
}
|
||||
}
|
||||
|
||||
// IndexersClient is a client for the Indexers schema.
|
||||
type IndexersClient struct {
|
||||
config
|
||||
@@ -953,6 +1102,139 @@ func (c *MediaClient) mutate(ctx context.Context, m *MediaMutation) (Value, erro
|
||||
}
|
||||
}
|
||||
|
||||
// NotificationClientClient is a client for the NotificationClient schema.
|
||||
type NotificationClientClient struct {
|
||||
config
|
||||
}
|
||||
|
||||
// NewNotificationClientClient returns a client for the NotificationClient from the given config.
|
||||
func NewNotificationClientClient(c config) *NotificationClientClient {
|
||||
return &NotificationClientClient{config: c}
|
||||
}
|
||||
|
||||
// Use adds a list of mutation hooks to the hooks stack.
|
||||
// A call to `Use(f, g, h)` equals to `notificationclient.Hooks(f(g(h())))`.
|
||||
func (c *NotificationClientClient) Use(hooks ...Hook) {
|
||||
c.hooks.NotificationClient = append(c.hooks.NotificationClient, hooks...)
|
||||
}
|
||||
|
||||
// Intercept adds a list of query interceptors to the interceptors stack.
|
||||
// A call to `Intercept(f, g, h)` equals to `notificationclient.Intercept(f(g(h())))`.
|
||||
func (c *NotificationClientClient) Intercept(interceptors ...Interceptor) {
|
||||
c.inters.NotificationClient = append(c.inters.NotificationClient, interceptors...)
|
||||
}
|
||||
|
||||
// Create returns a builder for creating a NotificationClient entity.
|
||||
func (c *NotificationClientClient) Create() *NotificationClientCreate {
|
||||
mutation := newNotificationClientMutation(c.config, OpCreate)
|
||||
return &NotificationClientCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// CreateBulk returns a builder for creating a bulk of NotificationClient entities.
|
||||
func (c *NotificationClientClient) CreateBulk(builders ...*NotificationClientCreate) *NotificationClientCreateBulk {
|
||||
return &NotificationClientCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *NotificationClientClient) MapCreateBulk(slice any, setFunc func(*NotificationClientCreate, int)) *NotificationClientCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &NotificationClientCreateBulk{err: fmt.Errorf("calling to NotificationClientClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*NotificationClientCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &NotificationClientCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for NotificationClient.
|
||||
func (c *NotificationClientClient) Update() *NotificationClientUpdate {
|
||||
mutation := newNotificationClientMutation(c.config, OpUpdate)
|
||||
return &NotificationClientUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// UpdateOne returns an update builder for the given entity.
|
||||
func (c *NotificationClientClient) UpdateOne(nc *NotificationClient) *NotificationClientUpdateOne {
|
||||
mutation := newNotificationClientMutation(c.config, OpUpdateOne, withNotificationClient(nc))
|
||||
return &NotificationClientUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// UpdateOneID returns an update builder for the given id.
|
||||
func (c *NotificationClientClient) UpdateOneID(id int) *NotificationClientUpdateOne {
|
||||
mutation := newNotificationClientMutation(c.config, OpUpdateOne, withNotificationClientID(id))
|
||||
return &NotificationClientUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// Delete returns a delete builder for NotificationClient.
|
||||
func (c *NotificationClientClient) Delete() *NotificationClientDelete {
|
||||
mutation := newNotificationClientMutation(c.config, OpDelete)
|
||||
return &NotificationClientDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||
}
|
||||
|
||||
// DeleteOne returns a builder for deleting the given entity.
|
||||
func (c *NotificationClientClient) DeleteOne(nc *NotificationClient) *NotificationClientDeleteOne {
|
||||
return c.DeleteOneID(nc.ID)
|
||||
}
|
||||
|
||||
// DeleteOneID returns a builder for deleting the given entity by its id.
|
||||
func (c *NotificationClientClient) DeleteOneID(id int) *NotificationClientDeleteOne {
|
||||
builder := c.Delete().Where(notificationclient.ID(id))
|
||||
builder.mutation.id = &id
|
||||
builder.mutation.op = OpDeleteOne
|
||||
return &NotificationClientDeleteOne{builder}
|
||||
}
|
||||
|
||||
// Query returns a query builder for NotificationClient.
|
||||
func (c *NotificationClientClient) Query() *NotificationClientQuery {
|
||||
return &NotificationClientQuery{
|
||||
config: c.config,
|
||||
ctx: &QueryContext{Type: TypeNotificationClient},
|
||||
inters: c.Interceptors(),
|
||||
}
|
||||
}
|
||||
|
||||
// Get returns a NotificationClient entity by its id.
|
||||
func (c *NotificationClientClient) Get(ctx context.Context, id int) (*NotificationClient, error) {
|
||||
return c.Query().Where(notificationclient.ID(id)).Only(ctx)
|
||||
}
|
||||
|
||||
// GetX is like Get, but panics if an error occurs.
|
||||
func (c *NotificationClientClient) GetX(ctx context.Context, id int) *NotificationClient {
|
||||
obj, err := c.Get(ctx, id)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
// Hooks returns the client hooks.
|
||||
func (c *NotificationClientClient) Hooks() []Hook {
|
||||
return c.hooks.NotificationClient
|
||||
}
|
||||
|
||||
// Interceptors returns the client interceptors.
|
||||
func (c *NotificationClientClient) Interceptors() []Interceptor {
|
||||
return c.inters.NotificationClient
|
||||
}
|
||||
|
||||
func (c *NotificationClientClient) mutate(ctx context.Context, m *NotificationClientMutation) (Value, error) {
|
||||
switch m.Op() {
|
||||
case OpCreate:
|
||||
return (&NotificationClientCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||
case OpUpdate:
|
||||
return (&NotificationClientUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||
case OpUpdateOne:
|
||||
return (&NotificationClientUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||
case OpDelete, OpDeleteOne:
|
||||
return (&NotificationClientDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
|
||||
default:
|
||||
return nil, fmt.Errorf("ent: unknown NotificationClient mutation op: %q", m.Op())
|
||||
}
|
||||
}
|
||||
|
||||
// SettingsClient is a client for the Settings schema.
|
||||
type SettingsClient struct {
|
||||
config
|
||||
@@ -1222,10 +1504,11 @@ func (c *StorageClient) mutate(ctx context.Context, m *StorageMutation) (Value,
|
||||
// hooks and interceptors per client, for fast access.
|
||||
type (
|
||||
hooks struct {
|
||||
DownloadClients, Episode, History, Indexers, Media, Settings, Storage []ent.Hook
|
||||
DownloadClients, Episode, History, ImportList, Indexers, Media,
|
||||
NotificationClient, Settings, Storage []ent.Hook
|
||||
}
|
||||
inters struct {
|
||||
DownloadClients, Episode, History, Indexers, Media, Settings,
|
||||
Storage []ent.Interceptor
|
||||
DownloadClients, Episode, History, ImportList, Indexers, Media,
|
||||
NotificationClient, Settings, Storage []ent.Interceptor
|
||||
}
|
||||
)
|
||||
|
||||
18
ent/ent.go
@@ -9,8 +9,10 @@ import (
|
||||
"polaris/ent/downloadclients"
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/history"
|
||||
"polaris/ent/importlist"
|
||||
"polaris/ent/indexers"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/notificationclient"
|
||||
"polaris/ent/settings"
|
||||
"polaris/ent/storage"
|
||||
"reflect"
|
||||
@@ -79,13 +81,15 @@ var (
|
||||
func checkColumn(table, column string) error {
|
||||
initCheck.Do(func() {
|
||||
columnCheck = sql.NewColumnCheck(map[string]func(string) bool{
|
||||
downloadclients.Table: downloadclients.ValidColumn,
|
||||
episode.Table: episode.ValidColumn,
|
||||
history.Table: history.ValidColumn,
|
||||
indexers.Table: indexers.ValidColumn,
|
||||
media.Table: media.ValidColumn,
|
||||
settings.Table: settings.ValidColumn,
|
||||
storage.Table: storage.ValidColumn,
|
||||
downloadclients.Table: downloadclients.ValidColumn,
|
||||
episode.Table: episode.ValidColumn,
|
||||
history.Table: history.ValidColumn,
|
||||
importlist.Table: importlist.ValidColumn,
|
||||
indexers.Table: indexers.ValidColumn,
|
||||
media.Table: media.ValidColumn,
|
||||
notificationclient.Table: notificationclient.ValidColumn,
|
||||
settings.Table: settings.ValidColumn,
|
||||
storage.Table: storage.ValidColumn,
|
||||
})
|
||||
})
|
||||
return columnCheck(table, column)
|
||||
|
||||
@@ -31,8 +31,10 @@ type Episode struct {
|
||||
AirDate string `json:"air_date,omitempty"`
|
||||
// Status holds the value of the "status" field.
|
||||
Status episode.Status `json:"status,omitempty"`
|
||||
// FileInStorage holds the value of the "file_in_storage" field.
|
||||
FileInStorage string `json:"file_in_storage,omitempty"`
|
||||
// Monitored holds the value of the "monitored" field.
|
||||
Monitored bool `json:"monitored"`
|
||||
// TargetFile holds the value of the "target_file" field.
|
||||
TargetFile string `json:"target_file,omitempty"`
|
||||
// Edges holds the relations/edges for other nodes in the graph.
|
||||
// The values are being populated by the EpisodeQuery when eager-loading is set.
|
||||
Edges EpisodeEdges `json:"edges"`
|
||||
@@ -64,9 +66,11 @@ func (*Episode) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case episode.FieldMonitored:
|
||||
values[i] = new(sql.NullBool)
|
||||
case episode.FieldID, episode.FieldMediaID, episode.FieldSeasonNumber, episode.FieldEpisodeNumber:
|
||||
values[i] = new(sql.NullInt64)
|
||||
case episode.FieldTitle, episode.FieldOverview, episode.FieldAirDate, episode.FieldStatus, episode.FieldFileInStorage:
|
||||
case episode.FieldTitle, episode.FieldOverview, episode.FieldAirDate, episode.FieldStatus, episode.FieldTargetFile:
|
||||
values[i] = new(sql.NullString)
|
||||
default:
|
||||
values[i] = new(sql.UnknownType)
|
||||
@@ -131,11 +135,17 @@ func (e *Episode) assignValues(columns []string, values []any) error {
|
||||
} else if value.Valid {
|
||||
e.Status = episode.Status(value.String)
|
||||
}
|
||||
case episode.FieldFileInStorage:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field file_in_storage", values[i])
|
||||
case episode.FieldMonitored:
|
||||
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field monitored", values[i])
|
||||
} else if value.Valid {
|
||||
e.FileInStorage = value.String
|
||||
e.Monitored = value.Bool
|
||||
}
|
||||
case episode.FieldTargetFile:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field target_file", values[i])
|
||||
} else if value.Valid {
|
||||
e.TargetFile = value.String
|
||||
}
|
||||
default:
|
||||
e.selectValues.Set(columns[i], values[i])
|
||||
@@ -199,8 +209,11 @@ func (e *Episode) String() string {
|
||||
builder.WriteString("status=")
|
||||
builder.WriteString(fmt.Sprintf("%v", e.Status))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("file_in_storage=")
|
||||
builder.WriteString(e.FileInStorage)
|
||||
builder.WriteString("monitored=")
|
||||
builder.WriteString(fmt.Sprintf("%v", e.Monitored))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("target_file=")
|
||||
builder.WriteString(e.TargetFile)
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
@@ -28,8 +28,10 @@ const (
|
||||
FieldAirDate = "air_date"
|
||||
// FieldStatus holds the string denoting the status field in the database.
|
||||
FieldStatus = "status"
|
||||
// FieldFileInStorage holds the string denoting the file_in_storage field in the database.
|
||||
FieldFileInStorage = "file_in_storage"
|
||||
// FieldMonitored holds the string denoting the monitored field in the database.
|
||||
FieldMonitored = "monitored"
|
||||
// FieldTargetFile holds the string denoting the target_file field in the database.
|
||||
FieldTargetFile = "target_file"
|
||||
// EdgeMedia holds the string denoting the media edge name in mutations.
|
||||
EdgeMedia = "media"
|
||||
// Table holds the table name of the episode in the database.
|
||||
@@ -53,7 +55,8 @@ var Columns = []string{
|
||||
FieldOverview,
|
||||
FieldAirDate,
|
||||
FieldStatus,
|
||||
FieldFileInStorage,
|
||||
FieldMonitored,
|
||||
FieldTargetFile,
|
||||
}
|
||||
|
||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||
@@ -66,6 +69,11 @@ func ValidColumn(column string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
var (
|
||||
// DefaultMonitored holds the default value on creation for the "monitored" field.
|
||||
DefaultMonitored bool
|
||||
)
|
||||
|
||||
// Status defines the type for the "status" enum field.
|
||||
type Status string
|
||||
|
||||
@@ -136,9 +144,14 @@ func ByStatus(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldStatus, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByFileInStorage orders the results by the file_in_storage field.
|
||||
func ByFileInStorage(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldFileInStorage, opts...).ToFunc()
|
||||
// ByMonitored orders the results by the monitored field.
|
||||
func ByMonitored(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldMonitored, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByTargetFile orders the results by the target_file field.
|
||||
func ByTargetFile(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldTargetFile, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByMediaField orders the results by media field.
|
||||
|
||||
@@ -84,9 +84,14 @@ func AirDate(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEQ(FieldAirDate, v))
|
||||
}
|
||||
|
||||
// FileInStorage applies equality check predicate on the "file_in_storage" field. It's identical to FileInStorageEQ.
|
||||
func FileInStorage(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEQ(FieldFileInStorage, v))
|
||||
// Monitored applies equality check predicate on the "monitored" field. It's identical to MonitoredEQ.
|
||||
func Monitored(v bool) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEQ(FieldMonitored, v))
|
||||
}
|
||||
|
||||
// TargetFile applies equality check predicate on the "target_file" field. It's identical to TargetFileEQ.
|
||||
func TargetFile(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEQ(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// MediaIDEQ applies the EQ predicate on the "media_id" field.
|
||||
@@ -414,79 +419,89 @@ func StatusNotIn(vs ...Status) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldNotIn(FieldStatus, vs...))
|
||||
}
|
||||
|
||||
// FileInStorageEQ applies the EQ predicate on the "file_in_storage" field.
|
||||
func FileInStorageEQ(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEQ(FieldFileInStorage, v))
|
||||
// MonitoredEQ applies the EQ predicate on the "monitored" field.
|
||||
func MonitoredEQ(v bool) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEQ(FieldMonitored, v))
|
||||
}
|
||||
|
||||
// FileInStorageNEQ applies the NEQ predicate on the "file_in_storage" field.
|
||||
func FileInStorageNEQ(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldNEQ(FieldFileInStorage, v))
|
||||
// MonitoredNEQ applies the NEQ predicate on the "monitored" field.
|
||||
func MonitoredNEQ(v bool) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldNEQ(FieldMonitored, v))
|
||||
}
|
||||
|
||||
// FileInStorageIn applies the In predicate on the "file_in_storage" field.
|
||||
func FileInStorageIn(vs ...string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldIn(FieldFileInStorage, vs...))
|
||||
// TargetFileEQ applies the EQ predicate on the "target_file" field.
|
||||
func TargetFileEQ(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEQ(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageNotIn applies the NotIn predicate on the "file_in_storage" field.
|
||||
func FileInStorageNotIn(vs ...string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldNotIn(FieldFileInStorage, vs...))
|
||||
// TargetFileNEQ applies the NEQ predicate on the "target_file" field.
|
||||
func TargetFileNEQ(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldNEQ(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageGT applies the GT predicate on the "file_in_storage" field.
|
||||
func FileInStorageGT(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldGT(FieldFileInStorage, v))
|
||||
// TargetFileIn applies the In predicate on the "target_file" field.
|
||||
func TargetFileIn(vs ...string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldIn(FieldTargetFile, vs...))
|
||||
}
|
||||
|
||||
// FileInStorageGTE applies the GTE predicate on the "file_in_storage" field.
|
||||
func FileInStorageGTE(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldGTE(FieldFileInStorage, v))
|
||||
// TargetFileNotIn applies the NotIn predicate on the "target_file" field.
|
||||
func TargetFileNotIn(vs ...string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldNotIn(FieldTargetFile, vs...))
|
||||
}
|
||||
|
||||
// FileInStorageLT applies the LT predicate on the "file_in_storage" field.
|
||||
func FileInStorageLT(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldLT(FieldFileInStorage, v))
|
||||
// TargetFileGT applies the GT predicate on the "target_file" field.
|
||||
func TargetFileGT(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldGT(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageLTE applies the LTE predicate on the "file_in_storage" field.
|
||||
func FileInStorageLTE(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldLTE(FieldFileInStorage, v))
|
||||
// TargetFileGTE applies the GTE predicate on the "target_file" field.
|
||||
func TargetFileGTE(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldGTE(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageContains applies the Contains predicate on the "file_in_storage" field.
|
||||
func FileInStorageContains(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldContains(FieldFileInStorage, v))
|
||||
// TargetFileLT applies the LT predicate on the "target_file" field.
|
||||
func TargetFileLT(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldLT(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageHasPrefix applies the HasPrefix predicate on the "file_in_storage" field.
|
||||
func FileInStorageHasPrefix(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldHasPrefix(FieldFileInStorage, v))
|
||||
// TargetFileLTE applies the LTE predicate on the "target_file" field.
|
||||
func TargetFileLTE(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldLTE(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageHasSuffix applies the HasSuffix predicate on the "file_in_storage" field.
|
||||
func FileInStorageHasSuffix(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldHasSuffix(FieldFileInStorage, v))
|
||||
// TargetFileContains applies the Contains predicate on the "target_file" field.
|
||||
func TargetFileContains(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldContains(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageIsNil applies the IsNil predicate on the "file_in_storage" field.
|
||||
func FileInStorageIsNil() predicate.Episode {
|
||||
return predicate.Episode(sql.FieldIsNull(FieldFileInStorage))
|
||||
// TargetFileHasPrefix applies the HasPrefix predicate on the "target_file" field.
|
||||
func TargetFileHasPrefix(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldHasPrefix(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageNotNil applies the NotNil predicate on the "file_in_storage" field.
|
||||
func FileInStorageNotNil() predicate.Episode {
|
||||
return predicate.Episode(sql.FieldNotNull(FieldFileInStorage))
|
||||
// TargetFileHasSuffix applies the HasSuffix predicate on the "target_file" field.
|
||||
func TargetFileHasSuffix(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldHasSuffix(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// FileInStorageEqualFold applies the EqualFold predicate on the "file_in_storage" field.
|
||||
func FileInStorageEqualFold(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEqualFold(FieldFileInStorage, v))
|
||||
// TargetFileIsNil applies the IsNil predicate on the "target_file" field.
|
||||
func TargetFileIsNil() predicate.Episode {
|
||||
return predicate.Episode(sql.FieldIsNull(FieldTargetFile))
|
||||
}
|
||||
|
||||
// FileInStorageContainsFold applies the ContainsFold predicate on the "file_in_storage" field.
|
||||
func FileInStorageContainsFold(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldContainsFold(FieldFileInStorage, v))
|
||||
// TargetFileNotNil applies the NotNil predicate on the "target_file" field.
|
||||
func TargetFileNotNil() predicate.Episode {
|
||||
return predicate.Episode(sql.FieldNotNull(FieldTargetFile))
|
||||
}
|
||||
|
||||
// TargetFileEqualFold applies the EqualFold predicate on the "target_file" field.
|
||||
func TargetFileEqualFold(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldEqualFold(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// TargetFileContainsFold applies the ContainsFold predicate on the "target_file" field.
|
||||
func TargetFileContainsFold(v string) predicate.Episode {
|
||||
return predicate.Episode(sql.FieldContainsFold(FieldTargetFile, v))
|
||||
}
|
||||
|
||||
// HasMedia applies the HasEdge predicate on the "media" edge.
|
||||
|
||||
@@ -78,16 +78,30 @@ func (ec *EpisodeCreate) SetNillableStatus(e *episode.Status) *EpisodeCreate {
|
||||
return ec
|
||||
}
|
||||
|
||||
// SetFileInStorage sets the "file_in_storage" field.
|
||||
func (ec *EpisodeCreate) SetFileInStorage(s string) *EpisodeCreate {
|
||||
ec.mutation.SetFileInStorage(s)
|
||||
// SetMonitored sets the "monitored" field.
|
||||
func (ec *EpisodeCreate) SetMonitored(b bool) *EpisodeCreate {
|
||||
ec.mutation.SetMonitored(b)
|
||||
return ec
|
||||
}
|
||||
|
||||
// SetNillableFileInStorage sets the "file_in_storage" field if the given value is not nil.
|
||||
func (ec *EpisodeCreate) SetNillableFileInStorage(s *string) *EpisodeCreate {
|
||||
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
|
||||
func (ec *EpisodeCreate) SetNillableMonitored(b *bool) *EpisodeCreate {
|
||||
if b != nil {
|
||||
ec.SetMonitored(*b)
|
||||
}
|
||||
return ec
|
||||
}
|
||||
|
||||
// SetTargetFile sets the "target_file" field.
|
||||
func (ec *EpisodeCreate) SetTargetFile(s string) *EpisodeCreate {
|
||||
ec.mutation.SetTargetFile(s)
|
||||
return ec
|
||||
}
|
||||
|
||||
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
|
||||
func (ec *EpisodeCreate) SetNillableTargetFile(s *string) *EpisodeCreate {
|
||||
if s != nil {
|
||||
ec.SetFileInStorage(*s)
|
||||
ec.SetTargetFile(*s)
|
||||
}
|
||||
return ec
|
||||
}
|
||||
@@ -136,6 +150,10 @@ func (ec *EpisodeCreate) defaults() {
|
||||
v := episode.DefaultStatus
|
||||
ec.mutation.SetStatus(v)
|
||||
}
|
||||
if _, ok := ec.mutation.Monitored(); !ok {
|
||||
v := episode.DefaultMonitored
|
||||
ec.mutation.SetMonitored(v)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
@@ -163,6 +181,9 @@ func (ec *EpisodeCreate) check() error {
|
||||
return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "Episode.status": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := ec.mutation.Monitored(); !ok {
|
||||
return &ValidationError{Name: "monitored", err: errors.New(`ent: missing required field "Episode.monitored"`)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -213,9 +234,13 @@ func (ec *EpisodeCreate) createSpec() (*Episode, *sqlgraph.CreateSpec) {
|
||||
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
|
||||
_node.Status = value
|
||||
}
|
||||
if value, ok := ec.mutation.FileInStorage(); ok {
|
||||
_spec.SetField(episode.FieldFileInStorage, field.TypeString, value)
|
||||
_node.FileInStorage = value
|
||||
if value, ok := ec.mutation.Monitored(); ok {
|
||||
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
|
||||
_node.Monitored = value
|
||||
}
|
||||
if value, ok := ec.mutation.TargetFile(); ok {
|
||||
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
|
||||
_node.TargetFile = value
|
||||
}
|
||||
if nodes := ec.mutation.MediaIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
|
||||
@@ -146,23 +146,37 @@ func (eu *EpisodeUpdate) SetNillableStatus(e *episode.Status) *EpisodeUpdate {
|
||||
return eu
|
||||
}
|
||||
|
||||
// SetFileInStorage sets the "file_in_storage" field.
|
||||
func (eu *EpisodeUpdate) SetFileInStorage(s string) *EpisodeUpdate {
|
||||
eu.mutation.SetFileInStorage(s)
|
||||
// SetMonitored sets the "monitored" field.
|
||||
func (eu *EpisodeUpdate) SetMonitored(b bool) *EpisodeUpdate {
|
||||
eu.mutation.SetMonitored(b)
|
||||
return eu
|
||||
}
|
||||
|
||||
// SetNillableFileInStorage sets the "file_in_storage" field if the given value is not nil.
|
||||
func (eu *EpisodeUpdate) SetNillableFileInStorage(s *string) *EpisodeUpdate {
|
||||
if s != nil {
|
||||
eu.SetFileInStorage(*s)
|
||||
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
|
||||
func (eu *EpisodeUpdate) SetNillableMonitored(b *bool) *EpisodeUpdate {
|
||||
if b != nil {
|
||||
eu.SetMonitored(*b)
|
||||
}
|
||||
return eu
|
||||
}
|
||||
|
||||
// ClearFileInStorage clears the value of the "file_in_storage" field.
|
||||
func (eu *EpisodeUpdate) ClearFileInStorage() *EpisodeUpdate {
|
||||
eu.mutation.ClearFileInStorage()
|
||||
// SetTargetFile sets the "target_file" field.
|
||||
func (eu *EpisodeUpdate) SetTargetFile(s string) *EpisodeUpdate {
|
||||
eu.mutation.SetTargetFile(s)
|
||||
return eu
|
||||
}
|
||||
|
||||
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
|
||||
func (eu *EpisodeUpdate) SetNillableTargetFile(s *string) *EpisodeUpdate {
|
||||
if s != nil {
|
||||
eu.SetTargetFile(*s)
|
||||
}
|
||||
return eu
|
||||
}
|
||||
|
||||
// ClearTargetFile clears the value of the "target_file" field.
|
||||
func (eu *EpisodeUpdate) ClearTargetFile() *EpisodeUpdate {
|
||||
eu.mutation.ClearTargetFile()
|
||||
return eu
|
||||
}
|
||||
|
||||
@@ -255,11 +269,14 @@ func (eu *EpisodeUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if value, ok := eu.mutation.Status(); ok {
|
||||
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := eu.mutation.FileInStorage(); ok {
|
||||
_spec.SetField(episode.FieldFileInStorage, field.TypeString, value)
|
||||
if value, ok := eu.mutation.Monitored(); ok {
|
||||
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
|
||||
}
|
||||
if eu.mutation.FileInStorageCleared() {
|
||||
_spec.ClearField(episode.FieldFileInStorage, field.TypeString)
|
||||
if value, ok := eu.mutation.TargetFile(); ok {
|
||||
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
|
||||
}
|
||||
if eu.mutation.TargetFileCleared() {
|
||||
_spec.ClearField(episode.FieldTargetFile, field.TypeString)
|
||||
}
|
||||
if eu.mutation.MediaCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
@@ -428,23 +445,37 @@ func (euo *EpisodeUpdateOne) SetNillableStatus(e *episode.Status) *EpisodeUpdate
|
||||
return euo
|
||||
}
|
||||
|
||||
// SetFileInStorage sets the "file_in_storage" field.
|
||||
func (euo *EpisodeUpdateOne) SetFileInStorage(s string) *EpisodeUpdateOne {
|
||||
euo.mutation.SetFileInStorage(s)
|
||||
// SetMonitored sets the "monitored" field.
|
||||
func (euo *EpisodeUpdateOne) SetMonitored(b bool) *EpisodeUpdateOne {
|
||||
euo.mutation.SetMonitored(b)
|
||||
return euo
|
||||
}
|
||||
|
||||
// SetNillableFileInStorage sets the "file_in_storage" field if the given value is not nil.
|
||||
func (euo *EpisodeUpdateOne) SetNillableFileInStorage(s *string) *EpisodeUpdateOne {
|
||||
if s != nil {
|
||||
euo.SetFileInStorage(*s)
|
||||
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
|
||||
func (euo *EpisodeUpdateOne) SetNillableMonitored(b *bool) *EpisodeUpdateOne {
|
||||
if b != nil {
|
||||
euo.SetMonitored(*b)
|
||||
}
|
||||
return euo
|
||||
}
|
||||
|
||||
// ClearFileInStorage clears the value of the "file_in_storage" field.
|
||||
func (euo *EpisodeUpdateOne) ClearFileInStorage() *EpisodeUpdateOne {
|
||||
euo.mutation.ClearFileInStorage()
|
||||
// SetTargetFile sets the "target_file" field.
|
||||
func (euo *EpisodeUpdateOne) SetTargetFile(s string) *EpisodeUpdateOne {
|
||||
euo.mutation.SetTargetFile(s)
|
||||
return euo
|
||||
}
|
||||
|
||||
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
|
||||
func (euo *EpisodeUpdateOne) SetNillableTargetFile(s *string) *EpisodeUpdateOne {
|
||||
if s != nil {
|
||||
euo.SetTargetFile(*s)
|
||||
}
|
||||
return euo
|
||||
}
|
||||
|
||||
// ClearTargetFile clears the value of the "target_file" field.
|
||||
func (euo *EpisodeUpdateOne) ClearTargetFile() *EpisodeUpdateOne {
|
||||
euo.mutation.ClearTargetFile()
|
||||
return euo
|
||||
}
|
||||
|
||||
@@ -567,11 +598,14 @@ func (euo *EpisodeUpdateOne) sqlSave(ctx context.Context) (_node *Episode, err e
|
||||
if value, ok := euo.mutation.Status(); ok {
|
||||
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := euo.mutation.FileInStorage(); ok {
|
||||
_spec.SetField(episode.FieldFileInStorage, field.TypeString, value)
|
||||
if value, ok := euo.mutation.Monitored(); ok {
|
||||
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
|
||||
}
|
||||
if euo.mutation.FileInStorageCleared() {
|
||||
_spec.ClearField(episode.FieldFileInStorage, field.TypeString)
|
||||
if value, ok := euo.mutation.TargetFile(); ok {
|
||||
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
|
||||
}
|
||||
if euo.mutation.TargetFileCleared() {
|
||||
_spec.ClearField(episode.FieldTargetFile, field.TypeString)
|
||||
}
|
||||
if euo.mutation.MediaCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
|
||||
@@ -29,6 +29,10 @@ type History struct {
|
||||
TargetDir string `json:"target_dir,omitempty"`
|
||||
// Size holds the value of the "size" field.
|
||||
Size int `json:"size,omitempty"`
|
||||
// DownloadClientID holds the value of the "download_client_id" field.
|
||||
DownloadClientID int `json:"download_client_id,omitempty"`
|
||||
// IndexerID holds the value of the "indexer_id" field.
|
||||
IndexerID int `json:"indexer_id,omitempty"`
|
||||
// Status holds the value of the "status" field.
|
||||
Status history.Status `json:"status,omitempty"`
|
||||
// Saved holds the value of the "saved" field.
|
||||
@@ -41,7 +45,7 @@ func (*History) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case history.FieldID, history.FieldMediaID, history.FieldEpisodeID, history.FieldSize:
|
||||
case history.FieldID, history.FieldMediaID, history.FieldEpisodeID, history.FieldSize, history.FieldDownloadClientID, history.FieldIndexerID:
|
||||
values[i] = new(sql.NullInt64)
|
||||
case history.FieldSourceTitle, history.FieldTargetDir, history.FieldStatus, history.FieldSaved:
|
||||
values[i] = new(sql.NullString)
|
||||
@@ -104,6 +108,18 @@ func (h *History) assignValues(columns []string, values []any) error {
|
||||
} else if value.Valid {
|
||||
h.Size = int(value.Int64)
|
||||
}
|
||||
case history.FieldDownloadClientID:
|
||||
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field download_client_id", values[i])
|
||||
} else if value.Valid {
|
||||
h.DownloadClientID = int(value.Int64)
|
||||
}
|
||||
case history.FieldIndexerID:
|
||||
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field indexer_id", values[i])
|
||||
} else if value.Valid {
|
||||
h.IndexerID = int(value.Int64)
|
||||
}
|
||||
case history.FieldStatus:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field status", values[i])
|
||||
@@ -170,6 +186,12 @@ func (h *History) String() string {
|
||||
builder.WriteString("size=")
|
||||
builder.WriteString(fmt.Sprintf("%v", h.Size))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("download_client_id=")
|
||||
builder.WriteString(fmt.Sprintf("%v", h.DownloadClientID))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("indexer_id=")
|
||||
builder.WriteString(fmt.Sprintf("%v", h.IndexerID))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("status=")
|
||||
builder.WriteString(fmt.Sprintf("%v", h.Status))
|
||||
builder.WriteString(", ")
|
||||
|
||||
@@ -25,6 +25,10 @@ const (
|
||||
FieldTargetDir = "target_dir"
|
||||
// FieldSize holds the string denoting the size field in the database.
|
||||
FieldSize = "size"
|
||||
// FieldDownloadClientID holds the string denoting the download_client_id field in the database.
|
||||
FieldDownloadClientID = "download_client_id"
|
||||
// FieldIndexerID holds the string denoting the indexer_id field in the database.
|
||||
FieldIndexerID = "indexer_id"
|
||||
// FieldStatus holds the string denoting the status field in the database.
|
||||
FieldStatus = "status"
|
||||
// FieldSaved holds the string denoting the saved field in the database.
|
||||
@@ -42,6 +46,8 @@ var Columns = []string{
|
||||
FieldDate,
|
||||
FieldTargetDir,
|
||||
FieldSize,
|
||||
FieldDownloadClientID,
|
||||
FieldIndexerID,
|
||||
FieldStatus,
|
||||
FieldSaved,
|
||||
}
|
||||
@@ -70,6 +76,7 @@ const (
|
||||
StatusSuccess Status = "success"
|
||||
StatusFail Status = "fail"
|
||||
StatusUploading Status = "uploading"
|
||||
StatusSeeding Status = "seeding"
|
||||
)
|
||||
|
||||
func (s Status) String() string {
|
||||
@@ -79,7 +86,7 @@ func (s Status) String() string {
|
||||
// StatusValidator is a validator for the "status" field enum values. It is called by the builders before save.
|
||||
func StatusValidator(s Status) error {
|
||||
switch s {
|
||||
case StatusRunning, StatusSuccess, StatusFail, StatusUploading:
|
||||
case StatusRunning, StatusSuccess, StatusFail, StatusUploading, StatusSeeding:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("history: invalid enum value for status field: %q", s)
|
||||
@@ -124,6 +131,16 @@ func BySize(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldSize, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByDownloadClientID orders the results by the download_client_id field.
|
||||
func ByDownloadClientID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldDownloadClientID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByIndexerID orders the results by the indexer_id field.
|
||||
func ByIndexerID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldIndexerID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByStatus orders the results by the status field.
|
||||
func ByStatus(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldStatus, opts...).ToFunc()
|
||||
|
||||
@@ -84,6 +84,16 @@ func Size(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldSize, v))
|
||||
}
|
||||
|
||||
// DownloadClientID applies equality check predicate on the "download_client_id" field. It's identical to DownloadClientIDEQ.
|
||||
func DownloadClientID(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldDownloadClientID, v))
|
||||
}
|
||||
|
||||
// IndexerID applies equality check predicate on the "indexer_id" field. It's identical to IndexerIDEQ.
|
||||
func IndexerID(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldIndexerID, v))
|
||||
}
|
||||
|
||||
// Saved applies equality check predicate on the "saved" field. It's identical to SavedEQ.
|
||||
func Saved(v string) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldSaved, v))
|
||||
@@ -389,6 +399,106 @@ func SizeLTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLTE(FieldSize, v))
|
||||
}
|
||||
|
||||
// DownloadClientIDEQ applies the EQ predicate on the "download_client_id" field.
|
||||
func DownloadClientIDEQ(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldDownloadClientID, v))
|
||||
}
|
||||
|
||||
// DownloadClientIDNEQ applies the NEQ predicate on the "download_client_id" field.
|
||||
func DownloadClientIDNEQ(v int) predicate.History {
|
||||
return predicate.History(sql.FieldNEQ(FieldDownloadClientID, v))
|
||||
}
|
||||
|
||||
// DownloadClientIDIn applies the In predicate on the "download_client_id" field.
|
||||
func DownloadClientIDIn(vs ...int) predicate.History {
|
||||
return predicate.History(sql.FieldIn(FieldDownloadClientID, vs...))
|
||||
}
|
||||
|
||||
// DownloadClientIDNotIn applies the NotIn predicate on the "download_client_id" field.
|
||||
func DownloadClientIDNotIn(vs ...int) predicate.History {
|
||||
return predicate.History(sql.FieldNotIn(FieldDownloadClientID, vs...))
|
||||
}
|
||||
|
||||
// DownloadClientIDGT applies the GT predicate on the "download_client_id" field.
|
||||
func DownloadClientIDGT(v int) predicate.History {
|
||||
return predicate.History(sql.FieldGT(FieldDownloadClientID, v))
|
||||
}
|
||||
|
||||
// DownloadClientIDGTE applies the GTE predicate on the "download_client_id" field.
|
||||
func DownloadClientIDGTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldGTE(FieldDownloadClientID, v))
|
||||
}
|
||||
|
||||
// DownloadClientIDLT applies the LT predicate on the "download_client_id" field.
|
||||
func DownloadClientIDLT(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLT(FieldDownloadClientID, v))
|
||||
}
|
||||
|
||||
// DownloadClientIDLTE applies the LTE predicate on the "download_client_id" field.
|
||||
func DownloadClientIDLTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLTE(FieldDownloadClientID, v))
|
||||
}
|
||||
|
||||
// DownloadClientIDIsNil applies the IsNil predicate on the "download_client_id" field.
|
||||
func DownloadClientIDIsNil() predicate.History {
|
||||
return predicate.History(sql.FieldIsNull(FieldDownloadClientID))
|
||||
}
|
||||
|
||||
// DownloadClientIDNotNil applies the NotNil predicate on the "download_client_id" field.
|
||||
func DownloadClientIDNotNil() predicate.History {
|
||||
return predicate.History(sql.FieldNotNull(FieldDownloadClientID))
|
||||
}
|
||||
|
||||
// IndexerIDEQ applies the EQ predicate on the "indexer_id" field.
|
||||
func IndexerIDEQ(v int) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldIndexerID, v))
|
||||
}
|
||||
|
||||
// IndexerIDNEQ applies the NEQ predicate on the "indexer_id" field.
|
||||
func IndexerIDNEQ(v int) predicate.History {
|
||||
return predicate.History(sql.FieldNEQ(FieldIndexerID, v))
|
||||
}
|
||||
|
||||
// IndexerIDIn applies the In predicate on the "indexer_id" field.
|
||||
func IndexerIDIn(vs ...int) predicate.History {
|
||||
return predicate.History(sql.FieldIn(FieldIndexerID, vs...))
|
||||
}
|
||||
|
||||
// IndexerIDNotIn applies the NotIn predicate on the "indexer_id" field.
|
||||
func IndexerIDNotIn(vs ...int) predicate.History {
|
||||
return predicate.History(sql.FieldNotIn(FieldIndexerID, vs...))
|
||||
}
|
||||
|
||||
// IndexerIDGT applies the GT predicate on the "indexer_id" field.
|
||||
func IndexerIDGT(v int) predicate.History {
|
||||
return predicate.History(sql.FieldGT(FieldIndexerID, v))
|
||||
}
|
||||
|
||||
// IndexerIDGTE applies the GTE predicate on the "indexer_id" field.
|
||||
func IndexerIDGTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldGTE(FieldIndexerID, v))
|
||||
}
|
||||
|
||||
// IndexerIDLT applies the LT predicate on the "indexer_id" field.
|
||||
func IndexerIDLT(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLT(FieldIndexerID, v))
|
||||
}
|
||||
|
||||
// IndexerIDLTE applies the LTE predicate on the "indexer_id" field.
|
||||
func IndexerIDLTE(v int) predicate.History {
|
||||
return predicate.History(sql.FieldLTE(FieldIndexerID, v))
|
||||
}
|
||||
|
||||
// IndexerIDIsNil applies the IsNil predicate on the "indexer_id" field.
|
||||
func IndexerIDIsNil() predicate.History {
|
||||
return predicate.History(sql.FieldIsNull(FieldIndexerID))
|
||||
}
|
||||
|
||||
// IndexerIDNotNil applies the NotNil predicate on the "indexer_id" field.
|
||||
func IndexerIDNotNil() predicate.History {
|
||||
return predicate.History(sql.FieldNotNull(FieldIndexerID))
|
||||
}
|
||||
|
||||
// StatusEQ applies the EQ predicate on the "status" field.
|
||||
func StatusEQ(v Status) predicate.History {
|
||||
return predicate.History(sql.FieldEQ(FieldStatus, v))
|
||||
|
||||
@@ -72,6 +72,34 @@ func (hc *HistoryCreate) SetNillableSize(i *int) *HistoryCreate {
|
||||
return hc
|
||||
}
|
||||
|
||||
// SetDownloadClientID sets the "download_client_id" field.
|
||||
func (hc *HistoryCreate) SetDownloadClientID(i int) *HistoryCreate {
|
||||
hc.mutation.SetDownloadClientID(i)
|
||||
return hc
|
||||
}
|
||||
|
||||
// SetNillableDownloadClientID sets the "download_client_id" field if the given value is not nil.
|
||||
func (hc *HistoryCreate) SetNillableDownloadClientID(i *int) *HistoryCreate {
|
||||
if i != nil {
|
||||
hc.SetDownloadClientID(*i)
|
||||
}
|
||||
return hc
|
||||
}
|
||||
|
||||
// SetIndexerID sets the "indexer_id" field.
|
||||
func (hc *HistoryCreate) SetIndexerID(i int) *HistoryCreate {
|
||||
hc.mutation.SetIndexerID(i)
|
||||
return hc
|
||||
}
|
||||
|
||||
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
|
||||
func (hc *HistoryCreate) SetNillableIndexerID(i *int) *HistoryCreate {
|
||||
if i != nil {
|
||||
hc.SetIndexerID(*i)
|
||||
}
|
||||
return hc
|
||||
}
|
||||
|
||||
// SetStatus sets the "status" field.
|
||||
func (hc *HistoryCreate) SetStatus(h history.Status) *HistoryCreate {
|
||||
hc.mutation.SetStatus(h)
|
||||
@@ -208,6 +236,14 @@ func (hc *HistoryCreate) createSpec() (*History, *sqlgraph.CreateSpec) {
|
||||
_spec.SetField(history.FieldSize, field.TypeInt, value)
|
||||
_node.Size = value
|
||||
}
|
||||
if value, ok := hc.mutation.DownloadClientID(); ok {
|
||||
_spec.SetField(history.FieldDownloadClientID, field.TypeInt, value)
|
||||
_node.DownloadClientID = value
|
||||
}
|
||||
if value, ok := hc.mutation.IndexerID(); ok {
|
||||
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
|
||||
_node.IndexerID = value
|
||||
}
|
||||
if value, ok := hc.mutation.Status(); ok {
|
||||
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
|
||||
_node.Status = value
|
||||
|
||||
@@ -139,6 +139,60 @@ func (hu *HistoryUpdate) AddSize(i int) *HistoryUpdate {
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetDownloadClientID sets the "download_client_id" field.
|
||||
func (hu *HistoryUpdate) SetDownloadClientID(i int) *HistoryUpdate {
|
||||
hu.mutation.ResetDownloadClientID()
|
||||
hu.mutation.SetDownloadClientID(i)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetNillableDownloadClientID sets the "download_client_id" field if the given value is not nil.
|
||||
func (hu *HistoryUpdate) SetNillableDownloadClientID(i *int) *HistoryUpdate {
|
||||
if i != nil {
|
||||
hu.SetDownloadClientID(*i)
|
||||
}
|
||||
return hu
|
||||
}
|
||||
|
||||
// AddDownloadClientID adds i to the "download_client_id" field.
|
||||
func (hu *HistoryUpdate) AddDownloadClientID(i int) *HistoryUpdate {
|
||||
hu.mutation.AddDownloadClientID(i)
|
||||
return hu
|
||||
}
|
||||
|
||||
// ClearDownloadClientID clears the value of the "download_client_id" field.
|
||||
func (hu *HistoryUpdate) ClearDownloadClientID() *HistoryUpdate {
|
||||
hu.mutation.ClearDownloadClientID()
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetIndexerID sets the "indexer_id" field.
|
||||
func (hu *HistoryUpdate) SetIndexerID(i int) *HistoryUpdate {
|
||||
hu.mutation.ResetIndexerID()
|
||||
hu.mutation.SetIndexerID(i)
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
|
||||
func (hu *HistoryUpdate) SetNillableIndexerID(i *int) *HistoryUpdate {
|
||||
if i != nil {
|
||||
hu.SetIndexerID(*i)
|
||||
}
|
||||
return hu
|
||||
}
|
||||
|
||||
// AddIndexerID adds i to the "indexer_id" field.
|
||||
func (hu *HistoryUpdate) AddIndexerID(i int) *HistoryUpdate {
|
||||
hu.mutation.AddIndexerID(i)
|
||||
return hu
|
||||
}
|
||||
|
||||
// ClearIndexerID clears the value of the "indexer_id" field.
|
||||
func (hu *HistoryUpdate) ClearIndexerID() *HistoryUpdate {
|
||||
hu.mutation.ClearIndexerID()
|
||||
return hu
|
||||
}
|
||||
|
||||
// SetStatus sets the "status" field.
|
||||
func (hu *HistoryUpdate) SetStatus(h history.Status) *HistoryUpdate {
|
||||
hu.mutation.SetStatus(h)
|
||||
@@ -257,6 +311,24 @@ func (hu *HistoryUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if value, ok := hu.mutation.AddedSize(); ok {
|
||||
_spec.AddField(history.FieldSize, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := hu.mutation.DownloadClientID(); ok {
|
||||
_spec.SetField(history.FieldDownloadClientID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := hu.mutation.AddedDownloadClientID(); ok {
|
||||
_spec.AddField(history.FieldDownloadClientID, field.TypeInt, value)
|
||||
}
|
||||
if hu.mutation.DownloadClientIDCleared() {
|
||||
_spec.ClearField(history.FieldDownloadClientID, field.TypeInt)
|
||||
}
|
||||
if value, ok := hu.mutation.IndexerID(); ok {
|
||||
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := hu.mutation.AddedIndexerID(); ok {
|
||||
_spec.AddField(history.FieldIndexerID, field.TypeInt, value)
|
||||
}
|
||||
if hu.mutation.IndexerIDCleared() {
|
||||
_spec.ClearField(history.FieldIndexerID, field.TypeInt)
|
||||
}
|
||||
if value, ok := hu.mutation.Status(); ok {
|
||||
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
|
||||
}
|
||||
@@ -397,6 +469,60 @@ func (huo *HistoryUpdateOne) AddSize(i int) *HistoryUpdateOne {
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetDownloadClientID sets the "download_client_id" field.
|
||||
func (huo *HistoryUpdateOne) SetDownloadClientID(i int) *HistoryUpdateOne {
|
||||
huo.mutation.ResetDownloadClientID()
|
||||
huo.mutation.SetDownloadClientID(i)
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetNillableDownloadClientID sets the "download_client_id" field if the given value is not nil.
|
||||
func (huo *HistoryUpdateOne) SetNillableDownloadClientID(i *int) *HistoryUpdateOne {
|
||||
if i != nil {
|
||||
huo.SetDownloadClientID(*i)
|
||||
}
|
||||
return huo
|
||||
}
|
||||
|
||||
// AddDownloadClientID adds i to the "download_client_id" field.
|
||||
func (huo *HistoryUpdateOne) AddDownloadClientID(i int) *HistoryUpdateOne {
|
||||
huo.mutation.AddDownloadClientID(i)
|
||||
return huo
|
||||
}
|
||||
|
||||
// ClearDownloadClientID clears the value of the "download_client_id" field.
|
||||
func (huo *HistoryUpdateOne) ClearDownloadClientID() *HistoryUpdateOne {
|
||||
huo.mutation.ClearDownloadClientID()
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetIndexerID sets the "indexer_id" field.
|
||||
func (huo *HistoryUpdateOne) SetIndexerID(i int) *HistoryUpdateOne {
|
||||
huo.mutation.ResetIndexerID()
|
||||
huo.mutation.SetIndexerID(i)
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
|
||||
func (huo *HistoryUpdateOne) SetNillableIndexerID(i *int) *HistoryUpdateOne {
|
||||
if i != nil {
|
||||
huo.SetIndexerID(*i)
|
||||
}
|
||||
return huo
|
||||
}
|
||||
|
||||
// AddIndexerID adds i to the "indexer_id" field.
|
||||
func (huo *HistoryUpdateOne) AddIndexerID(i int) *HistoryUpdateOne {
|
||||
huo.mutation.AddIndexerID(i)
|
||||
return huo
|
||||
}
|
||||
|
||||
// ClearIndexerID clears the value of the "indexer_id" field.
|
||||
func (huo *HistoryUpdateOne) ClearIndexerID() *HistoryUpdateOne {
|
||||
huo.mutation.ClearIndexerID()
|
||||
return huo
|
||||
}
|
||||
|
||||
// SetStatus sets the "status" field.
|
||||
func (huo *HistoryUpdateOne) SetStatus(h history.Status) *HistoryUpdateOne {
|
||||
huo.mutation.SetStatus(h)
|
||||
@@ -545,6 +671,24 @@ func (huo *HistoryUpdateOne) sqlSave(ctx context.Context) (_node *History, err e
|
||||
if value, ok := huo.mutation.AddedSize(); ok {
|
||||
_spec.AddField(history.FieldSize, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := huo.mutation.DownloadClientID(); ok {
|
||||
_spec.SetField(history.FieldDownloadClientID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := huo.mutation.AddedDownloadClientID(); ok {
|
||||
_spec.AddField(history.FieldDownloadClientID, field.TypeInt, value)
|
||||
}
|
||||
if huo.mutation.DownloadClientIDCleared() {
|
||||
_spec.ClearField(history.FieldDownloadClientID, field.TypeInt)
|
||||
}
|
||||
if value, ok := huo.mutation.IndexerID(); ok {
|
||||
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := huo.mutation.AddedIndexerID(); ok {
|
||||
_spec.AddField(history.FieldIndexerID, field.TypeInt, value)
|
||||
}
|
||||
if huo.mutation.IndexerIDCleared() {
|
||||
_spec.ClearField(history.FieldIndexerID, field.TypeInt)
|
||||
}
|
||||
if value, ok := huo.mutation.Status(); ok {
|
||||
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
|
||||
}
|
||||
|
||||
@@ -44,6 +44,18 @@ func (f HistoryFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, err
|
||||
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.HistoryMutation", m)
|
||||
}
|
||||
|
||||
// The ImportListFunc type is an adapter to allow the use of ordinary
|
||||
// function as ImportList mutator.
|
||||
type ImportListFunc func(context.Context, *ent.ImportListMutation) (ent.Value, error)
|
||||
|
||||
// Mutate calls f(ctx, m).
|
||||
func (f ImportListFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||
if mv, ok := m.(*ent.ImportListMutation); ok {
|
||||
return f(ctx, mv)
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.ImportListMutation", m)
|
||||
}
|
||||
|
||||
// The IndexersFunc type is an adapter to allow the use of ordinary
|
||||
// function as Indexers mutator.
|
||||
type IndexersFunc func(context.Context, *ent.IndexersMutation) (ent.Value, error)
|
||||
@@ -68,6 +80,18 @@ func (f MediaFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error
|
||||
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.MediaMutation", m)
|
||||
}
|
||||
|
||||
// The NotificationClientFunc type is an adapter to allow the use of ordinary
|
||||
// function as NotificationClient mutator.
|
||||
type NotificationClientFunc func(context.Context, *ent.NotificationClientMutation) (ent.Value, error)
|
||||
|
||||
// Mutate calls f(ctx, m).
|
||||
func (f NotificationClientFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||
if mv, ok := m.(*ent.NotificationClientMutation); ok {
|
||||
return f(ctx, mv)
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.NotificationClientMutation", m)
|
||||
}
|
||||
|
||||
// The SettingsFunc type is an adapter to allow the use of ordinary
|
||||
// function as Settings mutator.
|
||||
type SettingsFunc func(context.Context, *ent.SettingsMutation) (ent.Value, error)
|
||||
|
||||
164
ent/importlist.go
Normal file
@@ -0,0 +1,164 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"polaris/ent/importlist"
|
||||
"polaris/ent/schema"
|
||||
"strings"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
// ImportList is the model entity for the ImportList schema.
|
||||
type ImportList struct {
|
||||
config `json:"-"`
|
||||
// ID of the ent.
|
||||
ID int `json:"id,omitempty"`
|
||||
// Name holds the value of the "name" field.
|
||||
Name string `json:"name,omitempty"`
|
||||
// Type holds the value of the "type" field.
|
||||
Type importlist.Type `json:"type,omitempty"`
|
||||
// URL holds the value of the "url" field.
|
||||
URL string `json:"url,omitempty"`
|
||||
// Qulity holds the value of the "qulity" field.
|
||||
Qulity string `json:"qulity,omitempty"`
|
||||
// StorageID holds the value of the "storage_id" field.
|
||||
StorageID int `json:"storage_id,omitempty"`
|
||||
// Settings holds the value of the "settings" field.
|
||||
Settings schema.ImportListSettings `json:"settings,omitempty"`
|
||||
selectValues sql.SelectValues
|
||||
}
|
||||
|
||||
// scanValues returns the types for scanning values from sql.Rows.
|
||||
func (*ImportList) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case importlist.FieldSettings:
|
||||
values[i] = new([]byte)
|
||||
case importlist.FieldID, importlist.FieldStorageID:
|
||||
values[i] = new(sql.NullInt64)
|
||||
case importlist.FieldName, importlist.FieldType, importlist.FieldURL, importlist.FieldQulity:
|
||||
values[i] = new(sql.NullString)
|
||||
default:
|
||||
values[i] = new(sql.UnknownType)
|
||||
}
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||
// to the ImportList fields.
|
||||
func (il *ImportList) assignValues(columns []string, values []any) error {
|
||||
if m, n := len(values), len(columns); m < n {
|
||||
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||
}
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case importlist.FieldID:
|
||||
value, ok := values[i].(*sql.NullInt64)
|
||||
if !ok {
|
||||
return fmt.Errorf("unexpected type %T for field id", value)
|
||||
}
|
||||
il.ID = int(value.Int64)
|
||||
case importlist.FieldName:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field name", values[i])
|
||||
} else if value.Valid {
|
||||
il.Name = value.String
|
||||
}
|
||||
case importlist.FieldType:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field type", values[i])
|
||||
} else if value.Valid {
|
||||
il.Type = importlist.Type(value.String)
|
||||
}
|
||||
case importlist.FieldURL:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field url", values[i])
|
||||
} else if value.Valid {
|
||||
il.URL = value.String
|
||||
}
|
||||
case importlist.FieldQulity:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field qulity", values[i])
|
||||
} else if value.Valid {
|
||||
il.Qulity = value.String
|
||||
}
|
||||
case importlist.FieldStorageID:
|
||||
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field storage_id", values[i])
|
||||
} else if value.Valid {
|
||||
il.StorageID = int(value.Int64)
|
||||
}
|
||||
case importlist.FieldSettings:
|
||||
if value, ok := values[i].(*[]byte); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field settings", values[i])
|
||||
} else if value != nil && len(*value) > 0 {
|
||||
if err := json.Unmarshal(*value, &il.Settings); err != nil {
|
||||
return fmt.Errorf("unmarshal field settings: %w", err)
|
||||
}
|
||||
}
|
||||
default:
|
||||
il.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value returns the ent.Value that was dynamically selected and assigned to the ImportList.
|
||||
// This includes values selected through modifiers, order, etc.
|
||||
func (il *ImportList) Value(name string) (ent.Value, error) {
|
||||
return il.selectValues.Get(name)
|
||||
}
|
||||
|
||||
// Update returns a builder for updating this ImportList.
|
||||
// Note that you need to call ImportList.Unwrap() before calling this method if this ImportList
|
||||
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||
func (il *ImportList) Update() *ImportListUpdateOne {
|
||||
return NewImportListClient(il.config).UpdateOne(il)
|
||||
}
|
||||
|
||||
// Unwrap unwraps the ImportList entity that was returned from a transaction after it was closed,
|
||||
// so that all future queries will be executed through the driver which created the transaction.
|
||||
func (il *ImportList) Unwrap() *ImportList {
|
||||
_tx, ok := il.config.driver.(*txDriver)
|
||||
if !ok {
|
||||
panic("ent: ImportList is not a transactional entity")
|
||||
}
|
||||
il.config.driver = _tx.drv
|
||||
return il
|
||||
}
|
||||
|
||||
// String implements the fmt.Stringer.
|
||||
func (il *ImportList) String() string {
|
||||
var builder strings.Builder
|
||||
builder.WriteString("ImportList(")
|
||||
builder.WriteString(fmt.Sprintf("id=%v, ", il.ID))
|
||||
builder.WriteString("name=")
|
||||
builder.WriteString(il.Name)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("type=")
|
||||
builder.WriteString(fmt.Sprintf("%v", il.Type))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("url=")
|
||||
builder.WriteString(il.URL)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("qulity=")
|
||||
builder.WriteString(il.Qulity)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("storage_id=")
|
||||
builder.WriteString(fmt.Sprintf("%v", il.StorageID))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("settings=")
|
||||
builder.WriteString(fmt.Sprintf("%v", il.Settings))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
// ImportLists is a parsable slice of ImportList.
|
||||
type ImportLists []*ImportList
|
||||
107
ent/importlist/importlist.go
Normal file
@@ -0,0 +1,107 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package importlist
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
const (
|
||||
// Label holds the string label denoting the importlist type in the database.
|
||||
Label = "import_list"
|
||||
// FieldID holds the string denoting the id field in the database.
|
||||
FieldID = "id"
|
||||
// FieldName holds the string denoting the name field in the database.
|
||||
FieldName = "name"
|
||||
// FieldType holds the string denoting the type field in the database.
|
||||
FieldType = "type"
|
||||
// FieldURL holds the string denoting the url field in the database.
|
||||
FieldURL = "url"
|
||||
// FieldQulity holds the string denoting the qulity field in the database.
|
||||
FieldQulity = "qulity"
|
||||
// FieldStorageID holds the string denoting the storage_id field in the database.
|
||||
FieldStorageID = "storage_id"
|
||||
// FieldSettings holds the string denoting the settings field in the database.
|
||||
FieldSettings = "settings"
|
||||
// Table holds the table name of the importlist in the database.
|
||||
Table = "import_lists"
|
||||
)
|
||||
|
||||
// Columns holds all SQL columns for importlist fields.
|
||||
var Columns = []string{
|
||||
FieldID,
|
||||
FieldName,
|
||||
FieldType,
|
||||
FieldURL,
|
||||
FieldQulity,
|
||||
FieldStorageID,
|
||||
FieldSettings,
|
||||
}
|
||||
|
||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||
func ValidColumn(column string) bool {
|
||||
for i := range Columns {
|
||||
if column == Columns[i] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Type defines the type for the "type" enum field.
|
||||
type Type string
|
||||
|
||||
// Type values.
|
||||
const (
|
||||
TypePlex Type = "plex"
|
||||
TypeDoulist Type = "doulist"
|
||||
)
|
||||
|
||||
func (_type Type) String() string {
|
||||
return string(_type)
|
||||
}
|
||||
|
||||
// TypeValidator is a validator for the "type" field enum values. It is called by the builders before save.
|
||||
func TypeValidator(_type Type) error {
|
||||
switch _type {
|
||||
case TypePlex, TypeDoulist:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("importlist: invalid enum value for type field: %q", _type)
|
||||
}
|
||||
}
|
||||
|
||||
// OrderOption defines the ordering options for the ImportList queries.
|
||||
type OrderOption func(*sql.Selector)
|
||||
|
||||
// ByID orders the results by the id field.
|
||||
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByName orders the results by the name field.
|
||||
func ByName(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldName, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByType orders the results by the type field.
|
||||
func ByType(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldType, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByURL orders the results by the url field.
|
||||
func ByURL(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldURL, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByQulity orders the results by the qulity field.
|
||||
func ByQulity(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldQulity, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByStorageID orders the results by the storage_id field.
|
||||
func ByStorageID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldStorageID, opts...).ToFunc()
|
||||
}
|
||||
364
ent/importlist/where.go
Normal file
@@ -0,0 +1,364 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package importlist
|
||||
|
||||
import (
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
func ID(id int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDEQ applies the EQ predicate on the ID field.
|
||||
func IDEQ(id int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDNEQ applies the NEQ predicate on the ID field.
|
||||
func IDNEQ(id int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDIn applies the In predicate on the ID field.
|
||||
func IDIn(ids ...int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldIn(FieldID, ids...))
|
||||
}
|
||||
|
||||
// IDNotIn applies the NotIn predicate on the ID field.
|
||||
func IDNotIn(ids ...int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNotIn(FieldID, ids...))
|
||||
}
|
||||
|
||||
// IDGT applies the GT predicate on the ID field.
|
||||
func IDGT(id int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGT(FieldID, id))
|
||||
}
|
||||
|
||||
// IDGTE applies the GTE predicate on the ID field.
|
||||
func IDGTE(id int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGTE(FieldID, id))
|
||||
}
|
||||
|
||||
// IDLT applies the LT predicate on the ID field.
|
||||
func IDLT(id int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLT(FieldID, id))
|
||||
}
|
||||
|
||||
// IDLTE applies the LTE predicate on the ID field.
|
||||
func IDLTE(id int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLTE(FieldID, id))
|
||||
}
|
||||
|
||||
// Name applies equality check predicate on the "name" field. It's identical to NameEQ.
|
||||
func Name(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldName, v))
|
||||
}
|
||||
|
||||
// URL applies equality check predicate on the "url" field. It's identical to URLEQ.
|
||||
func URL(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldURL, v))
|
||||
}
|
||||
|
||||
// Qulity applies equality check predicate on the "qulity" field. It's identical to QulityEQ.
|
||||
func Qulity(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldQulity, v))
|
||||
}
|
||||
|
||||
// StorageID applies equality check predicate on the "storage_id" field. It's identical to StorageIDEQ.
|
||||
func StorageID(v int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldStorageID, v))
|
||||
}
|
||||
|
||||
// NameEQ applies the EQ predicate on the "name" field.
|
||||
func NameEQ(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldName, v))
|
||||
}
|
||||
|
||||
// NameNEQ applies the NEQ predicate on the "name" field.
|
||||
func NameNEQ(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNEQ(FieldName, v))
|
||||
}
|
||||
|
||||
// NameIn applies the In predicate on the "name" field.
|
||||
func NameIn(vs ...string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldIn(FieldName, vs...))
|
||||
}
|
||||
|
||||
// NameNotIn applies the NotIn predicate on the "name" field.
|
||||
func NameNotIn(vs ...string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNotIn(FieldName, vs...))
|
||||
}
|
||||
|
||||
// NameGT applies the GT predicate on the "name" field.
|
||||
func NameGT(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGT(FieldName, v))
|
||||
}
|
||||
|
||||
// NameGTE applies the GTE predicate on the "name" field.
|
||||
func NameGTE(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGTE(FieldName, v))
|
||||
}
|
||||
|
||||
// NameLT applies the LT predicate on the "name" field.
|
||||
func NameLT(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLT(FieldName, v))
|
||||
}
|
||||
|
||||
// NameLTE applies the LTE predicate on the "name" field.
|
||||
func NameLTE(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLTE(FieldName, v))
|
||||
}
|
||||
|
||||
// NameContains applies the Contains predicate on the "name" field.
|
||||
func NameContains(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldContains(FieldName, v))
|
||||
}
|
||||
|
||||
// NameHasPrefix applies the HasPrefix predicate on the "name" field.
|
||||
func NameHasPrefix(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldHasPrefix(FieldName, v))
|
||||
}
|
||||
|
||||
// NameHasSuffix applies the HasSuffix predicate on the "name" field.
|
||||
func NameHasSuffix(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldHasSuffix(FieldName, v))
|
||||
}
|
||||
|
||||
// NameEqualFold applies the EqualFold predicate on the "name" field.
|
||||
func NameEqualFold(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEqualFold(FieldName, v))
|
||||
}
|
||||
|
||||
// NameContainsFold applies the ContainsFold predicate on the "name" field.
|
||||
func NameContainsFold(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldContainsFold(FieldName, v))
|
||||
}
|
||||
|
||||
// TypeEQ applies the EQ predicate on the "type" field.
|
||||
func TypeEQ(v Type) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldType, v))
|
||||
}
|
||||
|
||||
// TypeNEQ applies the NEQ predicate on the "type" field.
|
||||
func TypeNEQ(v Type) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNEQ(FieldType, v))
|
||||
}
|
||||
|
||||
// TypeIn applies the In predicate on the "type" field.
|
||||
func TypeIn(vs ...Type) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldIn(FieldType, vs...))
|
||||
}
|
||||
|
||||
// TypeNotIn applies the NotIn predicate on the "type" field.
|
||||
func TypeNotIn(vs ...Type) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNotIn(FieldType, vs...))
|
||||
}
|
||||
|
||||
// URLEQ applies the EQ predicate on the "url" field.
|
||||
func URLEQ(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLNEQ applies the NEQ predicate on the "url" field.
|
||||
func URLNEQ(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNEQ(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLIn applies the In predicate on the "url" field.
|
||||
func URLIn(vs ...string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldIn(FieldURL, vs...))
|
||||
}
|
||||
|
||||
// URLNotIn applies the NotIn predicate on the "url" field.
|
||||
func URLNotIn(vs ...string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNotIn(FieldURL, vs...))
|
||||
}
|
||||
|
||||
// URLGT applies the GT predicate on the "url" field.
|
||||
func URLGT(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGT(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLGTE applies the GTE predicate on the "url" field.
|
||||
func URLGTE(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGTE(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLLT applies the LT predicate on the "url" field.
|
||||
func URLLT(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLT(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLLTE applies the LTE predicate on the "url" field.
|
||||
func URLLTE(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLTE(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLContains applies the Contains predicate on the "url" field.
|
||||
func URLContains(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldContains(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLHasPrefix applies the HasPrefix predicate on the "url" field.
|
||||
func URLHasPrefix(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldHasPrefix(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLHasSuffix applies the HasSuffix predicate on the "url" field.
|
||||
func URLHasSuffix(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldHasSuffix(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLIsNil applies the IsNil predicate on the "url" field.
|
||||
func URLIsNil() predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldIsNull(FieldURL))
|
||||
}
|
||||
|
||||
// URLNotNil applies the NotNil predicate on the "url" field.
|
||||
func URLNotNil() predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNotNull(FieldURL))
|
||||
}
|
||||
|
||||
// URLEqualFold applies the EqualFold predicate on the "url" field.
|
||||
func URLEqualFold(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEqualFold(FieldURL, v))
|
||||
}
|
||||
|
||||
// URLContainsFold applies the ContainsFold predicate on the "url" field.
|
||||
func URLContainsFold(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldContainsFold(FieldURL, v))
|
||||
}
|
||||
|
||||
// QulityEQ applies the EQ predicate on the "qulity" field.
|
||||
func QulityEQ(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityNEQ applies the NEQ predicate on the "qulity" field.
|
||||
func QulityNEQ(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNEQ(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityIn applies the In predicate on the "qulity" field.
|
||||
func QulityIn(vs ...string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldIn(FieldQulity, vs...))
|
||||
}
|
||||
|
||||
// QulityNotIn applies the NotIn predicate on the "qulity" field.
|
||||
func QulityNotIn(vs ...string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNotIn(FieldQulity, vs...))
|
||||
}
|
||||
|
||||
// QulityGT applies the GT predicate on the "qulity" field.
|
||||
func QulityGT(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGT(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityGTE applies the GTE predicate on the "qulity" field.
|
||||
func QulityGTE(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGTE(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityLT applies the LT predicate on the "qulity" field.
|
||||
func QulityLT(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLT(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityLTE applies the LTE predicate on the "qulity" field.
|
||||
func QulityLTE(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLTE(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityContains applies the Contains predicate on the "qulity" field.
|
||||
func QulityContains(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldContains(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityHasPrefix applies the HasPrefix predicate on the "qulity" field.
|
||||
func QulityHasPrefix(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldHasPrefix(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityHasSuffix applies the HasSuffix predicate on the "qulity" field.
|
||||
func QulityHasSuffix(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldHasSuffix(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityEqualFold applies the EqualFold predicate on the "qulity" field.
|
||||
func QulityEqualFold(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEqualFold(FieldQulity, v))
|
||||
}
|
||||
|
||||
// QulityContainsFold applies the ContainsFold predicate on the "qulity" field.
|
||||
func QulityContainsFold(v string) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldContainsFold(FieldQulity, v))
|
||||
}
|
||||
|
||||
// StorageIDEQ applies the EQ predicate on the "storage_id" field.
|
||||
func StorageIDEQ(v int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldEQ(FieldStorageID, v))
|
||||
}
|
||||
|
||||
// StorageIDNEQ applies the NEQ predicate on the "storage_id" field.
|
||||
func StorageIDNEQ(v int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNEQ(FieldStorageID, v))
|
||||
}
|
||||
|
||||
// StorageIDIn applies the In predicate on the "storage_id" field.
|
||||
func StorageIDIn(vs ...int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldIn(FieldStorageID, vs...))
|
||||
}
|
||||
|
||||
// StorageIDNotIn applies the NotIn predicate on the "storage_id" field.
|
||||
func StorageIDNotIn(vs ...int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNotIn(FieldStorageID, vs...))
|
||||
}
|
||||
|
||||
// StorageIDGT applies the GT predicate on the "storage_id" field.
|
||||
func StorageIDGT(v int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGT(FieldStorageID, v))
|
||||
}
|
||||
|
||||
// StorageIDGTE applies the GTE predicate on the "storage_id" field.
|
||||
func StorageIDGTE(v int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldGTE(FieldStorageID, v))
|
||||
}
|
||||
|
||||
// StorageIDLT applies the LT predicate on the "storage_id" field.
|
||||
func StorageIDLT(v int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLT(FieldStorageID, v))
|
||||
}
|
||||
|
||||
// StorageIDLTE applies the LTE predicate on the "storage_id" field.
|
||||
func StorageIDLTE(v int) predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldLTE(FieldStorageID, v))
|
||||
}
|
||||
|
||||
// SettingsIsNil applies the IsNil predicate on the "settings" field.
|
||||
func SettingsIsNil() predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldIsNull(FieldSettings))
|
||||
}
|
||||
|
||||
// SettingsNotNil applies the NotNil predicate on the "settings" field.
|
||||
func SettingsNotNil() predicate.ImportList {
|
||||
return predicate.ImportList(sql.FieldNotNull(FieldSettings))
|
||||
}
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.ImportList) predicate.ImportList {
|
||||
return predicate.ImportList(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.ImportList) predicate.ImportList {
|
||||
return predicate.ImportList(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.ImportList) predicate.ImportList {
|
||||
return predicate.ImportList(sql.NotPredicates(p))
|
||||
}
|
||||
264
ent/importlist_create.go
Normal file
@@ -0,0 +1,264 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"polaris/ent/importlist"
|
||||
"polaris/ent/schema"
|
||||
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// ImportListCreate is the builder for creating a ImportList entity.
|
||||
type ImportListCreate struct {
|
||||
config
|
||||
mutation *ImportListMutation
|
||||
hooks []Hook
|
||||
}
|
||||
|
||||
// SetName sets the "name" field.
|
||||
func (ilc *ImportListCreate) SetName(s string) *ImportListCreate {
|
||||
ilc.mutation.SetName(s)
|
||||
return ilc
|
||||
}
|
||||
|
||||
// SetType sets the "type" field.
|
||||
func (ilc *ImportListCreate) SetType(i importlist.Type) *ImportListCreate {
|
||||
ilc.mutation.SetType(i)
|
||||
return ilc
|
||||
}
|
||||
|
||||
// SetURL sets the "url" field.
|
||||
func (ilc *ImportListCreate) SetURL(s string) *ImportListCreate {
|
||||
ilc.mutation.SetURL(s)
|
||||
return ilc
|
||||
}
|
||||
|
||||
// SetNillableURL sets the "url" field if the given value is not nil.
|
||||
func (ilc *ImportListCreate) SetNillableURL(s *string) *ImportListCreate {
|
||||
if s != nil {
|
||||
ilc.SetURL(*s)
|
||||
}
|
||||
return ilc
|
||||
}
|
||||
|
||||
// SetQulity sets the "qulity" field.
|
||||
func (ilc *ImportListCreate) SetQulity(s string) *ImportListCreate {
|
||||
ilc.mutation.SetQulity(s)
|
||||
return ilc
|
||||
}
|
||||
|
||||
// SetStorageID sets the "storage_id" field.
|
||||
func (ilc *ImportListCreate) SetStorageID(i int) *ImportListCreate {
|
||||
ilc.mutation.SetStorageID(i)
|
||||
return ilc
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (ilc *ImportListCreate) SetSettings(sls schema.ImportListSettings) *ImportListCreate {
|
||||
ilc.mutation.SetSettings(sls)
|
||||
return ilc
|
||||
}
|
||||
|
||||
// SetNillableSettings sets the "settings" field if the given value is not nil.
|
||||
func (ilc *ImportListCreate) SetNillableSettings(sls *schema.ImportListSettings) *ImportListCreate {
|
||||
if sls != nil {
|
||||
ilc.SetSettings(*sls)
|
||||
}
|
||||
return ilc
|
||||
}
|
||||
|
||||
// Mutation returns the ImportListMutation object of the builder.
|
||||
func (ilc *ImportListCreate) Mutation() *ImportListMutation {
|
||||
return ilc.mutation
|
||||
}
|
||||
|
||||
// Save creates the ImportList in the database.
|
||||
func (ilc *ImportListCreate) Save(ctx context.Context) (*ImportList, error) {
|
||||
return withHooks(ctx, ilc.sqlSave, ilc.mutation, ilc.hooks)
|
||||
}
|
||||
|
||||
// SaveX calls Save and panics if Save returns an error.
|
||||
func (ilc *ImportListCreate) SaveX(ctx context.Context) *ImportList {
|
||||
v, err := ilc.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (ilc *ImportListCreate) Exec(ctx context.Context) error {
|
||||
_, err := ilc.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ilc *ImportListCreate) ExecX(ctx context.Context) {
|
||||
if err := ilc.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
func (ilc *ImportListCreate) check() error {
|
||||
if _, ok := ilc.mutation.Name(); !ok {
|
||||
return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "ImportList.name"`)}
|
||||
}
|
||||
if _, ok := ilc.mutation.GetType(); !ok {
|
||||
return &ValidationError{Name: "type", err: errors.New(`ent: missing required field "ImportList.type"`)}
|
||||
}
|
||||
if v, ok := ilc.mutation.GetType(); ok {
|
||||
if err := importlist.TypeValidator(v); err != nil {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := ilc.mutation.Qulity(); !ok {
|
||||
return &ValidationError{Name: "qulity", err: errors.New(`ent: missing required field "ImportList.qulity"`)}
|
||||
}
|
||||
if _, ok := ilc.mutation.StorageID(); !ok {
|
||||
return &ValidationError{Name: "storage_id", err: errors.New(`ent: missing required field "ImportList.storage_id"`)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ilc *ImportListCreate) sqlSave(ctx context.Context) (*ImportList, error) {
|
||||
if err := ilc.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_node, _spec := ilc.createSpec()
|
||||
if err := sqlgraph.CreateNode(ctx, ilc.driver, _spec); err != nil {
|
||||
if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
id := _spec.ID.Value.(int64)
|
||||
_node.ID = int(id)
|
||||
ilc.mutation.id = &_node.ID
|
||||
ilc.mutation.done = true
|
||||
return _node, nil
|
||||
}
|
||||
|
||||
func (ilc *ImportListCreate) createSpec() (*ImportList, *sqlgraph.CreateSpec) {
|
||||
var (
|
||||
_node = &ImportList{config: ilc.config}
|
||||
_spec = sqlgraph.NewCreateSpec(importlist.Table, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
|
||||
)
|
||||
if value, ok := ilc.mutation.Name(); ok {
|
||||
_spec.SetField(importlist.FieldName, field.TypeString, value)
|
||||
_node.Name = value
|
||||
}
|
||||
if value, ok := ilc.mutation.GetType(); ok {
|
||||
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
|
||||
_node.Type = value
|
||||
}
|
||||
if value, ok := ilc.mutation.URL(); ok {
|
||||
_spec.SetField(importlist.FieldURL, field.TypeString, value)
|
||||
_node.URL = value
|
||||
}
|
||||
if value, ok := ilc.mutation.Qulity(); ok {
|
||||
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
|
||||
_node.Qulity = value
|
||||
}
|
||||
if value, ok := ilc.mutation.StorageID(); ok {
|
||||
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
|
||||
_node.StorageID = value
|
||||
}
|
||||
if value, ok := ilc.mutation.Settings(); ok {
|
||||
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
|
||||
_node.Settings = value
|
||||
}
|
||||
return _node, _spec
|
||||
}
|
||||
|
||||
// ImportListCreateBulk is the builder for creating many ImportList entities in bulk.
|
||||
type ImportListCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*ImportListCreate
|
||||
}
|
||||
|
||||
// Save creates the ImportList entities in the database.
|
||||
func (ilcb *ImportListCreateBulk) Save(ctx context.Context) ([]*ImportList, error) {
|
||||
if ilcb.err != nil {
|
||||
return nil, ilcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(ilcb.builders))
|
||||
nodes := make([]*ImportList, len(ilcb.builders))
|
||||
mutators := make([]Mutator, len(ilcb.builders))
|
||||
for i := range ilcb.builders {
|
||||
func(i int, root context.Context) {
|
||||
builder := ilcb.builders[i]
|
||||
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||
mutation, ok := m.(*ImportListMutation)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||
}
|
||||
if err := builder.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
builder.mutation = mutation
|
||||
var err error
|
||||
nodes[i], specs[i] = builder.createSpec()
|
||||
if i < len(mutators)-1 {
|
||||
_, err = mutators[i+1].Mutate(root, ilcb.builders[i+1].mutation)
|
||||
} else {
|
||||
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
|
||||
// Invoke the actual operation on the latest mutation in the chain.
|
||||
if err = sqlgraph.BatchCreate(ctx, ilcb.driver, spec); err != nil {
|
||||
if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mutation.id = &nodes[i].ID
|
||||
if specs[i].ID.Value != nil {
|
||||
id := specs[i].ID.Value.(int64)
|
||||
nodes[i].ID = int(id)
|
||||
}
|
||||
mutation.done = true
|
||||
return nodes[i], nil
|
||||
})
|
||||
for i := len(builder.hooks) - 1; i >= 0; i-- {
|
||||
mut = builder.hooks[i](mut)
|
||||
}
|
||||
mutators[i] = mut
|
||||
}(i, ctx)
|
||||
}
|
||||
if len(mutators) > 0 {
|
||||
if _, err := mutators[0].Mutate(ctx, ilcb.builders[0].mutation); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (ilcb *ImportListCreateBulk) SaveX(ctx context.Context) []*ImportList {
|
||||
v, err := ilcb.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (ilcb *ImportListCreateBulk) Exec(ctx context.Context) error {
|
||||
_, err := ilcb.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ilcb *ImportListCreateBulk) ExecX(ctx context.Context) {
|
||||
if err := ilcb.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
88
ent/importlist_delete.go
Normal file
@@ -0,0 +1,88 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"polaris/ent/importlist"
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// ImportListDelete is the builder for deleting a ImportList entity.
|
||||
type ImportListDelete struct {
|
||||
config
|
||||
hooks []Hook
|
||||
mutation *ImportListMutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the ImportListDelete builder.
|
||||
func (ild *ImportListDelete) Where(ps ...predicate.ImportList) *ImportListDelete {
|
||||
ild.mutation.Where(ps...)
|
||||
return ild
|
||||
}
|
||||
|
||||
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||
func (ild *ImportListDelete) Exec(ctx context.Context) (int, error) {
|
||||
return withHooks(ctx, ild.sqlExec, ild.mutation, ild.hooks)
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ild *ImportListDelete) ExecX(ctx context.Context) int {
|
||||
n, err := ild.Exec(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (ild *ImportListDelete) sqlExec(ctx context.Context) (int, error) {
|
||||
_spec := sqlgraph.NewDeleteSpec(importlist.Table, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
|
||||
if ps := ild.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
affected, err := sqlgraph.DeleteNodes(ctx, ild.driver, _spec)
|
||||
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
ild.mutation.done = true
|
||||
return affected, err
|
||||
}
|
||||
|
||||
// ImportListDeleteOne is the builder for deleting a single ImportList entity.
|
||||
type ImportListDeleteOne struct {
|
||||
ild *ImportListDelete
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the ImportListDelete builder.
|
||||
func (ildo *ImportListDeleteOne) Where(ps ...predicate.ImportList) *ImportListDeleteOne {
|
||||
ildo.ild.mutation.Where(ps...)
|
||||
return ildo
|
||||
}
|
||||
|
||||
// Exec executes the deletion query.
|
||||
func (ildo *ImportListDeleteOne) Exec(ctx context.Context) error {
|
||||
n, err := ildo.ild.Exec(ctx)
|
||||
switch {
|
||||
case err != nil:
|
||||
return err
|
||||
case n == 0:
|
||||
return &NotFoundError{importlist.Label}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ildo *ImportListDeleteOne) ExecX(ctx context.Context) {
|
||||
if err := ildo.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
526
ent/importlist_query.go
Normal file
@@ -0,0 +1,526 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math"
|
||||
"polaris/ent/importlist"
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// ImportListQuery is the builder for querying ImportList entities.
|
||||
type ImportListQuery struct {
|
||||
config
|
||||
ctx *QueryContext
|
||||
order []importlist.OrderOption
|
||||
inters []Interceptor
|
||||
predicates []predicate.ImportList
|
||||
// intermediate query (i.e. traversal path).
|
||||
sql *sql.Selector
|
||||
path func(context.Context) (*sql.Selector, error)
|
||||
}
|
||||
|
||||
// Where adds a new predicate for the ImportListQuery builder.
|
||||
func (ilq *ImportListQuery) Where(ps ...predicate.ImportList) *ImportListQuery {
|
||||
ilq.predicates = append(ilq.predicates, ps...)
|
||||
return ilq
|
||||
}
|
||||
|
||||
// Limit the number of records to be returned by this query.
|
||||
func (ilq *ImportListQuery) Limit(limit int) *ImportListQuery {
|
||||
ilq.ctx.Limit = &limit
|
||||
return ilq
|
||||
}
|
||||
|
||||
// Offset to start from.
|
||||
func (ilq *ImportListQuery) Offset(offset int) *ImportListQuery {
|
||||
ilq.ctx.Offset = &offset
|
||||
return ilq
|
||||
}
|
||||
|
||||
// Unique configures the query builder to filter duplicate records on query.
|
||||
// By default, unique is set to true, and can be disabled using this method.
|
||||
func (ilq *ImportListQuery) Unique(unique bool) *ImportListQuery {
|
||||
ilq.ctx.Unique = &unique
|
||||
return ilq
|
||||
}
|
||||
|
||||
// Order specifies how the records should be ordered.
|
||||
func (ilq *ImportListQuery) Order(o ...importlist.OrderOption) *ImportListQuery {
|
||||
ilq.order = append(ilq.order, o...)
|
||||
return ilq
|
||||
}
|
||||
|
||||
// First returns the first ImportList entity from the query.
|
||||
// Returns a *NotFoundError when no ImportList was found.
|
||||
func (ilq *ImportListQuery) First(ctx context.Context) (*ImportList, error) {
|
||||
nodes, err := ilq.Limit(1).All(setContextOp(ctx, ilq.ctx, "First"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(nodes) == 0 {
|
||||
return nil, &NotFoundError{importlist.Label}
|
||||
}
|
||||
return nodes[0], nil
|
||||
}
|
||||
|
||||
// FirstX is like First, but panics if an error occurs.
|
||||
func (ilq *ImportListQuery) FirstX(ctx context.Context) *ImportList {
|
||||
node, err := ilq.First(ctx)
|
||||
if err != nil && !IsNotFound(err) {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// FirstID returns the first ImportList ID from the query.
|
||||
// Returns a *NotFoundError when no ImportList ID was found.
|
||||
func (ilq *ImportListQuery) FirstID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = ilq.Limit(1).IDs(setContextOp(ctx, ilq.ctx, "FirstID")); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
err = &NotFoundError{importlist.Label}
|
||||
return
|
||||
}
|
||||
return ids[0], nil
|
||||
}
|
||||
|
||||
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||
func (ilq *ImportListQuery) FirstIDX(ctx context.Context) int {
|
||||
id, err := ilq.FirstID(ctx)
|
||||
if err != nil && !IsNotFound(err) {
|
||||
panic(err)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// Only returns a single ImportList entity found by the query, ensuring it only returns one.
|
||||
// Returns a *NotSingularError when more than one ImportList entity is found.
|
||||
// Returns a *NotFoundError when no ImportList entities are found.
|
||||
func (ilq *ImportListQuery) Only(ctx context.Context) (*ImportList, error) {
|
||||
nodes, err := ilq.Limit(2).All(setContextOp(ctx, ilq.ctx, "Only"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch len(nodes) {
|
||||
case 1:
|
||||
return nodes[0], nil
|
||||
case 0:
|
||||
return nil, &NotFoundError{importlist.Label}
|
||||
default:
|
||||
return nil, &NotSingularError{importlist.Label}
|
||||
}
|
||||
}
|
||||
|
||||
// OnlyX is like Only, but panics if an error occurs.
|
||||
func (ilq *ImportListQuery) OnlyX(ctx context.Context) *ImportList {
|
||||
node, err := ilq.Only(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// OnlyID is like Only, but returns the only ImportList ID in the query.
|
||||
// Returns a *NotSingularError when more than one ImportList ID is found.
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (ilq *ImportListQuery) OnlyID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = ilq.Limit(2).IDs(setContextOp(ctx, ilq.ctx, "OnlyID")); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
case 1:
|
||||
id = ids[0]
|
||||
case 0:
|
||||
err = &NotFoundError{importlist.Label}
|
||||
default:
|
||||
err = &NotSingularError{importlist.Label}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||
func (ilq *ImportListQuery) OnlyIDX(ctx context.Context) int {
|
||||
id, err := ilq.OnlyID(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// All executes the query and returns a list of ImportLists.
|
||||
func (ilq *ImportListQuery) All(ctx context.Context) ([]*ImportList, error) {
|
||||
ctx = setContextOp(ctx, ilq.ctx, "All")
|
||||
if err := ilq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
qr := querierAll[[]*ImportList, *ImportListQuery]()
|
||||
return withInterceptors[[]*ImportList](ctx, ilq, qr, ilq.inters)
|
||||
}
|
||||
|
||||
// AllX is like All, but panics if an error occurs.
|
||||
func (ilq *ImportListQuery) AllX(ctx context.Context) []*ImportList {
|
||||
nodes, err := ilq.All(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return nodes
|
||||
}
|
||||
|
||||
// IDs executes the query and returns a list of ImportList IDs.
|
||||
func (ilq *ImportListQuery) IDs(ctx context.Context) (ids []int, err error) {
|
||||
if ilq.ctx.Unique == nil && ilq.path != nil {
|
||||
ilq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, ilq.ctx, "IDs")
|
||||
if err = ilq.Select(importlist.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ids, nil
|
||||
}
|
||||
|
||||
// IDsX is like IDs, but panics if an error occurs.
|
||||
func (ilq *ImportListQuery) IDsX(ctx context.Context) []int {
|
||||
ids, err := ilq.IDs(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (ilq *ImportListQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, ilq.ctx, "Count")
|
||||
if err := ilq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return withInterceptors[int](ctx, ilq, querierCount[*ImportListQuery](), ilq.inters)
|
||||
}
|
||||
|
||||
// CountX is like Count, but panics if an error occurs.
|
||||
func (ilq *ImportListQuery) CountX(ctx context.Context) int {
|
||||
count, err := ilq.Count(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (ilq *ImportListQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, ilq.ctx, "Exist")
|
||||
switch _, err := ilq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
case err != nil:
|
||||
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||
default:
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
// ExistX is like Exist, but panics if an error occurs.
|
||||
func (ilq *ImportListQuery) ExistX(ctx context.Context) bool {
|
||||
exist, err := ilq.Exist(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return exist
|
||||
}
|
||||
|
||||
// Clone returns a duplicate of the ImportListQuery builder, including all associated steps. It can be
|
||||
// used to prepare common query builders and use them differently after the clone is made.
|
||||
func (ilq *ImportListQuery) Clone() *ImportListQuery {
|
||||
if ilq == nil {
|
||||
return nil
|
||||
}
|
||||
return &ImportListQuery{
|
||||
config: ilq.config,
|
||||
ctx: ilq.ctx.Clone(),
|
||||
order: append([]importlist.OrderOption{}, ilq.order...),
|
||||
inters: append([]Interceptor{}, ilq.inters...),
|
||||
predicates: append([]predicate.ImportList{}, ilq.predicates...),
|
||||
// clone intermediate query.
|
||||
sql: ilq.sql.Clone(),
|
||||
path: ilq.path,
|
||||
}
|
||||
}
|
||||
|
||||
// GroupBy is used to group vertices by one or more fields/columns.
|
||||
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var v []struct {
|
||||
// Name string `json:"name,omitempty"`
|
||||
// Count int `json:"count,omitempty"`
|
||||
// }
|
||||
//
|
||||
// client.ImportList.Query().
|
||||
// GroupBy(importlist.FieldName).
|
||||
// Aggregate(ent.Count()).
|
||||
// Scan(ctx, &v)
|
||||
func (ilq *ImportListQuery) GroupBy(field string, fields ...string) *ImportListGroupBy {
|
||||
ilq.ctx.Fields = append([]string{field}, fields...)
|
||||
grbuild := &ImportListGroupBy{build: ilq}
|
||||
grbuild.flds = &ilq.ctx.Fields
|
||||
grbuild.label = importlist.Label
|
||||
grbuild.scan = grbuild.Scan
|
||||
return grbuild
|
||||
}
|
||||
|
||||
// Select allows the selection one or more fields/columns for the given query,
|
||||
// instead of selecting all fields in the entity.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var v []struct {
|
||||
// Name string `json:"name,omitempty"`
|
||||
// }
|
||||
//
|
||||
// client.ImportList.Query().
|
||||
// Select(importlist.FieldName).
|
||||
// Scan(ctx, &v)
|
||||
func (ilq *ImportListQuery) Select(fields ...string) *ImportListSelect {
|
||||
ilq.ctx.Fields = append(ilq.ctx.Fields, fields...)
|
||||
sbuild := &ImportListSelect{ImportListQuery: ilq}
|
||||
sbuild.label = importlist.Label
|
||||
sbuild.flds, sbuild.scan = &ilq.ctx.Fields, sbuild.Scan
|
||||
return sbuild
|
||||
}
|
||||
|
||||
// Aggregate returns a ImportListSelect configured with the given aggregations.
|
||||
func (ilq *ImportListQuery) Aggregate(fns ...AggregateFunc) *ImportListSelect {
|
||||
return ilq.Select().Aggregate(fns...)
|
||||
}
|
||||
|
||||
func (ilq *ImportListQuery) prepareQuery(ctx context.Context) error {
|
||||
for _, inter := range ilq.inters {
|
||||
if inter == nil {
|
||||
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
|
||||
}
|
||||
if trv, ok := inter.(Traverser); ok {
|
||||
if err := trv.Traverse(ctx, ilq); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, f := range ilq.ctx.Fields {
|
||||
if !importlist.ValidColumn(f) {
|
||||
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||
}
|
||||
}
|
||||
if ilq.path != nil {
|
||||
prev, err := ilq.path(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ilq.sql = prev
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ilq *ImportListQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ImportList, error) {
|
||||
var (
|
||||
nodes = []*ImportList{}
|
||||
_spec = ilq.querySpec()
|
||||
)
|
||||
_spec.ScanValues = func(columns []string) ([]any, error) {
|
||||
return (*ImportList).scanValues(nil, columns)
|
||||
}
|
||||
_spec.Assign = func(columns []string, values []any) error {
|
||||
node := &ImportList{config: ilq.config}
|
||||
nodes = append(nodes, node)
|
||||
return node.assignValues(columns, values)
|
||||
}
|
||||
for i := range hooks {
|
||||
hooks[i](ctx, _spec)
|
||||
}
|
||||
if err := sqlgraph.QueryNodes(ctx, ilq.driver, _spec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(nodes) == 0 {
|
||||
return nodes, nil
|
||||
}
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
func (ilq *ImportListQuery) sqlCount(ctx context.Context) (int, error) {
|
||||
_spec := ilq.querySpec()
|
||||
_spec.Node.Columns = ilq.ctx.Fields
|
||||
if len(ilq.ctx.Fields) > 0 {
|
||||
_spec.Unique = ilq.ctx.Unique != nil && *ilq.ctx.Unique
|
||||
}
|
||||
return sqlgraph.CountNodes(ctx, ilq.driver, _spec)
|
||||
}
|
||||
|
||||
func (ilq *ImportListQuery) querySpec() *sqlgraph.QuerySpec {
|
||||
_spec := sqlgraph.NewQuerySpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
|
||||
_spec.From = ilq.sql
|
||||
if unique := ilq.ctx.Unique; unique != nil {
|
||||
_spec.Unique = *unique
|
||||
} else if ilq.path != nil {
|
||||
_spec.Unique = true
|
||||
}
|
||||
if fields := ilq.ctx.Fields; len(fields) > 0 {
|
||||
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, importlist.FieldID)
|
||||
for i := range fields {
|
||||
if fields[i] != importlist.FieldID {
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
if ps := ilq.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if limit := ilq.ctx.Limit; limit != nil {
|
||||
_spec.Limit = *limit
|
||||
}
|
||||
if offset := ilq.ctx.Offset; offset != nil {
|
||||
_spec.Offset = *offset
|
||||
}
|
||||
if ps := ilq.order; len(ps) > 0 {
|
||||
_spec.Order = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
return _spec
|
||||
}
|
||||
|
||||
func (ilq *ImportListQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||
builder := sql.Dialect(ilq.driver.Dialect())
|
||||
t1 := builder.Table(importlist.Table)
|
||||
columns := ilq.ctx.Fields
|
||||
if len(columns) == 0 {
|
||||
columns = importlist.Columns
|
||||
}
|
||||
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||
if ilq.sql != nil {
|
||||
selector = ilq.sql
|
||||
selector.Select(selector.Columns(columns...)...)
|
||||
}
|
||||
if ilq.ctx.Unique != nil && *ilq.ctx.Unique {
|
||||
selector.Distinct()
|
||||
}
|
||||
for _, p := range ilq.predicates {
|
||||
p(selector)
|
||||
}
|
||||
for _, p := range ilq.order {
|
||||
p(selector)
|
||||
}
|
||||
if offset := ilq.ctx.Offset; offset != nil {
|
||||
// limit is mandatory for offset clause. We start
|
||||
// with default value, and override it below if needed.
|
||||
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||
}
|
||||
if limit := ilq.ctx.Limit; limit != nil {
|
||||
selector.Limit(*limit)
|
||||
}
|
||||
return selector
|
||||
}
|
||||
|
||||
// ImportListGroupBy is the group-by builder for ImportList entities.
|
||||
type ImportListGroupBy struct {
|
||||
selector
|
||||
build *ImportListQuery
|
||||
}
|
||||
|
||||
// Aggregate adds the given aggregation functions to the group-by query.
|
||||
func (ilgb *ImportListGroupBy) Aggregate(fns ...AggregateFunc) *ImportListGroupBy {
|
||||
ilgb.fns = append(ilgb.fns, fns...)
|
||||
return ilgb
|
||||
}
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (ilgb *ImportListGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, ilgb.build.ctx, "GroupBy")
|
||||
if err := ilgb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return scanWithInterceptors[*ImportListQuery, *ImportListGroupBy](ctx, ilgb.build, ilgb, ilgb.build.inters, v)
|
||||
}
|
||||
|
||||
func (ilgb *ImportListGroupBy) sqlScan(ctx context.Context, root *ImportListQuery, v any) error {
|
||||
selector := root.sqlQuery(ctx).Select()
|
||||
aggregation := make([]string, 0, len(ilgb.fns))
|
||||
for _, fn := range ilgb.fns {
|
||||
aggregation = append(aggregation, fn(selector))
|
||||
}
|
||||
if len(selector.SelectedColumns()) == 0 {
|
||||
columns := make([]string, 0, len(*ilgb.flds)+len(ilgb.fns))
|
||||
for _, f := range *ilgb.flds {
|
||||
columns = append(columns, selector.C(f))
|
||||
}
|
||||
columns = append(columns, aggregation...)
|
||||
selector.Select(columns...)
|
||||
}
|
||||
selector.GroupBy(selector.Columns(*ilgb.flds...)...)
|
||||
if err := selector.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
rows := &sql.Rows{}
|
||||
query, args := selector.Query()
|
||||
if err := ilgb.build.driver.Query(ctx, query, args, rows); err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
return sql.ScanSlice(rows, v)
|
||||
}
|
||||
|
||||
// ImportListSelect is the builder for selecting fields of ImportList entities.
|
||||
type ImportListSelect struct {
|
||||
*ImportListQuery
|
||||
selector
|
||||
}
|
||||
|
||||
// Aggregate adds the given aggregation functions to the selector query.
|
||||
func (ils *ImportListSelect) Aggregate(fns ...AggregateFunc) *ImportListSelect {
|
||||
ils.fns = append(ils.fns, fns...)
|
||||
return ils
|
||||
}
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (ils *ImportListSelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, ils.ctx, "Select")
|
||||
if err := ils.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return scanWithInterceptors[*ImportListQuery, *ImportListSelect](ctx, ils.ImportListQuery, ils, ils.inters, v)
|
||||
}
|
||||
|
||||
func (ils *ImportListSelect) sqlScan(ctx context.Context, root *ImportListQuery, v any) error {
|
||||
selector := root.sqlQuery(ctx)
|
||||
aggregation := make([]string, 0, len(ils.fns))
|
||||
for _, fn := range ils.fns {
|
||||
aggregation = append(aggregation, fn(selector))
|
||||
}
|
||||
switch n := len(*ils.selector.flds); {
|
||||
case n == 0 && len(aggregation) > 0:
|
||||
selector.Select(aggregation...)
|
||||
case n != 0 && len(aggregation) > 0:
|
||||
selector.AppendSelect(aggregation...)
|
||||
}
|
||||
rows := &sql.Rows{}
|
||||
query, args := selector.Query()
|
||||
if err := ils.driver.Query(ctx, query, args, rows); err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
return sql.ScanSlice(rows, v)
|
||||
}
|
||||
462
ent/importlist_update.go
Normal file
@@ -0,0 +1,462 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"polaris/ent/importlist"
|
||||
"polaris/ent/predicate"
|
||||
"polaris/ent/schema"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// ImportListUpdate is the builder for updating ImportList entities.
|
||||
type ImportListUpdate struct {
|
||||
config
|
||||
hooks []Hook
|
||||
mutation *ImportListMutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the ImportListUpdate builder.
|
||||
func (ilu *ImportListUpdate) Where(ps ...predicate.ImportList) *ImportListUpdate {
|
||||
ilu.mutation.Where(ps...)
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetName sets the "name" field.
|
||||
func (ilu *ImportListUpdate) SetName(s string) *ImportListUpdate {
|
||||
ilu.mutation.SetName(s)
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (ilu *ImportListUpdate) SetNillableName(s *string) *ImportListUpdate {
|
||||
if s != nil {
|
||||
ilu.SetName(*s)
|
||||
}
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetType sets the "type" field.
|
||||
func (ilu *ImportListUpdate) SetType(i importlist.Type) *ImportListUpdate {
|
||||
ilu.mutation.SetType(i)
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetNillableType sets the "type" field if the given value is not nil.
|
||||
func (ilu *ImportListUpdate) SetNillableType(i *importlist.Type) *ImportListUpdate {
|
||||
if i != nil {
|
||||
ilu.SetType(*i)
|
||||
}
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetURL sets the "url" field.
|
||||
func (ilu *ImportListUpdate) SetURL(s string) *ImportListUpdate {
|
||||
ilu.mutation.SetURL(s)
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetNillableURL sets the "url" field if the given value is not nil.
|
||||
func (ilu *ImportListUpdate) SetNillableURL(s *string) *ImportListUpdate {
|
||||
if s != nil {
|
||||
ilu.SetURL(*s)
|
||||
}
|
||||
return ilu
|
||||
}
|
||||
|
||||
// ClearURL clears the value of the "url" field.
|
||||
func (ilu *ImportListUpdate) ClearURL() *ImportListUpdate {
|
||||
ilu.mutation.ClearURL()
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetQulity sets the "qulity" field.
|
||||
func (ilu *ImportListUpdate) SetQulity(s string) *ImportListUpdate {
|
||||
ilu.mutation.SetQulity(s)
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetNillableQulity sets the "qulity" field if the given value is not nil.
|
||||
func (ilu *ImportListUpdate) SetNillableQulity(s *string) *ImportListUpdate {
|
||||
if s != nil {
|
||||
ilu.SetQulity(*s)
|
||||
}
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetStorageID sets the "storage_id" field.
|
||||
func (ilu *ImportListUpdate) SetStorageID(i int) *ImportListUpdate {
|
||||
ilu.mutation.ResetStorageID()
|
||||
ilu.mutation.SetStorageID(i)
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetNillableStorageID sets the "storage_id" field if the given value is not nil.
|
||||
func (ilu *ImportListUpdate) SetNillableStorageID(i *int) *ImportListUpdate {
|
||||
if i != nil {
|
||||
ilu.SetStorageID(*i)
|
||||
}
|
||||
return ilu
|
||||
}
|
||||
|
||||
// AddStorageID adds i to the "storage_id" field.
|
||||
func (ilu *ImportListUpdate) AddStorageID(i int) *ImportListUpdate {
|
||||
ilu.mutation.AddStorageID(i)
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (ilu *ImportListUpdate) SetSettings(sls schema.ImportListSettings) *ImportListUpdate {
|
||||
ilu.mutation.SetSettings(sls)
|
||||
return ilu
|
||||
}
|
||||
|
||||
// SetNillableSettings sets the "settings" field if the given value is not nil.
|
||||
func (ilu *ImportListUpdate) SetNillableSettings(sls *schema.ImportListSettings) *ImportListUpdate {
|
||||
if sls != nil {
|
||||
ilu.SetSettings(*sls)
|
||||
}
|
||||
return ilu
|
||||
}
|
||||
|
||||
// ClearSettings clears the value of the "settings" field.
|
||||
func (ilu *ImportListUpdate) ClearSettings() *ImportListUpdate {
|
||||
ilu.mutation.ClearSettings()
|
||||
return ilu
|
||||
}
|
||||
|
||||
// Mutation returns the ImportListMutation object of the builder.
|
||||
func (ilu *ImportListUpdate) Mutation() *ImportListMutation {
|
||||
return ilu.mutation
|
||||
}
|
||||
|
||||
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||
func (ilu *ImportListUpdate) Save(ctx context.Context) (int, error) {
|
||||
return withHooks(ctx, ilu.sqlSave, ilu.mutation, ilu.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (ilu *ImportListUpdate) SaveX(ctx context.Context) int {
|
||||
affected, err := ilu.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return affected
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (ilu *ImportListUpdate) Exec(ctx context.Context) error {
|
||||
_, err := ilu.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ilu *ImportListUpdate) ExecX(ctx context.Context) {
|
||||
if err := ilu.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
func (ilu *ImportListUpdate) check() error {
|
||||
if v, ok := ilu.mutation.GetType(); ok {
|
||||
if err := importlist.TypeValidator(v); err != nil {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ilu *ImportListUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if err := ilu.check(); err != nil {
|
||||
return n, err
|
||||
}
|
||||
_spec := sqlgraph.NewUpdateSpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
|
||||
if ps := ilu.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok := ilu.mutation.Name(); ok {
|
||||
_spec.SetField(importlist.FieldName, field.TypeString, value)
|
||||
}
|
||||
if value, ok := ilu.mutation.GetType(); ok {
|
||||
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := ilu.mutation.URL(); ok {
|
||||
_spec.SetField(importlist.FieldURL, field.TypeString, value)
|
||||
}
|
||||
if ilu.mutation.URLCleared() {
|
||||
_spec.ClearField(importlist.FieldURL, field.TypeString)
|
||||
}
|
||||
if value, ok := ilu.mutation.Qulity(); ok {
|
||||
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
|
||||
}
|
||||
if value, ok := ilu.mutation.StorageID(); ok {
|
||||
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := ilu.mutation.AddedStorageID(); ok {
|
||||
_spec.AddField(importlist.FieldStorageID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := ilu.mutation.Settings(); ok {
|
||||
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
|
||||
}
|
||||
if ilu.mutation.SettingsCleared() {
|
||||
_spec.ClearField(importlist.FieldSettings, field.TypeJSON)
|
||||
}
|
||||
if n, err = sqlgraph.UpdateNodes(ctx, ilu.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{importlist.Label}
|
||||
} else if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return 0, err
|
||||
}
|
||||
ilu.mutation.done = true
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// ImportListUpdateOne is the builder for updating a single ImportList entity.
|
||||
type ImportListUpdateOne struct {
|
||||
config
|
||||
fields []string
|
||||
hooks []Hook
|
||||
mutation *ImportListMutation
|
||||
}
|
||||
|
||||
// SetName sets the "name" field.
|
||||
func (iluo *ImportListUpdateOne) SetName(s string) *ImportListUpdateOne {
|
||||
iluo.mutation.SetName(s)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (iluo *ImportListUpdateOne) SetNillableName(s *string) *ImportListUpdateOne {
|
||||
if s != nil {
|
||||
iluo.SetName(*s)
|
||||
}
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetType sets the "type" field.
|
||||
func (iluo *ImportListUpdateOne) SetType(i importlist.Type) *ImportListUpdateOne {
|
||||
iluo.mutation.SetType(i)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetNillableType sets the "type" field if the given value is not nil.
|
||||
func (iluo *ImportListUpdateOne) SetNillableType(i *importlist.Type) *ImportListUpdateOne {
|
||||
if i != nil {
|
||||
iluo.SetType(*i)
|
||||
}
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetURL sets the "url" field.
|
||||
func (iluo *ImportListUpdateOne) SetURL(s string) *ImportListUpdateOne {
|
||||
iluo.mutation.SetURL(s)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetNillableURL sets the "url" field if the given value is not nil.
|
||||
func (iluo *ImportListUpdateOne) SetNillableURL(s *string) *ImportListUpdateOne {
|
||||
if s != nil {
|
||||
iluo.SetURL(*s)
|
||||
}
|
||||
return iluo
|
||||
}
|
||||
|
||||
// ClearURL clears the value of the "url" field.
|
||||
func (iluo *ImportListUpdateOne) ClearURL() *ImportListUpdateOne {
|
||||
iluo.mutation.ClearURL()
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetQulity sets the "qulity" field.
|
||||
func (iluo *ImportListUpdateOne) SetQulity(s string) *ImportListUpdateOne {
|
||||
iluo.mutation.SetQulity(s)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetNillableQulity sets the "qulity" field if the given value is not nil.
|
||||
func (iluo *ImportListUpdateOne) SetNillableQulity(s *string) *ImportListUpdateOne {
|
||||
if s != nil {
|
||||
iluo.SetQulity(*s)
|
||||
}
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetStorageID sets the "storage_id" field.
|
||||
func (iluo *ImportListUpdateOne) SetStorageID(i int) *ImportListUpdateOne {
|
||||
iluo.mutation.ResetStorageID()
|
||||
iluo.mutation.SetStorageID(i)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetNillableStorageID sets the "storage_id" field if the given value is not nil.
|
||||
func (iluo *ImportListUpdateOne) SetNillableStorageID(i *int) *ImportListUpdateOne {
|
||||
if i != nil {
|
||||
iluo.SetStorageID(*i)
|
||||
}
|
||||
return iluo
|
||||
}
|
||||
|
||||
// AddStorageID adds i to the "storage_id" field.
|
||||
func (iluo *ImportListUpdateOne) AddStorageID(i int) *ImportListUpdateOne {
|
||||
iluo.mutation.AddStorageID(i)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (iluo *ImportListUpdateOne) SetSettings(sls schema.ImportListSettings) *ImportListUpdateOne {
|
||||
iluo.mutation.SetSettings(sls)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// SetNillableSettings sets the "settings" field if the given value is not nil.
|
||||
func (iluo *ImportListUpdateOne) SetNillableSettings(sls *schema.ImportListSettings) *ImportListUpdateOne {
|
||||
if sls != nil {
|
||||
iluo.SetSettings(*sls)
|
||||
}
|
||||
return iluo
|
||||
}
|
||||
|
||||
// ClearSettings clears the value of the "settings" field.
|
||||
func (iluo *ImportListUpdateOne) ClearSettings() *ImportListUpdateOne {
|
||||
iluo.mutation.ClearSettings()
|
||||
return iluo
|
||||
}
|
||||
|
||||
// Mutation returns the ImportListMutation object of the builder.
|
||||
func (iluo *ImportListUpdateOne) Mutation() *ImportListMutation {
|
||||
return iluo.mutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the ImportListUpdate builder.
|
||||
func (iluo *ImportListUpdateOne) Where(ps ...predicate.ImportList) *ImportListUpdateOne {
|
||||
iluo.mutation.Where(ps...)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||
// The default is selecting all fields defined in the entity schema.
|
||||
func (iluo *ImportListUpdateOne) Select(field string, fields ...string) *ImportListUpdateOne {
|
||||
iluo.fields = append([]string{field}, fields...)
|
||||
return iluo
|
||||
}
|
||||
|
||||
// Save executes the query and returns the updated ImportList entity.
|
||||
func (iluo *ImportListUpdateOne) Save(ctx context.Context) (*ImportList, error) {
|
||||
return withHooks(ctx, iluo.sqlSave, iluo.mutation, iluo.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (iluo *ImportListUpdateOne) SaveX(ctx context.Context) *ImportList {
|
||||
node, err := iluo.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// Exec executes the query on the entity.
|
||||
func (iluo *ImportListUpdateOne) Exec(ctx context.Context) error {
|
||||
_, err := iluo.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (iluo *ImportListUpdateOne) ExecX(ctx context.Context) {
|
||||
if err := iluo.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
func (iluo *ImportListUpdateOne) check() error {
|
||||
if v, ok := iluo.mutation.GetType(); ok {
|
||||
if err := importlist.TypeValidator(v); err != nil {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (iluo *ImportListUpdateOne) sqlSave(ctx context.Context) (_node *ImportList, err error) {
|
||||
if err := iluo.check(); err != nil {
|
||||
return _node, err
|
||||
}
|
||||
_spec := sqlgraph.NewUpdateSpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
|
||||
id, ok := iluo.mutation.ID()
|
||||
if !ok {
|
||||
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "ImportList.id" for update`)}
|
||||
}
|
||||
_spec.Node.ID.Value = id
|
||||
if fields := iluo.fields; len(fields) > 0 {
|
||||
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, importlist.FieldID)
|
||||
for _, f := range fields {
|
||||
if !importlist.ValidColumn(f) {
|
||||
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||
}
|
||||
if f != importlist.FieldID {
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
if ps := iluo.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok := iluo.mutation.Name(); ok {
|
||||
_spec.SetField(importlist.FieldName, field.TypeString, value)
|
||||
}
|
||||
if value, ok := iluo.mutation.GetType(); ok {
|
||||
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := iluo.mutation.URL(); ok {
|
||||
_spec.SetField(importlist.FieldURL, field.TypeString, value)
|
||||
}
|
||||
if iluo.mutation.URLCleared() {
|
||||
_spec.ClearField(importlist.FieldURL, field.TypeString)
|
||||
}
|
||||
if value, ok := iluo.mutation.Qulity(); ok {
|
||||
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
|
||||
}
|
||||
if value, ok := iluo.mutation.StorageID(); ok {
|
||||
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := iluo.mutation.AddedStorageID(); ok {
|
||||
_spec.AddField(importlist.FieldStorageID, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := iluo.mutation.Settings(); ok {
|
||||
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
|
||||
}
|
||||
if iluo.mutation.SettingsCleared() {
|
||||
_spec.ClearField(importlist.FieldSettings, field.TypeJSON)
|
||||
}
|
||||
_node = &ImportList{config: iluo.config}
|
||||
_spec.Assign = _node.assignValues
|
||||
_spec.ScanValues = _node.scanValues
|
||||
if err = sqlgraph.UpdateNode(ctx, iluo.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{importlist.Label}
|
||||
} else if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
iluo.mutation.done = true
|
||||
return _node, nil
|
||||
}
|
||||
@@ -25,7 +25,11 @@ type Indexers struct {
|
||||
// EnableRss holds the value of the "enable_rss" field.
|
||||
EnableRss bool `json:"enable_rss,omitempty"`
|
||||
// Priority holds the value of the "priority" field.
|
||||
Priority int `json:"priority,omitempty"`
|
||||
Priority int `json:"priority,omitempty"`
|
||||
// minimal seed ratio requied, before removing torrent
|
||||
SeedRatio float32 `json:"seed_ratio,omitempty"`
|
||||
// Disabled holds the value of the "disabled" field.
|
||||
Disabled bool `json:"disabled,omitempty"`
|
||||
selectValues sql.SelectValues
|
||||
}
|
||||
|
||||
@@ -34,8 +38,10 @@ func (*Indexers) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case indexers.FieldEnableRss:
|
||||
case indexers.FieldEnableRss, indexers.FieldDisabled:
|
||||
values[i] = new(sql.NullBool)
|
||||
case indexers.FieldSeedRatio:
|
||||
values[i] = new(sql.NullFloat64)
|
||||
case indexers.FieldID, indexers.FieldPriority:
|
||||
values[i] = new(sql.NullInt64)
|
||||
case indexers.FieldName, indexers.FieldImplementation, indexers.FieldSettings:
|
||||
@@ -91,6 +97,18 @@ func (i *Indexers) assignValues(columns []string, values []any) error {
|
||||
} else if value.Valid {
|
||||
i.Priority = int(value.Int64)
|
||||
}
|
||||
case indexers.FieldSeedRatio:
|
||||
if value, ok := values[j].(*sql.NullFloat64); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field seed_ratio", values[j])
|
||||
} else if value.Valid {
|
||||
i.SeedRatio = float32(value.Float64)
|
||||
}
|
||||
case indexers.FieldDisabled:
|
||||
if value, ok := values[j].(*sql.NullBool); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field disabled", values[j])
|
||||
} else if value.Valid {
|
||||
i.Disabled = value.Bool
|
||||
}
|
||||
default:
|
||||
i.selectValues.Set(columns[j], values[j])
|
||||
}
|
||||
@@ -141,6 +159,12 @@ func (i *Indexers) String() string {
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("priority=")
|
||||
builder.WriteString(fmt.Sprintf("%v", i.Priority))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("seed_ratio=")
|
||||
builder.WriteString(fmt.Sprintf("%v", i.SeedRatio))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("disabled=")
|
||||
builder.WriteString(fmt.Sprintf("%v", i.Disabled))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
@@ -21,6 +21,10 @@ const (
|
||||
FieldEnableRss = "enable_rss"
|
||||
// FieldPriority holds the string denoting the priority field in the database.
|
||||
FieldPriority = "priority"
|
||||
// FieldSeedRatio holds the string denoting the seed_ratio field in the database.
|
||||
FieldSeedRatio = "seed_ratio"
|
||||
// FieldDisabled holds the string denoting the disabled field in the database.
|
||||
FieldDisabled = "disabled"
|
||||
// Table holds the table name of the indexers in the database.
|
||||
Table = "indexers"
|
||||
)
|
||||
@@ -33,6 +37,8 @@ var Columns = []string{
|
||||
FieldSettings,
|
||||
FieldEnableRss,
|
||||
FieldPriority,
|
||||
FieldSeedRatio,
|
||||
FieldDisabled,
|
||||
}
|
||||
|
||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||
@@ -48,6 +54,12 @@ func ValidColumn(column string) bool {
|
||||
var (
|
||||
// DefaultEnableRss holds the default value on creation for the "enable_rss" field.
|
||||
DefaultEnableRss bool
|
||||
// DefaultPriority holds the default value on creation for the "priority" field.
|
||||
DefaultPriority int
|
||||
// DefaultSeedRatio holds the default value on creation for the "seed_ratio" field.
|
||||
DefaultSeedRatio float32
|
||||
// DefaultDisabled holds the default value on creation for the "disabled" field.
|
||||
DefaultDisabled bool
|
||||
)
|
||||
|
||||
// OrderOption defines the ordering options for the Indexers queries.
|
||||
@@ -82,3 +94,13 @@ func ByEnableRss(opts ...sql.OrderTermOption) OrderOption {
|
||||
func ByPriority(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldPriority, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// BySeedRatio orders the results by the seed_ratio field.
|
||||
func BySeedRatio(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldSeedRatio, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByDisabled orders the results by the disabled field.
|
||||
func ByDisabled(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldDisabled, opts...).ToFunc()
|
||||
}
|
||||
|
||||
@@ -78,6 +78,16 @@ func Priority(v int) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldEQ(FieldPriority, v))
|
||||
}
|
||||
|
||||
// SeedRatio applies equality check predicate on the "seed_ratio" field. It's identical to SeedRatioEQ.
|
||||
func SeedRatio(v float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldEQ(FieldSeedRatio, v))
|
||||
}
|
||||
|
||||
// Disabled applies equality check predicate on the "disabled" field. It's identical to DisabledEQ.
|
||||
func Disabled(v bool) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldEQ(FieldDisabled, v))
|
||||
}
|
||||
|
||||
// NameEQ applies the EQ predicate on the "name" field.
|
||||
func NameEQ(v string) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldEQ(FieldName, v))
|
||||
@@ -323,6 +333,76 @@ func PriorityLTE(v int) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldLTE(FieldPriority, v))
|
||||
}
|
||||
|
||||
// SeedRatioEQ applies the EQ predicate on the "seed_ratio" field.
|
||||
func SeedRatioEQ(v float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldEQ(FieldSeedRatio, v))
|
||||
}
|
||||
|
||||
// SeedRatioNEQ applies the NEQ predicate on the "seed_ratio" field.
|
||||
func SeedRatioNEQ(v float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldNEQ(FieldSeedRatio, v))
|
||||
}
|
||||
|
||||
// SeedRatioIn applies the In predicate on the "seed_ratio" field.
|
||||
func SeedRatioIn(vs ...float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldIn(FieldSeedRatio, vs...))
|
||||
}
|
||||
|
||||
// SeedRatioNotIn applies the NotIn predicate on the "seed_ratio" field.
|
||||
func SeedRatioNotIn(vs ...float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldNotIn(FieldSeedRatio, vs...))
|
||||
}
|
||||
|
||||
// SeedRatioGT applies the GT predicate on the "seed_ratio" field.
|
||||
func SeedRatioGT(v float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldGT(FieldSeedRatio, v))
|
||||
}
|
||||
|
||||
// SeedRatioGTE applies the GTE predicate on the "seed_ratio" field.
|
||||
func SeedRatioGTE(v float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldGTE(FieldSeedRatio, v))
|
||||
}
|
||||
|
||||
// SeedRatioLT applies the LT predicate on the "seed_ratio" field.
|
||||
func SeedRatioLT(v float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldLT(FieldSeedRatio, v))
|
||||
}
|
||||
|
||||
// SeedRatioLTE applies the LTE predicate on the "seed_ratio" field.
|
||||
func SeedRatioLTE(v float32) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldLTE(FieldSeedRatio, v))
|
||||
}
|
||||
|
||||
// SeedRatioIsNil applies the IsNil predicate on the "seed_ratio" field.
|
||||
func SeedRatioIsNil() predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldIsNull(FieldSeedRatio))
|
||||
}
|
||||
|
||||
// SeedRatioNotNil applies the NotNil predicate on the "seed_ratio" field.
|
||||
func SeedRatioNotNil() predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldNotNull(FieldSeedRatio))
|
||||
}
|
||||
|
||||
// DisabledEQ applies the EQ predicate on the "disabled" field.
|
||||
func DisabledEQ(v bool) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldEQ(FieldDisabled, v))
|
||||
}
|
||||
|
||||
// DisabledNEQ applies the NEQ predicate on the "disabled" field.
|
||||
func DisabledNEQ(v bool) predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldNEQ(FieldDisabled, v))
|
||||
}
|
||||
|
||||
// DisabledIsNil applies the IsNil predicate on the "disabled" field.
|
||||
func DisabledIsNil() predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldIsNull(FieldDisabled))
|
||||
}
|
||||
|
||||
// DisabledNotNil applies the NotNil predicate on the "disabled" field.
|
||||
func DisabledNotNil() predicate.Indexers {
|
||||
return predicate.Indexers(sql.FieldNotNull(FieldDisabled))
|
||||
}
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Indexers) predicate.Indexers {
|
||||
return predicate.Indexers(sql.AndPredicates(predicates...))
|
||||
|
||||
@@ -57,6 +57,42 @@ func (ic *IndexersCreate) SetPriority(i int) *IndexersCreate {
|
||||
return ic
|
||||
}
|
||||
|
||||
// SetNillablePriority sets the "priority" field if the given value is not nil.
|
||||
func (ic *IndexersCreate) SetNillablePriority(i *int) *IndexersCreate {
|
||||
if i != nil {
|
||||
ic.SetPriority(*i)
|
||||
}
|
||||
return ic
|
||||
}
|
||||
|
||||
// SetSeedRatio sets the "seed_ratio" field.
|
||||
func (ic *IndexersCreate) SetSeedRatio(f float32) *IndexersCreate {
|
||||
ic.mutation.SetSeedRatio(f)
|
||||
return ic
|
||||
}
|
||||
|
||||
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
|
||||
func (ic *IndexersCreate) SetNillableSeedRatio(f *float32) *IndexersCreate {
|
||||
if f != nil {
|
||||
ic.SetSeedRatio(*f)
|
||||
}
|
||||
return ic
|
||||
}
|
||||
|
||||
// SetDisabled sets the "disabled" field.
|
||||
func (ic *IndexersCreate) SetDisabled(b bool) *IndexersCreate {
|
||||
ic.mutation.SetDisabled(b)
|
||||
return ic
|
||||
}
|
||||
|
||||
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
|
||||
func (ic *IndexersCreate) SetNillableDisabled(b *bool) *IndexersCreate {
|
||||
if b != nil {
|
||||
ic.SetDisabled(*b)
|
||||
}
|
||||
return ic
|
||||
}
|
||||
|
||||
// Mutation returns the IndexersMutation object of the builder.
|
||||
func (ic *IndexersCreate) Mutation() *IndexersMutation {
|
||||
return ic.mutation
|
||||
@@ -96,6 +132,18 @@ func (ic *IndexersCreate) defaults() {
|
||||
v := indexers.DefaultEnableRss
|
||||
ic.mutation.SetEnableRss(v)
|
||||
}
|
||||
if _, ok := ic.mutation.Priority(); !ok {
|
||||
v := indexers.DefaultPriority
|
||||
ic.mutation.SetPriority(v)
|
||||
}
|
||||
if _, ok := ic.mutation.SeedRatio(); !ok {
|
||||
v := indexers.DefaultSeedRatio
|
||||
ic.mutation.SetSeedRatio(v)
|
||||
}
|
||||
if _, ok := ic.mutation.Disabled(); !ok {
|
||||
v := indexers.DefaultDisabled
|
||||
ic.mutation.SetDisabled(v)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
@@ -161,6 +209,14 @@ func (ic *IndexersCreate) createSpec() (*Indexers, *sqlgraph.CreateSpec) {
|
||||
_spec.SetField(indexers.FieldPriority, field.TypeInt, value)
|
||||
_node.Priority = value
|
||||
}
|
||||
if value, ok := ic.mutation.SeedRatio(); ok {
|
||||
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
|
||||
_node.SeedRatio = value
|
||||
}
|
||||
if value, ok := ic.mutation.Disabled(); ok {
|
||||
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
|
||||
_node.Disabled = value
|
||||
}
|
||||
return _node, _spec
|
||||
}
|
||||
|
||||
|
||||
@@ -104,6 +104,53 @@ func (iu *IndexersUpdate) AddPriority(i int) *IndexersUpdate {
|
||||
return iu
|
||||
}
|
||||
|
||||
// SetSeedRatio sets the "seed_ratio" field.
|
||||
func (iu *IndexersUpdate) SetSeedRatio(f float32) *IndexersUpdate {
|
||||
iu.mutation.ResetSeedRatio()
|
||||
iu.mutation.SetSeedRatio(f)
|
||||
return iu
|
||||
}
|
||||
|
||||
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
|
||||
func (iu *IndexersUpdate) SetNillableSeedRatio(f *float32) *IndexersUpdate {
|
||||
if f != nil {
|
||||
iu.SetSeedRatio(*f)
|
||||
}
|
||||
return iu
|
||||
}
|
||||
|
||||
// AddSeedRatio adds f to the "seed_ratio" field.
|
||||
func (iu *IndexersUpdate) AddSeedRatio(f float32) *IndexersUpdate {
|
||||
iu.mutation.AddSeedRatio(f)
|
||||
return iu
|
||||
}
|
||||
|
||||
// ClearSeedRatio clears the value of the "seed_ratio" field.
|
||||
func (iu *IndexersUpdate) ClearSeedRatio() *IndexersUpdate {
|
||||
iu.mutation.ClearSeedRatio()
|
||||
return iu
|
||||
}
|
||||
|
||||
// SetDisabled sets the "disabled" field.
|
||||
func (iu *IndexersUpdate) SetDisabled(b bool) *IndexersUpdate {
|
||||
iu.mutation.SetDisabled(b)
|
||||
return iu
|
||||
}
|
||||
|
||||
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
|
||||
func (iu *IndexersUpdate) SetNillableDisabled(b *bool) *IndexersUpdate {
|
||||
if b != nil {
|
||||
iu.SetDisabled(*b)
|
||||
}
|
||||
return iu
|
||||
}
|
||||
|
||||
// ClearDisabled clears the value of the "disabled" field.
|
||||
func (iu *IndexersUpdate) ClearDisabled() *IndexersUpdate {
|
||||
iu.mutation.ClearDisabled()
|
||||
return iu
|
||||
}
|
||||
|
||||
// Mutation returns the IndexersMutation object of the builder.
|
||||
func (iu *IndexersUpdate) Mutation() *IndexersMutation {
|
||||
return iu.mutation
|
||||
@@ -163,6 +210,21 @@ func (iu *IndexersUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if value, ok := iu.mutation.AddedPriority(); ok {
|
||||
_spec.AddField(indexers.FieldPriority, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := iu.mutation.SeedRatio(); ok {
|
||||
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
|
||||
}
|
||||
if value, ok := iu.mutation.AddedSeedRatio(); ok {
|
||||
_spec.AddField(indexers.FieldSeedRatio, field.TypeFloat32, value)
|
||||
}
|
||||
if iu.mutation.SeedRatioCleared() {
|
||||
_spec.ClearField(indexers.FieldSeedRatio, field.TypeFloat32)
|
||||
}
|
||||
if value, ok := iu.mutation.Disabled(); ok {
|
||||
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
|
||||
}
|
||||
if iu.mutation.DisabledCleared() {
|
||||
_spec.ClearField(indexers.FieldDisabled, field.TypeBool)
|
||||
}
|
||||
if n, err = sqlgraph.UpdateNodes(ctx, iu.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{indexers.Label}
|
||||
@@ -260,6 +322,53 @@ func (iuo *IndexersUpdateOne) AddPriority(i int) *IndexersUpdateOne {
|
||||
return iuo
|
||||
}
|
||||
|
||||
// SetSeedRatio sets the "seed_ratio" field.
|
||||
func (iuo *IndexersUpdateOne) SetSeedRatio(f float32) *IndexersUpdateOne {
|
||||
iuo.mutation.ResetSeedRatio()
|
||||
iuo.mutation.SetSeedRatio(f)
|
||||
return iuo
|
||||
}
|
||||
|
||||
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
|
||||
func (iuo *IndexersUpdateOne) SetNillableSeedRatio(f *float32) *IndexersUpdateOne {
|
||||
if f != nil {
|
||||
iuo.SetSeedRatio(*f)
|
||||
}
|
||||
return iuo
|
||||
}
|
||||
|
||||
// AddSeedRatio adds f to the "seed_ratio" field.
|
||||
func (iuo *IndexersUpdateOne) AddSeedRatio(f float32) *IndexersUpdateOne {
|
||||
iuo.mutation.AddSeedRatio(f)
|
||||
return iuo
|
||||
}
|
||||
|
||||
// ClearSeedRatio clears the value of the "seed_ratio" field.
|
||||
func (iuo *IndexersUpdateOne) ClearSeedRatio() *IndexersUpdateOne {
|
||||
iuo.mutation.ClearSeedRatio()
|
||||
return iuo
|
||||
}
|
||||
|
||||
// SetDisabled sets the "disabled" field.
|
||||
func (iuo *IndexersUpdateOne) SetDisabled(b bool) *IndexersUpdateOne {
|
||||
iuo.mutation.SetDisabled(b)
|
||||
return iuo
|
||||
}
|
||||
|
||||
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
|
||||
func (iuo *IndexersUpdateOne) SetNillableDisabled(b *bool) *IndexersUpdateOne {
|
||||
if b != nil {
|
||||
iuo.SetDisabled(*b)
|
||||
}
|
||||
return iuo
|
||||
}
|
||||
|
||||
// ClearDisabled clears the value of the "disabled" field.
|
||||
func (iuo *IndexersUpdateOne) ClearDisabled() *IndexersUpdateOne {
|
||||
iuo.mutation.ClearDisabled()
|
||||
return iuo
|
||||
}
|
||||
|
||||
// Mutation returns the IndexersMutation object of the builder.
|
||||
func (iuo *IndexersUpdateOne) Mutation() *IndexersMutation {
|
||||
return iuo.mutation
|
||||
@@ -349,6 +458,21 @@ func (iuo *IndexersUpdateOne) sqlSave(ctx context.Context) (_node *Indexers, err
|
||||
if value, ok := iuo.mutation.AddedPriority(); ok {
|
||||
_spec.AddField(indexers.FieldPriority, field.TypeInt, value)
|
||||
}
|
||||
if value, ok := iuo.mutation.SeedRatio(); ok {
|
||||
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
|
||||
}
|
||||
if value, ok := iuo.mutation.AddedSeedRatio(); ok {
|
||||
_spec.AddField(indexers.FieldSeedRatio, field.TypeFloat32, value)
|
||||
}
|
||||
if iuo.mutation.SeedRatioCleared() {
|
||||
_spec.ClearField(indexers.FieldSeedRatio, field.TypeFloat32)
|
||||
}
|
||||
if value, ok := iuo.mutation.Disabled(); ok {
|
||||
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
|
||||
}
|
||||
if iuo.mutation.DisabledCleared() {
|
||||
_spec.ClearField(indexers.FieldDisabled, field.TypeBool)
|
||||
}
|
||||
_node = &Indexers{config: iuo.config}
|
||||
_spec.Assign = _node.assignValues
|
||||
_spec.ScanValues = _node.scanValues
|
||||
|
||||
43
ent/media.go
@@ -3,8 +3,10 @@
|
||||
package ent
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/schema"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -41,6 +43,12 @@ type Media struct {
|
||||
StorageID int `json:"storage_id,omitempty"`
|
||||
// TargetDir holds the value of the "target_dir" field.
|
||||
TargetDir string `json:"target_dir,omitempty"`
|
||||
// tv series only
|
||||
DownloadHistoryEpisodes bool `json:"download_history_episodes,omitempty"`
|
||||
// Limiter holds the value of the "limiter" field.
|
||||
Limiter schema.MediaLimiter `json:"limiter,omitempty"`
|
||||
// Extras holds the value of the "extras" field.
|
||||
Extras schema.MediaExtras `json:"extras,omitempty"`
|
||||
// Edges holds the relations/edges for other nodes in the graph.
|
||||
// The values are being populated by the MediaQuery when eager-loading is set.
|
||||
Edges MediaEdges `json:"edges"`
|
||||
@@ -70,6 +78,10 @@ func (*Media) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case media.FieldLimiter, media.FieldExtras:
|
||||
values[i] = new([]byte)
|
||||
case media.FieldDownloadHistoryEpisodes:
|
||||
values[i] = new(sql.NullBool)
|
||||
case media.FieldID, media.FieldTmdbID, media.FieldStorageID:
|
||||
values[i] = new(sql.NullInt64)
|
||||
case media.FieldImdbID, media.FieldMediaType, media.FieldNameCn, media.FieldNameEn, media.FieldOriginalName, media.FieldOverview, media.FieldAirDate, media.FieldResolution, media.FieldTargetDir:
|
||||
@@ -169,6 +181,28 @@ func (m *Media) assignValues(columns []string, values []any) error {
|
||||
} else if value.Valid {
|
||||
m.TargetDir = value.String
|
||||
}
|
||||
case media.FieldDownloadHistoryEpisodes:
|
||||
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field download_history_episodes", values[i])
|
||||
} else if value.Valid {
|
||||
m.DownloadHistoryEpisodes = value.Bool
|
||||
}
|
||||
case media.FieldLimiter:
|
||||
if value, ok := values[i].(*[]byte); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field limiter", values[i])
|
||||
} else if value != nil && len(*value) > 0 {
|
||||
if err := json.Unmarshal(*value, &m.Limiter); err != nil {
|
||||
return fmt.Errorf("unmarshal field limiter: %w", err)
|
||||
}
|
||||
}
|
||||
case media.FieldExtras:
|
||||
if value, ok := values[i].(*[]byte); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field extras", values[i])
|
||||
} else if value != nil && len(*value) > 0 {
|
||||
if err := json.Unmarshal(*value, &m.Extras); err != nil {
|
||||
return fmt.Errorf("unmarshal field extras: %w", err)
|
||||
}
|
||||
}
|
||||
default:
|
||||
m.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
@@ -245,6 +279,15 @@ func (m *Media) String() string {
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("target_dir=")
|
||||
builder.WriteString(m.TargetDir)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("download_history_episodes=")
|
||||
builder.WriteString(fmt.Sprintf("%v", m.DownloadHistoryEpisodes))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("limiter=")
|
||||
builder.WriteString(fmt.Sprintf("%v", m.Limiter))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("extras=")
|
||||
builder.WriteString(fmt.Sprintf("%v", m.Extras))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
@@ -39,6 +39,12 @@ const (
|
||||
FieldStorageID = "storage_id"
|
||||
// FieldTargetDir holds the string denoting the target_dir field in the database.
|
||||
FieldTargetDir = "target_dir"
|
||||
// FieldDownloadHistoryEpisodes holds the string denoting the download_history_episodes field in the database.
|
||||
FieldDownloadHistoryEpisodes = "download_history_episodes"
|
||||
// FieldLimiter holds the string denoting the limiter field in the database.
|
||||
FieldLimiter = "limiter"
|
||||
// FieldExtras holds the string denoting the extras field in the database.
|
||||
FieldExtras = "extras"
|
||||
// EdgeEpisodes holds the string denoting the episodes edge name in mutations.
|
||||
EdgeEpisodes = "episodes"
|
||||
// Table holds the table name of the media in the database.
|
||||
@@ -67,6 +73,9 @@ var Columns = []string{
|
||||
FieldResolution,
|
||||
FieldStorageID,
|
||||
FieldTargetDir,
|
||||
FieldDownloadHistoryEpisodes,
|
||||
FieldLimiter,
|
||||
FieldExtras,
|
||||
}
|
||||
|
||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||
@@ -84,6 +93,8 @@ var (
|
||||
DefaultCreatedAt time.Time
|
||||
// DefaultAirDate holds the default value on creation for the "air_date" field.
|
||||
DefaultAirDate string
|
||||
// DefaultDownloadHistoryEpisodes holds the default value on creation for the "download_history_episodes" field.
|
||||
DefaultDownloadHistoryEpisodes bool
|
||||
)
|
||||
|
||||
// MediaType defines the type for the "media_type" enum field.
|
||||
@@ -119,7 +130,7 @@ const DefaultResolution = Resolution1080p
|
||||
const (
|
||||
Resolution720p Resolution = "720p"
|
||||
Resolution1080p Resolution = "1080p"
|
||||
Resolution4k Resolution = "4k"
|
||||
Resolution2160p Resolution = "2160p"
|
||||
)
|
||||
|
||||
func (r Resolution) String() string {
|
||||
@@ -129,7 +140,7 @@ func (r Resolution) String() string {
|
||||
// ResolutionValidator is a validator for the "resolution" field enum values. It is called by the builders before save.
|
||||
func ResolutionValidator(r Resolution) error {
|
||||
switch r {
|
||||
case Resolution720p, Resolution1080p, Resolution4k:
|
||||
case Resolution720p, Resolution1080p, Resolution2160p:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("media: invalid enum value for resolution field: %q", r)
|
||||
@@ -204,6 +215,11 @@ func ByTargetDir(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldTargetDir, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByDownloadHistoryEpisodes orders the results by the download_history_episodes field.
|
||||
func ByDownloadHistoryEpisodes(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldDownloadHistoryEpisodes, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByEpisodesCount orders the results by episodes count.
|
||||
func ByEpisodesCount(opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
|
||||
@@ -105,6 +105,11 @@ func TargetDir(v string) predicate.Media {
|
||||
return predicate.Media(sql.FieldEQ(FieldTargetDir, v))
|
||||
}
|
||||
|
||||
// DownloadHistoryEpisodes applies equality check predicate on the "download_history_episodes" field. It's identical to DownloadHistoryEpisodesEQ.
|
||||
func DownloadHistoryEpisodes(v bool) predicate.Media {
|
||||
return predicate.Media(sql.FieldEQ(FieldDownloadHistoryEpisodes, v))
|
||||
}
|
||||
|
||||
// TmdbIDEQ applies the EQ predicate on the "tmdb_id" field.
|
||||
func TmdbIDEQ(v int) predicate.Media {
|
||||
return predicate.Media(sql.FieldEQ(FieldTmdbID, v))
|
||||
@@ -750,6 +755,46 @@ func TargetDirContainsFold(v string) predicate.Media {
|
||||
return predicate.Media(sql.FieldContainsFold(FieldTargetDir, v))
|
||||
}
|
||||
|
||||
// DownloadHistoryEpisodesEQ applies the EQ predicate on the "download_history_episodes" field.
|
||||
func DownloadHistoryEpisodesEQ(v bool) predicate.Media {
|
||||
return predicate.Media(sql.FieldEQ(FieldDownloadHistoryEpisodes, v))
|
||||
}
|
||||
|
||||
// DownloadHistoryEpisodesNEQ applies the NEQ predicate on the "download_history_episodes" field.
|
||||
func DownloadHistoryEpisodesNEQ(v bool) predicate.Media {
|
||||
return predicate.Media(sql.FieldNEQ(FieldDownloadHistoryEpisodes, v))
|
||||
}
|
||||
|
||||
// DownloadHistoryEpisodesIsNil applies the IsNil predicate on the "download_history_episodes" field.
|
||||
func DownloadHistoryEpisodesIsNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldIsNull(FieldDownloadHistoryEpisodes))
|
||||
}
|
||||
|
||||
// DownloadHistoryEpisodesNotNil applies the NotNil predicate on the "download_history_episodes" field.
|
||||
func DownloadHistoryEpisodesNotNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldNotNull(FieldDownloadHistoryEpisodes))
|
||||
}
|
||||
|
||||
// LimiterIsNil applies the IsNil predicate on the "limiter" field.
|
||||
func LimiterIsNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldIsNull(FieldLimiter))
|
||||
}
|
||||
|
||||
// LimiterNotNil applies the NotNil predicate on the "limiter" field.
|
||||
func LimiterNotNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldNotNull(FieldLimiter))
|
||||
}
|
||||
|
||||
// ExtrasIsNil applies the IsNil predicate on the "extras" field.
|
||||
func ExtrasIsNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldIsNull(FieldExtras))
|
||||
}
|
||||
|
||||
// ExtrasNotNil applies the NotNil predicate on the "extras" field.
|
||||
func ExtrasNotNil() predicate.Media {
|
||||
return predicate.Media(sql.FieldNotNull(FieldExtras))
|
||||
}
|
||||
|
||||
// HasEpisodes applies the HasEdge predicate on the "episodes" edge.
|
||||
func HasEpisodes() predicate.Media {
|
||||
return predicate.Media(func(s *sql.Selector) {
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"fmt"
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/schema"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
@@ -141,6 +142,48 @@ func (mc *MediaCreate) SetNillableTargetDir(s *string) *MediaCreate {
|
||||
return mc
|
||||
}
|
||||
|
||||
// SetDownloadHistoryEpisodes sets the "download_history_episodes" field.
|
||||
func (mc *MediaCreate) SetDownloadHistoryEpisodes(b bool) *MediaCreate {
|
||||
mc.mutation.SetDownloadHistoryEpisodes(b)
|
||||
return mc
|
||||
}
|
||||
|
||||
// SetNillableDownloadHistoryEpisodes sets the "download_history_episodes" field if the given value is not nil.
|
||||
func (mc *MediaCreate) SetNillableDownloadHistoryEpisodes(b *bool) *MediaCreate {
|
||||
if b != nil {
|
||||
mc.SetDownloadHistoryEpisodes(*b)
|
||||
}
|
||||
return mc
|
||||
}
|
||||
|
||||
// SetLimiter sets the "limiter" field.
|
||||
func (mc *MediaCreate) SetLimiter(sl schema.MediaLimiter) *MediaCreate {
|
||||
mc.mutation.SetLimiter(sl)
|
||||
return mc
|
||||
}
|
||||
|
||||
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
|
||||
func (mc *MediaCreate) SetNillableLimiter(sl *schema.MediaLimiter) *MediaCreate {
|
||||
if sl != nil {
|
||||
mc.SetLimiter(*sl)
|
||||
}
|
||||
return mc
|
||||
}
|
||||
|
||||
// SetExtras sets the "extras" field.
|
||||
func (mc *MediaCreate) SetExtras(se schema.MediaExtras) *MediaCreate {
|
||||
mc.mutation.SetExtras(se)
|
||||
return mc
|
||||
}
|
||||
|
||||
// SetNillableExtras sets the "extras" field if the given value is not nil.
|
||||
func (mc *MediaCreate) SetNillableExtras(se *schema.MediaExtras) *MediaCreate {
|
||||
if se != nil {
|
||||
mc.SetExtras(*se)
|
||||
}
|
||||
return mc
|
||||
}
|
||||
|
||||
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
|
||||
func (mc *MediaCreate) AddEpisodeIDs(ids ...int) *MediaCreate {
|
||||
mc.mutation.AddEpisodeIDs(ids...)
|
||||
@@ -203,6 +246,10 @@ func (mc *MediaCreate) defaults() {
|
||||
v := media.DefaultResolution
|
||||
mc.mutation.SetResolution(v)
|
||||
}
|
||||
if _, ok := mc.mutation.DownloadHistoryEpisodes(); !ok {
|
||||
v := media.DefaultDownloadHistoryEpisodes
|
||||
mc.mutation.SetDownloadHistoryEpisodes(v)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
@@ -318,6 +365,18 @@ func (mc *MediaCreate) createSpec() (*Media, *sqlgraph.CreateSpec) {
|
||||
_spec.SetField(media.FieldTargetDir, field.TypeString, value)
|
||||
_node.TargetDir = value
|
||||
}
|
||||
if value, ok := mc.mutation.DownloadHistoryEpisodes(); ok {
|
||||
_spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value)
|
||||
_node.DownloadHistoryEpisodes = value
|
||||
}
|
||||
if value, ok := mc.mutation.Limiter(); ok {
|
||||
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
|
||||
_node.Limiter = value
|
||||
}
|
||||
if value, ok := mc.mutation.Extras(); ok {
|
||||
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
|
||||
_node.Extras = value
|
||||
}
|
||||
if nodes := mc.mutation.EpisodesIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.O2M,
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/predicate"
|
||||
"polaris/ent/schema"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
@@ -229,6 +230,66 @@ func (mu *MediaUpdate) ClearTargetDir() *MediaUpdate {
|
||||
return mu
|
||||
}
|
||||
|
||||
// SetDownloadHistoryEpisodes sets the "download_history_episodes" field.
|
||||
func (mu *MediaUpdate) SetDownloadHistoryEpisodes(b bool) *MediaUpdate {
|
||||
mu.mutation.SetDownloadHistoryEpisodes(b)
|
||||
return mu
|
||||
}
|
||||
|
||||
// SetNillableDownloadHistoryEpisodes sets the "download_history_episodes" field if the given value is not nil.
|
||||
func (mu *MediaUpdate) SetNillableDownloadHistoryEpisodes(b *bool) *MediaUpdate {
|
||||
if b != nil {
|
||||
mu.SetDownloadHistoryEpisodes(*b)
|
||||
}
|
||||
return mu
|
||||
}
|
||||
|
||||
// ClearDownloadHistoryEpisodes clears the value of the "download_history_episodes" field.
|
||||
func (mu *MediaUpdate) ClearDownloadHistoryEpisodes() *MediaUpdate {
|
||||
mu.mutation.ClearDownloadHistoryEpisodes()
|
||||
return mu
|
||||
}
|
||||
|
||||
// SetLimiter sets the "limiter" field.
|
||||
func (mu *MediaUpdate) SetLimiter(sl schema.MediaLimiter) *MediaUpdate {
|
||||
mu.mutation.SetLimiter(sl)
|
||||
return mu
|
||||
}
|
||||
|
||||
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
|
||||
func (mu *MediaUpdate) SetNillableLimiter(sl *schema.MediaLimiter) *MediaUpdate {
|
||||
if sl != nil {
|
||||
mu.SetLimiter(*sl)
|
||||
}
|
||||
return mu
|
||||
}
|
||||
|
||||
// ClearLimiter clears the value of the "limiter" field.
|
||||
func (mu *MediaUpdate) ClearLimiter() *MediaUpdate {
|
||||
mu.mutation.ClearLimiter()
|
||||
return mu
|
||||
}
|
||||
|
||||
// SetExtras sets the "extras" field.
|
||||
func (mu *MediaUpdate) SetExtras(se schema.MediaExtras) *MediaUpdate {
|
||||
mu.mutation.SetExtras(se)
|
||||
return mu
|
||||
}
|
||||
|
||||
// SetNillableExtras sets the "extras" field if the given value is not nil.
|
||||
func (mu *MediaUpdate) SetNillableExtras(se *schema.MediaExtras) *MediaUpdate {
|
||||
if se != nil {
|
||||
mu.SetExtras(*se)
|
||||
}
|
||||
return mu
|
||||
}
|
||||
|
||||
// ClearExtras clears the value of the "extras" field.
|
||||
func (mu *MediaUpdate) ClearExtras() *MediaUpdate {
|
||||
mu.mutation.ClearExtras()
|
||||
return mu
|
||||
}
|
||||
|
||||
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
|
||||
func (mu *MediaUpdate) AddEpisodeIDs(ids ...int) *MediaUpdate {
|
||||
mu.mutation.AddEpisodeIDs(ids...)
|
||||
@@ -375,6 +436,24 @@ func (mu *MediaUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if mu.mutation.TargetDirCleared() {
|
||||
_spec.ClearField(media.FieldTargetDir, field.TypeString)
|
||||
}
|
||||
if value, ok := mu.mutation.DownloadHistoryEpisodes(); ok {
|
||||
_spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value)
|
||||
}
|
||||
if mu.mutation.DownloadHistoryEpisodesCleared() {
|
||||
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
|
||||
}
|
||||
if value, ok := mu.mutation.Limiter(); ok {
|
||||
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
|
||||
}
|
||||
if mu.mutation.LimiterCleared() {
|
||||
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
|
||||
}
|
||||
if value, ok := mu.mutation.Extras(); ok {
|
||||
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
|
||||
}
|
||||
if mu.mutation.ExtrasCleared() {
|
||||
_spec.ClearField(media.FieldExtras, field.TypeJSON)
|
||||
}
|
||||
if mu.mutation.EpisodesCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.O2M,
|
||||
@@ -640,6 +719,66 @@ func (muo *MediaUpdateOne) ClearTargetDir() *MediaUpdateOne {
|
||||
return muo
|
||||
}
|
||||
|
||||
// SetDownloadHistoryEpisodes sets the "download_history_episodes" field.
|
||||
func (muo *MediaUpdateOne) SetDownloadHistoryEpisodes(b bool) *MediaUpdateOne {
|
||||
muo.mutation.SetDownloadHistoryEpisodes(b)
|
||||
return muo
|
||||
}
|
||||
|
||||
// SetNillableDownloadHistoryEpisodes sets the "download_history_episodes" field if the given value is not nil.
|
||||
func (muo *MediaUpdateOne) SetNillableDownloadHistoryEpisodes(b *bool) *MediaUpdateOne {
|
||||
if b != nil {
|
||||
muo.SetDownloadHistoryEpisodes(*b)
|
||||
}
|
||||
return muo
|
||||
}
|
||||
|
||||
// ClearDownloadHistoryEpisodes clears the value of the "download_history_episodes" field.
|
||||
func (muo *MediaUpdateOne) ClearDownloadHistoryEpisodes() *MediaUpdateOne {
|
||||
muo.mutation.ClearDownloadHistoryEpisodes()
|
||||
return muo
|
||||
}
|
||||
|
||||
// SetLimiter sets the "limiter" field.
|
||||
func (muo *MediaUpdateOne) SetLimiter(sl schema.MediaLimiter) *MediaUpdateOne {
|
||||
muo.mutation.SetLimiter(sl)
|
||||
return muo
|
||||
}
|
||||
|
||||
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
|
||||
func (muo *MediaUpdateOne) SetNillableLimiter(sl *schema.MediaLimiter) *MediaUpdateOne {
|
||||
if sl != nil {
|
||||
muo.SetLimiter(*sl)
|
||||
}
|
||||
return muo
|
||||
}
|
||||
|
||||
// ClearLimiter clears the value of the "limiter" field.
|
||||
func (muo *MediaUpdateOne) ClearLimiter() *MediaUpdateOne {
|
||||
muo.mutation.ClearLimiter()
|
||||
return muo
|
||||
}
|
||||
|
||||
// SetExtras sets the "extras" field.
|
||||
func (muo *MediaUpdateOne) SetExtras(se schema.MediaExtras) *MediaUpdateOne {
|
||||
muo.mutation.SetExtras(se)
|
||||
return muo
|
||||
}
|
||||
|
||||
// SetNillableExtras sets the "extras" field if the given value is not nil.
|
||||
func (muo *MediaUpdateOne) SetNillableExtras(se *schema.MediaExtras) *MediaUpdateOne {
|
||||
if se != nil {
|
||||
muo.SetExtras(*se)
|
||||
}
|
||||
return muo
|
||||
}
|
||||
|
||||
// ClearExtras clears the value of the "extras" field.
|
||||
func (muo *MediaUpdateOne) ClearExtras() *MediaUpdateOne {
|
||||
muo.mutation.ClearExtras()
|
||||
return muo
|
||||
}
|
||||
|
||||
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
|
||||
func (muo *MediaUpdateOne) AddEpisodeIDs(ids ...int) *MediaUpdateOne {
|
||||
muo.mutation.AddEpisodeIDs(ids...)
|
||||
@@ -816,6 +955,24 @@ func (muo *MediaUpdateOne) sqlSave(ctx context.Context) (_node *Media, err error
|
||||
if muo.mutation.TargetDirCleared() {
|
||||
_spec.ClearField(media.FieldTargetDir, field.TypeString)
|
||||
}
|
||||
if value, ok := muo.mutation.DownloadHistoryEpisodes(); ok {
|
||||
_spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value)
|
||||
}
|
||||
if muo.mutation.DownloadHistoryEpisodesCleared() {
|
||||
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
|
||||
}
|
||||
if value, ok := muo.mutation.Limiter(); ok {
|
||||
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
|
||||
}
|
||||
if muo.mutation.LimiterCleared() {
|
||||
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
|
||||
}
|
||||
if value, ok := muo.mutation.Extras(); ok {
|
||||
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
|
||||
}
|
||||
if muo.mutation.ExtrasCleared() {
|
||||
_spec.ClearField(media.FieldExtras, field.TypeJSON)
|
||||
}
|
||||
if muo.mutation.EpisodesCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.O2M,
|
||||
|
||||
@@ -38,7 +38,8 @@ var (
|
||||
{Name: "overview", Type: field.TypeString},
|
||||
{Name: "air_date", Type: field.TypeString},
|
||||
{Name: "status", Type: field.TypeEnum, Enums: []string{"missing", "downloading", "downloaded"}, Default: "missing"},
|
||||
{Name: "file_in_storage", Type: field.TypeString, Nullable: true},
|
||||
{Name: "monitored", Type: field.TypeBool, Default: false},
|
||||
{Name: "target_file", Type: field.TypeString, Nullable: true},
|
||||
{Name: "media_id", Type: field.TypeInt, Nullable: true},
|
||||
}
|
||||
// EpisodesTable holds the schema information for the "episodes" table.
|
||||
@@ -49,7 +50,7 @@ var (
|
||||
ForeignKeys: []*schema.ForeignKey{
|
||||
{
|
||||
Symbol: "episodes_media_episodes",
|
||||
Columns: []*schema.Column{EpisodesColumns[8]},
|
||||
Columns: []*schema.Column{EpisodesColumns[9]},
|
||||
RefColumns: []*schema.Column{MediaColumns[0]},
|
||||
OnDelete: schema.SetNull,
|
||||
},
|
||||
@@ -64,7 +65,9 @@ var (
|
||||
{Name: "date", Type: field.TypeTime},
|
||||
{Name: "target_dir", Type: field.TypeString},
|
||||
{Name: "size", Type: field.TypeInt, Default: 0},
|
||||
{Name: "status", Type: field.TypeEnum, Enums: []string{"running", "success", "fail", "uploading"}},
|
||||
{Name: "download_client_id", Type: field.TypeInt, Nullable: true},
|
||||
{Name: "indexer_id", Type: field.TypeInt, Nullable: true},
|
||||
{Name: "status", Type: field.TypeEnum, Enums: []string{"running", "success", "fail", "uploading", "seeding"}},
|
||||
{Name: "saved", Type: field.TypeString, Nullable: true},
|
||||
}
|
||||
// HistoriesTable holds the schema information for the "histories" table.
|
||||
@@ -73,6 +76,22 @@ var (
|
||||
Columns: HistoriesColumns,
|
||||
PrimaryKey: []*schema.Column{HistoriesColumns[0]},
|
||||
}
|
||||
// ImportListsColumns holds the columns for the "import_lists" table.
|
||||
ImportListsColumns = []*schema.Column{
|
||||
{Name: "id", Type: field.TypeInt, Increment: true},
|
||||
{Name: "name", Type: field.TypeString},
|
||||
{Name: "type", Type: field.TypeEnum, Enums: []string{"plex", "doulist"}},
|
||||
{Name: "url", Type: field.TypeString, Nullable: true},
|
||||
{Name: "qulity", Type: field.TypeString},
|
||||
{Name: "storage_id", Type: field.TypeInt},
|
||||
{Name: "settings", Type: field.TypeJSON, Nullable: true},
|
||||
}
|
||||
// ImportListsTable holds the schema information for the "import_lists" table.
|
||||
ImportListsTable = &schema.Table{
|
||||
Name: "import_lists",
|
||||
Columns: ImportListsColumns,
|
||||
PrimaryKey: []*schema.Column{ImportListsColumns[0]},
|
||||
}
|
||||
// IndexersColumns holds the columns for the "indexers" table.
|
||||
IndexersColumns = []*schema.Column{
|
||||
{Name: "id", Type: field.TypeInt, Increment: true},
|
||||
@@ -80,7 +99,9 @@ var (
|
||||
{Name: "implementation", Type: field.TypeString},
|
||||
{Name: "settings", Type: field.TypeString},
|
||||
{Name: "enable_rss", Type: field.TypeBool, Default: true},
|
||||
{Name: "priority", Type: field.TypeInt},
|
||||
{Name: "priority", Type: field.TypeInt, Default: 50},
|
||||
{Name: "seed_ratio", Type: field.TypeFloat32, Nullable: true, Default: 0},
|
||||
{Name: "disabled", Type: field.TypeBool, Nullable: true, Default: false},
|
||||
}
|
||||
// IndexersTable holds the schema information for the "indexers" table.
|
||||
IndexersTable = &schema.Table{
|
||||
@@ -100,9 +121,12 @@ var (
|
||||
{Name: "overview", Type: field.TypeString},
|
||||
{Name: "created_at", Type: field.TypeTime},
|
||||
{Name: "air_date", Type: field.TypeString, Default: ""},
|
||||
{Name: "resolution", Type: field.TypeEnum, Enums: []string{"720p", "1080p", "4k"}, Default: "1080p"},
|
||||
{Name: "resolution", Type: field.TypeEnum, Enums: []string{"720p", "1080p", "2160p"}, Default: "1080p"},
|
||||
{Name: "storage_id", Type: field.TypeInt, Nullable: true},
|
||||
{Name: "target_dir", Type: field.TypeString, Nullable: true},
|
||||
{Name: "download_history_episodes", Type: field.TypeBool, Nullable: true, Default: false},
|
||||
{Name: "limiter", Type: field.TypeJSON, Nullable: true},
|
||||
{Name: "extras", Type: field.TypeJSON, Nullable: true},
|
||||
}
|
||||
// MediaTable holds the schema information for the "media" table.
|
||||
MediaTable = &schema.Table{
|
||||
@@ -110,6 +134,20 @@ var (
|
||||
Columns: MediaColumns,
|
||||
PrimaryKey: []*schema.Column{MediaColumns[0]},
|
||||
}
|
||||
// NotificationClientsColumns holds the columns for the "notification_clients" table.
|
||||
NotificationClientsColumns = []*schema.Column{
|
||||
{Name: "id", Type: field.TypeInt, Increment: true},
|
||||
{Name: "name", Type: field.TypeString},
|
||||
{Name: "service", Type: field.TypeString},
|
||||
{Name: "settings", Type: field.TypeString},
|
||||
{Name: "enabled", Type: field.TypeBool, Default: true},
|
||||
}
|
||||
// NotificationClientsTable holds the schema information for the "notification_clients" table.
|
||||
NotificationClientsTable = &schema.Table{
|
||||
Name: "notification_clients",
|
||||
Columns: NotificationClientsColumns,
|
||||
PrimaryKey: []*schema.Column{NotificationClientsColumns[0]},
|
||||
}
|
||||
// SettingsColumns holds the columns for the "settings" table.
|
||||
SettingsColumns = []*schema.Column{
|
||||
{Name: "id", Type: field.TypeInt, Increment: true},
|
||||
@@ -127,6 +165,8 @@ var (
|
||||
{Name: "id", Type: field.TypeInt, Increment: true},
|
||||
{Name: "name", Type: field.TypeString, Unique: true},
|
||||
{Name: "implementation", Type: field.TypeEnum, Enums: []string{"webdav", "local"}},
|
||||
{Name: "tv_path", Type: field.TypeString, Nullable: true},
|
||||
{Name: "movie_path", Type: field.TypeString, Nullable: true},
|
||||
{Name: "settings", Type: field.TypeString, Nullable: true},
|
||||
{Name: "deleted", Type: field.TypeBool, Default: false},
|
||||
{Name: "default", Type: field.TypeBool, Default: false},
|
||||
@@ -142,8 +182,10 @@ var (
|
||||
DownloadClientsTable,
|
||||
EpisodesTable,
|
||||
HistoriesTable,
|
||||
ImportListsTable,
|
||||
IndexersTable,
|
||||
MediaTable,
|
||||
NotificationClientsTable,
|
||||
SettingsTable,
|
||||
StoragesTable,
|
||||
}
|
||||
|
||||
2168
ent/mutation.go
138
ent/notificationclient.go
Normal file
@@ -0,0 +1,138 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"polaris/ent/notificationclient"
|
||||
"strings"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
// NotificationClient is the model entity for the NotificationClient schema.
|
||||
type NotificationClient struct {
|
||||
config `json:"-"`
|
||||
// ID of the ent.
|
||||
ID int `json:"id,omitempty"`
|
||||
// Name holds the value of the "name" field.
|
||||
Name string `json:"name,omitempty"`
|
||||
// Service holds the value of the "service" field.
|
||||
Service string `json:"service,omitempty"`
|
||||
// Settings holds the value of the "settings" field.
|
||||
Settings string `json:"settings,omitempty"`
|
||||
// Enabled holds the value of the "enabled" field.
|
||||
Enabled bool `json:"enabled,omitempty"`
|
||||
selectValues sql.SelectValues
|
||||
}
|
||||
|
||||
// scanValues returns the types for scanning values from sql.Rows.
|
||||
func (*NotificationClient) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case notificationclient.FieldEnabled:
|
||||
values[i] = new(sql.NullBool)
|
||||
case notificationclient.FieldID:
|
||||
values[i] = new(sql.NullInt64)
|
||||
case notificationclient.FieldName, notificationclient.FieldService, notificationclient.FieldSettings:
|
||||
values[i] = new(sql.NullString)
|
||||
default:
|
||||
values[i] = new(sql.UnknownType)
|
||||
}
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||
// to the NotificationClient fields.
|
||||
func (nc *NotificationClient) assignValues(columns []string, values []any) error {
|
||||
if m, n := len(values), len(columns); m < n {
|
||||
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||
}
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case notificationclient.FieldID:
|
||||
value, ok := values[i].(*sql.NullInt64)
|
||||
if !ok {
|
||||
return fmt.Errorf("unexpected type %T for field id", value)
|
||||
}
|
||||
nc.ID = int(value.Int64)
|
||||
case notificationclient.FieldName:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field name", values[i])
|
||||
} else if value.Valid {
|
||||
nc.Name = value.String
|
||||
}
|
||||
case notificationclient.FieldService:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field service", values[i])
|
||||
} else if value.Valid {
|
||||
nc.Service = value.String
|
||||
}
|
||||
case notificationclient.FieldSettings:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field settings", values[i])
|
||||
} else if value.Valid {
|
||||
nc.Settings = value.String
|
||||
}
|
||||
case notificationclient.FieldEnabled:
|
||||
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field enabled", values[i])
|
||||
} else if value.Valid {
|
||||
nc.Enabled = value.Bool
|
||||
}
|
||||
default:
|
||||
nc.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value returns the ent.Value that was dynamically selected and assigned to the NotificationClient.
|
||||
// This includes values selected through modifiers, order, etc.
|
||||
func (nc *NotificationClient) Value(name string) (ent.Value, error) {
|
||||
return nc.selectValues.Get(name)
|
||||
}
|
||||
|
||||
// Update returns a builder for updating this NotificationClient.
|
||||
// Note that you need to call NotificationClient.Unwrap() before calling this method if this NotificationClient
|
||||
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||
func (nc *NotificationClient) Update() *NotificationClientUpdateOne {
|
||||
return NewNotificationClientClient(nc.config).UpdateOne(nc)
|
||||
}
|
||||
|
||||
// Unwrap unwraps the NotificationClient entity that was returned from a transaction after it was closed,
|
||||
// so that all future queries will be executed through the driver which created the transaction.
|
||||
func (nc *NotificationClient) Unwrap() *NotificationClient {
|
||||
_tx, ok := nc.config.driver.(*txDriver)
|
||||
if !ok {
|
||||
panic("ent: NotificationClient is not a transactional entity")
|
||||
}
|
||||
nc.config.driver = _tx.drv
|
||||
return nc
|
||||
}
|
||||
|
||||
// String implements the fmt.Stringer.
|
||||
func (nc *NotificationClient) String() string {
|
||||
var builder strings.Builder
|
||||
builder.WriteString("NotificationClient(")
|
||||
builder.WriteString(fmt.Sprintf("id=%v, ", nc.ID))
|
||||
builder.WriteString("name=")
|
||||
builder.WriteString(nc.Name)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("service=")
|
||||
builder.WriteString(nc.Service)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("settings=")
|
||||
builder.WriteString(nc.Settings)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("enabled=")
|
||||
builder.WriteString(fmt.Sprintf("%v", nc.Enabled))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
// NotificationClients is a parsable slice of NotificationClient.
|
||||
type NotificationClients []*NotificationClient
|
||||
76
ent/notificationclient/notificationclient.go
Normal file
@@ -0,0 +1,76 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package notificationclient
|
||||
|
||||
import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
const (
|
||||
// Label holds the string label denoting the notificationclient type in the database.
|
||||
Label = "notification_client"
|
||||
// FieldID holds the string denoting the id field in the database.
|
||||
FieldID = "id"
|
||||
// FieldName holds the string denoting the name field in the database.
|
||||
FieldName = "name"
|
||||
// FieldService holds the string denoting the service field in the database.
|
||||
FieldService = "service"
|
||||
// FieldSettings holds the string denoting the settings field in the database.
|
||||
FieldSettings = "settings"
|
||||
// FieldEnabled holds the string denoting the enabled field in the database.
|
||||
FieldEnabled = "enabled"
|
||||
// Table holds the table name of the notificationclient in the database.
|
||||
Table = "notification_clients"
|
||||
)
|
||||
|
||||
// Columns holds all SQL columns for notificationclient fields.
|
||||
var Columns = []string{
|
||||
FieldID,
|
||||
FieldName,
|
||||
FieldService,
|
||||
FieldSettings,
|
||||
FieldEnabled,
|
||||
}
|
||||
|
||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||
func ValidColumn(column string) bool {
|
||||
for i := range Columns {
|
||||
if column == Columns[i] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var (
|
||||
// DefaultEnabled holds the default value on creation for the "enabled" field.
|
||||
DefaultEnabled bool
|
||||
)
|
||||
|
||||
// OrderOption defines the ordering options for the NotificationClient queries.
|
||||
type OrderOption func(*sql.Selector)
|
||||
|
||||
// ByID orders the results by the id field.
|
||||
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByName orders the results by the name field.
|
||||
func ByName(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldName, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByService orders the results by the service field.
|
||||
func ByService(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldService, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// BySettings orders the results by the settings field.
|
||||
func BySettings(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldSettings, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByEnabled orders the results by the enabled field.
|
||||
func ByEnabled(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldEnabled, opts...).ToFunc()
|
||||
}
|
||||
294
ent/notificationclient/where.go
Normal file
@@ -0,0 +1,294 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package notificationclient
|
||||
|
||||
import (
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
func ID(id int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDEQ applies the EQ predicate on the ID field.
|
||||
func IDEQ(id int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDNEQ applies the NEQ predicate on the ID field.
|
||||
func IDNEQ(id int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDIn applies the In predicate on the ID field.
|
||||
func IDIn(ids ...int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldIn(FieldID, ids...))
|
||||
}
|
||||
|
||||
// IDNotIn applies the NotIn predicate on the ID field.
|
||||
func IDNotIn(ids ...int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNotIn(FieldID, ids...))
|
||||
}
|
||||
|
||||
// IDGT applies the GT predicate on the ID field.
|
||||
func IDGT(id int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldGT(FieldID, id))
|
||||
}
|
||||
|
||||
// IDGTE applies the GTE predicate on the ID field.
|
||||
func IDGTE(id int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldGTE(FieldID, id))
|
||||
}
|
||||
|
||||
// IDLT applies the LT predicate on the ID field.
|
||||
func IDLT(id int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldLT(FieldID, id))
|
||||
}
|
||||
|
||||
// IDLTE applies the LTE predicate on the ID field.
|
||||
func IDLTE(id int) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldLTE(FieldID, id))
|
||||
}
|
||||
|
||||
// Name applies equality check predicate on the "name" field. It's identical to NameEQ.
|
||||
func Name(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldName, v))
|
||||
}
|
||||
|
||||
// Service applies equality check predicate on the "service" field. It's identical to ServiceEQ.
|
||||
func Service(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldService, v))
|
||||
}
|
||||
|
||||
// Settings applies equality check predicate on the "settings" field. It's identical to SettingsEQ.
|
||||
func Settings(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldSettings, v))
|
||||
}
|
||||
|
||||
// Enabled applies equality check predicate on the "enabled" field. It's identical to EnabledEQ.
|
||||
func Enabled(v bool) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldEnabled, v))
|
||||
}
|
||||
|
||||
// NameEQ applies the EQ predicate on the "name" field.
|
||||
func NameEQ(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldName, v))
|
||||
}
|
||||
|
||||
// NameNEQ applies the NEQ predicate on the "name" field.
|
||||
func NameNEQ(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNEQ(FieldName, v))
|
||||
}
|
||||
|
||||
// NameIn applies the In predicate on the "name" field.
|
||||
func NameIn(vs ...string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldIn(FieldName, vs...))
|
||||
}
|
||||
|
||||
// NameNotIn applies the NotIn predicate on the "name" field.
|
||||
func NameNotIn(vs ...string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNotIn(FieldName, vs...))
|
||||
}
|
||||
|
||||
// NameGT applies the GT predicate on the "name" field.
|
||||
func NameGT(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldGT(FieldName, v))
|
||||
}
|
||||
|
||||
// NameGTE applies the GTE predicate on the "name" field.
|
||||
func NameGTE(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldGTE(FieldName, v))
|
||||
}
|
||||
|
||||
// NameLT applies the LT predicate on the "name" field.
|
||||
func NameLT(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldLT(FieldName, v))
|
||||
}
|
||||
|
||||
// NameLTE applies the LTE predicate on the "name" field.
|
||||
func NameLTE(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldLTE(FieldName, v))
|
||||
}
|
||||
|
||||
// NameContains applies the Contains predicate on the "name" field.
|
||||
func NameContains(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldContains(FieldName, v))
|
||||
}
|
||||
|
||||
// NameHasPrefix applies the HasPrefix predicate on the "name" field.
|
||||
func NameHasPrefix(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldHasPrefix(FieldName, v))
|
||||
}
|
||||
|
||||
// NameHasSuffix applies the HasSuffix predicate on the "name" field.
|
||||
func NameHasSuffix(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldHasSuffix(FieldName, v))
|
||||
}
|
||||
|
||||
// NameEqualFold applies the EqualFold predicate on the "name" field.
|
||||
func NameEqualFold(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEqualFold(FieldName, v))
|
||||
}
|
||||
|
||||
// NameContainsFold applies the ContainsFold predicate on the "name" field.
|
||||
func NameContainsFold(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldContainsFold(FieldName, v))
|
||||
}
|
||||
|
||||
// ServiceEQ applies the EQ predicate on the "service" field.
|
||||
func ServiceEQ(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceNEQ applies the NEQ predicate on the "service" field.
|
||||
func ServiceNEQ(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNEQ(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceIn applies the In predicate on the "service" field.
|
||||
func ServiceIn(vs ...string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldIn(FieldService, vs...))
|
||||
}
|
||||
|
||||
// ServiceNotIn applies the NotIn predicate on the "service" field.
|
||||
func ServiceNotIn(vs ...string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNotIn(FieldService, vs...))
|
||||
}
|
||||
|
||||
// ServiceGT applies the GT predicate on the "service" field.
|
||||
func ServiceGT(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldGT(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceGTE applies the GTE predicate on the "service" field.
|
||||
func ServiceGTE(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldGTE(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceLT applies the LT predicate on the "service" field.
|
||||
func ServiceLT(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldLT(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceLTE applies the LTE predicate on the "service" field.
|
||||
func ServiceLTE(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldLTE(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceContains applies the Contains predicate on the "service" field.
|
||||
func ServiceContains(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldContains(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceHasPrefix applies the HasPrefix predicate on the "service" field.
|
||||
func ServiceHasPrefix(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldHasPrefix(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceHasSuffix applies the HasSuffix predicate on the "service" field.
|
||||
func ServiceHasSuffix(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldHasSuffix(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceEqualFold applies the EqualFold predicate on the "service" field.
|
||||
func ServiceEqualFold(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEqualFold(FieldService, v))
|
||||
}
|
||||
|
||||
// ServiceContainsFold applies the ContainsFold predicate on the "service" field.
|
||||
func ServiceContainsFold(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldContainsFold(FieldService, v))
|
||||
}
|
||||
|
||||
// SettingsEQ applies the EQ predicate on the "settings" field.
|
||||
func SettingsEQ(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsNEQ applies the NEQ predicate on the "settings" field.
|
||||
func SettingsNEQ(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNEQ(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsIn applies the In predicate on the "settings" field.
|
||||
func SettingsIn(vs ...string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldIn(FieldSettings, vs...))
|
||||
}
|
||||
|
||||
// SettingsNotIn applies the NotIn predicate on the "settings" field.
|
||||
func SettingsNotIn(vs ...string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNotIn(FieldSettings, vs...))
|
||||
}
|
||||
|
||||
// SettingsGT applies the GT predicate on the "settings" field.
|
||||
func SettingsGT(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldGT(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsGTE applies the GTE predicate on the "settings" field.
|
||||
func SettingsGTE(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldGTE(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsLT applies the LT predicate on the "settings" field.
|
||||
func SettingsLT(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldLT(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsLTE applies the LTE predicate on the "settings" field.
|
||||
func SettingsLTE(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldLTE(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsContains applies the Contains predicate on the "settings" field.
|
||||
func SettingsContains(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldContains(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsHasPrefix applies the HasPrefix predicate on the "settings" field.
|
||||
func SettingsHasPrefix(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldHasPrefix(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsHasSuffix applies the HasSuffix predicate on the "settings" field.
|
||||
func SettingsHasSuffix(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldHasSuffix(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsEqualFold applies the EqualFold predicate on the "settings" field.
|
||||
func SettingsEqualFold(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEqualFold(FieldSettings, v))
|
||||
}
|
||||
|
||||
// SettingsContainsFold applies the ContainsFold predicate on the "settings" field.
|
||||
func SettingsContainsFold(v string) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldContainsFold(FieldSettings, v))
|
||||
}
|
||||
|
||||
// EnabledEQ applies the EQ predicate on the "enabled" field.
|
||||
func EnabledEQ(v bool) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldEQ(FieldEnabled, v))
|
||||
}
|
||||
|
||||
// EnabledNEQ applies the NEQ predicate on the "enabled" field.
|
||||
func EnabledNEQ(v bool) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.FieldNEQ(FieldEnabled, v))
|
||||
}
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.NotificationClient) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.NotificationClient) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.NotificationClient) predicate.NotificationClient {
|
||||
return predicate.NotificationClient(sql.NotPredicates(p))
|
||||
}
|
||||
240
ent/notificationclient_create.go
Normal file
@@ -0,0 +1,240 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"polaris/ent/notificationclient"
|
||||
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// NotificationClientCreate is the builder for creating a NotificationClient entity.
|
||||
type NotificationClientCreate struct {
|
||||
config
|
||||
mutation *NotificationClientMutation
|
||||
hooks []Hook
|
||||
}
|
||||
|
||||
// SetName sets the "name" field.
|
||||
func (ncc *NotificationClientCreate) SetName(s string) *NotificationClientCreate {
|
||||
ncc.mutation.SetName(s)
|
||||
return ncc
|
||||
}
|
||||
|
||||
// SetService sets the "service" field.
|
||||
func (ncc *NotificationClientCreate) SetService(s string) *NotificationClientCreate {
|
||||
ncc.mutation.SetService(s)
|
||||
return ncc
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (ncc *NotificationClientCreate) SetSettings(s string) *NotificationClientCreate {
|
||||
ncc.mutation.SetSettings(s)
|
||||
return ncc
|
||||
}
|
||||
|
||||
// SetEnabled sets the "enabled" field.
|
||||
func (ncc *NotificationClientCreate) SetEnabled(b bool) *NotificationClientCreate {
|
||||
ncc.mutation.SetEnabled(b)
|
||||
return ncc
|
||||
}
|
||||
|
||||
// SetNillableEnabled sets the "enabled" field if the given value is not nil.
|
||||
func (ncc *NotificationClientCreate) SetNillableEnabled(b *bool) *NotificationClientCreate {
|
||||
if b != nil {
|
||||
ncc.SetEnabled(*b)
|
||||
}
|
||||
return ncc
|
||||
}
|
||||
|
||||
// Mutation returns the NotificationClientMutation object of the builder.
|
||||
func (ncc *NotificationClientCreate) Mutation() *NotificationClientMutation {
|
||||
return ncc.mutation
|
||||
}
|
||||
|
||||
// Save creates the NotificationClient in the database.
|
||||
func (ncc *NotificationClientCreate) Save(ctx context.Context) (*NotificationClient, error) {
|
||||
ncc.defaults()
|
||||
return withHooks(ctx, ncc.sqlSave, ncc.mutation, ncc.hooks)
|
||||
}
|
||||
|
||||
// SaveX calls Save and panics if Save returns an error.
|
||||
func (ncc *NotificationClientCreate) SaveX(ctx context.Context) *NotificationClient {
|
||||
v, err := ncc.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (ncc *NotificationClientCreate) Exec(ctx context.Context) error {
|
||||
_, err := ncc.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ncc *NotificationClientCreate) ExecX(ctx context.Context) {
|
||||
if err := ncc.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// defaults sets the default values of the builder before save.
|
||||
func (ncc *NotificationClientCreate) defaults() {
|
||||
if _, ok := ncc.mutation.Enabled(); !ok {
|
||||
v := notificationclient.DefaultEnabled
|
||||
ncc.mutation.SetEnabled(v)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
func (ncc *NotificationClientCreate) check() error {
|
||||
if _, ok := ncc.mutation.Name(); !ok {
|
||||
return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "NotificationClient.name"`)}
|
||||
}
|
||||
if _, ok := ncc.mutation.Service(); !ok {
|
||||
return &ValidationError{Name: "service", err: errors.New(`ent: missing required field "NotificationClient.service"`)}
|
||||
}
|
||||
if _, ok := ncc.mutation.Settings(); !ok {
|
||||
return &ValidationError{Name: "settings", err: errors.New(`ent: missing required field "NotificationClient.settings"`)}
|
||||
}
|
||||
if _, ok := ncc.mutation.Enabled(); !ok {
|
||||
return &ValidationError{Name: "enabled", err: errors.New(`ent: missing required field "NotificationClient.enabled"`)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ncc *NotificationClientCreate) sqlSave(ctx context.Context) (*NotificationClient, error) {
|
||||
if err := ncc.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_node, _spec := ncc.createSpec()
|
||||
if err := sqlgraph.CreateNode(ctx, ncc.driver, _spec); err != nil {
|
||||
if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
id := _spec.ID.Value.(int64)
|
||||
_node.ID = int(id)
|
||||
ncc.mutation.id = &_node.ID
|
||||
ncc.mutation.done = true
|
||||
return _node, nil
|
||||
}
|
||||
|
||||
func (ncc *NotificationClientCreate) createSpec() (*NotificationClient, *sqlgraph.CreateSpec) {
|
||||
var (
|
||||
_node = &NotificationClient{config: ncc.config}
|
||||
_spec = sqlgraph.NewCreateSpec(notificationclient.Table, sqlgraph.NewFieldSpec(notificationclient.FieldID, field.TypeInt))
|
||||
)
|
||||
if value, ok := ncc.mutation.Name(); ok {
|
||||
_spec.SetField(notificationclient.FieldName, field.TypeString, value)
|
||||
_node.Name = value
|
||||
}
|
||||
if value, ok := ncc.mutation.Service(); ok {
|
||||
_spec.SetField(notificationclient.FieldService, field.TypeString, value)
|
||||
_node.Service = value
|
||||
}
|
||||
if value, ok := ncc.mutation.Settings(); ok {
|
||||
_spec.SetField(notificationclient.FieldSettings, field.TypeString, value)
|
||||
_node.Settings = value
|
||||
}
|
||||
if value, ok := ncc.mutation.Enabled(); ok {
|
||||
_spec.SetField(notificationclient.FieldEnabled, field.TypeBool, value)
|
||||
_node.Enabled = value
|
||||
}
|
||||
return _node, _spec
|
||||
}
|
||||
|
||||
// NotificationClientCreateBulk is the builder for creating many NotificationClient entities in bulk.
|
||||
type NotificationClientCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*NotificationClientCreate
|
||||
}
|
||||
|
||||
// Save creates the NotificationClient entities in the database.
|
||||
func (nccb *NotificationClientCreateBulk) Save(ctx context.Context) ([]*NotificationClient, error) {
|
||||
if nccb.err != nil {
|
||||
return nil, nccb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(nccb.builders))
|
||||
nodes := make([]*NotificationClient, len(nccb.builders))
|
||||
mutators := make([]Mutator, len(nccb.builders))
|
||||
for i := range nccb.builders {
|
||||
func(i int, root context.Context) {
|
||||
builder := nccb.builders[i]
|
||||
builder.defaults()
|
||||
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||
mutation, ok := m.(*NotificationClientMutation)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||
}
|
||||
if err := builder.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
builder.mutation = mutation
|
||||
var err error
|
||||
nodes[i], specs[i] = builder.createSpec()
|
||||
if i < len(mutators)-1 {
|
||||
_, err = mutators[i+1].Mutate(root, nccb.builders[i+1].mutation)
|
||||
} else {
|
||||
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
|
||||
// Invoke the actual operation on the latest mutation in the chain.
|
||||
if err = sqlgraph.BatchCreate(ctx, nccb.driver, spec); err != nil {
|
||||
if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mutation.id = &nodes[i].ID
|
||||
if specs[i].ID.Value != nil {
|
||||
id := specs[i].ID.Value.(int64)
|
||||
nodes[i].ID = int(id)
|
||||
}
|
||||
mutation.done = true
|
||||
return nodes[i], nil
|
||||
})
|
||||
for i := len(builder.hooks) - 1; i >= 0; i-- {
|
||||
mut = builder.hooks[i](mut)
|
||||
}
|
||||
mutators[i] = mut
|
||||
}(i, ctx)
|
||||
}
|
||||
if len(mutators) > 0 {
|
||||
if _, err := mutators[0].Mutate(ctx, nccb.builders[0].mutation); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (nccb *NotificationClientCreateBulk) SaveX(ctx context.Context) []*NotificationClient {
|
||||
v, err := nccb.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (nccb *NotificationClientCreateBulk) Exec(ctx context.Context) error {
|
||||
_, err := nccb.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (nccb *NotificationClientCreateBulk) ExecX(ctx context.Context) {
|
||||
if err := nccb.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
88
ent/notificationclient_delete.go
Normal file
@@ -0,0 +1,88 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"polaris/ent/notificationclient"
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// NotificationClientDelete is the builder for deleting a NotificationClient entity.
|
||||
type NotificationClientDelete struct {
|
||||
config
|
||||
hooks []Hook
|
||||
mutation *NotificationClientMutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the NotificationClientDelete builder.
|
||||
func (ncd *NotificationClientDelete) Where(ps ...predicate.NotificationClient) *NotificationClientDelete {
|
||||
ncd.mutation.Where(ps...)
|
||||
return ncd
|
||||
}
|
||||
|
||||
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||
func (ncd *NotificationClientDelete) Exec(ctx context.Context) (int, error) {
|
||||
return withHooks(ctx, ncd.sqlExec, ncd.mutation, ncd.hooks)
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ncd *NotificationClientDelete) ExecX(ctx context.Context) int {
|
||||
n, err := ncd.Exec(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (ncd *NotificationClientDelete) sqlExec(ctx context.Context) (int, error) {
|
||||
_spec := sqlgraph.NewDeleteSpec(notificationclient.Table, sqlgraph.NewFieldSpec(notificationclient.FieldID, field.TypeInt))
|
||||
if ps := ncd.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
affected, err := sqlgraph.DeleteNodes(ctx, ncd.driver, _spec)
|
||||
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
ncd.mutation.done = true
|
||||
return affected, err
|
||||
}
|
||||
|
||||
// NotificationClientDeleteOne is the builder for deleting a single NotificationClient entity.
|
||||
type NotificationClientDeleteOne struct {
|
||||
ncd *NotificationClientDelete
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the NotificationClientDelete builder.
|
||||
func (ncdo *NotificationClientDeleteOne) Where(ps ...predicate.NotificationClient) *NotificationClientDeleteOne {
|
||||
ncdo.ncd.mutation.Where(ps...)
|
||||
return ncdo
|
||||
}
|
||||
|
||||
// Exec executes the deletion query.
|
||||
func (ncdo *NotificationClientDeleteOne) Exec(ctx context.Context) error {
|
||||
n, err := ncdo.ncd.Exec(ctx)
|
||||
switch {
|
||||
case err != nil:
|
||||
return err
|
||||
case n == 0:
|
||||
return &NotFoundError{notificationclient.Label}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ncdo *NotificationClientDeleteOne) ExecX(ctx context.Context) {
|
||||
if err := ncdo.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
526
ent/notificationclient_query.go
Normal file
@@ -0,0 +1,526 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math"
|
||||
"polaris/ent/notificationclient"
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// NotificationClientQuery is the builder for querying NotificationClient entities.
|
||||
type NotificationClientQuery struct {
|
||||
config
|
||||
ctx *QueryContext
|
||||
order []notificationclient.OrderOption
|
||||
inters []Interceptor
|
||||
predicates []predicate.NotificationClient
|
||||
// intermediate query (i.e. traversal path).
|
||||
sql *sql.Selector
|
||||
path func(context.Context) (*sql.Selector, error)
|
||||
}
|
||||
|
||||
// Where adds a new predicate for the NotificationClientQuery builder.
|
||||
func (ncq *NotificationClientQuery) Where(ps ...predicate.NotificationClient) *NotificationClientQuery {
|
||||
ncq.predicates = append(ncq.predicates, ps...)
|
||||
return ncq
|
||||
}
|
||||
|
||||
// Limit the number of records to be returned by this query.
|
||||
func (ncq *NotificationClientQuery) Limit(limit int) *NotificationClientQuery {
|
||||
ncq.ctx.Limit = &limit
|
||||
return ncq
|
||||
}
|
||||
|
||||
// Offset to start from.
|
||||
func (ncq *NotificationClientQuery) Offset(offset int) *NotificationClientQuery {
|
||||
ncq.ctx.Offset = &offset
|
||||
return ncq
|
||||
}
|
||||
|
||||
// Unique configures the query builder to filter duplicate records on query.
|
||||
// By default, unique is set to true, and can be disabled using this method.
|
||||
func (ncq *NotificationClientQuery) Unique(unique bool) *NotificationClientQuery {
|
||||
ncq.ctx.Unique = &unique
|
||||
return ncq
|
||||
}
|
||||
|
||||
// Order specifies how the records should be ordered.
|
||||
func (ncq *NotificationClientQuery) Order(o ...notificationclient.OrderOption) *NotificationClientQuery {
|
||||
ncq.order = append(ncq.order, o...)
|
||||
return ncq
|
||||
}
|
||||
|
||||
// First returns the first NotificationClient entity from the query.
|
||||
// Returns a *NotFoundError when no NotificationClient was found.
|
||||
func (ncq *NotificationClientQuery) First(ctx context.Context) (*NotificationClient, error) {
|
||||
nodes, err := ncq.Limit(1).All(setContextOp(ctx, ncq.ctx, "First"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(nodes) == 0 {
|
||||
return nil, &NotFoundError{notificationclient.Label}
|
||||
}
|
||||
return nodes[0], nil
|
||||
}
|
||||
|
||||
// FirstX is like First, but panics if an error occurs.
|
||||
func (ncq *NotificationClientQuery) FirstX(ctx context.Context) *NotificationClient {
|
||||
node, err := ncq.First(ctx)
|
||||
if err != nil && !IsNotFound(err) {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// FirstID returns the first NotificationClient ID from the query.
|
||||
// Returns a *NotFoundError when no NotificationClient ID was found.
|
||||
func (ncq *NotificationClientQuery) FirstID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = ncq.Limit(1).IDs(setContextOp(ctx, ncq.ctx, "FirstID")); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
err = &NotFoundError{notificationclient.Label}
|
||||
return
|
||||
}
|
||||
return ids[0], nil
|
||||
}
|
||||
|
||||
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||
func (ncq *NotificationClientQuery) FirstIDX(ctx context.Context) int {
|
||||
id, err := ncq.FirstID(ctx)
|
||||
if err != nil && !IsNotFound(err) {
|
||||
panic(err)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// Only returns a single NotificationClient entity found by the query, ensuring it only returns one.
|
||||
// Returns a *NotSingularError when more than one NotificationClient entity is found.
|
||||
// Returns a *NotFoundError when no NotificationClient entities are found.
|
||||
func (ncq *NotificationClientQuery) Only(ctx context.Context) (*NotificationClient, error) {
|
||||
nodes, err := ncq.Limit(2).All(setContextOp(ctx, ncq.ctx, "Only"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch len(nodes) {
|
||||
case 1:
|
||||
return nodes[0], nil
|
||||
case 0:
|
||||
return nil, &NotFoundError{notificationclient.Label}
|
||||
default:
|
||||
return nil, &NotSingularError{notificationclient.Label}
|
||||
}
|
||||
}
|
||||
|
||||
// OnlyX is like Only, but panics if an error occurs.
|
||||
func (ncq *NotificationClientQuery) OnlyX(ctx context.Context) *NotificationClient {
|
||||
node, err := ncq.Only(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// OnlyID is like Only, but returns the only NotificationClient ID in the query.
|
||||
// Returns a *NotSingularError when more than one NotificationClient ID is found.
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (ncq *NotificationClientQuery) OnlyID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = ncq.Limit(2).IDs(setContextOp(ctx, ncq.ctx, "OnlyID")); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
case 1:
|
||||
id = ids[0]
|
||||
case 0:
|
||||
err = &NotFoundError{notificationclient.Label}
|
||||
default:
|
||||
err = &NotSingularError{notificationclient.Label}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||
func (ncq *NotificationClientQuery) OnlyIDX(ctx context.Context) int {
|
||||
id, err := ncq.OnlyID(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// All executes the query and returns a list of NotificationClients.
|
||||
func (ncq *NotificationClientQuery) All(ctx context.Context) ([]*NotificationClient, error) {
|
||||
ctx = setContextOp(ctx, ncq.ctx, "All")
|
||||
if err := ncq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
qr := querierAll[[]*NotificationClient, *NotificationClientQuery]()
|
||||
return withInterceptors[[]*NotificationClient](ctx, ncq, qr, ncq.inters)
|
||||
}
|
||||
|
||||
// AllX is like All, but panics if an error occurs.
|
||||
func (ncq *NotificationClientQuery) AllX(ctx context.Context) []*NotificationClient {
|
||||
nodes, err := ncq.All(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return nodes
|
||||
}
|
||||
|
||||
// IDs executes the query and returns a list of NotificationClient IDs.
|
||||
func (ncq *NotificationClientQuery) IDs(ctx context.Context) (ids []int, err error) {
|
||||
if ncq.ctx.Unique == nil && ncq.path != nil {
|
||||
ncq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, ncq.ctx, "IDs")
|
||||
if err = ncq.Select(notificationclient.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ids, nil
|
||||
}
|
||||
|
||||
// IDsX is like IDs, but panics if an error occurs.
|
||||
func (ncq *NotificationClientQuery) IDsX(ctx context.Context) []int {
|
||||
ids, err := ncq.IDs(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (ncq *NotificationClientQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, ncq.ctx, "Count")
|
||||
if err := ncq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return withInterceptors[int](ctx, ncq, querierCount[*NotificationClientQuery](), ncq.inters)
|
||||
}
|
||||
|
||||
// CountX is like Count, but panics if an error occurs.
|
||||
func (ncq *NotificationClientQuery) CountX(ctx context.Context) int {
|
||||
count, err := ncq.Count(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (ncq *NotificationClientQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, ncq.ctx, "Exist")
|
||||
switch _, err := ncq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
case err != nil:
|
||||
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||
default:
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
// ExistX is like Exist, but panics if an error occurs.
|
||||
func (ncq *NotificationClientQuery) ExistX(ctx context.Context) bool {
|
||||
exist, err := ncq.Exist(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return exist
|
||||
}
|
||||
|
||||
// Clone returns a duplicate of the NotificationClientQuery builder, including all associated steps. It can be
|
||||
// used to prepare common query builders and use them differently after the clone is made.
|
||||
func (ncq *NotificationClientQuery) Clone() *NotificationClientQuery {
|
||||
if ncq == nil {
|
||||
return nil
|
||||
}
|
||||
return &NotificationClientQuery{
|
||||
config: ncq.config,
|
||||
ctx: ncq.ctx.Clone(),
|
||||
order: append([]notificationclient.OrderOption{}, ncq.order...),
|
||||
inters: append([]Interceptor{}, ncq.inters...),
|
||||
predicates: append([]predicate.NotificationClient{}, ncq.predicates...),
|
||||
// clone intermediate query.
|
||||
sql: ncq.sql.Clone(),
|
||||
path: ncq.path,
|
||||
}
|
||||
}
|
||||
|
||||
// GroupBy is used to group vertices by one or more fields/columns.
|
||||
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var v []struct {
|
||||
// Name string `json:"name,omitempty"`
|
||||
// Count int `json:"count,omitempty"`
|
||||
// }
|
||||
//
|
||||
// client.NotificationClient.Query().
|
||||
// GroupBy(notificationclient.FieldName).
|
||||
// Aggregate(ent.Count()).
|
||||
// Scan(ctx, &v)
|
||||
func (ncq *NotificationClientQuery) GroupBy(field string, fields ...string) *NotificationClientGroupBy {
|
||||
ncq.ctx.Fields = append([]string{field}, fields...)
|
||||
grbuild := &NotificationClientGroupBy{build: ncq}
|
||||
grbuild.flds = &ncq.ctx.Fields
|
||||
grbuild.label = notificationclient.Label
|
||||
grbuild.scan = grbuild.Scan
|
||||
return grbuild
|
||||
}
|
||||
|
||||
// Select allows the selection one or more fields/columns for the given query,
|
||||
// instead of selecting all fields in the entity.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var v []struct {
|
||||
// Name string `json:"name,omitempty"`
|
||||
// }
|
||||
//
|
||||
// client.NotificationClient.Query().
|
||||
// Select(notificationclient.FieldName).
|
||||
// Scan(ctx, &v)
|
||||
func (ncq *NotificationClientQuery) Select(fields ...string) *NotificationClientSelect {
|
||||
ncq.ctx.Fields = append(ncq.ctx.Fields, fields...)
|
||||
sbuild := &NotificationClientSelect{NotificationClientQuery: ncq}
|
||||
sbuild.label = notificationclient.Label
|
||||
sbuild.flds, sbuild.scan = &ncq.ctx.Fields, sbuild.Scan
|
||||
return sbuild
|
||||
}
|
||||
|
||||
// Aggregate returns a NotificationClientSelect configured with the given aggregations.
|
||||
func (ncq *NotificationClientQuery) Aggregate(fns ...AggregateFunc) *NotificationClientSelect {
|
||||
return ncq.Select().Aggregate(fns...)
|
||||
}
|
||||
|
||||
func (ncq *NotificationClientQuery) prepareQuery(ctx context.Context) error {
|
||||
for _, inter := range ncq.inters {
|
||||
if inter == nil {
|
||||
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
|
||||
}
|
||||
if trv, ok := inter.(Traverser); ok {
|
||||
if err := trv.Traverse(ctx, ncq); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, f := range ncq.ctx.Fields {
|
||||
if !notificationclient.ValidColumn(f) {
|
||||
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||
}
|
||||
}
|
||||
if ncq.path != nil {
|
||||
prev, err := ncq.path(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ncq.sql = prev
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ncq *NotificationClientQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*NotificationClient, error) {
|
||||
var (
|
||||
nodes = []*NotificationClient{}
|
||||
_spec = ncq.querySpec()
|
||||
)
|
||||
_spec.ScanValues = func(columns []string) ([]any, error) {
|
||||
return (*NotificationClient).scanValues(nil, columns)
|
||||
}
|
||||
_spec.Assign = func(columns []string, values []any) error {
|
||||
node := &NotificationClient{config: ncq.config}
|
||||
nodes = append(nodes, node)
|
||||
return node.assignValues(columns, values)
|
||||
}
|
||||
for i := range hooks {
|
||||
hooks[i](ctx, _spec)
|
||||
}
|
||||
if err := sqlgraph.QueryNodes(ctx, ncq.driver, _spec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(nodes) == 0 {
|
||||
return nodes, nil
|
||||
}
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
func (ncq *NotificationClientQuery) sqlCount(ctx context.Context) (int, error) {
|
||||
_spec := ncq.querySpec()
|
||||
_spec.Node.Columns = ncq.ctx.Fields
|
||||
if len(ncq.ctx.Fields) > 0 {
|
||||
_spec.Unique = ncq.ctx.Unique != nil && *ncq.ctx.Unique
|
||||
}
|
||||
return sqlgraph.CountNodes(ctx, ncq.driver, _spec)
|
||||
}
|
||||
|
||||
func (ncq *NotificationClientQuery) querySpec() *sqlgraph.QuerySpec {
|
||||
_spec := sqlgraph.NewQuerySpec(notificationclient.Table, notificationclient.Columns, sqlgraph.NewFieldSpec(notificationclient.FieldID, field.TypeInt))
|
||||
_spec.From = ncq.sql
|
||||
if unique := ncq.ctx.Unique; unique != nil {
|
||||
_spec.Unique = *unique
|
||||
} else if ncq.path != nil {
|
||||
_spec.Unique = true
|
||||
}
|
||||
if fields := ncq.ctx.Fields; len(fields) > 0 {
|
||||
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, notificationclient.FieldID)
|
||||
for i := range fields {
|
||||
if fields[i] != notificationclient.FieldID {
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
if ps := ncq.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if limit := ncq.ctx.Limit; limit != nil {
|
||||
_spec.Limit = *limit
|
||||
}
|
||||
if offset := ncq.ctx.Offset; offset != nil {
|
||||
_spec.Offset = *offset
|
||||
}
|
||||
if ps := ncq.order; len(ps) > 0 {
|
||||
_spec.Order = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
return _spec
|
||||
}
|
||||
|
||||
func (ncq *NotificationClientQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||
builder := sql.Dialect(ncq.driver.Dialect())
|
||||
t1 := builder.Table(notificationclient.Table)
|
||||
columns := ncq.ctx.Fields
|
||||
if len(columns) == 0 {
|
||||
columns = notificationclient.Columns
|
||||
}
|
||||
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||
if ncq.sql != nil {
|
||||
selector = ncq.sql
|
||||
selector.Select(selector.Columns(columns...)...)
|
||||
}
|
||||
if ncq.ctx.Unique != nil && *ncq.ctx.Unique {
|
||||
selector.Distinct()
|
||||
}
|
||||
for _, p := range ncq.predicates {
|
||||
p(selector)
|
||||
}
|
||||
for _, p := range ncq.order {
|
||||
p(selector)
|
||||
}
|
||||
if offset := ncq.ctx.Offset; offset != nil {
|
||||
// limit is mandatory for offset clause. We start
|
||||
// with default value, and override it below if needed.
|
||||
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||
}
|
||||
if limit := ncq.ctx.Limit; limit != nil {
|
||||
selector.Limit(*limit)
|
||||
}
|
||||
return selector
|
||||
}
|
||||
|
||||
// NotificationClientGroupBy is the group-by builder for NotificationClient entities.
|
||||
type NotificationClientGroupBy struct {
|
||||
selector
|
||||
build *NotificationClientQuery
|
||||
}
|
||||
|
||||
// Aggregate adds the given aggregation functions to the group-by query.
|
||||
func (ncgb *NotificationClientGroupBy) Aggregate(fns ...AggregateFunc) *NotificationClientGroupBy {
|
||||
ncgb.fns = append(ncgb.fns, fns...)
|
||||
return ncgb
|
||||
}
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (ncgb *NotificationClientGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, ncgb.build.ctx, "GroupBy")
|
||||
if err := ncgb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return scanWithInterceptors[*NotificationClientQuery, *NotificationClientGroupBy](ctx, ncgb.build, ncgb, ncgb.build.inters, v)
|
||||
}
|
||||
|
||||
func (ncgb *NotificationClientGroupBy) sqlScan(ctx context.Context, root *NotificationClientQuery, v any) error {
|
||||
selector := root.sqlQuery(ctx).Select()
|
||||
aggregation := make([]string, 0, len(ncgb.fns))
|
||||
for _, fn := range ncgb.fns {
|
||||
aggregation = append(aggregation, fn(selector))
|
||||
}
|
||||
if len(selector.SelectedColumns()) == 0 {
|
||||
columns := make([]string, 0, len(*ncgb.flds)+len(ncgb.fns))
|
||||
for _, f := range *ncgb.flds {
|
||||
columns = append(columns, selector.C(f))
|
||||
}
|
||||
columns = append(columns, aggregation...)
|
||||
selector.Select(columns...)
|
||||
}
|
||||
selector.GroupBy(selector.Columns(*ncgb.flds...)...)
|
||||
if err := selector.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
rows := &sql.Rows{}
|
||||
query, args := selector.Query()
|
||||
if err := ncgb.build.driver.Query(ctx, query, args, rows); err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
return sql.ScanSlice(rows, v)
|
||||
}
|
||||
|
||||
// NotificationClientSelect is the builder for selecting fields of NotificationClient entities.
|
||||
type NotificationClientSelect struct {
|
||||
*NotificationClientQuery
|
||||
selector
|
||||
}
|
||||
|
||||
// Aggregate adds the given aggregation functions to the selector query.
|
||||
func (ncs *NotificationClientSelect) Aggregate(fns ...AggregateFunc) *NotificationClientSelect {
|
||||
ncs.fns = append(ncs.fns, fns...)
|
||||
return ncs
|
||||
}
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (ncs *NotificationClientSelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, ncs.ctx, "Select")
|
||||
if err := ncs.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return scanWithInterceptors[*NotificationClientQuery, *NotificationClientSelect](ctx, ncs.NotificationClientQuery, ncs, ncs.inters, v)
|
||||
}
|
||||
|
||||
func (ncs *NotificationClientSelect) sqlScan(ctx context.Context, root *NotificationClientQuery, v any) error {
|
||||
selector := root.sqlQuery(ctx)
|
||||
aggregation := make([]string, 0, len(ncs.fns))
|
||||
for _, fn := range ncs.fns {
|
||||
aggregation = append(aggregation, fn(selector))
|
||||
}
|
||||
switch n := len(*ncs.selector.flds); {
|
||||
case n == 0 && len(aggregation) > 0:
|
||||
selector.Select(aggregation...)
|
||||
case n != 0 && len(aggregation) > 0:
|
||||
selector.AppendSelect(aggregation...)
|
||||
}
|
||||
rows := &sql.Rows{}
|
||||
query, args := selector.Query()
|
||||
if err := ncs.driver.Query(ctx, query, args, rows); err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
return sql.ScanSlice(rows, v)
|
||||
}
|
||||
311
ent/notificationclient_update.go
Normal file
@@ -0,0 +1,311 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"polaris/ent/notificationclient"
|
||||
"polaris/ent/predicate"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// NotificationClientUpdate is the builder for updating NotificationClient entities.
|
||||
type NotificationClientUpdate struct {
|
||||
config
|
||||
hooks []Hook
|
||||
mutation *NotificationClientMutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the NotificationClientUpdate builder.
|
||||
func (ncu *NotificationClientUpdate) Where(ps ...predicate.NotificationClient) *NotificationClientUpdate {
|
||||
ncu.mutation.Where(ps...)
|
||||
return ncu
|
||||
}
|
||||
|
||||
// SetName sets the "name" field.
|
||||
func (ncu *NotificationClientUpdate) SetName(s string) *NotificationClientUpdate {
|
||||
ncu.mutation.SetName(s)
|
||||
return ncu
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (ncu *NotificationClientUpdate) SetNillableName(s *string) *NotificationClientUpdate {
|
||||
if s != nil {
|
||||
ncu.SetName(*s)
|
||||
}
|
||||
return ncu
|
||||
}
|
||||
|
||||
// SetService sets the "service" field.
|
||||
func (ncu *NotificationClientUpdate) SetService(s string) *NotificationClientUpdate {
|
||||
ncu.mutation.SetService(s)
|
||||
return ncu
|
||||
}
|
||||
|
||||
// SetNillableService sets the "service" field if the given value is not nil.
|
||||
func (ncu *NotificationClientUpdate) SetNillableService(s *string) *NotificationClientUpdate {
|
||||
if s != nil {
|
||||
ncu.SetService(*s)
|
||||
}
|
||||
return ncu
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (ncu *NotificationClientUpdate) SetSettings(s string) *NotificationClientUpdate {
|
||||
ncu.mutation.SetSettings(s)
|
||||
return ncu
|
||||
}
|
||||
|
||||
// SetNillableSettings sets the "settings" field if the given value is not nil.
|
||||
func (ncu *NotificationClientUpdate) SetNillableSettings(s *string) *NotificationClientUpdate {
|
||||
if s != nil {
|
||||
ncu.SetSettings(*s)
|
||||
}
|
||||
return ncu
|
||||
}
|
||||
|
||||
// SetEnabled sets the "enabled" field.
|
||||
func (ncu *NotificationClientUpdate) SetEnabled(b bool) *NotificationClientUpdate {
|
||||
ncu.mutation.SetEnabled(b)
|
||||
return ncu
|
||||
}
|
||||
|
||||
// SetNillableEnabled sets the "enabled" field if the given value is not nil.
|
||||
func (ncu *NotificationClientUpdate) SetNillableEnabled(b *bool) *NotificationClientUpdate {
|
||||
if b != nil {
|
||||
ncu.SetEnabled(*b)
|
||||
}
|
||||
return ncu
|
||||
}
|
||||
|
||||
// Mutation returns the NotificationClientMutation object of the builder.
|
||||
func (ncu *NotificationClientUpdate) Mutation() *NotificationClientMutation {
|
||||
return ncu.mutation
|
||||
}
|
||||
|
||||
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||
func (ncu *NotificationClientUpdate) Save(ctx context.Context) (int, error) {
|
||||
return withHooks(ctx, ncu.sqlSave, ncu.mutation, ncu.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (ncu *NotificationClientUpdate) SaveX(ctx context.Context) int {
|
||||
affected, err := ncu.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return affected
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (ncu *NotificationClientUpdate) Exec(ctx context.Context) error {
|
||||
_, err := ncu.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ncu *NotificationClientUpdate) ExecX(ctx context.Context) {
|
||||
if err := ncu.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ncu *NotificationClientUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
_spec := sqlgraph.NewUpdateSpec(notificationclient.Table, notificationclient.Columns, sqlgraph.NewFieldSpec(notificationclient.FieldID, field.TypeInt))
|
||||
if ps := ncu.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok := ncu.mutation.Name(); ok {
|
||||
_spec.SetField(notificationclient.FieldName, field.TypeString, value)
|
||||
}
|
||||
if value, ok := ncu.mutation.Service(); ok {
|
||||
_spec.SetField(notificationclient.FieldService, field.TypeString, value)
|
||||
}
|
||||
if value, ok := ncu.mutation.Settings(); ok {
|
||||
_spec.SetField(notificationclient.FieldSettings, field.TypeString, value)
|
||||
}
|
||||
if value, ok := ncu.mutation.Enabled(); ok {
|
||||
_spec.SetField(notificationclient.FieldEnabled, field.TypeBool, value)
|
||||
}
|
||||
if n, err = sqlgraph.UpdateNodes(ctx, ncu.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{notificationclient.Label}
|
||||
} else if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return 0, err
|
||||
}
|
||||
ncu.mutation.done = true
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// NotificationClientUpdateOne is the builder for updating a single NotificationClient entity.
|
||||
type NotificationClientUpdateOne struct {
|
||||
config
|
||||
fields []string
|
||||
hooks []Hook
|
||||
mutation *NotificationClientMutation
|
||||
}
|
||||
|
||||
// SetName sets the "name" field.
|
||||
func (ncuo *NotificationClientUpdateOne) SetName(s string) *NotificationClientUpdateOne {
|
||||
ncuo.mutation.SetName(s)
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// SetNillableName sets the "name" field if the given value is not nil.
|
||||
func (ncuo *NotificationClientUpdateOne) SetNillableName(s *string) *NotificationClientUpdateOne {
|
||||
if s != nil {
|
||||
ncuo.SetName(*s)
|
||||
}
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// SetService sets the "service" field.
|
||||
func (ncuo *NotificationClientUpdateOne) SetService(s string) *NotificationClientUpdateOne {
|
||||
ncuo.mutation.SetService(s)
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// SetNillableService sets the "service" field if the given value is not nil.
|
||||
func (ncuo *NotificationClientUpdateOne) SetNillableService(s *string) *NotificationClientUpdateOne {
|
||||
if s != nil {
|
||||
ncuo.SetService(*s)
|
||||
}
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (ncuo *NotificationClientUpdateOne) SetSettings(s string) *NotificationClientUpdateOne {
|
||||
ncuo.mutation.SetSettings(s)
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// SetNillableSettings sets the "settings" field if the given value is not nil.
|
||||
func (ncuo *NotificationClientUpdateOne) SetNillableSettings(s *string) *NotificationClientUpdateOne {
|
||||
if s != nil {
|
||||
ncuo.SetSettings(*s)
|
||||
}
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// SetEnabled sets the "enabled" field.
|
||||
func (ncuo *NotificationClientUpdateOne) SetEnabled(b bool) *NotificationClientUpdateOne {
|
||||
ncuo.mutation.SetEnabled(b)
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// SetNillableEnabled sets the "enabled" field if the given value is not nil.
|
||||
func (ncuo *NotificationClientUpdateOne) SetNillableEnabled(b *bool) *NotificationClientUpdateOne {
|
||||
if b != nil {
|
||||
ncuo.SetEnabled(*b)
|
||||
}
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// Mutation returns the NotificationClientMutation object of the builder.
|
||||
func (ncuo *NotificationClientUpdateOne) Mutation() *NotificationClientMutation {
|
||||
return ncuo.mutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the NotificationClientUpdate builder.
|
||||
func (ncuo *NotificationClientUpdateOne) Where(ps ...predicate.NotificationClient) *NotificationClientUpdateOne {
|
||||
ncuo.mutation.Where(ps...)
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||
// The default is selecting all fields defined in the entity schema.
|
||||
func (ncuo *NotificationClientUpdateOne) Select(field string, fields ...string) *NotificationClientUpdateOne {
|
||||
ncuo.fields = append([]string{field}, fields...)
|
||||
return ncuo
|
||||
}
|
||||
|
||||
// Save executes the query and returns the updated NotificationClient entity.
|
||||
func (ncuo *NotificationClientUpdateOne) Save(ctx context.Context) (*NotificationClient, error) {
|
||||
return withHooks(ctx, ncuo.sqlSave, ncuo.mutation, ncuo.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (ncuo *NotificationClientUpdateOne) SaveX(ctx context.Context) *NotificationClient {
|
||||
node, err := ncuo.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// Exec executes the query on the entity.
|
||||
func (ncuo *NotificationClientUpdateOne) Exec(ctx context.Context) error {
|
||||
_, err := ncuo.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (ncuo *NotificationClientUpdateOne) ExecX(ctx context.Context) {
|
||||
if err := ncuo.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ncuo *NotificationClientUpdateOne) sqlSave(ctx context.Context) (_node *NotificationClient, err error) {
|
||||
_spec := sqlgraph.NewUpdateSpec(notificationclient.Table, notificationclient.Columns, sqlgraph.NewFieldSpec(notificationclient.FieldID, field.TypeInt))
|
||||
id, ok := ncuo.mutation.ID()
|
||||
if !ok {
|
||||
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "NotificationClient.id" for update`)}
|
||||
}
|
||||
_spec.Node.ID.Value = id
|
||||
if fields := ncuo.fields; len(fields) > 0 {
|
||||
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, notificationclient.FieldID)
|
||||
for _, f := range fields {
|
||||
if !notificationclient.ValidColumn(f) {
|
||||
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||
}
|
||||
if f != notificationclient.FieldID {
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
if ps := ncuo.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok := ncuo.mutation.Name(); ok {
|
||||
_spec.SetField(notificationclient.FieldName, field.TypeString, value)
|
||||
}
|
||||
if value, ok := ncuo.mutation.Service(); ok {
|
||||
_spec.SetField(notificationclient.FieldService, field.TypeString, value)
|
||||
}
|
||||
if value, ok := ncuo.mutation.Settings(); ok {
|
||||
_spec.SetField(notificationclient.FieldSettings, field.TypeString, value)
|
||||
}
|
||||
if value, ok := ncuo.mutation.Enabled(); ok {
|
||||
_spec.SetField(notificationclient.FieldEnabled, field.TypeBool, value)
|
||||
}
|
||||
_node = &NotificationClient{config: ncuo.config}
|
||||
_spec.Assign = _node.assignValues
|
||||
_spec.ScanValues = _node.scanValues
|
||||
if err = sqlgraph.UpdateNode(ctx, ncuo.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{notificationclient.Label}
|
||||
} else if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
ncuo.mutation.done = true
|
||||
return _node, nil
|
||||
}
|
||||
@@ -15,12 +15,18 @@ type Episode func(*sql.Selector)
|
||||
// History is the predicate function for history builders.
|
||||
type History func(*sql.Selector)
|
||||
|
||||
// ImportList is the predicate function for importlist builders.
|
||||
type ImportList func(*sql.Selector)
|
||||
|
||||
// Indexers is the predicate function for indexers builders.
|
||||
type Indexers func(*sql.Selector)
|
||||
|
||||
// Media is the predicate function for media builders.
|
||||
type Media func(*sql.Selector)
|
||||
|
||||
// NotificationClient is the predicate function for notificationclient builders.
|
||||
type NotificationClient func(*sql.Selector)
|
||||
|
||||
// Settings is the predicate function for settings builders.
|
||||
type Settings func(*sql.Selector)
|
||||
|
||||
|
||||
@@ -4,9 +4,11 @@ package ent
|
||||
|
||||
import (
|
||||
"polaris/ent/downloadclients"
|
||||
"polaris/ent/episode"
|
||||
"polaris/ent/history"
|
||||
"polaris/ent/indexers"
|
||||
"polaris/ent/media"
|
||||
"polaris/ent/notificationclient"
|
||||
"polaris/ent/schema"
|
||||
"polaris/ent/storage"
|
||||
"time"
|
||||
@@ -48,6 +50,10 @@ func init() {
|
||||
downloadclients.DefaultTags = downloadclientsDescTags.Default.(string)
|
||||
episodeFields := schema.Episode{}.Fields()
|
||||
_ = episodeFields
|
||||
// episodeDescMonitored is the schema descriptor for monitored field.
|
||||
episodeDescMonitored := episodeFields[7].Descriptor()
|
||||
// episode.DefaultMonitored holds the default value on creation for the monitored field.
|
||||
episode.DefaultMonitored = episodeDescMonitored.Default.(bool)
|
||||
historyFields := schema.History{}.Fields()
|
||||
_ = historyFields
|
||||
// historyDescSize is the schema descriptor for size field.
|
||||
@@ -60,6 +66,18 @@ func init() {
|
||||
indexersDescEnableRss := indexersFields[3].Descriptor()
|
||||
// indexers.DefaultEnableRss holds the default value on creation for the enable_rss field.
|
||||
indexers.DefaultEnableRss = indexersDescEnableRss.Default.(bool)
|
||||
// indexersDescPriority is the schema descriptor for priority field.
|
||||
indexersDescPriority := indexersFields[4].Descriptor()
|
||||
// indexers.DefaultPriority holds the default value on creation for the priority field.
|
||||
indexers.DefaultPriority = indexersDescPriority.Default.(int)
|
||||
// indexersDescSeedRatio is the schema descriptor for seed_ratio field.
|
||||
indexersDescSeedRatio := indexersFields[5].Descriptor()
|
||||
// indexers.DefaultSeedRatio holds the default value on creation for the seed_ratio field.
|
||||
indexers.DefaultSeedRatio = indexersDescSeedRatio.Default.(float32)
|
||||
// indexersDescDisabled is the schema descriptor for disabled field.
|
||||
indexersDescDisabled := indexersFields[6].Descriptor()
|
||||
// indexers.DefaultDisabled holds the default value on creation for the disabled field.
|
||||
indexers.DefaultDisabled = indexersDescDisabled.Default.(bool)
|
||||
mediaFields := schema.Media{}.Fields()
|
||||
_ = mediaFields
|
||||
// mediaDescCreatedAt is the schema descriptor for created_at field.
|
||||
@@ -70,14 +88,24 @@ func init() {
|
||||
mediaDescAirDate := mediaFields[8].Descriptor()
|
||||
// media.DefaultAirDate holds the default value on creation for the air_date field.
|
||||
media.DefaultAirDate = mediaDescAirDate.Default.(string)
|
||||
// mediaDescDownloadHistoryEpisodes is the schema descriptor for download_history_episodes field.
|
||||
mediaDescDownloadHistoryEpisodes := mediaFields[12].Descriptor()
|
||||
// media.DefaultDownloadHistoryEpisodes holds the default value on creation for the download_history_episodes field.
|
||||
media.DefaultDownloadHistoryEpisodes = mediaDescDownloadHistoryEpisodes.Default.(bool)
|
||||
notificationclientFields := schema.NotificationClient{}.Fields()
|
||||
_ = notificationclientFields
|
||||
// notificationclientDescEnabled is the schema descriptor for enabled field.
|
||||
notificationclientDescEnabled := notificationclientFields[3].Descriptor()
|
||||
// notificationclient.DefaultEnabled holds the default value on creation for the enabled field.
|
||||
notificationclient.DefaultEnabled = notificationclientDescEnabled.Default.(bool)
|
||||
storageFields := schema.Storage{}.Fields()
|
||||
_ = storageFields
|
||||
// storageDescDeleted is the schema descriptor for deleted field.
|
||||
storageDescDeleted := storageFields[3].Descriptor()
|
||||
storageDescDeleted := storageFields[5].Descriptor()
|
||||
// storage.DefaultDeleted holds the default value on creation for the deleted field.
|
||||
storage.DefaultDeleted = storageDescDeleted.Default.(bool)
|
||||
// storageDescDefault is the schema descriptor for default field.
|
||||
storageDescDefault := storageFields[4].Descriptor()
|
||||
storageDescDefault := storageFields[6].Descriptor()
|
||||
// storage.DefaultDefault holds the default value on creation for the default field.
|
||||
storage.DefaultDefault = storageDescDefault.Default.(bool)
|
||||
}
|
||||
|
||||
@@ -21,17 +21,18 @@ func (Episode) Fields() []ent.Field {
|
||||
field.String("overview"),
|
||||
field.String("air_date"),
|
||||
field.Enum("status").Values("missing", "downloading", "downloaded").Default("missing"),
|
||||
field.String("file_in_storage").Optional(),
|
||||
field.Bool("monitored").Default(false).StructTag("json:\"monitored\""), //whether this episode is monitored
|
||||
field.String("target_file").Optional(),
|
||||
}
|
||||
}
|
||||
|
||||
// Edges of the Episode.
|
||||
func (Episode) Edges() []ent.Edge {
|
||||
return []ent.Edge{
|
||||
edge.From("media", Media.Type).
|
||||
Ref("episodes").
|
||||
Unique().
|
||||
edge.From("media", Media.Type).
|
||||
Ref("episodes").
|
||||
Unique().
|
||||
Field("media_id"),
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -19,7 +19,9 @@ func (History) Fields() []ent.Field {
|
||||
field.Time("date"),
|
||||
field.String("target_dir"),
|
||||
field.Int("size").Default(0),
|
||||
field.Enum("status").Values("running", "success", "fail", "uploading"),
|
||||
field.Int("download_client_id").Optional(),
|
||||
field.Int("indexer_id").Optional(),
|
||||
field.Enum("status").Values("running", "success", "fail", "uploading", "seeding"),
|
||||
field.String("saved").Optional(),
|
||||
}
|
||||
}
|
||||
|
||||
32
ent/schema/importlist.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package schema
|
||||
|
||||
import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// ImportList holds the schema definition for the ImportList entity.
|
||||
type ImportList struct {
|
||||
ent.Schema
|
||||
}
|
||||
|
||||
// Fields of the ImportList.
|
||||
func (ImportList) Fields() []ent.Field {
|
||||
return []ent.Field{
|
||||
field.String("name"),
|
||||
field.Enum("type").Values("plex", "doulist"),
|
||||
field.String("url").Optional(),
|
||||
field.String("qulity"),
|
||||
field.Int("storage_id"),
|
||||
field.JSON("settings", ImportListSettings{}).Optional(),
|
||||
}
|
||||
}
|
||||
|
||||
// Edges of the ImportList.
|
||||
func (ImportList) Edges() []ent.Edge {
|
||||
return nil
|
||||
}
|
||||
|
||||
type ImportListSettings struct {
|
||||
//Url string `json:"url"`
|
||||
}
|
||||
@@ -17,7 +17,9 @@ func (Indexers) Fields() []ent.Field {
|
||||
field.String("implementation"),
|
||||
field.String("settings"),
|
||||
field.Bool("enable_rss").Default(true),
|
||||
field.Int("priority"),
|
||||
field.Int("priority").Default(50),
|
||||
field.Float32("seed_ratio").Optional().Default(0).Comment("minimal seed ratio requied, before removing torrent"),
|
||||
field.Bool("disabled").Optional().Default(false),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -25,9 +25,12 @@ func (Media) Fields() []ent.Field {
|
||||
field.String("overview"),
|
||||
field.Time("created_at").Default(time.Now()),
|
||||
field.String("air_date").Default(""),
|
||||
field.Enum("resolution").Values("720p", "1080p", "4k").Default("1080p"),
|
||||
field.Enum("resolution").Values("720p", "1080p", "2160p").Default("1080p"),
|
||||
field.Int("storage_id").Optional(),
|
||||
field.String("target_dir").Optional(),
|
||||
field.Bool("download_history_episodes").Optional().Default(false).Comment("tv series only"),
|
||||
field.JSON("limiter", MediaLimiter{}).Optional(),
|
||||
field.JSON("extras", MediaExtras{}).Optional(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,3 +40,23 @@ func (Media) Edges() []ent.Edge {
|
||||
edge.To("episodes", Episode.Type),
|
||||
}
|
||||
}
|
||||
|
||||
type MediaLimiter struct {
|
||||
SizeMin int `json:"size_min"` //in B
|
||||
SizeMax int `json:"size_max"` //in B
|
||||
}
|
||||
|
||||
type MediaExtras struct {
|
||||
IsAdultMovie bool `json:"is_adult_movie"`
|
||||
JavId string `json:"javid"`
|
||||
//OriginCountry []string `json:"origin_country"`
|
||||
OriginalLanguage string `json:"original_language"`
|
||||
Genres []struct {
|
||||
ID int64 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
} `json:"genres"`
|
||||
}
|
||||
|
||||
func (m *MediaExtras) IsJav() bool {
|
||||
return m.IsAdultMovie && m.JavId != ""
|
||||
}
|
||||
|
||||
26
ent/schema/notificationclient.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package schema
|
||||
|
||||
import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/schema/field"
|
||||
)
|
||||
|
||||
// NotificationClient holds the schema definition for the NotificationClient entity.
|
||||
type NotificationClient struct {
|
||||
ent.Schema
|
||||
}
|
||||
|
||||
// Fields of the NotificationClient.
|
||||
func (NotificationClient) Fields() []ent.Field {
|
||||
return []ent.Field{
|
||||
field.String("name"),
|
||||
field.String("service"),
|
||||
field.String("settings"),
|
||||
field.Bool("enabled").Default(true),
|
||||
}
|
||||
}
|
||||
|
||||
// Edges of the NotificationClient.
|
||||
func (NotificationClient) Edges() []ent.Edge {
|
||||
return nil
|
||||
}
|
||||
@@ -15,6 +15,8 @@ func (Storage) Fields() []ent.Field {
|
||||
return []ent.Field{
|
||||
field.String("name").Unique(),
|
||||
field.Enum("implementation").Values("webdav", "local"),
|
||||
field.String("tv_path").Optional(),
|
||||
field.String("movie_path").Optional(),
|
||||
field.String("settings").Optional(),
|
||||
field.Bool("deleted").Default(false),
|
||||
field.Bool("default").Default(false),
|
||||
|
||||
@@ -20,6 +20,10 @@ type Storage struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
// Implementation holds the value of the "implementation" field.
|
||||
Implementation storage.Implementation `json:"implementation,omitempty"`
|
||||
// TvPath holds the value of the "tv_path" field.
|
||||
TvPath string `json:"tv_path,omitempty"`
|
||||
// MoviePath holds the value of the "movie_path" field.
|
||||
MoviePath string `json:"movie_path,omitempty"`
|
||||
// Settings holds the value of the "settings" field.
|
||||
Settings string `json:"settings,omitempty"`
|
||||
// Deleted holds the value of the "deleted" field.
|
||||
@@ -38,7 +42,7 @@ func (*Storage) scanValues(columns []string) ([]any, error) {
|
||||
values[i] = new(sql.NullBool)
|
||||
case storage.FieldID:
|
||||
values[i] = new(sql.NullInt64)
|
||||
case storage.FieldName, storage.FieldImplementation, storage.FieldSettings:
|
||||
case storage.FieldName, storage.FieldImplementation, storage.FieldTvPath, storage.FieldMoviePath, storage.FieldSettings:
|
||||
values[i] = new(sql.NullString)
|
||||
default:
|
||||
values[i] = new(sql.UnknownType)
|
||||
@@ -73,6 +77,18 @@ func (s *Storage) assignValues(columns []string, values []any) error {
|
||||
} else if value.Valid {
|
||||
s.Implementation = storage.Implementation(value.String)
|
||||
}
|
||||
case storage.FieldTvPath:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field tv_path", values[i])
|
||||
} else if value.Valid {
|
||||
s.TvPath = value.String
|
||||
}
|
||||
case storage.FieldMoviePath:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field movie_path", values[i])
|
||||
} else if value.Valid {
|
||||
s.MoviePath = value.String
|
||||
}
|
||||
case storage.FieldSettings:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field settings", values[i])
|
||||
@@ -133,6 +149,12 @@ func (s *Storage) String() string {
|
||||
builder.WriteString("implementation=")
|
||||
builder.WriteString(fmt.Sprintf("%v", s.Implementation))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("tv_path=")
|
||||
builder.WriteString(s.TvPath)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("movie_path=")
|
||||
builder.WriteString(s.MoviePath)
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("settings=")
|
||||
builder.WriteString(s.Settings)
|
||||
builder.WriteString(", ")
|
||||
|
||||
@@ -17,6 +17,10 @@ const (
|
||||
FieldName = "name"
|
||||
// FieldImplementation holds the string denoting the implementation field in the database.
|
||||
FieldImplementation = "implementation"
|
||||
// FieldTvPath holds the string denoting the tv_path field in the database.
|
||||
FieldTvPath = "tv_path"
|
||||
// FieldMoviePath holds the string denoting the movie_path field in the database.
|
||||
FieldMoviePath = "movie_path"
|
||||
// FieldSettings holds the string denoting the settings field in the database.
|
||||
FieldSettings = "settings"
|
||||
// FieldDeleted holds the string denoting the deleted field in the database.
|
||||
@@ -32,6 +36,8 @@ var Columns = []string{
|
||||
FieldID,
|
||||
FieldName,
|
||||
FieldImplementation,
|
||||
FieldTvPath,
|
||||
FieldMoviePath,
|
||||
FieldSettings,
|
||||
FieldDeleted,
|
||||
FieldDefault,
|
||||
@@ -95,6 +101,16 @@ func ByImplementation(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldImplementation, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByTvPath orders the results by the tv_path field.
|
||||
func ByTvPath(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldTvPath, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByMoviePath orders the results by the movie_path field.
|
||||
func ByMoviePath(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldMoviePath, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// BySettings orders the results by the settings field.
|
||||
func BySettings(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldSettings, opts...).ToFunc()
|
||||
|
||||
@@ -58,6 +58,16 @@ func Name(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEQ(FieldName, v))
|
||||
}
|
||||
|
||||
// TvPath applies equality check predicate on the "tv_path" field. It's identical to TvPathEQ.
|
||||
func TvPath(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEQ(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// MoviePath applies equality check predicate on the "movie_path" field. It's identical to MoviePathEQ.
|
||||
func MoviePath(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEQ(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// Settings applies equality check predicate on the "settings" field. It's identical to SettingsEQ.
|
||||
func Settings(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEQ(FieldSettings, v))
|
||||
@@ -158,6 +168,156 @@ func ImplementationNotIn(vs ...Implementation) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldNotIn(FieldImplementation, vs...))
|
||||
}
|
||||
|
||||
// TvPathEQ applies the EQ predicate on the "tv_path" field.
|
||||
func TvPathEQ(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEQ(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathNEQ applies the NEQ predicate on the "tv_path" field.
|
||||
func TvPathNEQ(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldNEQ(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathIn applies the In predicate on the "tv_path" field.
|
||||
func TvPathIn(vs ...string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldIn(FieldTvPath, vs...))
|
||||
}
|
||||
|
||||
// TvPathNotIn applies the NotIn predicate on the "tv_path" field.
|
||||
func TvPathNotIn(vs ...string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldNotIn(FieldTvPath, vs...))
|
||||
}
|
||||
|
||||
// TvPathGT applies the GT predicate on the "tv_path" field.
|
||||
func TvPathGT(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldGT(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathGTE applies the GTE predicate on the "tv_path" field.
|
||||
func TvPathGTE(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldGTE(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathLT applies the LT predicate on the "tv_path" field.
|
||||
func TvPathLT(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldLT(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathLTE applies the LTE predicate on the "tv_path" field.
|
||||
func TvPathLTE(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldLTE(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathContains applies the Contains predicate on the "tv_path" field.
|
||||
func TvPathContains(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldContains(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathHasPrefix applies the HasPrefix predicate on the "tv_path" field.
|
||||
func TvPathHasPrefix(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldHasPrefix(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathHasSuffix applies the HasSuffix predicate on the "tv_path" field.
|
||||
func TvPathHasSuffix(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldHasSuffix(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathIsNil applies the IsNil predicate on the "tv_path" field.
|
||||
func TvPathIsNil() predicate.Storage {
|
||||
return predicate.Storage(sql.FieldIsNull(FieldTvPath))
|
||||
}
|
||||
|
||||
// TvPathNotNil applies the NotNil predicate on the "tv_path" field.
|
||||
func TvPathNotNil() predicate.Storage {
|
||||
return predicate.Storage(sql.FieldNotNull(FieldTvPath))
|
||||
}
|
||||
|
||||
// TvPathEqualFold applies the EqualFold predicate on the "tv_path" field.
|
||||
func TvPathEqualFold(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEqualFold(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// TvPathContainsFold applies the ContainsFold predicate on the "tv_path" field.
|
||||
func TvPathContainsFold(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldContainsFold(FieldTvPath, v))
|
||||
}
|
||||
|
||||
// MoviePathEQ applies the EQ predicate on the "movie_path" field.
|
||||
func MoviePathEQ(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEQ(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathNEQ applies the NEQ predicate on the "movie_path" field.
|
||||
func MoviePathNEQ(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldNEQ(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathIn applies the In predicate on the "movie_path" field.
|
||||
func MoviePathIn(vs ...string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldIn(FieldMoviePath, vs...))
|
||||
}
|
||||
|
||||
// MoviePathNotIn applies the NotIn predicate on the "movie_path" field.
|
||||
func MoviePathNotIn(vs ...string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldNotIn(FieldMoviePath, vs...))
|
||||
}
|
||||
|
||||
// MoviePathGT applies the GT predicate on the "movie_path" field.
|
||||
func MoviePathGT(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldGT(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathGTE applies the GTE predicate on the "movie_path" field.
|
||||
func MoviePathGTE(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldGTE(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathLT applies the LT predicate on the "movie_path" field.
|
||||
func MoviePathLT(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldLT(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathLTE applies the LTE predicate on the "movie_path" field.
|
||||
func MoviePathLTE(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldLTE(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathContains applies the Contains predicate on the "movie_path" field.
|
||||
func MoviePathContains(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldContains(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathHasPrefix applies the HasPrefix predicate on the "movie_path" field.
|
||||
func MoviePathHasPrefix(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldHasPrefix(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathHasSuffix applies the HasSuffix predicate on the "movie_path" field.
|
||||
func MoviePathHasSuffix(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldHasSuffix(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathIsNil applies the IsNil predicate on the "movie_path" field.
|
||||
func MoviePathIsNil() predicate.Storage {
|
||||
return predicate.Storage(sql.FieldIsNull(FieldMoviePath))
|
||||
}
|
||||
|
||||
// MoviePathNotNil applies the NotNil predicate on the "movie_path" field.
|
||||
func MoviePathNotNil() predicate.Storage {
|
||||
return predicate.Storage(sql.FieldNotNull(FieldMoviePath))
|
||||
}
|
||||
|
||||
// MoviePathEqualFold applies the EqualFold predicate on the "movie_path" field.
|
||||
func MoviePathEqualFold(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEqualFold(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// MoviePathContainsFold applies the ContainsFold predicate on the "movie_path" field.
|
||||
func MoviePathContainsFold(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldContainsFold(FieldMoviePath, v))
|
||||
}
|
||||
|
||||
// SettingsEQ applies the EQ predicate on the "settings" field.
|
||||
func SettingsEQ(v string) predicate.Storage {
|
||||
return predicate.Storage(sql.FieldEQ(FieldSettings, v))
|
||||
|
||||
@@ -31,6 +31,34 @@ func (sc *StorageCreate) SetImplementation(s storage.Implementation) *StorageCre
|
||||
return sc
|
||||
}
|
||||
|
||||
// SetTvPath sets the "tv_path" field.
|
||||
func (sc *StorageCreate) SetTvPath(s string) *StorageCreate {
|
||||
sc.mutation.SetTvPath(s)
|
||||
return sc
|
||||
}
|
||||
|
||||
// SetNillableTvPath sets the "tv_path" field if the given value is not nil.
|
||||
func (sc *StorageCreate) SetNillableTvPath(s *string) *StorageCreate {
|
||||
if s != nil {
|
||||
sc.SetTvPath(*s)
|
||||
}
|
||||
return sc
|
||||
}
|
||||
|
||||
// SetMoviePath sets the "movie_path" field.
|
||||
func (sc *StorageCreate) SetMoviePath(s string) *StorageCreate {
|
||||
sc.mutation.SetMoviePath(s)
|
||||
return sc
|
||||
}
|
||||
|
||||
// SetNillableMoviePath sets the "movie_path" field if the given value is not nil.
|
||||
func (sc *StorageCreate) SetNillableMoviePath(s *string) *StorageCreate {
|
||||
if s != nil {
|
||||
sc.SetMoviePath(*s)
|
||||
}
|
||||
return sc
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (sc *StorageCreate) SetSettings(s string) *StorageCreate {
|
||||
sc.mutation.SetSettings(s)
|
||||
@@ -171,6 +199,14 @@ func (sc *StorageCreate) createSpec() (*Storage, *sqlgraph.CreateSpec) {
|
||||
_spec.SetField(storage.FieldImplementation, field.TypeEnum, value)
|
||||
_node.Implementation = value
|
||||
}
|
||||
if value, ok := sc.mutation.TvPath(); ok {
|
||||
_spec.SetField(storage.FieldTvPath, field.TypeString, value)
|
||||
_node.TvPath = value
|
||||
}
|
||||
if value, ok := sc.mutation.MoviePath(); ok {
|
||||
_spec.SetField(storage.FieldMoviePath, field.TypeString, value)
|
||||
_node.MoviePath = value
|
||||
}
|
||||
if value, ok := sc.mutation.Settings(); ok {
|
||||
_spec.SetField(storage.FieldSettings, field.TypeString, value)
|
||||
_node.Settings = value
|
||||
|
||||
@@ -55,6 +55,46 @@ func (su *StorageUpdate) SetNillableImplementation(s *storage.Implementation) *S
|
||||
return su
|
||||
}
|
||||
|
||||
// SetTvPath sets the "tv_path" field.
|
||||
func (su *StorageUpdate) SetTvPath(s string) *StorageUpdate {
|
||||
su.mutation.SetTvPath(s)
|
||||
return su
|
||||
}
|
||||
|
||||
// SetNillableTvPath sets the "tv_path" field if the given value is not nil.
|
||||
func (su *StorageUpdate) SetNillableTvPath(s *string) *StorageUpdate {
|
||||
if s != nil {
|
||||
su.SetTvPath(*s)
|
||||
}
|
||||
return su
|
||||
}
|
||||
|
||||
// ClearTvPath clears the value of the "tv_path" field.
|
||||
func (su *StorageUpdate) ClearTvPath() *StorageUpdate {
|
||||
su.mutation.ClearTvPath()
|
||||
return su
|
||||
}
|
||||
|
||||
// SetMoviePath sets the "movie_path" field.
|
||||
func (su *StorageUpdate) SetMoviePath(s string) *StorageUpdate {
|
||||
su.mutation.SetMoviePath(s)
|
||||
return su
|
||||
}
|
||||
|
||||
// SetNillableMoviePath sets the "movie_path" field if the given value is not nil.
|
||||
func (su *StorageUpdate) SetNillableMoviePath(s *string) *StorageUpdate {
|
||||
if s != nil {
|
||||
su.SetMoviePath(*s)
|
||||
}
|
||||
return su
|
||||
}
|
||||
|
||||
// ClearMoviePath clears the value of the "movie_path" field.
|
||||
func (su *StorageUpdate) ClearMoviePath() *StorageUpdate {
|
||||
su.mutation.ClearMoviePath()
|
||||
return su
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (su *StorageUpdate) SetSettings(s string) *StorageUpdate {
|
||||
su.mutation.SetSettings(s)
|
||||
@@ -163,6 +203,18 @@ func (su *StorageUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if value, ok := su.mutation.Implementation(); ok {
|
||||
_spec.SetField(storage.FieldImplementation, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := su.mutation.TvPath(); ok {
|
||||
_spec.SetField(storage.FieldTvPath, field.TypeString, value)
|
||||
}
|
||||
if su.mutation.TvPathCleared() {
|
||||
_spec.ClearField(storage.FieldTvPath, field.TypeString)
|
||||
}
|
||||
if value, ok := su.mutation.MoviePath(); ok {
|
||||
_spec.SetField(storage.FieldMoviePath, field.TypeString, value)
|
||||
}
|
||||
if su.mutation.MoviePathCleared() {
|
||||
_spec.ClearField(storage.FieldMoviePath, field.TypeString)
|
||||
}
|
||||
if value, ok := su.mutation.Settings(); ok {
|
||||
_spec.SetField(storage.FieldSettings, field.TypeString, value)
|
||||
}
|
||||
@@ -223,6 +275,46 @@ func (suo *StorageUpdateOne) SetNillableImplementation(s *storage.Implementation
|
||||
return suo
|
||||
}
|
||||
|
||||
// SetTvPath sets the "tv_path" field.
|
||||
func (suo *StorageUpdateOne) SetTvPath(s string) *StorageUpdateOne {
|
||||
suo.mutation.SetTvPath(s)
|
||||
return suo
|
||||
}
|
||||
|
||||
// SetNillableTvPath sets the "tv_path" field if the given value is not nil.
|
||||
func (suo *StorageUpdateOne) SetNillableTvPath(s *string) *StorageUpdateOne {
|
||||
if s != nil {
|
||||
suo.SetTvPath(*s)
|
||||
}
|
||||
return suo
|
||||
}
|
||||
|
||||
// ClearTvPath clears the value of the "tv_path" field.
|
||||
func (suo *StorageUpdateOne) ClearTvPath() *StorageUpdateOne {
|
||||
suo.mutation.ClearTvPath()
|
||||
return suo
|
||||
}
|
||||
|
||||
// SetMoviePath sets the "movie_path" field.
|
||||
func (suo *StorageUpdateOne) SetMoviePath(s string) *StorageUpdateOne {
|
||||
suo.mutation.SetMoviePath(s)
|
||||
return suo
|
||||
}
|
||||
|
||||
// SetNillableMoviePath sets the "movie_path" field if the given value is not nil.
|
||||
func (suo *StorageUpdateOne) SetNillableMoviePath(s *string) *StorageUpdateOne {
|
||||
if s != nil {
|
||||
suo.SetMoviePath(*s)
|
||||
}
|
||||
return suo
|
||||
}
|
||||
|
||||
// ClearMoviePath clears the value of the "movie_path" field.
|
||||
func (suo *StorageUpdateOne) ClearMoviePath() *StorageUpdateOne {
|
||||
suo.mutation.ClearMoviePath()
|
||||
return suo
|
||||
}
|
||||
|
||||
// SetSettings sets the "settings" field.
|
||||
func (suo *StorageUpdateOne) SetSettings(s string) *StorageUpdateOne {
|
||||
suo.mutation.SetSettings(s)
|
||||
@@ -361,6 +453,18 @@ func (suo *StorageUpdateOne) sqlSave(ctx context.Context) (_node *Storage, err e
|
||||
if value, ok := suo.mutation.Implementation(); ok {
|
||||
_spec.SetField(storage.FieldImplementation, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := suo.mutation.TvPath(); ok {
|
||||
_spec.SetField(storage.FieldTvPath, field.TypeString, value)
|
||||
}
|
||||
if suo.mutation.TvPathCleared() {
|
||||
_spec.ClearField(storage.FieldTvPath, field.TypeString)
|
||||
}
|
||||
if value, ok := suo.mutation.MoviePath(); ok {
|
||||
_spec.SetField(storage.FieldMoviePath, field.TypeString, value)
|
||||
}
|
||||
if suo.mutation.MoviePathCleared() {
|
||||
_spec.ClearField(storage.FieldMoviePath, field.TypeString)
|
||||
}
|
||||
if value, ok := suo.mutation.Settings(); ok {
|
||||
_spec.SetField(storage.FieldSettings, field.TypeString, value)
|
||||
}
|
||||
|
||||
@@ -18,10 +18,14 @@ type Tx struct {
|
||||
Episode *EpisodeClient
|
||||
// History is the client for interacting with the History builders.
|
||||
History *HistoryClient
|
||||
// ImportList is the client for interacting with the ImportList builders.
|
||||
ImportList *ImportListClient
|
||||
// Indexers is the client for interacting with the Indexers builders.
|
||||
Indexers *IndexersClient
|
||||
// Media is the client for interacting with the Media builders.
|
||||
Media *MediaClient
|
||||
// NotificationClient is the client for interacting with the NotificationClient builders.
|
||||
NotificationClient *NotificationClientClient
|
||||
// Settings is the client for interacting with the Settings builders.
|
||||
Settings *SettingsClient
|
||||
// Storage is the client for interacting with the Storage builders.
|
||||
@@ -160,8 +164,10 @@ func (tx *Tx) init() {
|
||||
tx.DownloadClients = NewDownloadClientsClient(tx.config)
|
||||
tx.Episode = NewEpisodeClient(tx.config)
|
||||
tx.History = NewHistoryClient(tx.config)
|
||||
tx.ImportList = NewImportListClient(tx.config)
|
||||
tx.Indexers = NewIndexersClient(tx.config)
|
||||
tx.Media = NewMediaClient(tx.config)
|
||||
tx.NotificationClient = NewNotificationClientClient(tx.config)
|
||||
tx.Settings = NewSettingsClient(tx.config)
|
||||
tx.Storage = NewStorageClient(tx.config)
|
||||
}
|
||||
|
||||
39
go.mod
@@ -8,10 +8,30 @@ require (
|
||||
github.com/mattn/go-sqlite3 v1.14.16
|
||||
github.com/robfig/cron v1.2.0
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/net v0.25.0
|
||||
golang.org/x/net v0.27.0
|
||||
)
|
||||
|
||||
require github.com/adrg/strutil v0.3.1 // indirect
|
||||
require (
|
||||
github.com/gin-contrib/zap v1.1.3
|
||||
github.com/nikoksr/notify v1.0.0
|
||||
github.com/stretchr/testify v1.9.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v1.4.0 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.9.2 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.2 // indirect
|
||||
github.com/blinkbean/dingtalk v1.1.3 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/go-telegram-bot-api/telegram-bot-api v4.6.4+incompatible // indirect
|
||||
github.com/gregdel/pushover v1.3.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/technoweenie/multipartstreamer v1.0.1 // indirect
|
||||
golang.org/x/sync v0.7.0 // indirect
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
ariga.io/atlas v0.19.1-0.20240203083654-5948b60a8e43 // indirect
|
||||
@@ -31,7 +51,7 @@ require (
|
||||
github.com/go-playground/validator/v10 v10.20.0 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/google/uuid v1.4.0 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/hashicorp/hcl/v2 v2.13.0 // indirect
|
||||
@@ -55,12 +75,12 @@ require (
|
||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||
github.com/zclconf/go-cty v1.8.0 // indirect
|
||||
golang.org/x/arch v0.8.0 // indirect
|
||||
golang.org/x/crypto v0.23.0
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect
|
||||
golang.org/x/mod v0.17.0 // indirect
|
||||
golang.org/x/sys v0.21.0 // indirect
|
||||
golang.org/x/crypto v0.25.0
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56
|
||||
golang.org/x/mod v0.19.0 // indirect
|
||||
golang.org/x/sys v0.22.0
|
||||
golang.org/x/text v0.16.0 // indirect
|
||||
google.golang.org/protobuf v1.34.1 // indirect
|
||||
google.golang.org/protobuf v1.34.2 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
@@ -72,7 +92,8 @@ require (
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/natefinch/lumberjack v2.0.0+incompatible
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/spf13/viper v1.19.0
|
||||
go.uber.org/multierr v1.10.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
)
|
||||
|
||||
94
go.sum
@@ -2,14 +2,20 @@ ariga.io/atlas v0.19.1-0.20240203083654-5948b60a8e43 h1:GwdJbXydHCYPedeeLt4x/lrl
|
||||
ariga.io/atlas v0.19.1-0.20240203083654-5948b60a8e43/go.mod h1:uj3pm+hUTVN/X5yfdBexHlZv+1Xu5u5ZbZx7+CDavNU=
|
||||
entgo.io/ent v0.13.1 h1:uD8QwN1h6SNphdCCzmkMN3feSUzNnVvV/WIkHKMbzOE=
|
||||
entgo.io/ent v0.13.1/go.mod h1:qCEmo+biw3ccBn9OyL4ZK5dfpwg++l1Gxwac5B1206A=
|
||||
github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
|
||||
github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||
github.com/adrg/strutil v0.3.1 h1:OLvSS7CSJO8lBii4YmBt8jiK9QOtB9CzCzwl4Ic/Fz4=
|
||||
github.com/adrg/strutil v0.3.1/go.mod h1:8h90y18QLrs11IBffcGX3NW/GFBXCMcNg4M7H6MspPA=
|
||||
github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE=
|
||||
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
|
||||
github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
|
||||
github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
|
||||
github.com/blinkbean/dingtalk v1.1.3 h1:MbidFZYom7DTFHD/YIs+eaI7kRy52kmWE/sy0xjo6E4=
|
||||
github.com/blinkbean/dingtalk v1.1.3/go.mod h1:9BaLuGSBqY3vT5hstValh48DbsKO7vaHaJnG9pXwbto=
|
||||
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
|
||||
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
|
||||
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
|
||||
@@ -34,6 +40,8 @@ github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE
|
||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||
github.com/gin-contrib/static v1.1.2 h1:c3kT4bFkUJn2aoRU3s6XnMjJT8J6nNWJkR0NglqmlZ4=
|
||||
github.com/gin-contrib/static v1.1.2/go.mod h1:Fw90ozjHCmZBWbgrsqrDvO28YbhKEKzKp8GixhR4yLw=
|
||||
github.com/gin-contrib/zap v1.1.3 h1:9e/U9fYd4/OBfmSEBs5hHZq114uACn7bpuzvCkcJySA=
|
||||
github.com/gin-contrib/zap v1.1.3/go.mod h1:+BD/6NYZKJyUpqVoJEvgeq9GLz8pINEQvak9LHNOTSE=
|
||||
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
|
||||
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||
github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4=
|
||||
@@ -46,6 +54,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
|
||||
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-telegram-bot-api/telegram-bot-api v4.6.4+incompatible h1:2cauKuaELYAEARXRkq2LrJ0yDDv1rW7+wrTEdVL3uaU=
|
||||
github.com/go-telegram-bot-api/telegram-bot-api v4.6.4+incompatible/go.mod h1:qf9acutJ8cwBUhm1bqgz6Bei9/C/c93FPDljKWwsOgM=
|
||||
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
|
||||
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
@@ -58,8 +68,10 @@ github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
|
||||
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gregdel/pushover v1.3.1 h1:4bMLITOZ15+Zpi6qqoGqOPuVHCwSUvMCgVnN5Xhilfo=
|
||||
github.com/gregdel/pushover v1.3.1/go.mod h1:EcaO66Nn1StkpEm1iKtBTV3d2A16SoMsVER1PthX7to=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||
@@ -70,6 +82,8 @@ github.com/hekmon/cunits/v2 v2.1.0 h1:k6wIjc4PlacNOHwKEMBgWV2/c8jyD4eRMs5mR1BBhI
|
||||
github.com/hekmon/cunits/v2 v2.1.0/go.mod h1:9r1TycXYXaTmEWlAIfFV8JT+Xo59U96yUJAYHxzii2M=
|
||||
github.com/hekmon/transmissionrpc/v3 v3.0.0 h1:0Fb11qE0IBh4V4GlOwHNYpqpjcYDp5GouolwrpmcUDQ=
|
||||
github.com/hekmon/transmissionrpc/v3 v3.0.0/go.mod h1:38SlNhFzinVUuY87wGj3acOmRxeYZAZfrj6Re7UgCDg=
|
||||
github.com/jordan-wright/email v4.0.1-0.20210109023952-943e75fe5223+incompatible h1:jdpOPRN1zP63Td1hDQbZW73xKmzDvZHzVdNYxhnTMDA=
|
||||
github.com/jordan-wright/email v4.0.1-0.20210109023952-943e75fe5223+incompatible/go.mod h1:1c7szIrayyPPB/987hsnvNzLushdWf4o/79s3P08L8A=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
@@ -104,6 +118,10 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/natefinch/lumberjack v2.0.0+incompatible h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM=
|
||||
github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk=
|
||||
github.com/nikoksr/notify v1.0.0 h1:qe9/6FRsWdxBgQgWcpvQ0sv8LRGJZDpRB4TkL2uNdO8=
|
||||
github.com/nikoksr/notify v1.0.0/go.mod h1:hPaaDt30d6LAA7/5nb0e48Bp/MctDfycCSs8VEgN29I=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||
@@ -138,6 +156,7 @@ github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
@@ -149,58 +168,93 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/technoweenie/multipartstreamer v1.0.1 h1:XRztA5MXiR1TIRHxH2uNxXxaIkKQDeX7m2XsSOlQEnM=
|
||||
github.com/technoweenie/multipartstreamer v1.0.1/go.mod h1:jNVxdtShOxzAsukZwTSw6MDx5eUJoiEBsSvzDU9uzog=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4=
|
||||
github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA=
|
||||
github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ=
|
||||
go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
||||
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
|
||||
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM=
|
||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc=
|
||||
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
|
||||
golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8=
|
||||
golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys=
|
||||
golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
|
||||
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
|
||||
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg=
|
||||
golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
|
||||
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
|
||||
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
53
log/log.go
@@ -1,18 +1,63 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/natefinch/lumberjack"
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
)
|
||||
|
||||
var sugar *zap.SugaredLogger
|
||||
var atom zap.AtomicLevel
|
||||
|
||||
const dataPath = "./data"
|
||||
|
||||
func init() {
|
||||
config := zap.NewDevelopmentConfig()
|
||||
config.EncoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
|
||||
config.DisableStacktrace = true
|
||||
logger, _ := config.Build(zap.AddCallerSkip(1))
|
||||
atom = zap.NewAtomicLevel()
|
||||
atom.SetLevel(zap.DebugLevel)
|
||||
|
||||
w := zapcore.Lock(os.Stdout)
|
||||
if os.Getenv("GIN_MODE") == "release" {
|
||||
w = zapcore.AddSync(&lumberjack.Logger{
|
||||
Filename: filepath.Join(dataPath, "logs", "polaris.log"),
|
||||
MaxSize: 50, // megabytes
|
||||
MaxBackups: 3,
|
||||
MaxAge: 30, // days
|
||||
Compress: true,
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
consoleEncoder := zapcore.NewConsoleEncoder(zap.NewDevelopmentEncoderConfig())
|
||||
|
||||
logger := zap.New(zapcore.NewCore(consoleEncoder, w, atom), zap.AddCallerSkip(1),zap.AddCaller())
|
||||
|
||||
sugar = logger.Sugar()
|
||||
|
||||
}
|
||||
|
||||
func SetLogLevel(l string) {
|
||||
switch strings.TrimSpace(strings.ToLower(l)) {
|
||||
case "debug":
|
||||
atom.SetLevel(zap.DebugLevel)
|
||||
Debug("set log level to debug")
|
||||
case "info":
|
||||
atom.SetLevel(zap.InfoLevel)
|
||||
Info("set log level to info")
|
||||
case "warn", "warning":
|
||||
atom.SetLevel(zap.WarnLevel)
|
||||
Warn("set log level to warning")
|
||||
case "error":
|
||||
atom.SetLevel(zap.ErrorLevel)
|
||||
Error("set log level to error")
|
||||
}
|
||||
}
|
||||
|
||||
func Logger() *zap.SugaredLogger {
|
||||
return sugar
|
||||
}
|
||||
|
||||
func Info(args ...interface{}) {
|
||||
|
||||
52
pkg/cache/cache.go
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"polaris/log"
|
||||
"polaris/pkg/utils"
|
||||
"time"
|
||||
)
|
||||
|
||||
func NewCache[T comparable, S any](timeout time.Duration) *Cache[T, S] {
|
||||
c := &Cache[T, S]{
|
||||
m: utils.Map[T, inner[S]]{},
|
||||
timeout: timeout,
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
type Cache[T comparable, S any] struct {
|
||||
m utils.Map[T, inner[S]]
|
||||
timeout time.Duration
|
||||
}
|
||||
|
||||
type inner[S any] struct {
|
||||
t time.Time
|
||||
s S
|
||||
}
|
||||
|
||||
func (c *Cache[T, S]) Set(key T, value S) {
|
||||
c.m.Store(key, inner[S]{t: time.Now(), s: value})
|
||||
}
|
||||
|
||||
func (c *Cache[T, S]) Get(key T) (S, bool) {
|
||||
c.m.Range(func(key T, value inner[S]) bool {
|
||||
if time.Since(value.t) > c.timeout {
|
||||
log.Debugf("delete old cache: %v", key)
|
||||
c.m.Delete(key)
|
||||
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
v, ok := c.m.Load(key)
|
||||
if !ok {
|
||||
return getZero[S](), ok
|
||||
}
|
||||
return v.s, ok
|
||||
}
|
||||
|
||||
func getZero[T any]() T {
|
||||
var result T
|
||||
return result
|
||||
}
|
||||
@@ -1,13 +1,14 @@
|
||||
package pkg
|
||||
|
||||
type Torrent interface {
|
||||
Name() string
|
||||
Progress() int
|
||||
Name() (string, error)
|
||||
Progress() (int, error)
|
||||
Stop() error
|
||||
Start() error
|
||||
Remove() error
|
||||
Save() string
|
||||
Exists() bool
|
||||
SeedRatio() (float64, error)
|
||||
}
|
||||
|
||||
|
||||
|
||||
100
pkg/importlist/douban/douban.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package douban
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"polaris/log"
|
||||
"polaris/pkg/importlist"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
)
|
||||
|
||||
const ua = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
|
||||
|
||||
func ParseDoulist(doulistUrl string) (*importlist.Response, error) {
|
||||
if !strings.Contains(doulistUrl, "doulist") {
|
||||
return nil, fmt.Errorf("not doulist")
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("GET", doulistUrl, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("User-Agent", ua)
|
||||
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
|
||||
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
doc.Find("div[class=doulist-item]").Each(func(i int, selection *goquery.Selection) {
|
||||
titleDiv := selection.Find("div[class=title]")
|
||||
link := titleDiv.Find("div>a")
|
||||
href, ok := link.Attr("href")
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
abstract := selection.Find("div[class=abstract]")
|
||||
|
||||
lines := strings.Split(abstract.Text(), "\n")
|
||||
year := 0
|
||||
for _, l := range lines {
|
||||
if strings.Contains(l, "年份") {
|
||||
ppp := strings.Split(l, ":")
|
||||
if len(ppp) < 2 {
|
||||
continue
|
||||
} else {
|
||||
n := ppp[1]
|
||||
n1, err := strconv.Atoi(strings.TrimSpace(n))
|
||||
if err != nil {
|
||||
log.Errorf("convert year number %s to int error: %v", n, err)
|
||||
continue
|
||||
}
|
||||
year = n1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
item := importlist.Item{
|
||||
Title: strings.TrimSpace(link.Text()),
|
||||
Year: year,
|
||||
}
|
||||
_ = item
|
||||
println(link.Text(), href)
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func parseDetailPage(url string) (string, error) {
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("User-Agent", ua)
|
||||
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != 200 {
|
||||
return "", fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
|
||||
|
||||
}
|
||||
doc, err := goquery.NewDocumentFromReader(res.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
_ = doc
|
||||
return "", nil
|
||||
}
|
||||
11
pkg/importlist/douban/douban_test.go
Normal file
@@ -0,0 +1,11 @@
|
||||
package douban
|
||||
|
||||
import (
|
||||
"polaris/log"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseDoulist(t *testing.T) {
|
||||
r, err := ParseDoulist("https://www.douban.com/doulist/166422/")
|
||||
log.Info(r, err)
|
||||
}
|
||||
13
pkg/importlist/importlist.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package importlist
|
||||
|
||||
type Item struct {
|
||||
Title string
|
||||
Year int
|
||||
ImdbID string
|
||||
TvdbID string
|
||||
TmdbID string
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
Items []Item
|
||||
}
|
||||
96
pkg/importlist/plexwatchlist/plex.go
Normal file
@@ -0,0 +1,96 @@
|
||||
package plexwatchlist
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"io"
|
||||
"net/http"
|
||||
"polaris/pkg/importlist"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type Response struct {
|
||||
XMLName xml.Name `xml:"rss"`
|
||||
Text string `xml:",chardata"`
|
||||
Atom string `xml:"atom,attr"`
|
||||
Media string `xml:"media,attr"`
|
||||
Version string `xml:"version,attr"`
|
||||
Channel struct {
|
||||
Text string `xml:",chardata"`
|
||||
Title string `xml:"title"`
|
||||
Link struct {
|
||||
Text string `xml:",chardata"`
|
||||
Href string `xml:"href,attr"`
|
||||
Rel string `xml:"rel,attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
} `xml:"link"`
|
||||
Description string `xml:"description"`
|
||||
Category string `xml:"category"`
|
||||
Item []struct {
|
||||
Text string `xml:",chardata"`
|
||||
Title string `xml:"title"`
|
||||
PubDate string `xml:"pubDate"`
|
||||
Link string `xml:"link"`
|
||||
Description string `xml:"description"`
|
||||
Category string `xml:"category"`
|
||||
Credit []struct {
|
||||
Text string `xml:",chardata"`
|
||||
Role string `xml:"role,attr"`
|
||||
} `xml:"credit"`
|
||||
Thumbnail struct {
|
||||
Text string `xml:",chardata"`
|
||||
URL string `xml:"url,attr"`
|
||||
} `xml:"thumbnail"`
|
||||
Keywords string `xml:"keywords"`
|
||||
Rating struct {
|
||||
Text string `xml:",chardata"`
|
||||
Scheme string `xml:"scheme,attr"`
|
||||
} `xml:"rating"`
|
||||
Guid struct {
|
||||
Text string `xml:",chardata"`
|
||||
IsPermaLink string `xml:"isPermaLink,attr"`
|
||||
} `xml:"guid"`
|
||||
} `xml:"item"`
|
||||
} `xml:"channel"`
|
||||
}
|
||||
|
||||
func (r *Response) convert() *importlist.Response {
|
||||
res := &importlist.Response{}
|
||||
for _, im := range r.Channel.Item {
|
||||
item := importlist.Item{
|
||||
Title: im.Title,
|
||||
}
|
||||
id := strings.ToLower(im.Guid.Text)
|
||||
if strings.HasPrefix(id, "tvdb") {
|
||||
tvdbid := strings.TrimPrefix(id, "tvdb://")
|
||||
item.TvdbID = tvdbid
|
||||
} else if strings.HasPrefix(id, "imdb") {
|
||||
imdbid := strings.TrimPrefix(id, "imdb://")
|
||||
item.ImdbID = imdbid
|
||||
} else if strings.HasPrefix(id, "tmdb") {
|
||||
tmdbid := strings.TrimPrefix(id, "tmdb://")
|
||||
item.TmdbID = tmdbid
|
||||
}
|
||||
res.Items = append(res.Items, item)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func ParsePlexWatchlist(url string) (*importlist.Response, error) {
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "http get")
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "read data")
|
||||
}
|
||||
var rrr Response
|
||||
err = xml.Unmarshal(data, &rrr)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "xml")
|
||||
}
|
||||
return rrr.convert(), nil
|
||||
}
|
||||
23
pkg/metadata/doc.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package metadata
|
||||
|
||||
/*
|
||||
tv name examples
|
||||
|
||||
Cowboy Cartel S01E02 XviD-AFG [eztv]
|
||||
The.Bold.and.the.Beautiful.S37E219.XviD-AFG
|
||||
As Husband As Wife 2024 S01E05-E06 1080p WEB-DL HEVC DDP 2Audios-QHstudIo
|
||||
Twinkle Love 2024 S04 Complete 2160p WEB-DL HEVC AAC-QHstudIo
|
||||
[HorribleSubs] One Punch Man S2 - 08 [720p].mkv
|
||||
[Breeze] One Punch Man S01 S02 [1080p BD AV1][dual audio]
|
||||
[HYSUB]ONE PUNCH MAN[S1+S2][BDrip][GB_MP4][1920X1080]
|
||||
Cowboy Cartel S01E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv]
|
||||
Limitless Wrestling 2021 01 06 The Road Season 2 Episode 12 XviD-AFG [eztv]
|
||||
|
||||
[千夏字幕组][小市民系列_Shoushimin Series][第03话][1080p_HEVC][简繁内封][招募新人]
|
||||
[OPFans楓雪動漫][ONE PIECE 海賊王][第1113話][周日版][1080p][MP4][簡體]
|
||||
[桜都字幕组] 亦叶亦花 / Nanare Hananare [04][1080p][简体内嵌]
|
||||
[ANi] 戰國妖狐 千魔混沌篇 - 16 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]
|
||||
[ANi] 這是妳與我的最後戰場,或是開創世界的聖戰 第二季 - 04 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]
|
||||
[桜都字幕组][一拳超人 第2季/One Punch Man 2nd Season][01-12 END][BIG5][720P]
|
||||
一拳超人第二季One.Punch.Man.Season2.2160p.10Bit.HEVC.AAC.CHS&JPN
|
||||
*/
|
||||
72
pkg/metadata/movie.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package metadata
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type MovieMetadata struct {
|
||||
Name string
|
||||
Year int
|
||||
Resolution string
|
||||
IsQingban bool
|
||||
}
|
||||
|
||||
func ParseMovie(name string) *MovieMetadata {
|
||||
name = strings.Join(strings.Fields(name), " ") //remove unnessary spaces
|
||||
name = strings.ToLower(strings.TrimSpace(name))
|
||||
var meta = &MovieMetadata{}
|
||||
yearRe := regexp.MustCompile(`\(\d{4}\)`)
|
||||
yearMatches := yearRe.FindAllString(name, -1)
|
||||
var yearIndex = -1
|
||||
if len(yearMatches) > 0 {
|
||||
yearIndex = strings.Index(name, yearMatches[0])
|
||||
y := yearMatches[0][1 : len(yearMatches[0])-1]
|
||||
n, err := strconv.Atoi(y)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", y, err))
|
||||
}
|
||||
meta.Year = n
|
||||
} else {
|
||||
yearRe := regexp.MustCompile(`\d{4}`)
|
||||
yearMatches := yearRe.FindAllString(name, -1)
|
||||
if len(yearMatches) > 0 {
|
||||
n, err := strconv.Atoi(yearMatches[0])
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", yearMatches[0], err))
|
||||
}
|
||||
meta.Year = n
|
||||
}
|
||||
}
|
||||
|
||||
if yearIndex != -1 {
|
||||
meta.Name = name[:yearIndex]
|
||||
} else {
|
||||
meta.Name = name
|
||||
}
|
||||
resRe := regexp.MustCompile(`\d{3,4}p`)
|
||||
resMatches := resRe.FindAllString(name, -1)
|
||||
if len(resMatches) > 0 {
|
||||
meta.Resolution = resMatches[0]
|
||||
}
|
||||
meta.IsQingban = isQiangban(name)
|
||||
return meta
|
||||
}
|
||||
|
||||
// https://en.wikipedia.org/wiki/Pirated_movie_release_types
|
||||
func isQiangban(name string) bool {
|
||||
qiangbanFilter := []string{"CAMRip","CAM-Rip", "CAM", "HDCAM", "TS","TSRip", "HDTS", "TELESYNC", "PDVD", "PreDVDRip", "TC", "HDTC", "TELECINE", "WP", "WORKPRINT"}
|
||||
re := regexp.MustCompile(`\W`)
|
||||
name = re.ReplaceAllString(strings.ToLower(name), " ")
|
||||
fields := strings.Fields(name)
|
||||
for _, q := range qiangbanFilter {
|
||||
for _, f := range fields {
|
||||
if strings.EqualFold(q, f) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
309
pkg/metadata/tv.go
Normal file
@@ -0,0 +1,309 @@
|
||||
package metadata
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"polaris/log"
|
||||
"polaris/pkg/utils"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Metadata struct {
|
||||
NameEn string
|
||||
NameCn string
|
||||
Season int
|
||||
Episode int
|
||||
Resolution string
|
||||
IsSeasonPack bool
|
||||
}
|
||||
|
||||
func ParseTv(name string) *Metadata {
|
||||
name = strings.ToLower(name)
|
||||
name = strings.ReplaceAll(name, "\u200b", "") //remove unicode hidden character
|
||||
if utils.ContainsChineseChar(name) {
|
||||
return parseChineseName(name)
|
||||
}
|
||||
return parseEnglishName(name)
|
||||
}
|
||||
|
||||
func parseEnglishName(name string) *Metadata {
|
||||
re := regexp.MustCompile(`[^\p{L}\w\s]`)
|
||||
name = re.ReplaceAllString(strings.ToLower(name), " ")
|
||||
newSplits := strings.Split(strings.TrimSpace(name), " ")
|
||||
|
||||
seasonRe := regexp.MustCompile(`^s\d{1,2}`)
|
||||
resRe := regexp.MustCompile(`^\d{3,4}p`)
|
||||
episodeRe := regexp.MustCompile(`e\d{1,3}`)
|
||||
|
||||
var seasonIndex = -1
|
||||
var episodeIndex = -1
|
||||
var resIndex = -1
|
||||
for i, p := range newSplits {
|
||||
p = strings.TrimSpace(p)
|
||||
if p == "" {
|
||||
continue
|
||||
}
|
||||
if seasonRe.MatchString(p) {
|
||||
//season part
|
||||
seasonIndex = i
|
||||
} else if resRe.MatchString(p) {
|
||||
resIndex = i
|
||||
}
|
||||
if i >= seasonIndex && episodeRe.MatchString(p) {
|
||||
episodeIndex = i
|
||||
}
|
||||
}
|
||||
|
||||
meta := &Metadata{
|
||||
Season: -1,
|
||||
Episode: -1,
|
||||
}
|
||||
if seasonIndex != -1 {
|
||||
//season exists
|
||||
ss := seasonRe.FindAllString(newSplits[seasonIndex], -1)
|
||||
if len(ss) != 0 {
|
||||
//season info
|
||||
|
||||
ssNum := strings.TrimLeft(ss[0], "s")
|
||||
n, err := strconv.Atoi(ssNum)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", ssNum, err))
|
||||
}
|
||||
meta.Season = n
|
||||
}
|
||||
} else { //maybe like Season 1?
|
||||
seasonRe := regexp.MustCompile(`season \d{1,2}`)
|
||||
matches := seasonRe.FindAllString(name, -1)
|
||||
if len(matches) > 0 {
|
||||
for i, s := range newSplits {
|
||||
if s == "season" {
|
||||
seasonIndex = i
|
||||
}
|
||||
}
|
||||
numRe := regexp.MustCompile(`\d{1,2}`)
|
||||
seNum := numRe.FindAllString(matches[0], -1)[0]
|
||||
n, err := strconv.Atoi(seNum)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", seNum, err))
|
||||
}
|
||||
meta.Season = n
|
||||
|
||||
}
|
||||
}
|
||||
if episodeIndex != -1 {
|
||||
ep := episodeRe.FindAllString(newSplits[episodeIndex], -1)
|
||||
if len(ep) > 0 {
|
||||
//episode info exists
|
||||
epNum := strings.TrimLeft(ep[0], "e")
|
||||
n, err := strconv.Atoi(epNum)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", epNum, err))
|
||||
}
|
||||
meta.Episode = n
|
||||
}
|
||||
} else { //no episode, maybe like One Punch Man S2 - 08 [1080p].mkv
|
||||
|
||||
// numRe := regexp.MustCompile(`^\d{1,2}$`)
|
||||
// for i, p := range newSplits {
|
||||
// if numRe.MatchString(p) {
|
||||
// if i > 0 && strings.Contains(newSplits[i-1], "season") { //last word cannot be season
|
||||
// continue
|
||||
// }
|
||||
// if i < seasonIndex {
|
||||
// //episode number most likely should comes alfter season number
|
||||
// continue
|
||||
// }
|
||||
// //episodeIndex = i
|
||||
// n, err := strconv.Atoi(p)
|
||||
// if err != nil {
|
||||
// panic(fmt.Sprintf("convert %s error: %v", p, err))
|
||||
// }
|
||||
// meta.Episode = n
|
||||
|
||||
// }
|
||||
// }
|
||||
|
||||
}
|
||||
if resIndex != -1 {
|
||||
//resolution exists
|
||||
meta.Resolution = newSplits[resIndex]
|
||||
}
|
||||
if meta.Episode == -1 {
|
||||
meta.Episode = -1
|
||||
meta.IsSeasonPack = true
|
||||
}
|
||||
|
||||
if seasonIndex > 0 {
|
||||
//name exists
|
||||
names := newSplits[0:seasonIndex]
|
||||
meta.NameEn = strings.TrimSpace(strings.Join(names, " "))
|
||||
} else {
|
||||
meta.NameEn = name
|
||||
}
|
||||
|
||||
return meta
|
||||
}
|
||||
|
||||
func parseChineseName(name string) *Metadata {
|
||||
var meta = parseEnglishName(name)
|
||||
if meta.Season != -1 && (meta.Episode != -1 || meta.IsSeasonPack) {
|
||||
return meta
|
||||
}
|
||||
meta = &Metadata{Season: 1}
|
||||
//season pack
|
||||
packRe := regexp.MustCompile(`(\d{1,2}-\d{1,2})|(全集)`)
|
||||
if packRe.MatchString(name) {
|
||||
meta.IsSeasonPack = true
|
||||
}
|
||||
//resolution
|
||||
resRe := regexp.MustCompile(`\d{3,4}p`)
|
||||
resMatches := resRe.FindAllString(name, -1)
|
||||
if len(resMatches) != 0 {
|
||||
meta.Resolution = resMatches[0]
|
||||
} else {
|
||||
if strings.Contains(name, "720") {
|
||||
meta.Resolution = "720p"
|
||||
} else if strings.Contains(name, "1080") {
|
||||
meta.Resolution = "1080p"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//episode number
|
||||
re1 := regexp.MustCompile(`\[\d{1,3}\]`)
|
||||
episodeMatches1 := re1.FindAllString(name, -1)
|
||||
if len(episodeMatches1) > 0 { //[11] [1080p]
|
||||
epNum := strings.TrimRight(strings.TrimLeft(episodeMatches1[0], "["), "]")
|
||||
n, err := strconv.Atoi(epNum)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", epNum, err))
|
||||
}
|
||||
meta.Episode = n
|
||||
} else { //【第09話】
|
||||
re2 := regexp.MustCompile(`第\d{1,4}(话|話|集)`)
|
||||
episodeMatches1 := re2.FindAllString(name, -1)
|
||||
if len(episodeMatches1) > 0 {
|
||||
re := regexp.MustCompile(`\d{1,4}`)
|
||||
epNum := re.FindAllString(episodeMatches1[0], -1)[0]
|
||||
n, err := strconv.Atoi(epNum)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", epNum, err))
|
||||
}
|
||||
meta.Episode = n
|
||||
} else { //SHY 靦腆英雄 / Shy -05 ( CR 1920x1080 AVC AAC MKV)
|
||||
re3 := regexp.MustCompile(`[^\d\w]\d{1,2}[^\d\w]`)
|
||||
epNums := re3.FindAllString(name, -1)
|
||||
if len(epNums) > 0 {
|
||||
|
||||
re3 := regexp.MustCompile(`\d{1,2}`)
|
||||
epNum := re3.FindAllString(epNums[0], -1)[0]
|
||||
n, err := strconv.Atoi(epNum)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", epNum, err))
|
||||
}
|
||||
meta.Episode = n
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//season numner
|
||||
seasonRe1 := regexp.MustCompile(`s\d{1,2}`)
|
||||
seasonMatches := seasonRe1.FindAllString(name, -1)
|
||||
if len(seasonMatches) > 0 {
|
||||
seNum := seasonMatches[0][1:]
|
||||
n, err := strconv.Atoi(seNum)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", seNum, err))
|
||||
}
|
||||
meta.Season = n
|
||||
} else {
|
||||
seasonRe1 := regexp.MustCompile(`season \d{1,2}`)
|
||||
seasonMatches := seasonRe1.FindAllString(name, -1)
|
||||
if len(seasonMatches) > 0 {
|
||||
re3 := regexp.MustCompile(`\d{1,2}`)
|
||||
seNum := re3.FindAllString(seasonMatches[0], -1)[0]
|
||||
n, err := strconv.Atoi(seNum)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("convert %s error: %v", seNum, err))
|
||||
}
|
||||
meta.Season = n
|
||||
} else {
|
||||
seasonRe1 := regexp.MustCompile(`第.{1,2}季`)
|
||||
seasonMatches := seasonRe1.FindAllString(name, -1)
|
||||
if len(seasonMatches) > 0 {
|
||||
m1 := []rune(seasonMatches[0])
|
||||
seNum := m1[1 : len(m1)-1]
|
||||
n, err := strconv.Atoi(string(seNum))
|
||||
if err != nil {
|
||||
log.Warnf("parse season number %v error: %v, try to parse using chinese", seNum, err)
|
||||
n = chinese2Num[string(seNum)]
|
||||
}
|
||||
meta.Season = n
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if meta.IsSeasonPack && meta.Episode != 0 {
|
||||
meta.Season = meta.Episode
|
||||
meta.Episode = -1
|
||||
}
|
||||
|
||||
//tv name
|
||||
|
||||
fields := strings.FieldsFunc(name, func(r rune) bool {
|
||||
return r == '[' || r == ']' || r == '【' || r == '】'
|
||||
})
|
||||
titleCn := ""
|
||||
title := ""
|
||||
for _, p := range fields { //寻找匹配的最长的字符串,最有可能是名字
|
||||
if utils.ContainsChineseChar(p) && len([]rune(p)) > len([]rune(titleCn)) { //最长含中文字符串
|
||||
titleCn = p
|
||||
}
|
||||
if len([]rune(p)) > len([]rune(title)) { //最长字符串
|
||||
title = p
|
||||
}
|
||||
}
|
||||
re := regexp.MustCompile(`[^\p{L}\w\s]`)
|
||||
title = re.ReplaceAllString(strings.TrimSpace(strings.ToLower(title)), "") //去除标点符号
|
||||
titleCn = re.ReplaceAllString(strings.TrimSpace(strings.ToLower(titleCn)), "")
|
||||
|
||||
meta.NameCn = titleCn
|
||||
cnRe := regexp.MustCompile(`\p{Han}.*\p{Han}`)
|
||||
cnmatches := cnRe.FindAllString(titleCn, -1)
|
||||
|
||||
//titleCn中最长的中文字符
|
||||
if len(cnmatches) > 0 {
|
||||
for _, t := range cnmatches {
|
||||
if len([]rune(t)) > len([]rune(meta.NameCn)) {
|
||||
meta.NameCn = strings.ToLower(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//匹配title中最长拉丁字符串
|
||||
enRe := regexp.MustCompile(`[[:ascii:]]*`)
|
||||
enM := enRe.FindAllString(title, -1)
|
||||
if len(enM) > 0 {
|
||||
for _, t := range enM {
|
||||
if len(t) > len(meta.NameEn) {
|
||||
meta.NameEn = strings.TrimSpace(strings.ToLower(t))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return meta
|
||||
}
|
||||
|
||||
var chinese2Num = map[string]int{
|
||||
"一": 1,
|
||||
"二": 2,
|
||||
"三": 3,
|
||||
"四": 4,
|
||||
"五": 5,
|
||||
"六": 6,
|
||||
"七": 7,
|
||||
"八": 8,
|
||||
"九": 9,
|
||||
}
|
||||
147
pkg/metadata/tv_test.go
Normal file
@@ -0,0 +1,147 @@
|
||||
package metadata
|
||||
|
||||
import (
|
||||
"polaris/log"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_ParseTV1(t *testing.T) {
|
||||
s1 := "Twinkle Love 2024 S04 Complete 2160p WEB-DL HEVC AAC-QHstudIo"
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 4)
|
||||
assert.Equal(t, m.IsSeasonPack, true)
|
||||
assert.Equal(t, m.Resolution, "2160p")
|
||||
}
|
||||
|
||||
func Test_ParseTV2(t *testing.T) {
|
||||
s1 := "Cowboy Cartel S01E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv] "
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 1)
|
||||
assert.Equal(t, m.Episode, 4)
|
||||
assert.Equal(t, m.IsSeasonPack, false)
|
||||
assert.Equal(t, m.Resolution, "1080p")
|
||||
}
|
||||
|
||||
func Test_ParseTV3(t *testing.T) {
|
||||
s1 := "The.Bold.and.the.Beautiful.S37E219.XviD-AFG "
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 37)
|
||||
assert.Equal(t, m.Episode, 219)
|
||||
assert.Equal(t, m.IsSeasonPack, false)
|
||||
//assert.Equal(t, m.Resolution, "1080p")
|
||||
}
|
||||
|
||||
func Test_ParseTV4(t *testing.T) {
|
||||
s1 := "Limitless Wrestling 2021 01 06 The Road Season 2 Episode 12 XviD-AFG [eztv] "
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 2)
|
||||
//assert.Equal(t, m.Episode, 219)
|
||||
assert.Equal(t, m.IsSeasonPack, true)
|
||||
//assert.Equal(t, m.Resolution, "1080p")
|
||||
}
|
||||
|
||||
func Test_ParseTV5(t *testing.T) {
|
||||
s1 := "[Breeze] One Punch Man S01 S02 [1080p BD AV1][dual audio]"
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 1)
|
||||
//assert.Equal(t, m.Episode, 219)
|
||||
assert.Equal(t, m.IsSeasonPack, true)
|
||||
//assert.Equal(t, m.Resolution, "1080p")
|
||||
}
|
||||
|
||||
func Test_ParseTV6(t *testing.T) {
|
||||
s1 := "[千夏字幕组][小市民系列_Shoushimin Series][第03话][1080p_HEVC][简繁内封][招募新人]"
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 1)
|
||||
assert.Equal(t, m.Episode, 3)
|
||||
assert.Equal(t, m.IsSeasonPack, false)
|
||||
assert.Equal(t, m.Resolution, "1080p")
|
||||
}
|
||||
|
||||
func Test_ParseTV7(t *testing.T) {
|
||||
s1 := " [OPFans楓雪動漫][ONE PIECE 海賊王][第1113話][周日版][1080p][MP4][簡體]"
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 1)
|
||||
assert.Equal(t, m.Episode, 1113)
|
||||
assert.Equal(t, m.IsSeasonPack, false)
|
||||
assert.Equal(t, m.Resolution, "1080p")
|
||||
}
|
||||
|
||||
func Test_ParseTV8(t *testing.T) {
|
||||
s1 := "[桜都字幕组] 亦叶亦花 / Nanare Hananare [04][1080p][简体内嵌] "
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 1)
|
||||
assert.Equal(t, m.Episode, 4)
|
||||
assert.Equal(t, m.IsSeasonPack, false)
|
||||
assert.Equal(t, m.Resolution, "1080p")
|
||||
}
|
||||
|
||||
func Test_ParseTV9(t *testing.T) {
|
||||
s1 := "[ANi] 戰國妖狐 千魔混沌篇 - 16 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, m.Season, 1)
|
||||
assert.Equal(t, m.Episode, 16)
|
||||
assert.Equal(t, m.IsSeasonPack, false)
|
||||
assert.Equal(t, m.Resolution, "1080p")
|
||||
}
|
||||
|
||||
func Test_ParseTV10(t *testing.T) {
|
||||
s1 := " [桜都字幕组][一拳超人 第2季/One Punch Man 2nd Season][01-12 END][BIG5][720P]"
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, 2, m.Season)
|
||||
//assert.Equal(t, 01, m.Episode)
|
||||
assert.Equal(t, true, m.IsSeasonPack)
|
||||
assert.Equal(t, "720p", m.Resolution)
|
||||
}
|
||||
|
||||
func Test_ParseTV11(t *testing.T) {
|
||||
s1 := " [ANi] 這是妳與我的最後戰場,或是開創世界的聖戰 第二季 - 04 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4] "
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, 2, m.Season)
|
||||
assert.Equal(t, 4, m.Episode)
|
||||
assert.Equal(t, false, m.IsSeasonPack)
|
||||
assert.Equal(t, "1080p", m.Resolution)
|
||||
}
|
||||
|
||||
func Test_ParseTV12(t *testing.T) {
|
||||
s1 := " 牛仔Cowboy Cartel S02E04 Photo Finish 1080p ATVP WEB-DL DDP5 1 Atmos H 264-FLUX [eztv] "
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, 2, m.Season)
|
||||
assert.Equal(t, 4, m.Episode)
|
||||
assert.Equal(t, false, m.IsSeasonPack)
|
||||
assert.Equal(t, "1080p", m.Resolution)
|
||||
}
|
||||
|
||||
func Test_ParseTV13(t *testing.T) {
|
||||
s1 := "House of Dragon 2024 1080p S02E08 Leaked HQCAM NOT COMPLETE English Audio x264 ESub BOTHD"
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, 2, m.Season)
|
||||
assert.Equal(t, 8, m.Episode)
|
||||
assert.Equal(t, false, m.IsSeasonPack)
|
||||
assert.Equal(t, "1080p", m.Resolution)
|
||||
}
|
||||
|
||||
func Test_ParseTV14(t *testing.T) {
|
||||
s1 := "[GM-Team][国漫][斗破苍穹 第5季][Fights Break Sphere Ⅴ][2022][113][HEVC][GB][4K]"
|
||||
m := ParseTv(s1)
|
||||
log.Infof("results: %+v", m)
|
||||
assert.Equal(t, 5, m.Season)
|
||||
assert.Equal(t, 113, m.Episode)
|
||||
assert.Equal(t, false, m.IsSeasonPack)
|
||||
//assert.Equal(t, "720p", m.Resolution)
|
||||
}
|
||||
90
pkg/notifier/clients.go
Normal file
@@ -0,0 +1,90 @@
|
||||
package notifier
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/nikoksr/notify/service/bark"
|
||||
"github.com/nikoksr/notify/service/dingding"
|
||||
po "github.com/nikoksr/notify/service/pushover"
|
||||
"github.com/nikoksr/notify/service/telegram"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type PushoverConfig struct {
|
||||
UserKey string `json:"user_key"`
|
||||
GroupKey string `json:"group_key"`
|
||||
AppToken string `json:"app_token"`
|
||||
}
|
||||
|
||||
func NewPushoverClient(s string) (NotificationClient, error) {
|
||||
var cfg PushoverConfig
|
||||
if err := json.Unmarshal([]byte(s), &cfg); err != nil {
|
||||
return nil, errors.Wrap(err, "json")
|
||||
}
|
||||
|
||||
c := po.New(cfg.AppToken)
|
||||
if cfg.UserKey != "" {
|
||||
c.AddReceivers(cfg.UserKey)
|
||||
}
|
||||
if cfg.GroupKey != "" {
|
||||
c.AddReceivers(cfg.GroupKey)
|
||||
}
|
||||
|
||||
return &Notifier{service: c}, nil
|
||||
}
|
||||
|
||||
type DingTalkConfig struct {
|
||||
Token string `json:"token"`
|
||||
Secret string `json:"secret"`
|
||||
}
|
||||
|
||||
func NewDingTalkClient(s string) (NotificationClient, error) {
|
||||
var cfg DingTalkConfig
|
||||
if err := json.Unmarshal([]byte(s), &cfg); err != nil {
|
||||
return nil, errors.Wrap(err, "json")
|
||||
}
|
||||
|
||||
svc := dingding.New(&dingding.Config{
|
||||
Token: cfg.Token,
|
||||
Secret: cfg.Secret,
|
||||
})
|
||||
return &Notifier{service: svc}, nil
|
||||
}
|
||||
|
||||
type TelegramConfig struct {
|
||||
Token string `json:"token"`
|
||||
ChatID int64 `json:"chat_id"`
|
||||
}
|
||||
|
||||
func NewTelegramClient(s string) (NotificationClient, error) {
|
||||
var cfg TelegramConfig
|
||||
if err := json.Unmarshal([]byte(s), &cfg); err != nil {
|
||||
return nil, errors.Wrap(err, "json")
|
||||
}
|
||||
|
||||
svc, err := telegram.New(cfg.Token)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
svc.AddReceivers(cfg.ChatID)
|
||||
return &Notifier{service: svc}, nil
|
||||
}
|
||||
|
||||
|
||||
type BarkConfig struct {
|
||||
DeviceKey string `json:"device_key"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
func NewbarkClient(s string) (NotificationClient, error) {
|
||||
var cfg BarkConfig
|
||||
if err := json.Unmarshal([]byte(s), &cfg); err != nil {
|
||||
return nil, errors.Wrap(err, "json")
|
||||
}
|
||||
url := cfg.URL
|
||||
if url == "" {
|
||||
url = bark.DefaultServerURL
|
||||
}
|
||||
b := bark.NewWithServers(cfg.DeviceKey, url)
|
||||
return &Notifier{service: b}, nil
|
||||
}
|
||||
37
pkg/notifier/doc.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package notifier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"polaris/pkg/utils"
|
||||
|
||||
"github.com/nikoksr/notify"
|
||||
)
|
||||
|
||||
type HandlerFunc func(string) (NotificationClient, error)
|
||||
|
||||
type NotificationClient interface {
|
||||
SendMsg(msg string) error
|
||||
}
|
||||
|
||||
type Notifier struct {
|
||||
service notify.Notifier
|
||||
}
|
||||
|
||||
func (s *Notifier) SendMsg(msg string) error {
|
||||
notifier := notify.New()
|
||||
notifier.UseServices(s.service)
|
||||
return notifier.Send(context.TODO(), "Polaris", msg)
|
||||
}
|
||||
|
||||
var handler = utils.Map[string, HandlerFunc]{}
|
||||
|
||||
func init() {
|
||||
handler.Store("pushover", NewPushoverClient)
|
||||
handler.Store("dingtalk", NewDingTalkClient)
|
||||
handler.Store("telegram", NewTelegramClient)
|
||||
handler.Store("bark", NewbarkClient)
|
||||
}
|
||||
|
||||
func Gethandler(name string) (HandlerFunc, bool) {
|
||||
return handler.Load(name)
|
||||
}
|
||||
8
pkg/notifier/message/message.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package message
|
||||
|
||||
const (
|
||||
BeginDownload = "开始下载:%v"
|
||||
DownloadComplete = "下载完成:%v"
|
||||
ProcessingComplete = "文件处理完成:%v"
|
||||
ProcessingFailed = "文件处理失败:%v"
|
||||
)
|
||||
@@ -13,7 +13,10 @@ import (
|
||||
|
||||
type Storage interface {
|
||||
Move(src, dest string) error
|
||||
Copy(src, dest string) error
|
||||
ReadDir(dir string) ([]fs.FileInfo, error)
|
||||
ReadFile(string) ([]byte, error)
|
||||
WriteFile(string, []byte) error
|
||||
}
|
||||
|
||||
func NewLocalStorage(dir string) (*LocalStorage, error) {
|
||||
@@ -26,10 +29,20 @@ type LocalStorage struct {
|
||||
dir string
|
||||
}
|
||||
|
||||
func (l *LocalStorage) Move(src, dest string) error {
|
||||
targetDir := filepath.Join(l.dir, dest)
|
||||
os.MkdirAll(targetDir, 0655)
|
||||
err := filepath.Walk(src, func(path string, info fs.FileInfo, err error) error {
|
||||
func (l *LocalStorage) Copy(src, destDir string) error {
|
||||
os.MkdirAll(filepath.Join(l.dir, destDir), os.ModePerm)
|
||||
|
||||
targetBase := filepath.Join(l.dir, destDir, filepath.Base(src)) //文件的场景,要加上文件名, move filename ./dir/
|
||||
info, err := os.Stat(src)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "read source dir")
|
||||
}
|
||||
if info.IsDir() { //如果是路径,则只移动路径里面的文件,不管当前路径, 行为类似 move dirname/* target_dir/
|
||||
targetBase = filepath.Join(l.dir, destDir)
|
||||
}
|
||||
log.Debugf("local storage target base dir is: %v", targetBase)
|
||||
|
||||
err = filepath.Walk(src, func(path string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -37,24 +50,28 @@ func (l *LocalStorage) Move(src, dest string) error {
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "relation between %s and %s", src, path)
|
||||
}
|
||||
destName := filepath.Join(targetDir, rel)
|
||||
destName := filepath.Join(targetBase, rel)
|
||||
|
||||
if info.IsDir() {
|
||||
os.Mkdir(destName, 0655)
|
||||
os.Mkdir(destName, os.ModePerm)
|
||||
} else { //is file
|
||||
if writer, err := os.Create(destName); err != nil {
|
||||
return errors.Wrapf(err, "create file %s", destName)
|
||||
} else {
|
||||
defer writer.Close()
|
||||
if f, err := os.OpenFile(path, os.O_RDONLY, 0666); err != nil {
|
||||
return errors.Wrapf(err, "read file %v", path)
|
||||
} else { //open success
|
||||
defer f.Close()
|
||||
_, err := io.Copy(writer, f)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "transmitting data error")
|
||||
if err := os.Link(path, destName); err != nil {
|
||||
log.Warnf("hard link file error: %v, will try copy file, source: %s, dest: %s", err, path, destName)
|
||||
if writer, err := os.OpenFile(destName, os.O_RDWR|os.O_CREATE|os.O_TRUNC, os.ModePerm); err != nil {
|
||||
return errors.Wrapf(err, "create file %s", destName)
|
||||
} else {
|
||||
defer writer.Close()
|
||||
if f, err := os.OpenFile(path, os.O_RDONLY, os.ModePerm); err != nil {
|
||||
return errors.Wrapf(err, "read file %v", path)
|
||||
} else { //open success
|
||||
defer f.Close()
|
||||
_, err := io.Copy(writer, f)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "transmitting data error")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
log.Infof("file copy complete: %v", destName)
|
||||
@@ -63,10 +80,26 @@ func (l *LocalStorage) Move(src, dest string) error {
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "move file error")
|
||||
}
|
||||
return os.RemoveAll(src)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *LocalStorage) Move(src, destDir string) error {
|
||||
if err := l.Copy(src, destDir); err != nil {
|
||||
return err
|
||||
}
|
||||
return os.RemoveAll(src)
|
||||
}
|
||||
|
||||
func (l *LocalStorage) ReadDir(dir string) ([]fs.FileInfo, error) {
|
||||
return ioutil.ReadDir(filepath.Join(l.dir, dir))
|
||||
}
|
||||
|
||||
func (l *LocalStorage) ReadFile(name string) ([]byte, error) {
|
||||
return os.ReadFile(filepath.Join(l.dir, name))
|
||||
}
|
||||
|
||||
func (l *LocalStorage) WriteFile(name string, data []byte) error {
|
||||
path := filepath.Join(l.dir, name)
|
||||
os.MkdirAll(filepath.Dir(path), os.ModePerm)
|
||||
return os.WriteFile(path, data, os.ModePerm)
|
||||
}
|
||||
|
||||
@@ -7,34 +7,42 @@ import (
|
||||
"path/filepath"
|
||||
"polaris/log"
|
||||
"polaris/pkg/gowebdav"
|
||||
"polaris/pkg/utils"
|
||||
|
||||
"github.com/gabriel-vasile/mimetype"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type WebdavStorage struct {
|
||||
fs *gowebdav.Client
|
||||
dir string
|
||||
fs *gowebdav.Client
|
||||
dir string
|
||||
changeMediaHash bool
|
||||
}
|
||||
|
||||
func NewWebdavStorage(url, user, password, path string) (*WebdavStorage, error) {
|
||||
func NewWebdavStorage(url, user, password, path string, changeMediaHash bool) (*WebdavStorage, error) {
|
||||
c := gowebdav.NewClient(url, user, password)
|
||||
if err := c.Connect(); err != nil {
|
||||
return nil, errors.Wrap(err, "connect webdav")
|
||||
}
|
||||
return &WebdavStorage{
|
||||
fs: c,
|
||||
fs: c,
|
||||
dir: path,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (w *WebdavStorage) Move(local, remote string) error {
|
||||
|
||||
remoteBase := filepath.Join(w.dir,remote)
|
||||
func (w *WebdavStorage) Copy(local, remoteDir string) error {
|
||||
remoteBase := filepath.Join(w.dir, remoteDir, filepath.Base(local))
|
||||
info, err := os.Stat(local)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "read source dir")
|
||||
}
|
||||
if info.IsDir() { //如果是路径,则只移动路径里面的文件,不管当前路径, 行为类似 move dirname/* target_dir/
|
||||
remoteBase = filepath.Join(w.dir, remoteDir)
|
||||
}
|
||||
|
||||
//log.Infof("remove all content in %s", remoteBase)
|
||||
//w.fs.RemoveAll(remoteBase)
|
||||
err := filepath.Walk(local, func(path string, info fs.FileInfo, err error) error {
|
||||
err = filepath.Walk(local, func(path string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "read file %v", path)
|
||||
}
|
||||
@@ -53,6 +61,11 @@ func (w *WebdavStorage) Move(local, remote string) error {
|
||||
// }
|
||||
|
||||
} else { //is file
|
||||
if w.changeMediaHash {
|
||||
if err := utils.ChangeFileHash(path); err != nil {
|
||||
log.Errorf("change file %v hash error: %v", path, err)
|
||||
}
|
||||
}
|
||||
if f, err := os.OpenFile(path, os.O_RDONLY, 0666); err != nil {
|
||||
return errors.Wrapf(err, "read file %v", path)
|
||||
} else { //open success
|
||||
@@ -66,7 +79,7 @@ func (w *WebdavStorage) Move(local, remote string) error {
|
||||
r.Header.Set("Content-Type", mtype.String())
|
||||
r.ContentLength = info.Size()
|
||||
}
|
||||
|
||||
|
||||
if err := w.fs.WriteStream(remoteName, f, 0666, callback); err != nil {
|
||||
return errors.Wrap(err, "transmitting data error")
|
||||
}
|
||||
@@ -78,9 +91,24 @@ func (w *WebdavStorage) Move(local, remote string) error {
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "move file error")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *WebdavStorage) Move(local, remoteDir string) error {
|
||||
if err := w.Copy(local, remoteDir); err != nil {
|
||||
return err
|
||||
}
|
||||
return os.RemoveAll(local)
|
||||
}
|
||||
|
||||
func (w *WebdavStorage) ReadDir(dir string) ([]fs.FileInfo, error) {
|
||||
return w.fs.ReadDir(filepath.Join(w.dir, dir))
|
||||
}
|
||||
|
||||
func (w *WebdavStorage) ReadFile(name string) ([]byte, error) {
|
||||
return w.fs.Read(filepath.Join(w.dir, name))
|
||||
}
|
||||
|
||||
func (w *WebdavStorage) WriteFile(name string, data []byte) error {
|
||||
return w.fs.Write(filepath.Join(w.dir, name), data, os.ModePerm)
|
||||
}
|
||||
|
||||
@@ -1,59 +1,83 @@
|
||||
package tmdb
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"polaris/log"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
tmdb "github.com/cyruzin/golang-tmdb"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
apiKey string
|
||||
tmdbClient *tmdb.Client
|
||||
apiKey string
|
||||
tmdbClient *tmdb.Client
|
||||
enableAdultContent bool
|
||||
}
|
||||
|
||||
func NewClient(apiKey string) (*Client, error) {
|
||||
func NewClient(apiKey, proxyUrl string, enableAdultContent bool) (*Client, error) {
|
||||
|
||||
tmdbClient, err := tmdb.Init(apiKey)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "new tmdb client")
|
||||
}
|
||||
if proxyUrl != "" {
|
||||
//set proxy
|
||||
u, err := url.Parse(proxyUrl)
|
||||
if err != nil {
|
||||
log.Errorf("parse proxy %v error, skip: %v", proxyUrl, err)
|
||||
} else {
|
||||
tmdbClient.SetClientConfig(http.Client{
|
||||
Timeout: time.Second * 10,
|
||||
Transport: &http.Transport{
|
||||
Proxy: http.ProxyURL(u),
|
||||
MaxIdleConns: 10,
|
||||
IdleConnTimeout: 15 * time.Second,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return &Client{
|
||||
apiKey: apiKey,
|
||||
tmdbClient: tmdbClient,
|
||||
apiKey: apiKey,
|
||||
tmdbClient: tmdbClient,
|
||||
enableAdultContent: enableAdultContent,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *Client) GetTvDetails(id int, language string) (*tmdb.TVDetails, error) {
|
||||
d, err := c.tmdbClient.GetTVDetails(id, withLangOption(language))
|
||||
d, err := c.tmdbClient.GetTVDetails(id, withExternalIDs(withLangOption(language)))
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "get tv detail")
|
||||
}
|
||||
|
||||
log.Infof("tv id %d, language %s", id, language)
|
||||
if !episodeNameUseful(d.LastEpisodeToAir.Name) {
|
||||
log.Debug("should fetch english version")
|
||||
var detailEN *tmdb.TVDetails
|
||||
if language == "zh-CN" {
|
||||
detailEN, err = c.tmdbClient.GetTVDetails(id, withLangOption("en-US"))
|
||||
if language == "zh-CN" || language == "" {
|
||||
detailEN, err = c.tmdbClient.GetTVDetails(id, withExternalIDs(withLangOption("en-US")))
|
||||
if err != nil {
|
||||
return d, nil
|
||||
}
|
||||
|
||||
}
|
||||
if episodeNameUseful(detailEN.LastEpisodeToAir.Name) {
|
||||
d.LastEpisodeToAir.Name = detailEN.LastEpisodeToAir.Name
|
||||
d.LastEpisodeToAir.Overview = detailEN.LastEpisodeToAir.Overview
|
||||
d.NextEpisodeToAir.Name = detailEN.NextEpisodeToAir.Name
|
||||
d.NextEpisodeToAir.Overview = detailEN.NextEpisodeToAir.Overview
|
||||
if episodeNameUseful(detailEN.LastEpisodeToAir.Name) {
|
||||
d.LastEpisodeToAir.Name = detailEN.LastEpisodeToAir.Name
|
||||
d.LastEpisodeToAir.Overview = detailEN.LastEpisodeToAir.Overview
|
||||
d.NextEpisodeToAir.Name = detailEN.NextEpisodeToAir.Name
|
||||
d.NextEpisodeToAir.Overview = detailEN.NextEpisodeToAir.Overview
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return d, err
|
||||
}
|
||||
|
||||
func (c *Client) GetMovieDetails(id int, language string) (*tmdb.MovieDetails, error) {
|
||||
return c.tmdbClient.GetMovieDetails(id, withLangOption(language))
|
||||
return c.tmdbClient.GetMovieDetails(id, withExternalIDs(withLangOption(language)))
|
||||
}
|
||||
|
||||
func (c *Client) SearchTvShow(query string, lang string) (*tmdb.SearchTVShows, error) {
|
||||
@@ -92,6 +116,9 @@ func (c *Client) SearchMedia(query string, lang string, page int) (*SearchResult
|
||||
}
|
||||
options := withLangOption(lang)
|
||||
options["page"] = strconv.Itoa(page)
|
||||
if c.enableAdultContent {
|
||||
options["include_adult"] = "true"
|
||||
}
|
||||
res, err := c.tmdbClient.GetSearchMulti(query, options)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "query imdb")
|
||||
@@ -140,7 +167,7 @@ func (c *Client) GetEposideDetail(id, seasonNumber, eposideNumber int, language
|
||||
}
|
||||
if !episodeNameUseful(d.Name) {
|
||||
var detailEN *tmdb.TVEpisodeDetails
|
||||
if language == "zh-CN" {
|
||||
if language == "zh-CN" || language == "" {
|
||||
detailEN, err = c.tmdbClient.GetTVEpisodeDetails(id, seasonNumber, eposideNumber, withLangOption("en-US"))
|
||||
if err != nil {
|
||||
return d, nil
|
||||
@@ -150,7 +177,7 @@ func (c *Client) GetEposideDetail(id, seasonNumber, eposideNumber int, language
|
||||
return d, err
|
||||
}
|
||||
d.Name = detailEN.Name
|
||||
d.Overview = detailEN.Overview
|
||||
d.Overview = detailEN.Overview
|
||||
}
|
||||
|
||||
return d, err
|
||||
@@ -162,7 +189,7 @@ func (c *Client) GetSeasonDetails(id, seasonNumber int, language string) (*tmdb.
|
||||
return nil, err
|
||||
}
|
||||
var detailEN *tmdb.TVSeasonDetails
|
||||
if language == "zh-CN" {
|
||||
if language == "zh-CN" || language == "" {
|
||||
detailEN, err = c.tmdbClient.GetTVSeasonDetails(id, seasonNumber, withLangOption("en-US"))
|
||||
if err != nil {
|
||||
return detailCN, nil
|
||||
@@ -170,7 +197,7 @@ func (c *Client) GetSeasonDetails(id, seasonNumber int, language string) (*tmdb.
|
||||
}
|
||||
|
||||
for i, ep := range detailCN.Episodes {
|
||||
if episodeNameUseful(ep.Name){
|
||||
if !episodeNameUseful(ep.Name) && episodeNameUseful(detailEN.Episodes[i].Name) {
|
||||
detailCN.Episodes[i].Name = detailEN.Episodes[i].Name
|
||||
detailCN.Episodes[i].Overview = detailEN.Episodes[i].Overview
|
||||
}
|
||||
@@ -182,6 +209,22 @@ func (c *Client) GetTVAlternativeTitles(id int, language string) (*tmdb.TVAltern
|
||||
return c.tmdbClient.GetTVAlternativeTitles(id, withLangOption(language))
|
||||
}
|
||||
|
||||
func (c *Client) GetMovieAlternativeTitles(id int, language string) (*tmdb.MovieAlternativeTitles, error) {
|
||||
return c.tmdbClient.GetMovieAlternativeTitles(id, withLangOption(language))
|
||||
}
|
||||
|
||||
func (c *Client) GetByImdbId(imdbId string, lang string) (*tmdb.FindByID, error) {
|
||||
m := withLangOption(lang)
|
||||
m["external_source"] = "imdb_id"
|
||||
return c.tmdbClient.GetFindByID(imdbId, m)
|
||||
}
|
||||
|
||||
func (c *Client) GetByTvdbId(imdbId string, lang string) (*tmdb.FindByID, error) {
|
||||
m := withLangOption(lang)
|
||||
m["external_source"] = "tvdb_id"
|
||||
return c.tmdbClient.GetFindByID(imdbId, m)
|
||||
}
|
||||
|
||||
func wrapLanguage(lang string) string {
|
||||
if lang == "" {
|
||||
lang = "zh-CN"
|
||||
@@ -189,6 +232,11 @@ func wrapLanguage(lang string) string {
|
||||
return lang
|
||||
}
|
||||
|
||||
func withExternalIDs(m map[string]string) map[string]string {
|
||||
m["append_to_response"] = "external_ids"
|
||||
return m
|
||||
}
|
||||
|
||||
func withLangOption(language string) map[string]string {
|
||||
language = wrapLanguage(language)
|
||||
return map[string]string{
|
||||
@@ -196,7 +244,6 @@ func withLangOption(language string) map[string]string {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func episodeNameUseful(name string) bool {
|
||||
return !strings.HasSuffix(name, "集") && !strings.HasPrefix(strings.ToLower(name), "episode")
|
||||
}
|
||||
}
|
||||
|
||||
8
pkg/torznab/cache.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package torznab
|
||||
|
||||
import (
|
||||
"polaris/pkg/cache"
|
||||
"time"
|
||||
)
|
||||
|
||||
var cc = cache.NewCache[string, *Response](time.Minute * 30)
|
||||