Compare commits

...

279 Commits

Author SHA1 Message Date
Simon Ding
0954632b65 AI intergration WIP 2025-01-07 10:27:18 +08:00
Simon Ding
03105a1989 chore: update deps 2024-12-22 13:45:11 +08:00
Simon Ding
e169172c68 doc: update 2024-12-13 14:34:34 +08:00
Simon Ding
937b035634 fix 2024-12-13 13:48:58 +08:00
Simon Ding
c639e11b90 fix: debian do not support arm v6 2024-12-13 13:44:32 +08:00
Simon Ding
f2ac688ed8 feat: release build flutter use github action 2024-12-13 13:33:39 +08:00
Simon Ding
369263a55c fix 2024-12-13 13:25:51 +08:00
Simon Ding
9d4848129f feat: build flutter use github action 2024-12-13 13:22:12 +08:00
Simon Ding
f7e82fa464 feat: search with alternative titles 2024-12-13 12:19:02 +08:00
Simon Ding
d2354ab33c feat: ditch html render and update flutter packages 2024-12-13 11:44:42 +08:00
Simon Ding
67014cfb16 feat: save media alternative titles 2024-12-13 11:35:32 +08:00
Simon
60edeacd0d Merge pull request #10 from simon-ding/dependabot/go_modules/go_modules-5a9c29dde4
chore(deps): bump golang.org/x/crypto from 0.27.0 to 0.31.0 in the go_modules group across 1 directory
2024-12-12 12:33:06 +08:00
dependabot[bot]
4c77cf5798 chore(deps): bump golang.org/x/crypto
Bumps the go_modules group with 1 update in the / directory: [golang.org/x/crypto](https://github.com/golang/crypto).


Updates `golang.org/x/crypto` from 0.27.0 to 0.31.0
- [Commits](https://github.com/golang/crypto/compare/v0.27.0...v0.31.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: direct:production
  dependency-group: go_modules
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-12-12 00:29:52 +00:00
Simon Ding
3cf48d1f8e fix: remove files 2024-12-11 21:25:28 +08:00
Simon Ding
6d127c6d00 feat: add option to delete storage media files 2024-12-11 21:09:00 +08:00
Simon Ding
22f76e3f57 fix: monitor new episode 2024-12-11 20:27:51 +08:00
Simon Ding
e947396c04 fix: cache 2024-12-11 20:11:33 +08:00
Simon Ding
1020190c01 feat: cache download status number 2024-12-11 20:04:37 +08:00
Simon Ding
7c05acd1cf feat: in favor of gridview.builder for better performance 2024-12-11 14:44:41 +08:00
Simon Ding
76a9183b52 refactor: reduce default poster image size to w500 2024-12-11 12:24:16 +08:00
Simon Ding
6698d368c3 chore: updates 2024-12-06 11:06:50 +08:00
Simon Ding
acb627d011 feat: add arm v7 2024-11-26 18:50:40 +08:00
Simon Ding
7c64d964e8 ui: cancel timer before calling 2024-11-21 10:01:52 +08:00
Simon Ding
bec3b04705 ui: add macos client 2024-11-21 09:45:44 +08:00
Simon Ding
990da92b75 chore: change env name 2024-11-20 19:21:25 +08:00
Simon Ding
ee14cc63b8 chore: updates 2024-11-20 19:20:14 +08:00
Simon Ding
8df7b8665b chore: add sub ext 2024-11-20 16:25:35 +08:00
Simon Ding
ea90e014b1 feat: remove default internal size limiter 2024-11-20 15:34:57 +08:00
Simon Ding
6372c5c6e6 chore: update error msg 2024-11-20 15:06:26 +08:00
Simon Ding
7b6dba1afe feat: only accept video files and subtitles of known formats 2024-11-20 12:03:56 +08:00
Simon Ding
c833f6fab6 feat: complete size limiter feature 2024-11-19 23:54:27 +08:00
Simon Ding
b4c2002ad1 feat: apply global size limiter 2024-11-19 19:51:06 +08:00
Simon Ding
b2a9f1f83b refactor: size limiter 2024-11-19 19:24:43 +08:00
Simon Ding
b69881d26b WIP: size limiter 2024-11-19 18:22:40 +08:00
Simon Ding
be07e457d0 chore: update doc 2024-11-18 10:45:33 +08:00
Simon Ding
2cdd6e3740 doc: update 2024-11-18 00:28:10 +08:00
Simon Ding
fa2968f01a chore: remove log 2024-11-18 00:10:16 +08:00
Simon Ding
36f24a7e04 feat: alist upload as task 2024-11-17 23:46:31 +08:00
Simon Ding
ecc7465028 feat: use path escape 2024-11-17 23:44:46 +08:00
Simon Ding
3af4ac795e fix: alist upload 2024-11-17 23:40:31 +08:00
Simon Ding
af2a30405c fix 2024-11-17 22:10:24 +08:00
Simon Ding
ba3f6de852 feat: add upload progress and fix panic 2024-11-17 21:57:14 +08:00
Simon Ding
7d5ce8ba97 feat: support alist as a storage 2024-11-17 21:21:21 +08:00
Simon Ding
b136b9167f ui: change ui on add settings 2024-11-17 14:00:02 +08:00
Simon Ding
f0f3281428 feat: improve name parsing 2024-11-16 14:15:45 +08:00
Simon Ding
196ba6635f feat: start download after save to db 2024-11-16 10:27:49 +08:00
Simon Ding
b61b7f082e fix: season pack download 2024-11-16 10:19:46 +08:00
Simon Ding
105b296ba2 ui: submit search will refresh data 2024-11-15 20:15:12 +08:00
Simon Ding
c4d153f15b fix 2024-11-15 17:58:24 +08:00
Simon Ding
d2619120da fix: season pack download more than once 2024-11-15 15:59:32 +08:00
Simon Ding
fbfee65a50 feat: support for torrent with multi episodes 2024-11-15 15:43:07 +08:00
Simon Ding
c433ccaa0e fix: episode match 2024-11-15 13:12:20 +08:00
Simon Ding
58428405b0 fix: episode match 2024-11-15 12:48:12 +08:00
Simon Ding
45cd94f65b feat: parse multi episode like S01E01-S01E21 2024-11-15 12:38:10 +08:00
Simon Ding
53cbca3101 fix: name empty 2024-11-15 12:05:47 +08:00
Simon Ding
576956e271 fix: season position 2024-11-15 11:57:57 +08:00
Simon Ding
31d20b4f36 fix 2024-11-15 11:56:09 +08:00
Simon Ding
d026dc4eec feat: ability to parse multi episode 2024-11-15 11:44:19 +08:00
Simon Ding
e472d67c79 ui: update desc 2024-11-10 20:20:26 +08:00
Simon Ding
2165a8c533 WIP: init wizard 2024-11-10 15:09:16 +08:00
Simon Ding
0c3b5a6907 feat: create timer only after success 2024-11-10 13:47:50 +08:00
Simon Ding
aaa006a322 WIP: init wizard 2024-11-10 13:28:21 +08:00
Simon Ding
a83f860624 feat: no result consider ok 2024-11-09 20:31:14 +08:00
Simon Ding
b0c325bc4b feat: match reource name using tmdb api 2024-11-09 20:12:28 +08:00
Simon Ding
a0431df1ee feat: not query unaired episodes 2024-11-05 19:17:46 +08:00
Simon Ding
7b02eeac51 WIP: upload with progress 2024-11-05 18:54:40 +08:00
Simon Ding
66a307f202 feat: change timeout 2024-11-05 18:44:32 +08:00
Simon Ding
ae611943c3 doc: update 2024-11-05 13:23:37 +08:00
Simon Ding
4fd11540cd update 2024-11-05 13:16:28 +08:00
Simon Ding
587a28127b ui: improve search error display 2024-11-05 13:00:23 +08:00
Simon Ding
05ae58030c ui: improve error 2024-11-05 12:54:52 +08:00
Simon Ding
f1c4e306f4 ui: improve error readablity 2024-11-05 12:43:17 +08:00
Simon Ding
949b6e5188 ui: remove main selectionArea 2024-11-05 11:51:08 +08:00
Simon Ding
0d4b453d0a feat: prowlarr enable to disable 2024-11-05 10:52:14 +08:00
Simon Ding
bce4d93ab1 feat: add prowlarr enable button 2024-11-04 23:48:52 +08:00
Simon Ding
36b72e6461 fix: text selectable 2024-11-04 18:37:44 +08:00
Simon Ding
62417727f9 ui: revert searchbar 2024-11-04 18:13:02 +08:00
Simon Ding
03f72b9d86 ui: change appbar layout 2024-11-04 18:02:52 +08:00
Simon Ding
c17cf750e5 feat: better prowlarr support 2024-11-04 15:10:56 +08:00
Simon Ding
b176253fc4 feat: add log and defer task loading 2024-11-04 12:04:28 +08:00
Simon Ding
1e2d8b8520 fix: add default behavior 2024-11-04 12:03:32 +08:00
Simon Ding
3739f2c960 fix: page order 2024-11-04 11:30:48 +08:00
Simon Ding
bb6da47efb fix: use StatefulShellRoute to fix ui rerendering 2024-11-04 11:24:27 +08:00
Simon Ding
c28373bde1 ui: update deps 2024-11-01 22:23:21 +08:00
Simon Ding
8ce7045466 fix: return null 2024-11-01 22:05:06 +08:00
Simon Ding
0b1bd8226d fix: add validator 2024-11-01 22:00:18 +08:00
Simon Ding
e67413cec2 feat: add refresh button & parse dialog 2024-11-01 21:53:38 +08:00
Simon Ding
2da02fa706 ui: add filter option 2024-11-01 18:14:32 +08:00
Simon Ding
bc50dd888a ui: change icon 2024-10-27 21:26:53 +08:00
Simon Ding
0305c0709d ui: add donate button 2024-10-27 21:19:02 +08:00
Simon Ding
5e9e85206e doc: update 2024-10-27 20:12:46 +08:00
Simon Ding
3babb9f5c7 doc: update prowlarr setting 2024-10-27 20:11:04 +08:00
Simon Ding
c9928f10ce revert: to html renderer 2024-10-21 09:24:35 +08:00
Simon Ding
a2da1e7479 feat: use canvas render 2024-10-21 08:59:51 +08:00
Simon Ding
038f643ce3 ui: upgrade dependences 2024-10-21 08:46:10 +08:00
Simon Ding
cf2fa90d5d ui: change prowlarr setting text 2024-10-20 22:28:12 +08:00
Simon Ding
677923ea8a fix: match priority 2024-10-20 16:36:34 +08:00
Simon Ding
1192d00fe0 feat: change icon 2024-10-20 16:24:28 +08:00
Simon Ding
b317636a8a feat: support prowlarr connection 2024-10-20 16:21:58 +08:00
Simon Ding
0e6465593b update 2024-10-20 13:37:33 +08:00
Simon
2cb8a5b6fb Update README.md 2024-10-20 13:20:13 +08:00
Simon Ding
8b7bd1f6c6 fix: default size limiter 2024-10-19 21:57:23 +08:00
Simon Ding
1a9ba8a599 feat: add defaukt min size, 80M for tv, 200M for movie 2024-10-19 21:20:19 +08:00
Simon Ding
709d9ea4eb feat: send msg when added to import list 2024-10-16 15:21:47 +08:00
Simon Ding
ed692b5aae update wechat 2024-10-14 16:05:12 +08:00
Simon Ding
f80ee71018 ui: update 2024-10-11 14:52:11 +08:00
Simon Ding
98fae05e16 feat: add qbit category 2024-10-11 13:20:03 +08:00
Simon Ding
f2faefa837 fix: qbit torrent name mismatch file name 2024-10-11 11:39:41 +08:00
Simon Ding
b281c6febb fix: set locale 2024-10-10 18:46:21 +08:00
Simon Ding
09862a2844 fix: name 2024-10-10 18:24:38 +08:00
Simon Ding
f77f565146 fix: add user 2024-10-10 18:16:34 +08:00
Simon Ding
befd5c6a8a feat: disable cgo in docker 2024-10-10 17:46:41 +08:00
Simon Ding
8376fef376 feat: remove umask 2024-10-10 17:42:25 +08:00
Simon Ding
9b641fb593 doc: add unraid install 2024-10-10 17:41:11 +08:00
Simon Ding
9b0fb42043 doc: update 2024-10-10 17:30:55 +08:00
Simon Ding
bf2aeaa2d0 fix: permission 2024-10-10 16:55:03 +08:00
Simon Ding
c52204619d feat: dokcer image support PUID PGID 2024-10-10 16:47:13 +08:00
Simon Ding
485a580661 feat: ascii name should match words 2024-10-10 13:16:44 +08:00
Simon Ding
a22100b2fc feat: reduce time consuming 2024-10-10 10:29:47 +08:00
Simon Ding
1728690860 refactor: change method to restore tasks for better maintains 2024-10-10 09:57:17 +08:00
Simon Ding
e0d0ab80b6 feat: clean cache upon indexer setting changes 2024-10-10 09:21:33 +08:00
Simon Ding
19f21ddd6e feat: self calculate required torrent hash 2024-10-10 09:01:51 +08:00
Simon Ding
37ad1391db fix: ci 2024-10-10 08:44:09 +08:00
Simon Ding
5daeca0bd9 feat: api to add torrent hash to blacklist 2024-10-10 00:49:32 +08:00
Simon Ding
7e4d907ef6 feat: save torrent link to history 2024-10-09 23:56:04 +08:00
Simon Ding
6527f843d8 update 2024-10-04 21:17:33 +08:00
Simon Ding
f5ca53f7d7 update: required 2024-10-04 11:39:58 +08:00
Simon Ding
7461918a6c fix: qbit progress 2024-10-04 11:36:12 +08:00
Simon Ding
3af5f96cb0 feat: support download client priority 2024-10-04 11:12:06 +08:00
Simon Ding
7dfa4eafc4 feat: support reload qbit tasks 2024-10-04 10:45:31 +08:00
Simon Ding
579b010d13 fix: go mod tidy 2024-10-04 10:32:19 +08:00
Simon Ding
c42cbb5e5d feat: complete qbittorrent support 2024-10-04 10:31:49 +08:00
Simon Ding
6a5c105f8c WIP: qbittorrent support 2024-10-04 01:22:27 +08:00
Simon Ding
e8067f96f1 feat: allow set any media qulity 2024-10-03 22:19:16 +08:00
Simon Ding
84a0197776 refactor: name testing 2024-09-29 18:43:29 +08:00
Simon Ding
f9556ec2d2 feat: check movie folder upon added 2024-09-29 18:35:20 +08:00
Simon Ding
98d14befa9 fix: remove episodes no media id 2024-09-29 16:16:05 +08:00
Simon Ding
6fcc569bf2 feat: clean dangling episodes 2024-09-29 15:45:31 +08:00
Simon Ding
672e7f914d feat: desc ordering 2024-09-29 15:33:56 +08:00
Simon Ding
20bdcdbcde fix: validate 2024-09-29 14:46:06 +08:00
Simon Ding
577a6cee1e fix: add flutter build 2024-09-29 14:37:58 +08:00
Simon Ding
4186d7d97f update go version 2024-09-29 14:33:21 +08:00
Simon Ding
5d726dbcf1 feat: add goreleaser 2024-09-29 14:27:01 +08:00
Simon Ding
ce25c090f5 feat: open url at startup 2024-09-29 13:55:16 +08:00
Simon Ding
2683c5dbf2 fix: tv name parse 2024-09-27 21:58:34 +08:00
Simon Ding
b717885270 feat: change status color 2024-09-27 15:19:45 +08:00
Simon Ding
4e457e99b9 fix: context 2024-09-27 15:03:00 +08:00
Simon Ding
ecfe31ea45 fix: tv date not exists and folder name suggestion 2024-09-26 09:45:36 +08:00
Simon Ding
89104785d7 fix: monitor lastest season 2024-09-23 23:12:06 +08:00
Simon Ding
f4ccc69b50 feat: trim white space 2024-09-23 22:44:15 +08:00
Simon Ding
40c6e2df5c fix: panic when torrent not exist 2024-09-21 21:47:26 +08:00
Simon Ding
37dfb0fe94 feat: add check 2024-09-21 21:23:47 +08:00
Simon Ding
9968f9f225 feat: add ability to change folder naming convention 2024-09-20 20:03:05 +08:00
Simon Ding
f5c977224b fix 2024-09-20 14:27:49 +08:00
Simon Ding
8af3ffccd3 fix: double downloading status problem 2024-09-20 11:40:22 +08:00
Simon Ding
c535dfd714 feat: do not use lowercase 2024-09-20 11:11:27 +08:00
Simon Ding
f696b78260 feat: default monitor lastest season all episodes 2024-09-20 11:03:01 +08:00
Simon Ding
4f3e3e399d feat: transmission use hash instead of id 2024-09-20 10:45:10 +08:00
Simon Ding
e4e3c9a851 fix: suggestted name when name_cn is in english 2024-09-07 13:45:27 +08:00
Simon Ding
bf608f933d feat: ui to manaul trigger cron jobs 2024-09-07 13:18:12 +08:00
Simon Ding
5923fc73e1 fix: new import list display 2024-09-04 16:51:49 +08:00
Simon Ding
c2d9ccfd4c chore: update ui details 2024-09-04 16:20:33 +08:00
Simon Ding
5d4429bf7c chore: update watchlist 2024-09-04 15:55:15 +08:00
Simon Ding
e4c111ac2a fix: cron jobs 2024-09-04 13:18:14 +08:00
Simon Ding
3e5e20e933 fix: naming 2024-09-03 10:04:56 +08:00
Simon Ding
ba1be8f279 feat: cron trigger api and fix import lists 2024-09-03 09:46:23 +08:00
Simon Ding
361556228b feat: add import list & calendar 2024-09-02 23:47:19 +08:00
Simon Ding
ca414a73ff fix: logout 2024-09-02 14:07:47 +08:00
Simon Ding
32b595e116 fix: change token name and same site lax rule 2024-09-02 13:59:36 +08:00
Simon Ding
b12bbd2ad9 fix: http auth 2024-09-02 13:45:49 +08:00
Simon Ding
60110f4ca6 feat: add db extra fields 2024-08-24 21:03:08 +08:00
Simon Ding
b7ca02429c chore: jav name suggest 2024-08-19 17:45:34 +08:00
Simon Ding
ff63084014 feat: better jav search support 2024-08-19 17:39:37 +08:00
Simon Ding
821d6859ff feat: option to enable adult content 2024-08-19 16:39:34 +08:00
Simon Ding
10e6e99990 update readme 2024-08-16 11:49:23 +08:00
Simon Ding
23a5997814 fix: episode status 2024-08-13 11:27:54 +08:00
Simon Ding
b487c81865 feat: cache errored request 2024-08-13 11:05:46 +08:00
Simon Ding
32914344d1 fix: remove seeding torrent status not right 2024-08-13 10:57:04 +08:00
Simon Ding
644c9ed228 feat: marshal indented 2024-08-13 10:41:40 +08:00
Simon Ding
d3ad80380f feat: nfo support frontend 2024-08-13 10:17:46 +08:00
Simon Ding
19c6308a81 feat: nfo support backend 2024-08-13 10:05:38 +08:00
Simon Ding
7017f32fe3 feat: nfo support backend 2024-08-13 10:02:34 +08:00
Simon Ding
02a23f13f9 fix: mkdir 2024-08-13 00:11:27 +08:00
Simon Ding
cc211a89a4 fix: dir not exist 2024-08-12 23:06:51 +08:00
Simon Ding
4800e6c79d chore: update 2024-08-12 22:57:21 +08:00
Simon Ding
b5f0b28c61 fix 2024-08-12 22:44:36 +08:00
Simon Ding
081338df24 feat: code refactor and support season pack write .plexmatch file 2024-08-12 22:19:57 +08:00
Simon Ding
9632ca45b3 fix: match logic 2024-08-12 20:59:31 +08:00
Simon Ding
b948bff497 chore: if imdbid match no need check name 2024-08-12 19:02:43 +08:00
Simon Ding
29383cf75c feat: should match imdbid if present 2024-08-12 18:47:38 +08:00
Simon Ding
57ec0b9eb9 fix: download series all 2024-08-12 18:13:34 +08:00
Simon Ding
0cce4ffee0 add button to appbar 2024-08-12 14:53:23 +08:00
Simon Ding
5c01c45068 feat: fliter audios 2024-08-12 14:23:20 +08:00
Simon Ding
712bf84c90 feat: add app icon 2024-08-12 14:23:09 +08:00
Simon Ding
fdb63a8459 fix 2024-08-12 10:28:28 +08:00
Simon Ding
990d9dab08 update image 2024-08-12 10:27:20 +08:00
Simon Ding
da863588e4 feat: show snakebar 2024-08-12 10:23:28 +08:00
Simon Ding
09ff67fef7 feat: download per media feature 2024-08-12 10:16:36 +08:00
Simon Ding
3c37948798 ui: change icon 2024-08-11 23:42:43 +08:00
Simon Ding
6fd39d818c feat: better seeding status 2024-08-11 23:09:34 +08:00
Simon Ding
a0e211c328 fix: activity 2024-08-11 22:44:52 +08:00
Simon Ding
27d8b1672a feat: show seed as active 2024-08-11 22:40:38 +08:00
Simon Ding
349e394e8e chore: updates 2024-08-11 22:06:54 +08:00
Simon Ding
620f085ca5 add log 2024-08-11 20:54:12 +08:00
Simon Ding
5b70badb50 feat: add seed ratio display 2024-08-11 20:41:26 +08:00
Simon Ding
5c6ac2c430 fix: movie target dir 2024-08-11 20:19:51 +08:00
Simon Ding
365cfddf8f change icon 2024-08-11 19:20:23 +08:00
Simon Ding
6c26812b92 feat: filter resources that is qiangban 2024-08-11 19:18:27 +08:00
Simon Ding
0057a75a95 feat: find season pack first 2024-08-11 18:06:50 +08:00
Simon Ding
f110f257d4 code refactor and add season pack size limit 2024-08-11 17:40:01 +08:00
Simon Ding
93e8e78591 ui: improve external link display 2024-08-10 17:55:56 +08:00
Simon Ding
9ff12cd86b fix: movie year match 2024-08-10 17:06:33 +08:00
Simon Ding
fd2f4b140f refactor: download api 2024-08-10 16:46:49 +08:00
Simon Ding
4607af6982 feat: search original name 2024-08-10 16:46:19 +08:00
Simon Ding
984bebcfe0 ui: fine tune 2024-08-10 15:23:08 +08:00
Simon Ding
d31abd59ad chore: ui improvement 2024-08-10 15:05:18 +08:00
Simon Ding
e0ad71291c fix width 2024-08-10 11:23:17 +08:00
Simon Ding
8ecc9393cf fix: width 126->120 2024-08-10 11:16:17 +08:00
Simon Ding
b62e0e9bfd feat: small screen 2024-08-10 11:06:29 +08:00
Simon Ding
1391f55f44 feat: small screen 2024-08-10 10:52:48 +08:00
Simon Ding
0c709ee517 feat: detail card fit small screen 2024-08-10 10:45:18 +08:00
Simon Ding
806d821388 feat: better support for small screen 2024-08-10 10:35:57 +08:00
Simon Ding
829043bf28 fix: naming suggestion 2024-08-09 20:40:38 +08:00
Simon Ding
66ab418054 feat: remove name extras characters 2024-08-09 19:47:58 +08:00
Simon Ding
5fe40cc64b feat: add size to activity 2024-08-09 19:00:40 +08:00
Simon Ding
8f6f26f00e refactor: activity list 2024-08-08 19:23:23 +08:00
Simon Ding
ee0bee2b06 fix: formatting 2024-08-08 14:20:20 +08:00
Simon Ding
1bb16a8a66 feat: imdbid support 2024-08-08 14:10:26 +08:00
Simon Ding
d746032114 fix: result ordering 2024-08-08 13:40:07 +08:00
Simon Ding
b34e39889c feat: ui improvement 2024-08-08 10:56:03 +08:00
Simon Ding
64e98647a8 update go.mod 2024-08-08 00:56:49 +08:00
Simon Ding
f91c91e0b1 chore: main page ui update 2024-08-07 23:36:41 +08:00
Simon Ding
f1aaa06d05 chore: update new flutter version 2024-08-07 23:13:36 +08:00
Simon Ding
e8a38aa6f8 chore: ui update 2024-08-07 22:55:24 +08:00
Simon Ding
7e88533ea2 chore: update storage display 2024-08-07 14:12:47 +08:00
Simon Ding
05698f4047 fix: size limiter 2024-08-07 13:37:39 +08:00
Simon Ding
1daad0c236 fix size limiter 2024-08-07 13:27:41 +08:00
Simon Ding
86c8163f9c feat: default select first storage 2024-08-07 13:22:55 +08:00
Simon Ding
78ab8cc8e6 feat: add size display 2024-08-07 13:06:37 +08:00
Simon Ding
1390277b43 feat: second confirmation on deletion 2024-08-07 12:48:15 +08:00
Simon Ding
1aa3dca2c6 update 2024-08-07 11:20:21 +08:00
Simon Ding
f48b3c657e feat: change cache implementation 2024-08-07 11:07:10 +08:00
Simon Ding
d8d570f1b2 feat: change db 2024-08-07 10:46:30 +08:00
Simon Ding
bd385d4f85 feat: add simple cache, due to jackett poor performance 2024-08-07 10:42:12 +08:00
Simon Ding
466596345d feat: edit media details 2024-08-06 23:00:56 +08:00
Simon Ding
8ab33f3d54 ui refactor 2024-08-04 10:54:47 +08:00
Simon Ding
4d3b26135c fix: ui alignment 2024-08-04 10:22:47 +08:00
Simon Ding
56d5cdb2bf refactor ui resource list 2024-08-03 23:05:17 +08:00
Simon Ding
6f80da779b fix: download client id 2024-08-03 17:12:20 +08:00
Simon Ding
5fef156052 fix: indexid 2024-08-03 17:10:49 +08:00
Simon Ding
eab3a6ca2b try fix seed ratio 2024-08-03 16:47:58 +08:00
Simon Ding
ffa5c37c4c fix: name matching 2024-08-03 15:03:47 +08:00
Simon Ding
241e30152b feat: file size limiter 2024-08-03 12:31:53 +08:00
Simon Ding
16216fcc4f feat: change single episode monitoring status 2024-08-03 10:46:52 +08:00
Simon Ding
578b6a9d78 feat: proxy only affects tmdb 2024-08-03 09:54:23 +08:00
Simon Ding
f4da80c845 add testcases & add parse condition 2024-08-02 21:04:34 +08:00
Simon Ding
5a9acd3e6e ui: improve tv display 2024-08-02 19:10:53 +08:00
Simon Ding
8bfa8f84b9 fix monitor 2024-08-02 14:56:14 +08:00
Simon Ding
5b0b2ce5b0 fix: status 2024-08-02 14:39:01 +08:00
Simon Ding
b24c1a1501 ui: add monitored field 2024-08-02 14:06:44 +08:00
Simon Ding
aa320c6dcb fix: monitored 2024-08-02 13:06:38 +08:00
Simon Ding
5132714247 feat: change method to monitor episodes 2024-08-02 12:52:54 +08:00
Simon Ding
3aeecac4fb updates 2024-08-02 12:19:53 +08:00
Simon Ding
7f8c613a65 fix number formats 2024-08-02 11:14:21 +08:00
Simon Ding
c787d71fbd code refactor 2024-08-02 10:08:26 +08:00
Simon Ding
c28e16805e fix: empty list 2024-08-01 20:34:23 +08:00
Simon Ding
fc3d3878bc feat: disable indexer 2024-08-01 20:12:42 +08:00
Simon Ding
e26e86a63f feat: implement seed ratio check logic 2024-08-01 19:52:40 +08:00
Simon Ding
408ff163ef feat: improve support for pt 2024-08-01 19:12:14 +08:00
Simon Ding
35d299b60c feat: improve indexer setting 2024-08-01 17:36:40 +08:00
Simon Ding
6e002b1198 fix: add defaults 2024-08-01 13:05:05 +08:00
Simon Ding
7508a264a6 chore: ci update 2024-08-01 09:42:37 +08:00
Simon Ding
0022c9dad5 fix: umask 2024-08-01 09:39:38 +08:00
Simon Ding
654d8b50b4 chore: add more screenshot 2024-08-01 09:23:13 +08:00
Simon Ding
97ede5d9c9 feat: add badges 2024-08-01 00:01:55 +08:00
Simon Ding
4803567818 fix: remove attestations 2024-07-31 23:43:07 +08:00
Simon Ding
4e0014cb3f fix: sha256 tags in repo 2024-07-31 23:41:58 +08:00
Simon Ding
c256d46d5c test ci 2024-07-31 23:11:55 +08:00
Simon Ding
b765f16ea6 chore: updates 2024-07-31 20:59:40 +08:00
212 changed files with 21931 additions and 3422 deletions

View File

@@ -17,11 +17,6 @@ jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
attestations: write
id-token: write
steps:
- uses: actions/checkout@v4
@@ -34,6 +29,19 @@ jobs:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Set up Flutter
uses: subosito/flutter-action@v2
with:
channel: stable
flutter-version: 3
- name: Build Web
run: |
cd ui
flutter pub get
flutter build web --no-web-resources-cdn
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
@@ -51,9 +59,3 @@ jobs:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Generate artifact attestation
uses: actions/attest-build-provenance@v1
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

50
.github/workflows/goreleaser.yml vendored Normal file
View File

@@ -0,0 +1,50 @@
name: goreleaser
on:
workflow_dispatch:
push:
tags:
- 'v*'
permissions:
contents: write
jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Flutter
uses: subosito/flutter-action@v2
with:
channel: stable
flutter-version: 3
- name: Build Web
run: |
cd ui
flutter pub get
flutter build web --no-web-resources-cdn --web-renderer html
-
name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '>=1.23.0'
check-latest: true
-
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v6
with:
# either 'goreleaser' (default) or 'goreleaser-pro'
distribution: goreleaser
# 'latest', 'nightly', or a semver
version: '~> v2'
args: release --clean --skip=validate
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Your GoReleaser Pro key, if you are using the 'goreleaser-pro' distribution
# GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}

View File

@@ -37,12 +37,25 @@ jobs:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Set up Flutter
uses: subosito/flutter-action@v2
with:
channel: stable
flutter-version: 3
- name: Build Web
run: |
cd ui
flutter pub get
flutter build web --no-web-resources-cdn
- name: Build and push
id: push
uses: docker/build-push-action@v6
@@ -50,9 +63,7 @@ jobs:
context: .
file: Dockerfile
push: true
platforms: |
linux/amd64
linux/arm64
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/s390x,linux/ppc64le
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
@@ -61,4 +72,4 @@ jobs:
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true
push-to-registry: false

2
.gitignore vendored
View File

@@ -30,3 +30,5 @@ ui/dist/
# Go workspace file
go.work
go.work.sum
dist/

51
.goreleaser.yaml Normal file
View File

@@ -0,0 +1,51 @@
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
# The lines below are called `modelines`. See `:help modeline`
# Feel free to remove those if you don't want/need to use them.
# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
# vim: set ts=2 sw=2 tw=0 fo=cnqoj
version: 2
before:
hooks:
# You may remove this if you don't use go modules.
- go mod tidy
# you may remove this if you don't need go generate
#- go generate ./...
builds:
- env:
- CGO_ENABLED=0
goos:
- linux
- windows
- darwin
- freebsd
main: ./cmd
goarch:
- amd64
- arm64
archives:
- format: tar.gz
# this name template makes the OS and Arch compatible with the results of `uname`.
name_template: >-
{{ .ProjectName }}_
{{- title .Os }}_
{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "386" }}i386
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end }}
# use zip for windows archives
format_overrides:
- goos: windows
format: zip
changelog:
sort: asc
filters:
exclude:
- "^docs:"
- "^test:"

View File

@@ -1,12 +1,4 @@
FROM instrumentisto/flutter:3 AS flutter
WORKDIR /app
COPY ./ui/pubspec.yaml ./ui/pubspec.lock ./
RUN flutter pub get
COPY ./ui/ ./
RUN flutter build web --no-web-resources-cdn --web-renderer html
# 打包依赖阶段使用golang作为基础镜像
FROM golang:1.22 as builder
FROM golang:1.23 as builder
# 启用go module
ENV GO111MODULE=on \
@@ -20,21 +12,24 @@ RUN go mod download
COPY . .
COPY --from=flutter /app/build/web ./ui/build/web/
# 指定OS等并go build
RUN CGO_ENABLED=1 go build -o polaris -ldflags="-X polaris/db.Version=$(git describe --tags --long)" ./cmd/
RUN CGO_ENABLED=0 go build -o polaris -ldflags="-X polaris/db.Version=$(git describe --tags --long)" ./cmd/
FROM debian:stable-slim
ENV TZ="Asia/Shanghai" GIN_MODE=release
WORKDIR /app
RUN apt-get update && apt-get -y install ca-certificates
RUN apt-get update && apt-get -y install ca-certificates tzdata gosu tini locales && ln -sf /usr/share/zoneinfo/${TZ} /etc/localtime \
&& echo "${TZ}" > /etc/timezone && apt-get clean && sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
ENV TZ="Asia/Shanghai" GIN_MODE=release PUID=0 PGID=0 UMASK=0
ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8
# 将上一个阶段publish文件夹下的所有文件复制进来
COPY --from=builder /app/polaris .
COPY --from=builder /app/entrypoint.sh .
RUN chmod +x /app/entrypoint.sh
VOLUME /app/data
EXPOSE 8080
#USER 1000:1000
ENTRYPOINT ["./polaris"]
ENTRYPOINT ["tini","./entrypoint.sh"]

View File

@@ -1,42 +1,86 @@
# polaris
Polaris 是一个电视剧和电影的追踪软件。配置好了之后当剧集或者电影播出后会第一时间下载对应的资源。支持本地存储或者webdav。
![main_page](./doc/assets/main_page.png)
![detail_page](./doc/assets/detail_page.png)
<h1 align="center">Polaris</h1>
<div align="center">
![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/simon-ding/polaris/go.yml)
![GitHub Release](https://img.shields.io/github/v/release/simon-ding/polaris)
![GitHub Repo stars](https://img.shields.io/github/stars/simon-ding/polaris)
![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/simon-ding/polaris)
**Polaris 是一个电视剧和电影的追踪下载软件。对美剧动漫日剧都有良好的匹配支持多种存储方式webdav、alist、本地存储**
</div>
交流群: https://t.me/+8R2nzrlSs2JhMDgx
## 功能
- [x] 电视剧自动追踪下载
- [x] 电影自动追踪下载
- [x] webdav 存储支持,配合 [alist](https://github.com/alist-org/alist) 或阿里云等实现更多功能
- [x] 事件通知推送,目前支持 Pushover和 Bark还在扩充中
- [x] 后台代理支持
- [x] 用户认证
- [x] plex 刮削支持
- [x] and more...
## 使用
## 快速开始
使用此程序参考 [【快速开始】](./doc/quick_start.md)
## 原理
## Features
本程序不提供任何视频相关资源,所有的资源都通过 jackett/prowlarr 所对接的BT/PT站点提供。
1. 此程序通过调用 jackett/prowlarr API搜索相关资源然后匹配上对应的剧集
2. 把搜索到的资源送到下载器下载
3. 下载完成后归入对应的路径
- [x] 电视剧自动追踪下载
- [x] 电影自动追踪下载
- [x] 本地、webdav、 [alist](https://github.com/alist-org/alist) 存储支持,使用 alist 存储支持秒传功能
- [x] 事件通知推送,目前支持 Pushover和 Bark还在扩充中
- [x] TMDB 代理支持
- [x] 用户认证
- [x] plex 刮削支持
- [x] NFO 刮削文件支持
- [x] BT/PT 支持
- [x] qbittorrent/transmission客户端支持
- [x] 支持导入plex watchlistplex里标记自动导入polaris
- [x] and more...
## 对比 sonarr/radarr
## 支持的平台
- linux/amd64
- linux/arm64
- linux/arm/v7
- linux/386
- linux/s390x
- linux/ppc64le
## Todos
- [ ] 更多通知客户端支持
- [ ] 更多第三方watchlist导入支持
- [ ] 手机客户端
## 截图
![main_page](./doc/assets/main_page.png)
![detail_page](./doc/assets/detail_page.png)
![anime](./doc/assets/anime_match.png)
## 运行原理
Polaris本身不提供任何资源要使其能正常工作需要同时安装一个下载客户端transmission和一个索引客户端jackett
Polaris通过索引客户端查询相关的BT/PT站点然后把查到的资源送到下载器等待下载器下载完成对应资源。下载完成后根据下载的对应资源整理到对应的文件夹里去
![](./doc/assets/yuanli.png)
<!-- ## 对比 sonarr/radarr
* 更好的中文支持
* 对于动漫、日剧的良好支持,配合国内站点基本能匹配上对应资源
* 支持 webdav 后端存储,可以配合 alist 或者阿里云来实现下载后实时传到云上的功能。这样外出就可以不依靠家里的宽带来看电影了,或者实现个轻 NAS 功能,下载功能放在本地,数据放在云盘
* golang 实现后端,相比于 .NET 更节省资源
* 一个程序同时实现了电影、电视剧功能,不需要装两个程序
* 当然 sonarr/radarr 也是非常优秀的开源项目,目前 Polaris 功能还没有 sonarr/radarr 丰富
* 当然 sonarr/radarr 也是非常优秀的开源项目,目前 Polaris 功能还没有 sonarr/radarr 丰富 -->
## Stargazers over time
[![Stargazers over time](https://starchart.cc/simon-ding/polaris.svg?variant=adaptive)](https://starchart.cc/simon-ding/polaris)
-------------

View File

@@ -4,19 +4,25 @@ import (
"polaris/db"
"polaris/log"
"polaris/server"
"syscall"
)
func main() {
log.Infof("------------------- Starting Polaris ---------------------")
syscall.Umask(0000) //max permission 0777
//utils.MaxPermission()
dbClient, err := db.Open()
if err != nil {
log.Panicf("init db error: %v", err)
}
// go func() {
// time.Sleep(2 * time.Second)
// if err := utils.OpenURL("http://127.0.0.1:8080"); err != nil {
// log.Errorf("open url error: %v", err)
// }
// }()
s := server.NewServer(dbClient)
if err := s.Serve(); err != nil {
log.Errorf("server start error: %v", err)

View File

@@ -1,29 +1,44 @@
package db
import "polaris/ent/media"
var Version = "undefined"
const (
SettingTmdbApiKey = "tmdb_api_key"
SettingLanguage = "language"
SettingJacketUrl = "jacket_url"
SettingJacketApiKey = "jacket_api_key"
SettingDownloadDir = "download_dir"
SettingLogLevel = "log_level"
SettingProxy = "proxy"
SettingPlexMatchEnabled = "plexmatch_enabled"
SettingTmdbApiKey = "tmdb_api_key"
SettingLanguage = "language"
SettingJacketUrl = "jacket_url"
SettingJacketApiKey = "jacket_api_key"
SettingDownloadDir = "download_dir"
SettingLogLevel = "log_level"
SettingProxy = "proxy"
SettingPlexMatchEnabled = "plexmatch_enabled"
SettingNfoSupportEnabled = "nfo_support_enabled"
SettingAllowQiangban = "filter_qiangban"
SettingEnableTmdbAdultContent = "tmdb_adult_content"
SettingTvNamingFormat = "tv_naming_format"
SettingMovieNamingFormat = "movie_naming_format"
SettingProwlarrInfo = "prowlarr_info"
SettingTvSizeLimiter = "tv_size_limiter"
SettingMovieSizeLimiter = "movie_size_limiter"
SettingAcceptedVideoFormats = "accepted_video_formats"
SettingAcceptedSubtitleFormats = "accepted_subtitle_formats"
SettingAIConfig = "ai_config"
)
const (
SettingAuthEnabled = "auth_enbled"
SettingUsername = "auth_username"
SettingPassword = "auth_password"
SettingUsername = "auth_username"
SettingPassword = "auth_password"
)
const (
IndexerTorznabImpl = "torznab"
DataPath = "./data"
ImgPath = DataPath + "/img"
LogPath = DataPath + "/logs"
DataPath = "./data"
ImgPath = DataPath + "/img"
LogPath = DataPath + "/logs"
)
const (
@@ -31,6 +46,62 @@ const (
LanguageCN = "zh-CN"
)
const DefaultNamingFormat = "{{.NameCN}} {{.NameEN}} {{if .Year}} ({{.Year}}) {{end}}"
// https://en.wikipedia.org/wiki/Video_file_format
var defaultAcceptedVideoFormats = []string{
".webm", ".mkv", ".flv", ".vob", ".ogv", ".ogg", ".drc", ".mng", ".avi", ".mts", ".m2ts", ".ts",
".mov", ".qt", ".wmv", ".yuv", ".rm", ".rmvb", ".viv", ".amv", ".mp4", ".m4p", ".m4v",
".mpg", ".mp2", ".mpeg", ".mpe", ".mpv", ".m2v", ".m4v",
".svi", ".3gp", ".3g2", ".nsv",
}
var defaultAcceptedSubtitleFormats = []string{
".ass", ".srt", ".vtt", ".webvtt", ".sub", ".idx",
}
type NamingInfo struct {
NameCN string
NameEN string
Year string
TmdbID int
}
type ResolutionType string
const JwtSerectKey = "jwt_secrect_key"
const JwtSerectKey = "jwt_secrect_key"
type MediaSizeLimiter struct {
P720p SizeLimiter `json:"720p"`
P1080 SizeLimiter `json:"1080p"`
P2160 SizeLimiter `json:"2160p"`
}
func (m *MediaSizeLimiter) GetLimiter(r media.Resolution) SizeLimiter {
if r == media.Resolution1080p {
return m.P1080
} else if r == media.Resolution720p {
return m.P720p
} else if r == media.Resolution2160p {
return m.P2160
}
return SizeLimiter{}
}
type SizeLimiter struct {
MaxSIze int64 `json:"max_size"`
MinSize int64 `json:"min_size"`
PreferSIze int64 `json:"prefer_size"`
}
type ProwlarrSetting struct {
Disabled bool `json:"disabled"`
ApiKey string `json:"api_key"`
URL string `json:"url"`
}
type AIConfig struct {
Enabled bool `json:"enabled"`
GeminiApiKey string `json:"gemini_api_key"`
GeminiModelName string `json:"gemini_model_name"`
}

328
db/db.go
View File

@@ -9,8 +9,10 @@ import (
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/importlist"
"polaris/ent/indexers"
"polaris/ent/media"
"polaris/ent/schema"
"polaris/ent/settings"
"polaris/ent/storage"
"polaris/log"
@@ -20,7 +22,8 @@ import (
"entgo.io/ent/dialect"
"entgo.io/ent/dialect/sql"
_ "github.com/mattn/go-sqlite3"
_ "github.com/ncruces/go-sqlite3/driver"
_ "github.com/ncruces/go-sqlite3/embed"
"github.com/pkg/errors"
)
@@ -63,9 +66,13 @@ func (c *Client) init() {
log.Infof("set default log level")
c.SetSetting(SettingLogLevel, "info")
}
if tr := c.GetTransmission(); tr == nil {
if tr := c.GetAllDonloadClients(); len(tr) == 0 {
log.Warnf("no download client, set default download client")
c.SaveTransmission("transmission", "http://transmission:9091", "", "")
c.SaveDownloader(&ent.DownloadClients{
Name: "transmission",
Implementation: downloadclients.ImplementationTransmission,
URL: "http://transmission:9091",
})
}
}
@@ -87,8 +94,8 @@ func (c *Client) generateDefaultLocalStorage() error {
return c.AddStorage(&StorageInfo{
Name: "local",
Implementation: "local",
TvPath: "/data/tv/",
MoviePath: "/data/movies/",
TvPath: "/data/tv/",
MoviePath: "/data/movies/",
Default: true,
})
}
@@ -117,7 +124,7 @@ func (c *Client) GetLanguage() string {
lang := c.GetSetting(SettingLanguage)
log.Infof("get application language: %s", lang)
if lang == "" {
return "zh-CN"
return LanguageCN
}
return lang
}
@@ -137,6 +144,7 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
}
r, err := c.ent.Media.Create().
SetTmdbID(m.TmdbID).
SetImdbID(m.ImdbID).
SetStorageID(m.StorageID).
SetOverview(m.Overview).
SetNameCn(m.NameCn).
@@ -147,6 +155,9 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
SetResolution(m.Resolution).
SetTargetDir(m.TargetDir).
SetDownloadHistoryEpisodes(m.DownloadHistoryEpisodes).
SetLimiter(m.Limiter).
SetExtras(m.Extras).
SetAlternativeTitles(m.AlternativeTitles).
AddEpisodeIDs(episodes...).
Save(context.TODO())
return r, err
@@ -154,7 +165,7 @@ func (c *Client) AddMediaWatchlist(m *ent.Media, episodes []int) (*ent.Media, er
}
func (c *Client) GetMediaWatchlist(mediaType media.MediaType) []*ent.Media {
list, err := c.ent.Media.Query().Where(media.MediaTypeEQ(mediaType)).All(context.TODO())
list, err := c.ent.Media.Query().Where(media.MediaTypeEQ(mediaType)).Order(ent.Desc(media.FieldID)).All(context.TODO())
if err != nil {
log.Infof("query wtach list error: %v", err)
return nil
@@ -213,7 +224,10 @@ func (c *Client) DeleteMedia(id int) error {
return err
}
_, err = c.ent.Media.Delete().Where(media.ID(id)).Exec(context.TODO())
return err
if err != nil {
return err
}
return c.CleanAllDanglingEpisodes()
}
func (c *Client) SaveEposideDetail(d *ent.Episode) (int, error) {
@@ -222,6 +236,7 @@ func (c *Client) SaveEposideDetail(d *ent.Episode) (int, error) {
SetSeasonNumber(d.SeasonNumber).
SetEpisodeNumber(d.EpisodeNumber).
SetOverview(d.Overview).
SetMonitored(d.Monitored).
SetTitle(d.Title).Save(context.TODO())
if err != nil {
return 0, errors.Wrap(err, "save episode")
@@ -237,6 +252,7 @@ func (c *Client) SaveEposideDetail2(d *ent.Episode) (int, error) {
SetMediaID(d.MediaID).
SetStatus(d.Status).
SetOverview(d.Overview).
SetMonitored(d.Monitored).
SetTitle(d.Title).Save(context.TODO())
return ep.ID, err
@@ -247,19 +263,22 @@ type TorznabSetting struct {
ApiKey string `json:"api_key"`
}
func (c *Client) SaveTorznabInfo(name string, setting TorznabSetting) error {
data, err := json.Marshal(setting)
if err != nil {
return errors.Wrap(err, "marshal json")
func (c *Client) SaveIndexer(in *ent.Indexers) error {
if in.ID != 0 {
//update setting
return c.ent.Indexers.Update().Where(indexers.ID(in.ID)).SetName(in.Name).SetImplementation(in.Implementation).
SetPriority(in.Priority).SetSettings(in.Settings).SetSeedRatio(in.SeedRatio).SetDisabled(in.Disabled).Exec(context.Background())
}
count := c.ent.Indexers.Query().Where(indexers.Name(name)).CountX(context.TODO())
//create new one
count := c.ent.Indexers.Query().Where(indexers.Name(in.Name)).CountX(context.TODO())
if count > 0 {
c.ent.Indexers.Update().Where(indexers.Name(name)).SetSettings(string(data)).Save(context.TODO())
return err
return fmt.Errorf("name already esxits: %v", in.Name)
}
_, err = c.ent.Indexers.Create().
SetName(name).SetImplementation(IndexerTorznabImpl).SetPriority(1).SetSettings(string(data)).Save(context.TODO())
_, err := c.ent.Indexers.Create().
SetName(in.Name).SetImplementation(in.Implementation).SetPriority(in.Priority).SetSettings(in.Settings).SetSeedRatio(in.SeedRatio).
SetDisabled(in.Disabled).Save(context.TODO())
if err != nil {
return errors.Wrap(err, "save db")
}
@@ -271,9 +290,22 @@ func (c *Client) DeleteTorznab(id int) {
c.ent.Indexers.Delete().Where(indexers.ID(id)).Exec(context.TODO())
}
func (c *Client) GetIndexer(id int) (*TorznabInfo, error) {
res, err := c.ent.Indexers.Query().Where(indexers.ID(id)).First(context.TODO())
if err != nil {
return nil, err
}
var ss TorznabSetting
err = json.Unmarshal([]byte(res.Settings), &ss)
if err != nil {
return nil, fmt.Errorf("unmarshal torznab %s error: %v", res.Name, err)
}
return &TorznabInfo{Indexers: res, TorznabSetting: ss}, nil
}
type TorznabInfo struct {
ID int `json:"id"`
Name string `json:"name"`
*ent.Indexers
TorznabSetting
}
@@ -289,38 +321,28 @@ func (c *Client) GetAllTorznabInfo() []*TorznabInfo {
continue
}
l = append(l, &TorznabInfo{
ID: r.ID,
Name: r.Name,
Indexers: r,
TorznabSetting: ss,
})
}
return l
}
func (c *Client) SaveTransmission(name, url, user, password string) error {
count := c.ent.DownloadClients.Query().Where(downloadclients.Name(name)).CountX(context.TODO())
func (c *Client) SaveDownloader(downloader *ent.DownloadClients) error {
count := c.ent.DownloadClients.Query().Where(downloadclients.Name(downloader.Name)).CountX(context.TODO())
if count != 0 {
err := c.ent.DownloadClients.Update().Where(downloadclients.Name(name)).
SetURL(url).SetUser(user).SetPassword(password).Exec(context.TODO())
err := c.ent.DownloadClients.Update().Where(downloadclients.Name(downloader.Name)).SetImplementation(downloader.Implementation).
SetURL(downloader.URL).SetUser(downloader.User).SetPassword(downloader.Password).SetPriority1(downloader.Priority1).Exec(context.TODO())
return err
}
_, err := c.ent.DownloadClients.Create().SetEnable(true).SetImplementation("transmission").
SetName(name).SetURL(url).SetUser(user).SetPassword(password).Save(context.TODO())
_, err := c.ent.DownloadClients.Create().SetEnable(true).SetImplementation(downloader.Implementation).
SetName(downloader.Name).SetURL(downloader.URL).SetUser(downloader.User).SetPriority1(downloader.Priority1).SetPassword(downloader.Password).Save(context.TODO())
return err
}
func (c *Client) GetTransmission() *ent.DownloadClients {
dc, err := c.ent.DownloadClients.Query().Where(downloadclients.Implementation("transmission")).First(context.TODO())
if err != nil {
log.Errorf("no transmission client found: %v", err)
return nil
}
return dc
}
func (c *Client) GetAllDonloadClients() []*ent.DownloadClients {
cc, err := c.ent.DownloadClients.Query().All(context.TODO())
cc, err := c.ent.DownloadClients.Query().Order(ent.Asc(downloadclients.FieldPriority1)).All(context.TODO())
if err != nil {
log.Errorf("no download client")
return nil
@@ -339,7 +361,7 @@ type StorageInfo struct {
Settings map[string]string `json:"settings" binding:"required"`
TvPath string `json:"tv_path" binding:"required"`
MoviePath string `json:"movie_path" binding:"required"`
Default bool `json:"default"`
Default bool `json:"default"`
}
func (s *StorageInfo) ToWebDavSetting() WebdavSetting {
@@ -354,6 +376,14 @@ func (s *StorageInfo) ToWebDavSetting() WebdavSetting {
}
}
func (s *StorageInfo) ToAlistSetting() WebdavSetting {
return WebdavSetting{
URL: s.Settings["url"],
User: s.Settings["user"],
Password: s.Settings["password"],
ChangeFileHash: s.Settings["change_file_hash"],
}
}
type WebdavSetting struct {
URL string `json:"url"`
@@ -413,7 +443,7 @@ type Storage struct {
}
func (s *Storage) ToWebDavSetting() WebdavSetting {
if s.Implementation != storage.ImplementationWebdav {
if s.Implementation != storage.ImplementationWebdav && s.Implementation != storage.ImplementationAlist {
panic("not webdav storage")
}
var webdavSetting WebdavSetting
@@ -454,8 +484,18 @@ func (c *Client) SetDefaultStorageByName(name string) error {
}
func (c *Client) SaveHistoryRecord(h ent.History) (*ent.History, error) {
if h.Link != "" {
r, err := utils.Link2Magnet(h.Link)
if err != nil {
log.Warnf("convert link to magnet error, link %v, error: %v", h.Link, err)
} else {
h.Link = r
}
}
return c.ent.History.Create().SetMediaID(h.MediaID).SetEpisodeID(h.EpisodeID).SetDate(time.Now()).
SetStatus(h.Status).SetTargetDir(h.TargetDir).SetSourceTitle(h.SourceTitle).SetSaved(h.Saved).Save(context.TODO())
SetStatus(h.Status).SetTargetDir(h.TargetDir).SetSourceTitle(h.SourceTitle).SetIndexerID(h.IndexerID).
SetDownloadClientID(h.DownloadClientID).SetSize(h.Size).SetSaved(h.Saved).SetSeasonNum(h.SeasonNum).
SetEpisodeNums(h.EpisodeNums).SetLink(h.Link).Save(context.TODO())
}
func (c *Client) SetHistoryStatus(id int, status history.Status) error {
@@ -472,7 +512,7 @@ func (c *Client) GetHistories() ent.Histories {
func (c *Client) GetRunningHistories() ent.Histories {
h, err := c.ent.History.Query().Where(history.Or(history.StatusEQ(history.StatusRunning),
history.StatusEQ(history.StatusUploading))).All(context.TODO())
history.StatusEQ(history.StatusUploading), history.StatusEQ(history.StatusSeeding))).All(context.TODO())
if err != nil {
return nil
}
@@ -509,6 +549,16 @@ func (c *Client) SetEpisodeStatus(id int, status episode.Status) error {
return c.ent.Episode.Update().Where(episode.ID(id)).SetStatus(status).Exec(context.TODO())
}
func (c *Client) IsEpisodeDownloadingOrDownloaded(id int) bool {
his := c.ent.History.Query().Where(history.EpisodeID(id)).AllX(context.Background())
for _, h := range his {
if h.Status != history.StatusFail {
return true
}
}
return false
}
func (c *Client) SetSeasonAllEpisodeStatus(mediaID, seasonNum int, status episode.Status) error {
return c.ent.Episode.Update().Where(episode.MediaID(mediaID), episode.SeasonNumber(seasonNum)).SetStatus(status).Exec(context.TODO())
}
@@ -536,3 +586,199 @@ func (c *Client) GetMovieDummyEpisode(movieId int) (*ent.Episode, error) {
func (c *Client) GetDownloadClient(id int) (*ent.DownloadClients, error) {
return c.ent.DownloadClients.Query().Where(downloadclients.ID(id)).First(context.Background())
}
func (c *Client) SetEpisodeMonitoring(id int, b bool) error {
return c.ent.Episode.Update().Where(episode.ID(id)).SetMonitored(b).Exec(context.Background())
}
type EditMediaData struct {
ID int `json:"id"`
Resolution media.Resolution `json:"resolution"`
TargetDir string `json:"target_dir"`
Limiter schema.MediaLimiter `json:"limiter"`
}
func (c *Client) EditMediaMetadata(in EditMediaData) error {
return c.ent.Media.Update().Where(media.ID(in.ID)).SetResolution(in.Resolution).SetTargetDir(in.TargetDir).SetLimiter(in.Limiter).
Exec(context.Background())
}
func (c *Client) UpdateEpisodeTargetFile(id int, filename string) error {
return c.ent.Episode.Update().Where(episode.ID(id)).SetTargetFile(filename).Exec(context.Background())
}
func (c *Client) GetSeasonEpisodes(mediaId, seasonNum int) ([]*ent.Episode, error) {
return c.ent.Episode.Query().Where(episode.MediaID(mediaId), episode.SeasonNumber(seasonNum)).All(context.Background())
}
func (c *Client) GetAllImportLists() ([]*ent.ImportList, error) {
return c.ent.ImportList.Query().All(context.Background())
}
func (c *Client) AddImportlist(il *ent.ImportList) error {
count, err := c.ent.ImportList.Query().Where(importlist.Name(il.Name)).Count(context.Background())
if err != nil {
return err
}
if count > 0 {
//edit exist record
return c.ent.ImportList.Update().Where(importlist.Name(il.Name)).
SetURL(il.URL).SetQulity(il.Qulity).SetType(il.Type).SetStorageID(il.StorageID).Exec(context.Background())
}
return c.ent.ImportList.Create().SetName(il.Name).SetURL(il.URL).SetQulity(il.Qulity).SetStorageID(il.StorageID).
SetType(il.Type).Exec(context.Background())
}
func (c *Client) DeleteImportlist(id int) error {
return c.ent.ImportList.DeleteOneID(id).Exec(context.TODO())
}
func (c *Client) GetSizeLimiter(mediaType string) (*MediaSizeLimiter, error) {
var v string
if mediaType == "tv" {
v = c.GetSetting(SettingTvSizeLimiter)
} else if mediaType == "movie" {
v = c.GetSetting(SettingMovieSizeLimiter)
} else {
return nil, errors.Errorf("media type not supported: %v", mediaType)
}
var limiter MediaSizeLimiter
if v == "" {
return &limiter, nil
}
err := json.Unmarshal([]byte(v), &limiter)
return &limiter, err
}
func (c *Client) SetSizeLimiter(mediaType string, limiter *MediaSizeLimiter) error {
data, err := json.Marshal(limiter)
if err != nil {
return err
}
if mediaType == "tv" {
return c.SetSetting(SettingTvSizeLimiter, string(data))
} else if mediaType == "movie" {
return c.SetSetting(SettingMovieSizeLimiter, string(data))
} else {
return errors.Errorf("media type not supported: %v", mediaType)
}
}
func (c *Client) GetTvNamingFormat() string {
s := c.GetSetting(SettingTvNamingFormat)
if s == "" {
return DefaultNamingFormat
}
return s
}
func (c *Client) GetMovingNamingFormat() string {
s := c.GetSetting(SettingMovieNamingFormat)
if s == "" {
return DefaultNamingFormat
}
return s
}
func (c *Client) CleanAllDanglingEpisodes() error {
_, err := c.ent.Episode.Delete().Where(episode.Not(episode.HasMedia())).Exec(context.Background())
return err
}
func (c *Client) AddBlacklistItem(item *ent.Blacklist) error {
return c.ent.Blacklist.Create().SetType(item.Type).SetValue(item.Value).SetNotes(item.Notes).Exec(context.Background())
}
func (c *Client) GetProwlarrSetting() (*ProwlarrSetting, error) {
s := c.GetSetting(SettingProwlarrInfo)
if s == "" {
return nil, errors.New("prowlarr setting not set")
}
var se ProwlarrSetting
if err := json.Unmarshal([]byte(s), &se); err != nil {
return nil, err
}
return &se, nil
}
func (c *Client) SaveProwlarrSetting(se *ProwlarrSetting) error {
data, err := json.Marshal(se)
if err != nil {
return err
}
return c.SetSetting(SettingProwlarrInfo, string(data))
}
func (c *Client) getAcceptedFormats(key string) ([]string, error) {
v := c.GetSetting(key)
if v == "" {
return nil, nil
}
var res []string
err := json.Unmarshal([]byte(v), &res)
return res, err
}
func (c *Client) setAcceptedFormats(key string, v []string) error {
data, err := json.Marshal(v)
if err != nil {
return err
}
return c.SetSetting(key, string(data))
}
func (c *Client) GetAcceptedVideoFormats() ([]string, error) {
res, err := c.getAcceptedFormats(SettingAcceptedVideoFormats)
if err != nil {
return nil, err
}
if res == nil {
return defaultAcceptedVideoFormats, nil
}
return res, nil
}
func (c *Client) SetAcceptedVideoFormats(key string, v []string) error {
return c.setAcceptedFormats(SettingAcceptedVideoFormats, v)
}
func (c *Client) GetAcceptedSubtitleFormats() ([]string, error) {
res, err := c.getAcceptedFormats(SettingAcceptedSubtitleFormats)
if err != nil {
return nil, err
}
if res== nil {
return defaultAcceptedSubtitleFormats, nil
}
return res, nil
}
func (c *Client) SetAcceptedSubtitleFormats(key string, v []string) error {
return c.setAcceptedFormats(SettingAcceptedSubtitleFormats, v)
}
func (c *Client) GetAIConfig() (AIConfig, error) {
cfg := c.GetSetting(SettingAIConfig)
var ai AIConfig
if cfg == "" {
return ai, nil
}
err := json.Unmarshal([]byte(cfg), &ai)
if err != nil {
return AIConfig{}, err
}
return ai, nil
}
func (c *Client) SetAIConfig(cfg *AIConfig) error {
if data, err := json.Marshal(cfg); err != nil {
return err
} else {
return c.SetSetting(SettingAIConfig, string(data))
}
}

13
doc/alist.md Normal file
View File

@@ -0,0 +1,13 @@
# alist 对接
> 本程序可以把alist作为一个存储后台使用下载完成的电影电视剧上传到alist对应的文件夹。配合阿里云、夸克云盘等实现云盘NAS功能。目前支持两种对接方式webdav和直接对接
## webdav
使用webdav形式对接本程序支持程序所有功能但是不支持秒传上传会比较慢
## alist 直接对接
存储设置里选择 alist填入对应的信息即可。
优点支持秒传上传速度快。缺点部分功能无法使用plex和nfo文件刮削

BIN
doc/assets/anime_match.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 774 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 MiB

After

Width:  |  Height:  |  Size: 2.6 MiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 MiB

After

Width:  |  Height:  |  Size: 843 KiB

BIN
doc/assets/unraid.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 206 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 KiB

After

Width:  |  Height:  |  Size: 74 KiB

BIN
doc/assets/yuanli.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 170 KiB

View File

@@ -2,44 +2,28 @@
要正确使用此程序,需要配置好以下设置:
### TMDB设置
### 1. TMDB设置
1. 因为此程序需要使用到 TMDB 的数据,使用此程序首先要申请一个 TMDB 的 Api Key. 申请教程请 google [tmdb api key申请](https://www.google.com/search?q=tmdb+api+key%E7%94%B3%E8%AF%B7)
2. 拿到 TMDB Api Key之后请填到 *设置 -> 常规设置 -> TMDB Api Key里*
### 索引器
**注意:** TMDB可能需要翻墙才能使用参考 [TMDB 访问问题](./tmdb.md)
索引器是资源提供者,目前支持 torznab 协议,意味着 polarr 或者 jackett 都可以支持。请自行部署相关程序,或者使用的 docker compose 配置一起拉起
### 2. 索引器
推荐使用 linuxserver 的镜像https://docs.linuxserver.io/images/docker-jackett/
使用配置页面索引器配置或者prowlarr设置其中一个即可。
#### 索引器配置
#### jackett配置参考 [jackett](./jackett.md)
索引器配置这里以 jackett 为例。使用默认 docker compose 配置拉起后以 http://< ip >:9117 可访问 jackett 的主页。
#### prowlarr设置
1. 打开 jackett 主页后,点击页面上面的 Add indexer会出现 BT/PT 站点列表,选择你需要的站点点击+号添加。如果是PT请自行配置好相关配置
![add indexer](./assets/add_indexer.png)
![search add](./assets/search_add.png)
2. 添加后主页即会显示相应的BT/PT站点点击 *Copy Torznab Feed* 即得到了我们需要的地址
![copy feed](./assets/copy_feed.png)
3. 回到我们的主程序 Polaris 当中,点击 *设置 -> 索引器设置* -> 点击+号增加新的索引器输入一个名称拷贝我们第2步得到的地址到地址栏
![polaris add indexer](./assets/polaris_add_indexer.png)
4. 选相框中我们可以看到,还需要一个 API Key我们回到 Jackett 中,在页面右上角,复制我们需要的 API Key
![api key](./assets/jackett_api_key.png)
5. 恭喜!你已经成功完成了索引器配置。如需要更多的站点,请重复相同的操作完成配置
1) 取得prowlarr的url和api key api key在 *Prowlarr 设置 -> 通用 -> API 密钥* 处取得
2) 对应参数填到 polaris程序*设置->prowlarr设置*当中
### 下载器
资源下载器,目前可支持 tansmission请配置好对应配置
资源下载器,目前可支持 tansmission/qbittorrent,请配置好对应配置
![transmission](./assets/downloader.png)

21
doc/jackett.md Normal file
View File

@@ -0,0 +1,21 @@
## jackett 索引器配置
1. 打开 jackett 主页后,点击页面上面的 Add indexer会出现 BT/PT 站点列表,选择你需要的站点点击+号添加。如果是PT请自行配置好相关配置
![add indexer](./assets/add_indexer.png)
![search add](./assets/search_add.png)
2. 添加后主页即会显示相应的BT/PT站点点击 *Copy Torznab Feed* 即得到了我们需要的地址
![copy feed](./assets/copy_feed.png)
3. 回到我们的主程序 Polaris 当中,点击 *设置 -> 索引器设置* -> 点击+号增加新的索引器输入一个名称拷贝我们第2步得到的地址到地址栏
![polaris add indexer](./assets/polaris_add_indexer.png)
4. 选相框中我们可以看到,还需要一个 API Key我们回到 Jackett 中,在页面右上角,复制我们需要的 API Key
![api key](./assets/jackett_api_key.png)
5. 恭喜!你已经成功完成了索引器配置。如需要更多的站点,请重复相同的操作完成配置

View File

@@ -1,8 +1,83 @@
## 快速开始
# 快速开始
最简单部署 Polaris 的方式是使用 docker composePolaris要完整运行另外需要一个索引客户端和一个下载客户端。索引客户端支持 polarr 或 jackett下载客户端目前只支持 transmission。
## 1. 安装 Polaris
下面是一个示例 docker-compose 配置,为了简单起见,一起拉起了 transmission 和 jackett你也可选择单独安装
### 1.1 Docker Compose 方式安装
最简单使用本程序的方式是通过docker compose下面内容保存成 docker-compose.yml然后执行 docker compose up -d, 即可拉起程序。
```yaml
services:
polaris:
image: ghcr.io/simon-ding/polaris:latest
restart: always
environment:
- PUID=99 #程序运行的用户UID
- PGID=100 #程序运行的用户GID
- TZ=Asia/Shanghai #时区
volumes:
- <配置文件路径>:/app/data #程序配置文件路径
- <下载路径>:/downloads #下载路径,需要和下载客户端配置一致
- <媒体文件路径>:/data #媒体数据存储路径也可以启动自己配置webdav存储
ports:
- 8080:8080 #端口映射,冒号前的端口可自行改为需要的
```
> latest为发布版本如果你追求新功能且能接受bug可以使用main tag
### 1.2 Docker 方式安装
也可以通过原始 docker 命令的方式安装 Polaris
```bash
docker run -d \
-v <配置文件路径>:/app/data \
-v <下载路径>:/downloads \
-v <媒体文件路径>:/data \
-e PUID=99 \
-e PGID=100 \
-e TZ=Asia/Shanghai \
-p 8080:8080 \
--restart always \
ghcr.io/simon-ding/polaris:latest
```
### 1.3 Unraid 安装
参考下图进行配置
![](./assets/unraid.png)
### 1.4 访问
拉起之后访问 http://< ip >:8080 即可访问 Polaris 的主页:
![](./assets/main_page.png)
## 2. 安装下载客户端
Polaris 需要下载客户端的配合使用,目前支持 Transmission 和 Qbittorrent。推荐使用linuxserver镜像进行安装
* [linuxserver/transmission](https://docs.linuxserver.io/images/docker-transmission)
* [linuxserver/qbittorrent](https://docs.linuxserver.io/images/docker-qbittorrent/)
需要注意的是下载客户端内 /downloads 路径的映射地址要和 Polaris的/downloads路径映射保持一致。也就是说他俩都要映射到同一路径。
## 3. 安装 Jackett/Prowlarr
Polaris 如果要正常工作还需要一个索引客户端的支持目前支持jackett/prowlarr索引客户端。推荐使用prowlarr设置更简单
安装方式见:
* [linuxserver/jackett](https://docs.linuxserver.io/images/docker-jackett/)
* [linuxserver/prowlarr](https://docs.linuxserver.io/images/docker-prowlarr/)
## 4. 联合安装
如果觉得一个个安装麻烦也可以使用下面docker compose文件一键拉起所有组件
**注意:** transmission 的下载路径映射要和 polaris 保持一致,如果您不知道怎么做,请保持默认设置。
@@ -11,6 +86,10 @@ services:
polaris:
image: ghcr.io/simon-ding/polaris:latest
restart: always
environment:
- PUID=1000
- PGID=1000
- TZ=Asia/Shanghai
volumes:
- ./config/polaris:/app/data #程序配置文件路径
- /downloads:/downloads #下载路径,需要和下载客户端配置一致
@@ -43,27 +122,16 @@ services:
restart: unless-stopped
```
拉起之后访问 http://< ip >:8080 的形式访问
复制上面文件保存成 docker-compose.yml 文件,然后执行下面命令
```bash
docker compose up -d
```
![](./assets/main_page.png)
## 配置
## 5. 配置
详细配置请看 [配置篇](./configuration.md)
## 开始使用
1. 完成配置之后,我们就可以在右上角的搜索按钮里输入我们想看的电影、电视剧。
![search](./assets/search_series.png)
2. 找到对应电影电视剧后,点击加入想看列表
![add](./assets/add_series.png)
3. 当电影有资源、或者电视剧有更新时,程序就会自动下载对应资源到指定的存储。对于剧集,您也可以进入剧集的详细页面,点击搜索按钮来自己搜索对应集的资源。
到此,您已经基本掌握了此程序的使用方式,请尽情体验吧!

6
doc/tmdb.md Normal file
View File

@@ -0,0 +1,6 @@
# TMDB 访问问题
由于国内特殊的网络环境,访问 tmdb 可能需要翻墙代理,有两种解决方案
1. 设置路由器全局代理openwrt等第三方路由固件可以实现这个功能
2. 使用程序代理功能,*常规->代理地址* 中设置,格式如 http://192.168.0.10:1080

131
ent/blacklist.go Normal file
View File

@@ -0,0 +1,131 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"encoding/json"
"fmt"
"polaris/ent/blacklist"
"polaris/ent/schema"
"strings"
"entgo.io/ent"
"entgo.io/ent/dialect/sql"
)
// Blacklist is the model entity for the Blacklist schema.
type Blacklist struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Type holds the value of the "type" field.
Type blacklist.Type `json:"type,omitempty"`
// Value holds the value of the "value" field.
Value schema.BlacklistValue `json:"value,omitempty"`
// Notes holds the value of the "notes" field.
Notes string `json:"notes,omitempty"`
selectValues sql.SelectValues
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Blacklist) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case blacklist.FieldValue:
values[i] = new([]byte)
case blacklist.FieldID:
values[i] = new(sql.NullInt64)
case blacklist.FieldType, blacklist.FieldNotes:
values[i] = new(sql.NullString)
default:
values[i] = new(sql.UnknownType)
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the Blacklist fields.
func (b *Blacklist) assignValues(columns []string, values []any) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case blacklist.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
b.ID = int(value.Int64)
case blacklist.FieldType:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field type", values[i])
} else if value.Valid {
b.Type = blacklist.Type(value.String)
}
case blacklist.FieldValue:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field value", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &b.Value); err != nil {
return fmt.Errorf("unmarshal field value: %w", err)
}
}
case blacklist.FieldNotes:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field notes", values[i])
} else if value.Valid {
b.Notes = value.String
}
default:
b.selectValues.Set(columns[i], values[i])
}
}
return nil
}
// GetValue returns the ent.Value that was dynamically selected and assigned to the Blacklist.
// This includes values selected through modifiers, order, etc.
func (b *Blacklist) GetValue(name string) (ent.Value, error) {
return b.selectValues.Get(name)
}
// Update returns a builder for updating this Blacklist.
// Note that you need to call Blacklist.Unwrap() before calling this method if this Blacklist
// was returned from a transaction, and the transaction was committed or rolled back.
func (b *Blacklist) Update() *BlacklistUpdateOne {
return NewBlacklistClient(b.config).UpdateOne(b)
}
// Unwrap unwraps the Blacklist entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (b *Blacklist) Unwrap() *Blacklist {
_tx, ok := b.config.driver.(*txDriver)
if !ok {
panic("ent: Blacklist is not a transactional entity")
}
b.config.driver = _tx.drv
return b
}
// String implements the fmt.Stringer.
func (b *Blacklist) String() string {
var builder strings.Builder
builder.WriteString("Blacklist(")
builder.WriteString(fmt.Sprintf("id=%v, ", b.ID))
builder.WriteString("type=")
builder.WriteString(fmt.Sprintf("%v", b.Type))
builder.WriteString(", ")
builder.WriteString("value=")
builder.WriteString(fmt.Sprintf("%v", b.Value))
builder.WriteString(", ")
builder.WriteString("notes=")
builder.WriteString(b.Notes)
builder.WriteByte(')')
return builder.String()
}
// Blacklists is a parsable slice of Blacklist.
type Blacklists []*Blacklist

View File

@@ -0,0 +1,89 @@
// Code generated by ent, DO NOT EDIT.
package blacklist
import (
"fmt"
"polaris/ent/schema"
"entgo.io/ent/dialect/sql"
)
const (
// Label holds the string label denoting the blacklist type in the database.
Label = "blacklist"
// FieldID holds the string denoting the id field in the database.
FieldID = "id"
// FieldType holds the string denoting the type field in the database.
FieldType = "type"
// FieldValue holds the string denoting the value field in the database.
FieldValue = "value"
// FieldNotes holds the string denoting the notes field in the database.
FieldNotes = "notes"
// Table holds the table name of the blacklist in the database.
Table = "blacklists"
)
// Columns holds all SQL columns for blacklist fields.
var Columns = []string{
FieldID,
FieldType,
FieldValue,
FieldNotes,
}
// ValidColumn reports if the column name is valid (part of the table columns).
func ValidColumn(column string) bool {
for i := range Columns {
if column == Columns[i] {
return true
}
}
return false
}
var (
// DefaultValue holds the default value on creation for the "value" field.
DefaultValue schema.BlacklistValue
)
// Type defines the type for the "type" enum field.
type Type string
// Type values.
const (
TypeMedia Type = "media"
TypeTorrent Type = "torrent"
)
func (_type Type) String() string {
return string(_type)
}
// TypeValidator is a validator for the "type" field enum values. It is called by the builders before save.
func TypeValidator(_type Type) error {
switch _type {
case TypeMedia, TypeTorrent:
return nil
default:
return fmt.Errorf("blacklist: invalid enum value for type field: %q", _type)
}
}
// OrderOption defines the ordering options for the Blacklist queries.
type OrderOption func(*sql.Selector)
// ByID orders the results by the id field.
func ByID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldID, opts...).ToFunc()
}
// ByType orders the results by the type field.
func ByType(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldType, opts...).ToFunc()
}
// ByNotes orders the results by the notes field.
func ByNotes(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldNotes, opts...).ToFunc()
}

169
ent/blacklist/where.go Normal file
View File

@@ -0,0 +1,169 @@
// Code generated by ent, DO NOT EDIT.
package blacklist
import (
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
)
// ID filters vertices based on their ID field.
func ID(id int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldEQ(FieldID, id))
}
// IDEQ applies the EQ predicate on the ID field.
func IDEQ(id int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldEQ(FieldID, id))
}
// IDNEQ applies the NEQ predicate on the ID field.
func IDNEQ(id int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldNEQ(FieldID, id))
}
// IDIn applies the In predicate on the ID field.
func IDIn(ids ...int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldIn(FieldID, ids...))
}
// IDNotIn applies the NotIn predicate on the ID field.
func IDNotIn(ids ...int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldNotIn(FieldID, ids...))
}
// IDGT applies the GT predicate on the ID field.
func IDGT(id int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldGT(FieldID, id))
}
// IDGTE applies the GTE predicate on the ID field.
func IDGTE(id int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldGTE(FieldID, id))
}
// IDLT applies the LT predicate on the ID field.
func IDLT(id int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldLT(FieldID, id))
}
// IDLTE applies the LTE predicate on the ID field.
func IDLTE(id int) predicate.Blacklist {
return predicate.Blacklist(sql.FieldLTE(FieldID, id))
}
// Notes applies equality check predicate on the "notes" field. It's identical to NotesEQ.
func Notes(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldEQ(FieldNotes, v))
}
// TypeEQ applies the EQ predicate on the "type" field.
func TypeEQ(v Type) predicate.Blacklist {
return predicate.Blacklist(sql.FieldEQ(FieldType, v))
}
// TypeNEQ applies the NEQ predicate on the "type" field.
func TypeNEQ(v Type) predicate.Blacklist {
return predicate.Blacklist(sql.FieldNEQ(FieldType, v))
}
// TypeIn applies the In predicate on the "type" field.
func TypeIn(vs ...Type) predicate.Blacklist {
return predicate.Blacklist(sql.FieldIn(FieldType, vs...))
}
// TypeNotIn applies the NotIn predicate on the "type" field.
func TypeNotIn(vs ...Type) predicate.Blacklist {
return predicate.Blacklist(sql.FieldNotIn(FieldType, vs...))
}
// NotesEQ applies the EQ predicate on the "notes" field.
func NotesEQ(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldEQ(FieldNotes, v))
}
// NotesNEQ applies the NEQ predicate on the "notes" field.
func NotesNEQ(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldNEQ(FieldNotes, v))
}
// NotesIn applies the In predicate on the "notes" field.
func NotesIn(vs ...string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldIn(FieldNotes, vs...))
}
// NotesNotIn applies the NotIn predicate on the "notes" field.
func NotesNotIn(vs ...string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldNotIn(FieldNotes, vs...))
}
// NotesGT applies the GT predicate on the "notes" field.
func NotesGT(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldGT(FieldNotes, v))
}
// NotesGTE applies the GTE predicate on the "notes" field.
func NotesGTE(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldGTE(FieldNotes, v))
}
// NotesLT applies the LT predicate on the "notes" field.
func NotesLT(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldLT(FieldNotes, v))
}
// NotesLTE applies the LTE predicate on the "notes" field.
func NotesLTE(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldLTE(FieldNotes, v))
}
// NotesContains applies the Contains predicate on the "notes" field.
func NotesContains(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldContains(FieldNotes, v))
}
// NotesHasPrefix applies the HasPrefix predicate on the "notes" field.
func NotesHasPrefix(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldHasPrefix(FieldNotes, v))
}
// NotesHasSuffix applies the HasSuffix predicate on the "notes" field.
func NotesHasSuffix(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldHasSuffix(FieldNotes, v))
}
// NotesIsNil applies the IsNil predicate on the "notes" field.
func NotesIsNil() predicate.Blacklist {
return predicate.Blacklist(sql.FieldIsNull(FieldNotes))
}
// NotesNotNil applies the NotNil predicate on the "notes" field.
func NotesNotNil() predicate.Blacklist {
return predicate.Blacklist(sql.FieldNotNull(FieldNotes))
}
// NotesEqualFold applies the EqualFold predicate on the "notes" field.
func NotesEqualFold(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldEqualFold(FieldNotes, v))
}
// NotesContainsFold applies the ContainsFold predicate on the "notes" field.
func NotesContainsFold(v string) predicate.Blacklist {
return predicate.Blacklist(sql.FieldContainsFold(FieldNotes, v))
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.Blacklist) predicate.Blacklist {
return predicate.Blacklist(sql.AndPredicates(predicates...))
}
// Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.Blacklist) predicate.Blacklist {
return predicate.Blacklist(sql.OrPredicates(predicates...))
}
// Not applies the not operator on the given predicate.
func Not(p predicate.Blacklist) predicate.Blacklist {
return predicate.Blacklist(sql.NotPredicates(p))
}

238
ent/blacklist_create.go Normal file
View File

@@ -0,0 +1,238 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"polaris/ent/blacklist"
"polaris/ent/schema"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// BlacklistCreate is the builder for creating a Blacklist entity.
type BlacklistCreate struct {
config
mutation *BlacklistMutation
hooks []Hook
}
// SetType sets the "type" field.
func (bc *BlacklistCreate) SetType(b blacklist.Type) *BlacklistCreate {
bc.mutation.SetType(b)
return bc
}
// SetValue sets the "value" field.
func (bc *BlacklistCreate) SetValue(sv schema.BlacklistValue) *BlacklistCreate {
bc.mutation.SetValue(sv)
return bc
}
// SetNillableValue sets the "value" field if the given value is not nil.
func (bc *BlacklistCreate) SetNillableValue(sv *schema.BlacklistValue) *BlacklistCreate {
if sv != nil {
bc.SetValue(*sv)
}
return bc
}
// SetNotes sets the "notes" field.
func (bc *BlacklistCreate) SetNotes(s string) *BlacklistCreate {
bc.mutation.SetNotes(s)
return bc
}
// SetNillableNotes sets the "notes" field if the given value is not nil.
func (bc *BlacklistCreate) SetNillableNotes(s *string) *BlacklistCreate {
if s != nil {
bc.SetNotes(*s)
}
return bc
}
// Mutation returns the BlacklistMutation object of the builder.
func (bc *BlacklistCreate) Mutation() *BlacklistMutation {
return bc.mutation
}
// Save creates the Blacklist in the database.
func (bc *BlacklistCreate) Save(ctx context.Context) (*Blacklist, error) {
bc.defaults()
return withHooks(ctx, bc.sqlSave, bc.mutation, bc.hooks)
}
// SaveX calls Save and panics if Save returns an error.
func (bc *BlacklistCreate) SaveX(ctx context.Context) *Blacklist {
v, err := bc.Save(ctx)
if err != nil {
panic(err)
}
return v
}
// Exec executes the query.
func (bc *BlacklistCreate) Exec(ctx context.Context) error {
_, err := bc.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (bc *BlacklistCreate) ExecX(ctx context.Context) {
if err := bc.Exec(ctx); err != nil {
panic(err)
}
}
// defaults sets the default values of the builder before save.
func (bc *BlacklistCreate) defaults() {
if _, ok := bc.mutation.Value(); !ok {
v := blacklist.DefaultValue
bc.mutation.SetValue(v)
}
}
// check runs all checks and user-defined validators on the builder.
func (bc *BlacklistCreate) check() error {
if _, ok := bc.mutation.GetType(); !ok {
return &ValidationError{Name: "type", err: errors.New(`ent: missing required field "Blacklist.type"`)}
}
if v, ok := bc.mutation.GetType(); ok {
if err := blacklist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Blacklist.type": %w`, err)}
}
}
if _, ok := bc.mutation.Value(); !ok {
return &ValidationError{Name: "value", err: errors.New(`ent: missing required field "Blacklist.value"`)}
}
return nil
}
func (bc *BlacklistCreate) sqlSave(ctx context.Context) (*Blacklist, error) {
if err := bc.check(); err != nil {
return nil, err
}
_node, _spec := bc.createSpec()
if err := sqlgraph.CreateNode(ctx, bc.driver, _spec); err != nil {
if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return nil, err
}
id := _spec.ID.Value.(int64)
_node.ID = int(id)
bc.mutation.id = &_node.ID
bc.mutation.done = true
return _node, nil
}
func (bc *BlacklistCreate) createSpec() (*Blacklist, *sqlgraph.CreateSpec) {
var (
_node = &Blacklist{config: bc.config}
_spec = sqlgraph.NewCreateSpec(blacklist.Table, sqlgraph.NewFieldSpec(blacklist.FieldID, field.TypeInt))
)
if value, ok := bc.mutation.GetType(); ok {
_spec.SetField(blacklist.FieldType, field.TypeEnum, value)
_node.Type = value
}
if value, ok := bc.mutation.Value(); ok {
_spec.SetField(blacklist.FieldValue, field.TypeJSON, value)
_node.Value = value
}
if value, ok := bc.mutation.Notes(); ok {
_spec.SetField(blacklist.FieldNotes, field.TypeString, value)
_node.Notes = value
}
return _node, _spec
}
// BlacklistCreateBulk is the builder for creating many Blacklist entities in bulk.
type BlacklistCreateBulk struct {
config
err error
builders []*BlacklistCreate
}
// Save creates the Blacklist entities in the database.
func (bcb *BlacklistCreateBulk) Save(ctx context.Context) ([]*Blacklist, error) {
if bcb.err != nil {
return nil, bcb.err
}
specs := make([]*sqlgraph.CreateSpec, len(bcb.builders))
nodes := make([]*Blacklist, len(bcb.builders))
mutators := make([]Mutator, len(bcb.builders))
for i := range bcb.builders {
func(i int, root context.Context) {
builder := bcb.builders[i]
builder.defaults()
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*BlacklistMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err := builder.check(); err != nil {
return nil, err
}
builder.mutation = mutation
var err error
nodes[i], specs[i] = builder.createSpec()
if i < len(mutators)-1 {
_, err = mutators[i+1].Mutate(root, bcb.builders[i+1].mutation)
} else {
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
// Invoke the actual operation on the latest mutation in the chain.
if err = sqlgraph.BatchCreate(ctx, bcb.driver, spec); err != nil {
if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
}
}
if err != nil {
return nil, err
}
mutation.id = &nodes[i].ID
if specs[i].ID.Value != nil {
id := specs[i].ID.Value.(int64)
nodes[i].ID = int(id)
}
mutation.done = true
return nodes[i], nil
})
for i := len(builder.hooks) - 1; i >= 0; i-- {
mut = builder.hooks[i](mut)
}
mutators[i] = mut
}(i, ctx)
}
if len(mutators) > 0 {
if _, err := mutators[0].Mutate(ctx, bcb.builders[0].mutation); err != nil {
return nil, err
}
}
return nodes, nil
}
// SaveX is like Save, but panics if an error occurs.
func (bcb *BlacklistCreateBulk) SaveX(ctx context.Context) []*Blacklist {
v, err := bcb.Save(ctx)
if err != nil {
panic(err)
}
return v
}
// Exec executes the query.
func (bcb *BlacklistCreateBulk) Exec(ctx context.Context) error {
_, err := bcb.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (bcb *BlacklistCreateBulk) ExecX(ctx context.Context) {
if err := bcb.Exec(ctx); err != nil {
panic(err)
}
}

88
ent/blacklist_delete.go Normal file
View File

@@ -0,0 +1,88 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"polaris/ent/blacklist"
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// BlacklistDelete is the builder for deleting a Blacklist entity.
type BlacklistDelete struct {
config
hooks []Hook
mutation *BlacklistMutation
}
// Where appends a list predicates to the BlacklistDelete builder.
func (bd *BlacklistDelete) Where(ps ...predicate.Blacklist) *BlacklistDelete {
bd.mutation.Where(ps...)
return bd
}
// Exec executes the deletion query and returns how many vertices were deleted.
func (bd *BlacklistDelete) Exec(ctx context.Context) (int, error) {
return withHooks(ctx, bd.sqlExec, bd.mutation, bd.hooks)
}
// ExecX is like Exec, but panics if an error occurs.
func (bd *BlacklistDelete) ExecX(ctx context.Context) int {
n, err := bd.Exec(ctx)
if err != nil {
panic(err)
}
return n
}
func (bd *BlacklistDelete) sqlExec(ctx context.Context) (int, error) {
_spec := sqlgraph.NewDeleteSpec(blacklist.Table, sqlgraph.NewFieldSpec(blacklist.FieldID, field.TypeInt))
if ps := bd.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
affected, err := sqlgraph.DeleteNodes(ctx, bd.driver, _spec)
if err != nil && sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
bd.mutation.done = true
return affected, err
}
// BlacklistDeleteOne is the builder for deleting a single Blacklist entity.
type BlacklistDeleteOne struct {
bd *BlacklistDelete
}
// Where appends a list predicates to the BlacklistDelete builder.
func (bdo *BlacklistDeleteOne) Where(ps ...predicate.Blacklist) *BlacklistDeleteOne {
bdo.bd.mutation.Where(ps...)
return bdo
}
// Exec executes the deletion query.
func (bdo *BlacklistDeleteOne) Exec(ctx context.Context) error {
n, err := bdo.bd.Exec(ctx)
switch {
case err != nil:
return err
case n == 0:
return &NotFoundError{blacklist.Label}
default:
return nil
}
}
// ExecX is like Exec, but panics if an error occurs.
func (bdo *BlacklistDeleteOne) ExecX(ctx context.Context) {
if err := bdo.Exec(ctx); err != nil {
panic(err)
}
}

526
ent/blacklist_query.go Normal file
View File

@@ -0,0 +1,526 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"fmt"
"math"
"polaris/ent/blacklist"
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// BlacklistQuery is the builder for querying Blacklist entities.
type BlacklistQuery struct {
config
ctx *QueryContext
order []blacklist.OrderOption
inters []Interceptor
predicates []predicate.Blacklist
// intermediate query (i.e. traversal path).
sql *sql.Selector
path func(context.Context) (*sql.Selector, error)
}
// Where adds a new predicate for the BlacklistQuery builder.
func (bq *BlacklistQuery) Where(ps ...predicate.Blacklist) *BlacklistQuery {
bq.predicates = append(bq.predicates, ps...)
return bq
}
// Limit the number of records to be returned by this query.
func (bq *BlacklistQuery) Limit(limit int) *BlacklistQuery {
bq.ctx.Limit = &limit
return bq
}
// Offset to start from.
func (bq *BlacklistQuery) Offset(offset int) *BlacklistQuery {
bq.ctx.Offset = &offset
return bq
}
// Unique configures the query builder to filter duplicate records on query.
// By default, unique is set to true, and can be disabled using this method.
func (bq *BlacklistQuery) Unique(unique bool) *BlacklistQuery {
bq.ctx.Unique = &unique
return bq
}
// Order specifies how the records should be ordered.
func (bq *BlacklistQuery) Order(o ...blacklist.OrderOption) *BlacklistQuery {
bq.order = append(bq.order, o...)
return bq
}
// First returns the first Blacklist entity from the query.
// Returns a *NotFoundError when no Blacklist was found.
func (bq *BlacklistQuery) First(ctx context.Context) (*Blacklist, error) {
nodes, err := bq.Limit(1).All(setContextOp(ctx, bq.ctx, "First"))
if err != nil {
return nil, err
}
if len(nodes) == 0 {
return nil, &NotFoundError{blacklist.Label}
}
return nodes[0], nil
}
// FirstX is like First, but panics if an error occurs.
func (bq *BlacklistQuery) FirstX(ctx context.Context) *Blacklist {
node, err := bq.First(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return node
}
// FirstID returns the first Blacklist ID from the query.
// Returns a *NotFoundError when no Blacklist ID was found.
func (bq *BlacklistQuery) FirstID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = bq.Limit(1).IDs(setContextOp(ctx, bq.ctx, "FirstID")); err != nil {
return
}
if len(ids) == 0 {
err = &NotFoundError{blacklist.Label}
return
}
return ids[0], nil
}
// FirstIDX is like FirstID, but panics if an error occurs.
func (bq *BlacklistQuery) FirstIDX(ctx context.Context) int {
id, err := bq.FirstID(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return id
}
// Only returns a single Blacklist entity found by the query, ensuring it only returns one.
// Returns a *NotSingularError when more than one Blacklist entity is found.
// Returns a *NotFoundError when no Blacklist entities are found.
func (bq *BlacklistQuery) Only(ctx context.Context) (*Blacklist, error) {
nodes, err := bq.Limit(2).All(setContextOp(ctx, bq.ctx, "Only"))
if err != nil {
return nil, err
}
switch len(nodes) {
case 1:
return nodes[0], nil
case 0:
return nil, &NotFoundError{blacklist.Label}
default:
return nil, &NotSingularError{blacklist.Label}
}
}
// OnlyX is like Only, but panics if an error occurs.
func (bq *BlacklistQuery) OnlyX(ctx context.Context) *Blacklist {
node, err := bq.Only(ctx)
if err != nil {
panic(err)
}
return node
}
// OnlyID is like Only, but returns the only Blacklist ID in the query.
// Returns a *NotSingularError when more than one Blacklist ID is found.
// Returns a *NotFoundError when no entities are found.
func (bq *BlacklistQuery) OnlyID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = bq.Limit(2).IDs(setContextOp(ctx, bq.ctx, "OnlyID")); err != nil {
return
}
switch len(ids) {
case 1:
id = ids[0]
case 0:
err = &NotFoundError{blacklist.Label}
default:
err = &NotSingularError{blacklist.Label}
}
return
}
// OnlyIDX is like OnlyID, but panics if an error occurs.
func (bq *BlacklistQuery) OnlyIDX(ctx context.Context) int {
id, err := bq.OnlyID(ctx)
if err != nil {
panic(err)
}
return id
}
// All executes the query and returns a list of Blacklists.
func (bq *BlacklistQuery) All(ctx context.Context) ([]*Blacklist, error) {
ctx = setContextOp(ctx, bq.ctx, "All")
if err := bq.prepareQuery(ctx); err != nil {
return nil, err
}
qr := querierAll[[]*Blacklist, *BlacklistQuery]()
return withInterceptors[[]*Blacklist](ctx, bq, qr, bq.inters)
}
// AllX is like All, but panics if an error occurs.
func (bq *BlacklistQuery) AllX(ctx context.Context) []*Blacklist {
nodes, err := bq.All(ctx)
if err != nil {
panic(err)
}
return nodes
}
// IDs executes the query and returns a list of Blacklist IDs.
func (bq *BlacklistQuery) IDs(ctx context.Context) (ids []int, err error) {
if bq.ctx.Unique == nil && bq.path != nil {
bq.Unique(true)
}
ctx = setContextOp(ctx, bq.ctx, "IDs")
if err = bq.Select(blacklist.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
}
// IDsX is like IDs, but panics if an error occurs.
func (bq *BlacklistQuery) IDsX(ctx context.Context) []int {
ids, err := bq.IDs(ctx)
if err != nil {
panic(err)
}
return ids
}
// Count returns the count of the given query.
func (bq *BlacklistQuery) Count(ctx context.Context) (int, error) {
ctx = setContextOp(ctx, bq.ctx, "Count")
if err := bq.prepareQuery(ctx); err != nil {
return 0, err
}
return withInterceptors[int](ctx, bq, querierCount[*BlacklistQuery](), bq.inters)
}
// CountX is like Count, but panics if an error occurs.
func (bq *BlacklistQuery) CountX(ctx context.Context) int {
count, err := bq.Count(ctx)
if err != nil {
panic(err)
}
return count
}
// Exist returns true if the query has elements in the graph.
func (bq *BlacklistQuery) Exist(ctx context.Context) (bool, error) {
ctx = setContextOp(ctx, bq.ctx, "Exist")
switch _, err := bq.FirstID(ctx); {
case IsNotFound(err):
return false, nil
case err != nil:
return false, fmt.Errorf("ent: check existence: %w", err)
default:
return true, nil
}
}
// ExistX is like Exist, but panics if an error occurs.
func (bq *BlacklistQuery) ExistX(ctx context.Context) bool {
exist, err := bq.Exist(ctx)
if err != nil {
panic(err)
}
return exist
}
// Clone returns a duplicate of the BlacklistQuery builder, including all associated steps. It can be
// used to prepare common query builders and use them differently after the clone is made.
func (bq *BlacklistQuery) Clone() *BlacklistQuery {
if bq == nil {
return nil
}
return &BlacklistQuery{
config: bq.config,
ctx: bq.ctx.Clone(),
order: append([]blacklist.OrderOption{}, bq.order...),
inters: append([]Interceptor{}, bq.inters...),
predicates: append([]predicate.Blacklist{}, bq.predicates...),
// clone intermediate query.
sql: bq.sql.Clone(),
path: bq.path,
}
}
// GroupBy is used to group vertices by one or more fields/columns.
// It is often used with aggregate functions, like: count, max, mean, min, sum.
//
// Example:
//
// var v []struct {
// Type blacklist.Type `json:"type,omitempty"`
// Count int `json:"count,omitempty"`
// }
//
// client.Blacklist.Query().
// GroupBy(blacklist.FieldType).
// Aggregate(ent.Count()).
// Scan(ctx, &v)
func (bq *BlacklistQuery) GroupBy(field string, fields ...string) *BlacklistGroupBy {
bq.ctx.Fields = append([]string{field}, fields...)
grbuild := &BlacklistGroupBy{build: bq}
grbuild.flds = &bq.ctx.Fields
grbuild.label = blacklist.Label
grbuild.scan = grbuild.Scan
return grbuild
}
// Select allows the selection one or more fields/columns for the given query,
// instead of selecting all fields in the entity.
//
// Example:
//
// var v []struct {
// Type blacklist.Type `json:"type,omitempty"`
// }
//
// client.Blacklist.Query().
// Select(blacklist.FieldType).
// Scan(ctx, &v)
func (bq *BlacklistQuery) Select(fields ...string) *BlacklistSelect {
bq.ctx.Fields = append(bq.ctx.Fields, fields...)
sbuild := &BlacklistSelect{BlacklistQuery: bq}
sbuild.label = blacklist.Label
sbuild.flds, sbuild.scan = &bq.ctx.Fields, sbuild.Scan
return sbuild
}
// Aggregate returns a BlacklistSelect configured with the given aggregations.
func (bq *BlacklistQuery) Aggregate(fns ...AggregateFunc) *BlacklistSelect {
return bq.Select().Aggregate(fns...)
}
func (bq *BlacklistQuery) prepareQuery(ctx context.Context) error {
for _, inter := range bq.inters {
if inter == nil {
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
}
if trv, ok := inter.(Traverser); ok {
if err := trv.Traverse(ctx, bq); err != nil {
return err
}
}
}
for _, f := range bq.ctx.Fields {
if !blacklist.ValidColumn(f) {
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
}
if bq.path != nil {
prev, err := bq.path(ctx)
if err != nil {
return err
}
bq.sql = prev
}
return nil
}
func (bq *BlacklistQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Blacklist, error) {
var (
nodes = []*Blacklist{}
_spec = bq.querySpec()
)
_spec.ScanValues = func(columns []string) ([]any, error) {
return (*Blacklist).scanValues(nil, columns)
}
_spec.Assign = func(columns []string, values []any) error {
node := &Blacklist{config: bq.config}
nodes = append(nodes, node)
return node.assignValues(columns, values)
}
for i := range hooks {
hooks[i](ctx, _spec)
}
if err := sqlgraph.QueryNodes(ctx, bq.driver, _spec); err != nil {
return nil, err
}
if len(nodes) == 0 {
return nodes, nil
}
return nodes, nil
}
func (bq *BlacklistQuery) sqlCount(ctx context.Context) (int, error) {
_spec := bq.querySpec()
_spec.Node.Columns = bq.ctx.Fields
if len(bq.ctx.Fields) > 0 {
_spec.Unique = bq.ctx.Unique != nil && *bq.ctx.Unique
}
return sqlgraph.CountNodes(ctx, bq.driver, _spec)
}
func (bq *BlacklistQuery) querySpec() *sqlgraph.QuerySpec {
_spec := sqlgraph.NewQuerySpec(blacklist.Table, blacklist.Columns, sqlgraph.NewFieldSpec(blacklist.FieldID, field.TypeInt))
_spec.From = bq.sql
if unique := bq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if bq.path != nil {
_spec.Unique = true
}
if fields := bq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, blacklist.FieldID)
for i := range fields {
if fields[i] != blacklist.FieldID {
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
}
}
}
if ps := bq.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if limit := bq.ctx.Limit; limit != nil {
_spec.Limit = *limit
}
if offset := bq.ctx.Offset; offset != nil {
_spec.Offset = *offset
}
if ps := bq.order; len(ps) > 0 {
_spec.Order = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
return _spec
}
func (bq *BlacklistQuery) sqlQuery(ctx context.Context) *sql.Selector {
builder := sql.Dialect(bq.driver.Dialect())
t1 := builder.Table(blacklist.Table)
columns := bq.ctx.Fields
if len(columns) == 0 {
columns = blacklist.Columns
}
selector := builder.Select(t1.Columns(columns...)...).From(t1)
if bq.sql != nil {
selector = bq.sql
selector.Select(selector.Columns(columns...)...)
}
if bq.ctx.Unique != nil && *bq.ctx.Unique {
selector.Distinct()
}
for _, p := range bq.predicates {
p(selector)
}
for _, p := range bq.order {
p(selector)
}
if offset := bq.ctx.Offset; offset != nil {
// limit is mandatory for offset clause. We start
// with default value, and override it below if needed.
selector.Offset(*offset).Limit(math.MaxInt32)
}
if limit := bq.ctx.Limit; limit != nil {
selector.Limit(*limit)
}
return selector
}
// BlacklistGroupBy is the group-by builder for Blacklist entities.
type BlacklistGroupBy struct {
selector
build *BlacklistQuery
}
// Aggregate adds the given aggregation functions to the group-by query.
func (bgb *BlacklistGroupBy) Aggregate(fns ...AggregateFunc) *BlacklistGroupBy {
bgb.fns = append(bgb.fns, fns...)
return bgb
}
// Scan applies the selector query and scans the result into the given value.
func (bgb *BlacklistGroupBy) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, bgb.build.ctx, "GroupBy")
if err := bgb.build.prepareQuery(ctx); err != nil {
return err
}
return scanWithInterceptors[*BlacklistQuery, *BlacklistGroupBy](ctx, bgb.build, bgb, bgb.build.inters, v)
}
func (bgb *BlacklistGroupBy) sqlScan(ctx context.Context, root *BlacklistQuery, v any) error {
selector := root.sqlQuery(ctx).Select()
aggregation := make([]string, 0, len(bgb.fns))
for _, fn := range bgb.fns {
aggregation = append(aggregation, fn(selector))
}
if len(selector.SelectedColumns()) == 0 {
columns := make([]string, 0, len(*bgb.flds)+len(bgb.fns))
for _, f := range *bgb.flds {
columns = append(columns, selector.C(f))
}
columns = append(columns, aggregation...)
selector.Select(columns...)
}
selector.GroupBy(selector.Columns(*bgb.flds...)...)
if err := selector.Err(); err != nil {
return err
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := bgb.build.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}
// BlacklistSelect is the builder for selecting fields of Blacklist entities.
type BlacklistSelect struct {
*BlacklistQuery
selector
}
// Aggregate adds the given aggregation functions to the selector query.
func (bs *BlacklistSelect) Aggregate(fns ...AggregateFunc) *BlacklistSelect {
bs.fns = append(bs.fns, fns...)
return bs
}
// Scan applies the selector query and scans the result into the given value.
func (bs *BlacklistSelect) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, bs.ctx, "Select")
if err := bs.prepareQuery(ctx); err != nil {
return err
}
return scanWithInterceptors[*BlacklistQuery, *BlacklistSelect](ctx, bs.BlacklistQuery, bs, bs.inters, v)
}
func (bs *BlacklistSelect) sqlScan(ctx context.Context, root *BlacklistQuery, v any) error {
selector := root.sqlQuery(ctx)
aggregation := make([]string, 0, len(bs.fns))
for _, fn := range bs.fns {
aggregation = append(aggregation, fn(selector))
}
switch n := len(*bs.selector.flds); {
case n == 0 && len(aggregation) > 0:
selector.Select(aggregation...)
case n != 0 && len(aggregation) > 0:
selector.AppendSelect(aggregation...)
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := bs.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}

322
ent/blacklist_update.go Normal file
View File

@@ -0,0 +1,322 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"polaris/ent/blacklist"
"polaris/ent/predicate"
"polaris/ent/schema"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// BlacklistUpdate is the builder for updating Blacklist entities.
type BlacklistUpdate struct {
config
hooks []Hook
mutation *BlacklistMutation
}
// Where appends a list predicates to the BlacklistUpdate builder.
func (bu *BlacklistUpdate) Where(ps ...predicate.Blacklist) *BlacklistUpdate {
bu.mutation.Where(ps...)
return bu
}
// SetType sets the "type" field.
func (bu *BlacklistUpdate) SetType(b blacklist.Type) *BlacklistUpdate {
bu.mutation.SetType(b)
return bu
}
// SetNillableType sets the "type" field if the given value is not nil.
func (bu *BlacklistUpdate) SetNillableType(b *blacklist.Type) *BlacklistUpdate {
if b != nil {
bu.SetType(*b)
}
return bu
}
// SetValue sets the "value" field.
func (bu *BlacklistUpdate) SetValue(sv schema.BlacklistValue) *BlacklistUpdate {
bu.mutation.SetValue(sv)
return bu
}
// SetNillableValue sets the "value" field if the given value is not nil.
func (bu *BlacklistUpdate) SetNillableValue(sv *schema.BlacklistValue) *BlacklistUpdate {
if sv != nil {
bu.SetValue(*sv)
}
return bu
}
// SetNotes sets the "notes" field.
func (bu *BlacklistUpdate) SetNotes(s string) *BlacklistUpdate {
bu.mutation.SetNotes(s)
return bu
}
// SetNillableNotes sets the "notes" field if the given value is not nil.
func (bu *BlacklistUpdate) SetNillableNotes(s *string) *BlacklistUpdate {
if s != nil {
bu.SetNotes(*s)
}
return bu
}
// ClearNotes clears the value of the "notes" field.
func (bu *BlacklistUpdate) ClearNotes() *BlacklistUpdate {
bu.mutation.ClearNotes()
return bu
}
// Mutation returns the BlacklistMutation object of the builder.
func (bu *BlacklistUpdate) Mutation() *BlacklistMutation {
return bu.mutation
}
// Save executes the query and returns the number of nodes affected by the update operation.
func (bu *BlacklistUpdate) Save(ctx context.Context) (int, error) {
return withHooks(ctx, bu.sqlSave, bu.mutation, bu.hooks)
}
// SaveX is like Save, but panics if an error occurs.
func (bu *BlacklistUpdate) SaveX(ctx context.Context) int {
affected, err := bu.Save(ctx)
if err != nil {
panic(err)
}
return affected
}
// Exec executes the query.
func (bu *BlacklistUpdate) Exec(ctx context.Context) error {
_, err := bu.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (bu *BlacklistUpdate) ExecX(ctx context.Context) {
if err := bu.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (bu *BlacklistUpdate) check() error {
if v, ok := bu.mutation.GetType(); ok {
if err := blacklist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Blacklist.type": %w`, err)}
}
}
return nil
}
func (bu *BlacklistUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := bu.check(); err != nil {
return n, err
}
_spec := sqlgraph.NewUpdateSpec(blacklist.Table, blacklist.Columns, sqlgraph.NewFieldSpec(blacklist.FieldID, field.TypeInt))
if ps := bu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := bu.mutation.GetType(); ok {
_spec.SetField(blacklist.FieldType, field.TypeEnum, value)
}
if value, ok := bu.mutation.Value(); ok {
_spec.SetField(blacklist.FieldValue, field.TypeJSON, value)
}
if value, ok := bu.mutation.Notes(); ok {
_spec.SetField(blacklist.FieldNotes, field.TypeString, value)
}
if bu.mutation.NotesCleared() {
_spec.ClearField(blacklist.FieldNotes, field.TypeString)
}
if n, err = sqlgraph.UpdateNodes(ctx, bu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{blacklist.Label}
} else if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return 0, err
}
bu.mutation.done = true
return n, nil
}
// BlacklistUpdateOne is the builder for updating a single Blacklist entity.
type BlacklistUpdateOne struct {
config
fields []string
hooks []Hook
mutation *BlacklistMutation
}
// SetType sets the "type" field.
func (buo *BlacklistUpdateOne) SetType(b blacklist.Type) *BlacklistUpdateOne {
buo.mutation.SetType(b)
return buo
}
// SetNillableType sets the "type" field if the given value is not nil.
func (buo *BlacklistUpdateOne) SetNillableType(b *blacklist.Type) *BlacklistUpdateOne {
if b != nil {
buo.SetType(*b)
}
return buo
}
// SetValue sets the "value" field.
func (buo *BlacklistUpdateOne) SetValue(sv schema.BlacklistValue) *BlacklistUpdateOne {
buo.mutation.SetValue(sv)
return buo
}
// SetNillableValue sets the "value" field if the given value is not nil.
func (buo *BlacklistUpdateOne) SetNillableValue(sv *schema.BlacklistValue) *BlacklistUpdateOne {
if sv != nil {
buo.SetValue(*sv)
}
return buo
}
// SetNotes sets the "notes" field.
func (buo *BlacklistUpdateOne) SetNotes(s string) *BlacklistUpdateOne {
buo.mutation.SetNotes(s)
return buo
}
// SetNillableNotes sets the "notes" field if the given value is not nil.
func (buo *BlacklistUpdateOne) SetNillableNotes(s *string) *BlacklistUpdateOne {
if s != nil {
buo.SetNotes(*s)
}
return buo
}
// ClearNotes clears the value of the "notes" field.
func (buo *BlacklistUpdateOne) ClearNotes() *BlacklistUpdateOne {
buo.mutation.ClearNotes()
return buo
}
// Mutation returns the BlacklistMutation object of the builder.
func (buo *BlacklistUpdateOne) Mutation() *BlacklistMutation {
return buo.mutation
}
// Where appends a list predicates to the BlacklistUpdate builder.
func (buo *BlacklistUpdateOne) Where(ps ...predicate.Blacklist) *BlacklistUpdateOne {
buo.mutation.Where(ps...)
return buo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (buo *BlacklistUpdateOne) Select(field string, fields ...string) *BlacklistUpdateOne {
buo.fields = append([]string{field}, fields...)
return buo
}
// Save executes the query and returns the updated Blacklist entity.
func (buo *BlacklistUpdateOne) Save(ctx context.Context) (*Blacklist, error) {
return withHooks(ctx, buo.sqlSave, buo.mutation, buo.hooks)
}
// SaveX is like Save, but panics if an error occurs.
func (buo *BlacklistUpdateOne) SaveX(ctx context.Context) *Blacklist {
node, err := buo.Save(ctx)
if err != nil {
panic(err)
}
return node
}
// Exec executes the query on the entity.
func (buo *BlacklistUpdateOne) Exec(ctx context.Context) error {
_, err := buo.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (buo *BlacklistUpdateOne) ExecX(ctx context.Context) {
if err := buo.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (buo *BlacklistUpdateOne) check() error {
if v, ok := buo.mutation.GetType(); ok {
if err := blacklist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Blacklist.type": %w`, err)}
}
}
return nil
}
func (buo *BlacklistUpdateOne) sqlSave(ctx context.Context) (_node *Blacklist, err error) {
if err := buo.check(); err != nil {
return _node, err
}
_spec := sqlgraph.NewUpdateSpec(blacklist.Table, blacklist.Columns, sqlgraph.NewFieldSpec(blacklist.FieldID, field.TypeInt))
id, ok := buo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Blacklist.id" for update`)}
}
_spec.Node.ID.Value = id
if fields := buo.fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, blacklist.FieldID)
for _, f := range fields {
if !blacklist.ValidColumn(f) {
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
if f != blacklist.FieldID {
_spec.Node.Columns = append(_spec.Node.Columns, f)
}
}
}
if ps := buo.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := buo.mutation.GetType(); ok {
_spec.SetField(blacklist.FieldType, field.TypeEnum, value)
}
if value, ok := buo.mutation.Value(); ok {
_spec.SetField(blacklist.FieldValue, field.TypeJSON, value)
}
if value, ok := buo.mutation.Notes(); ok {
_spec.SetField(blacklist.FieldNotes, field.TypeString, value)
}
if buo.mutation.NotesCleared() {
_spec.ClearField(blacklist.FieldNotes, field.TypeString)
}
_node = &Blacklist{config: buo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues
if err = sqlgraph.UpdateNode(ctx, buo.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{blacklist.Label}
} else if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return nil, err
}
buo.mutation.done = true
return _node, nil
}

View File

@@ -11,9 +11,11 @@ import (
"polaris/ent/migrate"
"polaris/ent/blacklist"
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/importlist"
"polaris/ent/indexers"
"polaris/ent/media"
"polaris/ent/notificationclient"
@@ -31,12 +33,16 @@ type Client struct {
config
// Schema is the client for creating, migrating and dropping schema.
Schema *migrate.Schema
// Blacklist is the client for interacting with the Blacklist builders.
Blacklist *BlacklistClient
// DownloadClients is the client for interacting with the DownloadClients builders.
DownloadClients *DownloadClientsClient
// Episode is the client for interacting with the Episode builders.
Episode *EpisodeClient
// History is the client for interacting with the History builders.
History *HistoryClient
// ImportList is the client for interacting with the ImportList builders.
ImportList *ImportListClient
// Indexers is the client for interacting with the Indexers builders.
Indexers *IndexersClient
// Media is the client for interacting with the Media builders.
@@ -58,9 +64,11 @@ func NewClient(opts ...Option) *Client {
func (c *Client) init() {
c.Schema = migrate.NewSchema(c.driver)
c.Blacklist = NewBlacklistClient(c.config)
c.DownloadClients = NewDownloadClientsClient(c.config)
c.Episode = NewEpisodeClient(c.config)
c.History = NewHistoryClient(c.config)
c.ImportList = NewImportListClient(c.config)
c.Indexers = NewIndexersClient(c.config)
c.Media = NewMediaClient(c.config)
c.NotificationClient = NewNotificationClientClient(c.config)
@@ -158,9 +166,11 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
return &Tx{
ctx: ctx,
config: cfg,
Blacklist: NewBlacklistClient(cfg),
DownloadClients: NewDownloadClientsClient(cfg),
Episode: NewEpisodeClient(cfg),
History: NewHistoryClient(cfg),
ImportList: NewImportListClient(cfg),
Indexers: NewIndexersClient(cfg),
Media: NewMediaClient(cfg),
NotificationClient: NewNotificationClientClient(cfg),
@@ -185,9 +195,11 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
return &Tx{
ctx: ctx,
config: cfg,
Blacklist: NewBlacklistClient(cfg),
DownloadClients: NewDownloadClientsClient(cfg),
Episode: NewEpisodeClient(cfg),
History: NewHistoryClient(cfg),
ImportList: NewImportListClient(cfg),
Indexers: NewIndexersClient(cfg),
Media: NewMediaClient(cfg),
NotificationClient: NewNotificationClientClient(cfg),
@@ -199,7 +211,7 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
// Debug returns a new debug-client. It's used to get verbose logging on specific operations.
//
// client.Debug().
// DownloadClients.
// Blacklist.
// Query().
// Count(ctx)
func (c *Client) Debug() *Client {
@@ -222,8 +234,8 @@ func (c *Client) Close() error {
// In order to add hooks to a specific client, call: `client.Node.Use(...)`.
func (c *Client) Use(hooks ...Hook) {
for _, n := range []interface{ Use(...Hook) }{
c.DownloadClients, c.Episode, c.History, c.Indexers, c.Media,
c.NotificationClient, c.Settings, c.Storage,
c.Blacklist, c.DownloadClients, c.Episode, c.History, c.ImportList, c.Indexers,
c.Media, c.NotificationClient, c.Settings, c.Storage,
} {
n.Use(hooks...)
}
@@ -233,8 +245,8 @@ func (c *Client) Use(hooks ...Hook) {
// In order to add interceptors to a specific client, call: `client.Node.Intercept(...)`.
func (c *Client) Intercept(interceptors ...Interceptor) {
for _, n := range []interface{ Intercept(...Interceptor) }{
c.DownloadClients, c.Episode, c.History, c.Indexers, c.Media,
c.NotificationClient, c.Settings, c.Storage,
c.Blacklist, c.DownloadClients, c.Episode, c.History, c.ImportList, c.Indexers,
c.Media, c.NotificationClient, c.Settings, c.Storage,
} {
n.Intercept(interceptors...)
}
@@ -243,12 +255,16 @@ func (c *Client) Intercept(interceptors ...Interceptor) {
// Mutate implements the ent.Mutator interface.
func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) {
switch m := m.(type) {
case *BlacklistMutation:
return c.Blacklist.mutate(ctx, m)
case *DownloadClientsMutation:
return c.DownloadClients.mutate(ctx, m)
case *EpisodeMutation:
return c.Episode.mutate(ctx, m)
case *HistoryMutation:
return c.History.mutate(ctx, m)
case *ImportListMutation:
return c.ImportList.mutate(ctx, m)
case *IndexersMutation:
return c.Indexers.mutate(ctx, m)
case *MediaMutation:
@@ -264,6 +280,139 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) {
}
}
// BlacklistClient is a client for the Blacklist schema.
type BlacklistClient struct {
config
}
// NewBlacklistClient returns a client for the Blacklist from the given config.
func NewBlacklistClient(c config) *BlacklistClient {
return &BlacklistClient{config: c}
}
// Use adds a list of mutation hooks to the hooks stack.
// A call to `Use(f, g, h)` equals to `blacklist.Hooks(f(g(h())))`.
func (c *BlacklistClient) Use(hooks ...Hook) {
c.hooks.Blacklist = append(c.hooks.Blacklist, hooks...)
}
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `blacklist.Intercept(f(g(h())))`.
func (c *BlacklistClient) Intercept(interceptors ...Interceptor) {
c.inters.Blacklist = append(c.inters.Blacklist, interceptors...)
}
// Create returns a builder for creating a Blacklist entity.
func (c *BlacklistClient) Create() *BlacklistCreate {
mutation := newBlacklistMutation(c.config, OpCreate)
return &BlacklistCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// CreateBulk returns a builder for creating a bulk of Blacklist entities.
func (c *BlacklistClient) CreateBulk(builders ...*BlacklistCreate) *BlacklistCreateBulk {
return &BlacklistCreateBulk{config: c.config, builders: builders}
}
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
// a builder and applies setFunc on it.
func (c *BlacklistClient) MapCreateBulk(slice any, setFunc func(*BlacklistCreate, int)) *BlacklistCreateBulk {
rv := reflect.ValueOf(slice)
if rv.Kind() != reflect.Slice {
return &BlacklistCreateBulk{err: fmt.Errorf("calling to BlacklistClient.MapCreateBulk with wrong type %T, need slice", slice)}
}
builders := make([]*BlacklistCreate, rv.Len())
for i := 0; i < rv.Len(); i++ {
builders[i] = c.Create()
setFunc(builders[i], i)
}
return &BlacklistCreateBulk{config: c.config, builders: builders}
}
// Update returns an update builder for Blacklist.
func (c *BlacklistClient) Update() *BlacklistUpdate {
mutation := newBlacklistMutation(c.config, OpUpdate)
return &BlacklistUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// UpdateOne returns an update builder for the given entity.
func (c *BlacklistClient) UpdateOne(b *Blacklist) *BlacklistUpdateOne {
mutation := newBlacklistMutation(c.config, OpUpdateOne, withBlacklist(b))
return &BlacklistUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// UpdateOneID returns an update builder for the given id.
func (c *BlacklistClient) UpdateOneID(id int) *BlacklistUpdateOne {
mutation := newBlacklistMutation(c.config, OpUpdateOne, withBlacklistID(id))
return &BlacklistUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// Delete returns a delete builder for Blacklist.
func (c *BlacklistClient) Delete() *BlacklistDelete {
mutation := newBlacklistMutation(c.config, OpDelete)
return &BlacklistDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// DeleteOne returns a builder for deleting the given entity.
func (c *BlacklistClient) DeleteOne(b *Blacklist) *BlacklistDeleteOne {
return c.DeleteOneID(b.ID)
}
// DeleteOneID returns a builder for deleting the given entity by its id.
func (c *BlacklistClient) DeleteOneID(id int) *BlacklistDeleteOne {
builder := c.Delete().Where(blacklist.ID(id))
builder.mutation.id = &id
builder.mutation.op = OpDeleteOne
return &BlacklistDeleteOne{builder}
}
// Query returns a query builder for Blacklist.
func (c *BlacklistClient) Query() *BlacklistQuery {
return &BlacklistQuery{
config: c.config,
ctx: &QueryContext{Type: TypeBlacklist},
inters: c.Interceptors(),
}
}
// Get returns a Blacklist entity by its id.
func (c *BlacklistClient) Get(ctx context.Context, id int) (*Blacklist, error) {
return c.Query().Where(blacklist.ID(id)).Only(ctx)
}
// GetX is like Get, but panics if an error occurs.
func (c *BlacklistClient) GetX(ctx context.Context, id int) *Blacklist {
obj, err := c.Get(ctx, id)
if err != nil {
panic(err)
}
return obj
}
// Hooks returns the client hooks.
func (c *BlacklistClient) Hooks() []Hook {
return c.hooks.Blacklist
}
// Interceptors returns the client interceptors.
func (c *BlacklistClient) Interceptors() []Interceptor {
return c.inters.Blacklist
}
func (c *BlacklistClient) mutate(ctx context.Context, m *BlacklistMutation) (Value, error) {
switch m.Op() {
case OpCreate:
return (&BlacklistCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpUpdate:
return (&BlacklistUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpUpdateOne:
return (&BlacklistUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpDelete, OpDeleteOne:
return (&BlacklistDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
default:
return nil, fmt.Errorf("ent: unknown Blacklist mutation op: %q", m.Op())
}
}
// DownloadClientsClient is a client for the DownloadClients schema.
type DownloadClientsClient struct {
config
@@ -679,6 +828,139 @@ func (c *HistoryClient) mutate(ctx context.Context, m *HistoryMutation) (Value,
}
}
// ImportListClient is a client for the ImportList schema.
type ImportListClient struct {
config
}
// NewImportListClient returns a client for the ImportList from the given config.
func NewImportListClient(c config) *ImportListClient {
return &ImportListClient{config: c}
}
// Use adds a list of mutation hooks to the hooks stack.
// A call to `Use(f, g, h)` equals to `importlist.Hooks(f(g(h())))`.
func (c *ImportListClient) Use(hooks ...Hook) {
c.hooks.ImportList = append(c.hooks.ImportList, hooks...)
}
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `importlist.Intercept(f(g(h())))`.
func (c *ImportListClient) Intercept(interceptors ...Interceptor) {
c.inters.ImportList = append(c.inters.ImportList, interceptors...)
}
// Create returns a builder for creating a ImportList entity.
func (c *ImportListClient) Create() *ImportListCreate {
mutation := newImportListMutation(c.config, OpCreate)
return &ImportListCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// CreateBulk returns a builder for creating a bulk of ImportList entities.
func (c *ImportListClient) CreateBulk(builders ...*ImportListCreate) *ImportListCreateBulk {
return &ImportListCreateBulk{config: c.config, builders: builders}
}
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
// a builder and applies setFunc on it.
func (c *ImportListClient) MapCreateBulk(slice any, setFunc func(*ImportListCreate, int)) *ImportListCreateBulk {
rv := reflect.ValueOf(slice)
if rv.Kind() != reflect.Slice {
return &ImportListCreateBulk{err: fmt.Errorf("calling to ImportListClient.MapCreateBulk with wrong type %T, need slice", slice)}
}
builders := make([]*ImportListCreate, rv.Len())
for i := 0; i < rv.Len(); i++ {
builders[i] = c.Create()
setFunc(builders[i], i)
}
return &ImportListCreateBulk{config: c.config, builders: builders}
}
// Update returns an update builder for ImportList.
func (c *ImportListClient) Update() *ImportListUpdate {
mutation := newImportListMutation(c.config, OpUpdate)
return &ImportListUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// UpdateOne returns an update builder for the given entity.
func (c *ImportListClient) UpdateOne(il *ImportList) *ImportListUpdateOne {
mutation := newImportListMutation(c.config, OpUpdateOne, withImportList(il))
return &ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// UpdateOneID returns an update builder for the given id.
func (c *ImportListClient) UpdateOneID(id int) *ImportListUpdateOne {
mutation := newImportListMutation(c.config, OpUpdateOne, withImportListID(id))
return &ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// Delete returns a delete builder for ImportList.
func (c *ImportListClient) Delete() *ImportListDelete {
mutation := newImportListMutation(c.config, OpDelete)
return &ImportListDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// DeleteOne returns a builder for deleting the given entity.
func (c *ImportListClient) DeleteOne(il *ImportList) *ImportListDeleteOne {
return c.DeleteOneID(il.ID)
}
// DeleteOneID returns a builder for deleting the given entity by its id.
func (c *ImportListClient) DeleteOneID(id int) *ImportListDeleteOne {
builder := c.Delete().Where(importlist.ID(id))
builder.mutation.id = &id
builder.mutation.op = OpDeleteOne
return &ImportListDeleteOne{builder}
}
// Query returns a query builder for ImportList.
func (c *ImportListClient) Query() *ImportListQuery {
return &ImportListQuery{
config: c.config,
ctx: &QueryContext{Type: TypeImportList},
inters: c.Interceptors(),
}
}
// Get returns a ImportList entity by its id.
func (c *ImportListClient) Get(ctx context.Context, id int) (*ImportList, error) {
return c.Query().Where(importlist.ID(id)).Only(ctx)
}
// GetX is like Get, but panics if an error occurs.
func (c *ImportListClient) GetX(ctx context.Context, id int) *ImportList {
obj, err := c.Get(ctx, id)
if err != nil {
panic(err)
}
return obj
}
// Hooks returns the client hooks.
func (c *ImportListClient) Hooks() []Hook {
return c.hooks.ImportList
}
// Interceptors returns the client interceptors.
func (c *ImportListClient) Interceptors() []Interceptor {
return c.inters.ImportList
}
func (c *ImportListClient) mutate(ctx context.Context, m *ImportListMutation) (Value, error) {
switch m.Op() {
case OpCreate:
return (&ImportListCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpUpdate:
return (&ImportListUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpUpdateOne:
return (&ImportListUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpDelete, OpDeleteOne:
return (&ImportListDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
default:
return nil, fmt.Errorf("ent: unknown ImportList mutation op: %q", m.Op())
}
}
// IndexersClient is a client for the Indexers schema.
type IndexersClient struct {
config
@@ -1363,11 +1645,11 @@ func (c *StorageClient) mutate(ctx context.Context, m *StorageMutation) (Value,
// hooks and interceptors per client, for fast access.
type (
hooks struct {
DownloadClients, Episode, History, Indexers, Media, NotificationClient,
Settings, Storage []ent.Hook
Blacklist, DownloadClients, Episode, History, ImportList, Indexers, Media,
NotificationClient, Settings, Storage []ent.Hook
}
inters struct {
DownloadClients, Episode, History, Indexers, Media, NotificationClient,
Settings, Storage []ent.Interceptor
Blacklist, DownloadClients, Episode, History, ImportList, Indexers, Media,
NotificationClient, Settings, Storage []ent.Interceptor
}
)

View File

@@ -21,7 +21,7 @@ type DownloadClients struct {
// Name holds the value of the "name" field.
Name string `json:"name,omitempty"`
// Implementation holds the value of the "implementation" field.
Implementation string `json:"implementation,omitempty"`
Implementation downloadclients.Implementation `json:"implementation,omitempty"`
// URL holds the value of the "url" field.
URL string `json:"url,omitempty"`
// User holds the value of the "user" field.
@@ -30,8 +30,8 @@ type DownloadClients struct {
Password string `json:"password,omitempty"`
// Settings holds the value of the "settings" field.
Settings string `json:"settings,omitempty"`
// Priority holds the value of the "priority" field.
Priority string `json:"priority,omitempty"`
// Priority1 holds the value of the "priority1" field.
Priority1 int `json:"priority1,omitempty"`
// RemoveCompletedDownloads holds the value of the "remove_completed_downloads" field.
RemoveCompletedDownloads bool `json:"remove_completed_downloads,omitempty"`
// RemoveFailedDownloads holds the value of the "remove_failed_downloads" field.
@@ -48,9 +48,9 @@ func (*DownloadClients) scanValues(columns []string) ([]any, error) {
switch columns[i] {
case downloadclients.FieldEnable, downloadclients.FieldRemoveCompletedDownloads, downloadclients.FieldRemoveFailedDownloads:
values[i] = new(sql.NullBool)
case downloadclients.FieldID:
case downloadclients.FieldID, downloadclients.FieldPriority1:
values[i] = new(sql.NullInt64)
case downloadclients.FieldName, downloadclients.FieldImplementation, downloadclients.FieldURL, downloadclients.FieldUser, downloadclients.FieldPassword, downloadclients.FieldSettings, downloadclients.FieldPriority, downloadclients.FieldTags:
case downloadclients.FieldName, downloadclients.FieldImplementation, downloadclients.FieldURL, downloadclients.FieldUser, downloadclients.FieldPassword, downloadclients.FieldSettings, downloadclients.FieldTags:
values[i] = new(sql.NullString)
default:
values[i] = new(sql.UnknownType)
@@ -89,7 +89,7 @@ func (dc *DownloadClients) assignValues(columns []string, values []any) error {
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field implementation", values[i])
} else if value.Valid {
dc.Implementation = value.String
dc.Implementation = downloadclients.Implementation(value.String)
}
case downloadclients.FieldURL:
if value, ok := values[i].(*sql.NullString); !ok {
@@ -115,11 +115,11 @@ func (dc *DownloadClients) assignValues(columns []string, values []any) error {
} else if value.Valid {
dc.Settings = value.String
}
case downloadclients.FieldPriority:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field priority", values[i])
case downloadclients.FieldPriority1:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field priority1", values[i])
} else if value.Valid {
dc.Priority = value.String
dc.Priority1 = int(value.Int64)
}
case downloadclients.FieldRemoveCompletedDownloads:
if value, ok := values[i].(*sql.NullBool); !ok {
@@ -182,7 +182,7 @@ func (dc *DownloadClients) String() string {
builder.WriteString(dc.Name)
builder.WriteString(", ")
builder.WriteString("implementation=")
builder.WriteString(dc.Implementation)
builder.WriteString(fmt.Sprintf("%v", dc.Implementation))
builder.WriteString(", ")
builder.WriteString("url=")
builder.WriteString(dc.URL)
@@ -196,8 +196,8 @@ func (dc *DownloadClients) String() string {
builder.WriteString("settings=")
builder.WriteString(dc.Settings)
builder.WriteString(", ")
builder.WriteString("priority=")
builder.WriteString(dc.Priority)
builder.WriteString("priority1=")
builder.WriteString(fmt.Sprintf("%v", dc.Priority1))
builder.WriteString(", ")
builder.WriteString("remove_completed_downloads=")
builder.WriteString(fmt.Sprintf("%v", dc.RemoveCompletedDownloads))

View File

@@ -3,6 +3,8 @@
package downloadclients
import (
"fmt"
"entgo.io/ent/dialect/sql"
)
@@ -25,8 +27,8 @@ const (
FieldPassword = "password"
// FieldSettings holds the string denoting the settings field in the database.
FieldSettings = "settings"
// FieldPriority holds the string denoting the priority field in the database.
FieldPriority = "priority"
// FieldPriority1 holds the string denoting the priority1 field in the database.
FieldPriority1 = "priority1"
// FieldRemoveCompletedDownloads holds the string denoting the remove_completed_downloads field in the database.
FieldRemoveCompletedDownloads = "remove_completed_downloads"
// FieldRemoveFailedDownloads holds the string denoting the remove_failed_downloads field in the database.
@@ -47,7 +49,7 @@ var Columns = []string{
FieldUser,
FieldPassword,
FieldSettings,
FieldPriority,
FieldPriority1,
FieldRemoveCompletedDownloads,
FieldRemoveFailedDownloads,
FieldTags,
@@ -70,8 +72,10 @@ var (
DefaultPassword string
// DefaultSettings holds the default value on creation for the "settings" field.
DefaultSettings string
// DefaultPriority holds the default value on creation for the "priority" field.
DefaultPriority string
// DefaultPriority1 holds the default value on creation for the "priority1" field.
DefaultPriority1 int
// Priority1Validator is a validator for the "priority1" field. It is called by the builders before save.
Priority1Validator func(int) error
// DefaultRemoveCompletedDownloads holds the default value on creation for the "remove_completed_downloads" field.
DefaultRemoveCompletedDownloads bool
// DefaultRemoveFailedDownloads holds the default value on creation for the "remove_failed_downloads" field.
@@ -80,6 +84,29 @@ var (
DefaultTags string
)
// Implementation defines the type for the "implementation" enum field.
type Implementation string
// Implementation values.
const (
ImplementationTransmission Implementation = "transmission"
ImplementationQbittorrent Implementation = "qbittorrent"
)
func (i Implementation) String() string {
return string(i)
}
// ImplementationValidator is a validator for the "implementation" field enum values. It is called by the builders before save.
func ImplementationValidator(i Implementation) error {
switch i {
case ImplementationTransmission, ImplementationQbittorrent:
return nil
default:
return fmt.Errorf("downloadclients: invalid enum value for implementation field: %q", i)
}
}
// OrderOption defines the ordering options for the DownloadClients queries.
type OrderOption func(*sql.Selector)
@@ -123,9 +150,9 @@ func BySettings(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldSettings, opts...).ToFunc()
}
// ByPriority orders the results by the priority field.
func ByPriority(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldPriority, opts...).ToFunc()
// ByPriority1 orders the results by the priority1 field.
func ByPriority1(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldPriority1, opts...).ToFunc()
}
// ByRemoveCompletedDownloads orders the results by the remove_completed_downloads field.

View File

@@ -63,11 +63,6 @@ func Name(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldName, v))
}
// Implementation applies equality check predicate on the "implementation" field. It's identical to ImplementationEQ.
func Implementation(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldImplementation, v))
}
// URL applies equality check predicate on the "url" field. It's identical to URLEQ.
func URL(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldURL, v))
@@ -88,9 +83,9 @@ func Settings(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldSettings, v))
}
// Priority applies equality check predicate on the "priority" field. It's identical to PriorityEQ.
func Priority(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldPriority, v))
// Priority1 applies equality check predicate on the "priority1" field. It's identical to Priority1EQ.
func Priority1(v int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldPriority1, v))
}
// RemoveCompletedDownloads applies equality check predicate on the "remove_completed_downloads" field. It's identical to RemoveCompletedDownloadsEQ.
@@ -184,70 +179,25 @@ func NameContainsFold(v string) predicate.DownloadClients {
}
// ImplementationEQ applies the EQ predicate on the "implementation" field.
func ImplementationEQ(v string) predicate.DownloadClients {
func ImplementationEQ(v Implementation) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldImplementation, v))
}
// ImplementationNEQ applies the NEQ predicate on the "implementation" field.
func ImplementationNEQ(v string) predicate.DownloadClients {
func ImplementationNEQ(v Implementation) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldNEQ(FieldImplementation, v))
}
// ImplementationIn applies the In predicate on the "implementation" field.
func ImplementationIn(vs ...string) predicate.DownloadClients {
func ImplementationIn(vs ...Implementation) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldIn(FieldImplementation, vs...))
}
// ImplementationNotIn applies the NotIn predicate on the "implementation" field.
func ImplementationNotIn(vs ...string) predicate.DownloadClients {
func ImplementationNotIn(vs ...Implementation) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldNotIn(FieldImplementation, vs...))
}
// ImplementationGT applies the GT predicate on the "implementation" field.
func ImplementationGT(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldGT(FieldImplementation, v))
}
// ImplementationGTE applies the GTE predicate on the "implementation" field.
func ImplementationGTE(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldGTE(FieldImplementation, v))
}
// ImplementationLT applies the LT predicate on the "implementation" field.
func ImplementationLT(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldLT(FieldImplementation, v))
}
// ImplementationLTE applies the LTE predicate on the "implementation" field.
func ImplementationLTE(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldLTE(FieldImplementation, v))
}
// ImplementationContains applies the Contains predicate on the "implementation" field.
func ImplementationContains(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldContains(FieldImplementation, v))
}
// ImplementationHasPrefix applies the HasPrefix predicate on the "implementation" field.
func ImplementationHasPrefix(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldHasPrefix(FieldImplementation, v))
}
// ImplementationHasSuffix applies the HasSuffix predicate on the "implementation" field.
func ImplementationHasSuffix(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldHasSuffix(FieldImplementation, v))
}
// ImplementationEqualFold applies the EqualFold predicate on the "implementation" field.
func ImplementationEqualFold(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEqualFold(FieldImplementation, v))
}
// ImplementationContainsFold applies the ContainsFold predicate on the "implementation" field.
func ImplementationContainsFold(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldContainsFold(FieldImplementation, v))
}
// URLEQ applies the EQ predicate on the "url" field.
func URLEQ(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldURL, v))
@@ -508,69 +458,44 @@ func SettingsContainsFold(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldContainsFold(FieldSettings, v))
}
// PriorityEQ applies the EQ predicate on the "priority" field.
func PriorityEQ(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldPriority, v))
// Priority1EQ applies the EQ predicate on the "priority1" field.
func Priority1EQ(v int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEQ(FieldPriority1, v))
}
// PriorityNEQ applies the NEQ predicate on the "priority" field.
func PriorityNEQ(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldNEQ(FieldPriority, v))
// Priority1NEQ applies the NEQ predicate on the "priority1" field.
func Priority1NEQ(v int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldNEQ(FieldPriority1, v))
}
// PriorityIn applies the In predicate on the "priority" field.
func PriorityIn(vs ...string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldIn(FieldPriority, vs...))
// Priority1In applies the In predicate on the "priority1" field.
func Priority1In(vs ...int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldIn(FieldPriority1, vs...))
}
// PriorityNotIn applies the NotIn predicate on the "priority" field.
func PriorityNotIn(vs ...string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldNotIn(FieldPriority, vs...))
// Priority1NotIn applies the NotIn predicate on the "priority1" field.
func Priority1NotIn(vs ...int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldNotIn(FieldPriority1, vs...))
}
// PriorityGT applies the GT predicate on the "priority" field.
func PriorityGT(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldGT(FieldPriority, v))
// Priority1GT applies the GT predicate on the "priority1" field.
func Priority1GT(v int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldGT(FieldPriority1, v))
}
// PriorityGTE applies the GTE predicate on the "priority" field.
func PriorityGTE(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldGTE(FieldPriority, v))
// Priority1GTE applies the GTE predicate on the "priority1" field.
func Priority1GTE(v int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldGTE(FieldPriority1, v))
}
// PriorityLT applies the LT predicate on the "priority" field.
func PriorityLT(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldLT(FieldPriority, v))
// Priority1LT applies the LT predicate on the "priority1" field.
func Priority1LT(v int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldLT(FieldPriority1, v))
}
// PriorityLTE applies the LTE predicate on the "priority" field.
func PriorityLTE(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldLTE(FieldPriority, v))
}
// PriorityContains applies the Contains predicate on the "priority" field.
func PriorityContains(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldContains(FieldPriority, v))
}
// PriorityHasPrefix applies the HasPrefix predicate on the "priority" field.
func PriorityHasPrefix(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldHasPrefix(FieldPriority, v))
}
// PriorityHasSuffix applies the HasSuffix predicate on the "priority" field.
func PriorityHasSuffix(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldHasSuffix(FieldPriority, v))
}
// PriorityEqualFold applies the EqualFold predicate on the "priority" field.
func PriorityEqualFold(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldEqualFold(FieldPriority, v))
}
// PriorityContainsFold applies the ContainsFold predicate on the "priority" field.
func PriorityContainsFold(v string) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldContainsFold(FieldPriority, v))
// Priority1LTE applies the LTE predicate on the "priority1" field.
func Priority1LTE(v int) predicate.DownloadClients {
return predicate.DownloadClients(sql.FieldLTE(FieldPriority1, v))
}
// RemoveCompletedDownloadsEQ applies the EQ predicate on the "remove_completed_downloads" field.

View File

@@ -32,8 +32,8 @@ func (dcc *DownloadClientsCreate) SetName(s string) *DownloadClientsCreate {
}
// SetImplementation sets the "implementation" field.
func (dcc *DownloadClientsCreate) SetImplementation(s string) *DownloadClientsCreate {
dcc.mutation.SetImplementation(s)
func (dcc *DownloadClientsCreate) SetImplementation(d downloadclients.Implementation) *DownloadClientsCreate {
dcc.mutation.SetImplementation(d)
return dcc
}
@@ -85,16 +85,16 @@ func (dcc *DownloadClientsCreate) SetNillableSettings(s *string) *DownloadClient
return dcc
}
// SetPriority sets the "priority" field.
func (dcc *DownloadClientsCreate) SetPriority(s string) *DownloadClientsCreate {
dcc.mutation.SetPriority(s)
// SetPriority1 sets the "priority1" field.
func (dcc *DownloadClientsCreate) SetPriority1(i int) *DownloadClientsCreate {
dcc.mutation.SetPriority1(i)
return dcc
}
// SetNillablePriority sets the "priority" field if the given value is not nil.
func (dcc *DownloadClientsCreate) SetNillablePriority(s *string) *DownloadClientsCreate {
if s != nil {
dcc.SetPriority(*s)
// SetNillablePriority1 sets the "priority1" field if the given value is not nil.
func (dcc *DownloadClientsCreate) SetNillablePriority1(i *int) *DownloadClientsCreate {
if i != nil {
dcc.SetPriority1(*i)
}
return dcc
}
@@ -188,9 +188,9 @@ func (dcc *DownloadClientsCreate) defaults() {
v := downloadclients.DefaultSettings
dcc.mutation.SetSettings(v)
}
if _, ok := dcc.mutation.Priority(); !ok {
v := downloadclients.DefaultPriority
dcc.mutation.SetPriority(v)
if _, ok := dcc.mutation.Priority1(); !ok {
v := downloadclients.DefaultPriority1
dcc.mutation.SetPriority1(v)
}
if _, ok := dcc.mutation.RemoveCompletedDownloads(); !ok {
v := downloadclients.DefaultRemoveCompletedDownloads
@@ -217,6 +217,11 @@ func (dcc *DownloadClientsCreate) check() error {
if _, ok := dcc.mutation.Implementation(); !ok {
return &ValidationError{Name: "implementation", err: errors.New(`ent: missing required field "DownloadClients.implementation"`)}
}
if v, ok := dcc.mutation.Implementation(); ok {
if err := downloadclients.ImplementationValidator(v); err != nil {
return &ValidationError{Name: "implementation", err: fmt.Errorf(`ent: validator failed for field "DownloadClients.implementation": %w`, err)}
}
}
if _, ok := dcc.mutation.URL(); !ok {
return &ValidationError{Name: "url", err: errors.New(`ent: missing required field "DownloadClients.url"`)}
}
@@ -229,8 +234,13 @@ func (dcc *DownloadClientsCreate) check() error {
if _, ok := dcc.mutation.Settings(); !ok {
return &ValidationError{Name: "settings", err: errors.New(`ent: missing required field "DownloadClients.settings"`)}
}
if _, ok := dcc.mutation.Priority(); !ok {
return &ValidationError{Name: "priority", err: errors.New(`ent: missing required field "DownloadClients.priority"`)}
if _, ok := dcc.mutation.Priority1(); !ok {
return &ValidationError{Name: "priority1", err: errors.New(`ent: missing required field "DownloadClients.priority1"`)}
}
if v, ok := dcc.mutation.Priority1(); ok {
if err := downloadclients.Priority1Validator(v); err != nil {
return &ValidationError{Name: "priority1", err: fmt.Errorf(`ent: validator failed for field "DownloadClients.priority1": %w`, err)}
}
}
if _, ok := dcc.mutation.RemoveCompletedDownloads(); !ok {
return &ValidationError{Name: "remove_completed_downloads", err: errors.New(`ent: missing required field "DownloadClients.remove_completed_downloads"`)}
@@ -276,7 +286,7 @@ func (dcc *DownloadClientsCreate) createSpec() (*DownloadClients, *sqlgraph.Crea
_node.Name = value
}
if value, ok := dcc.mutation.Implementation(); ok {
_spec.SetField(downloadclients.FieldImplementation, field.TypeString, value)
_spec.SetField(downloadclients.FieldImplementation, field.TypeEnum, value)
_node.Implementation = value
}
if value, ok := dcc.mutation.URL(); ok {
@@ -295,9 +305,9 @@ func (dcc *DownloadClientsCreate) createSpec() (*DownloadClients, *sqlgraph.Crea
_spec.SetField(downloadclients.FieldSettings, field.TypeString, value)
_node.Settings = value
}
if value, ok := dcc.mutation.Priority(); ok {
_spec.SetField(downloadclients.FieldPriority, field.TypeString, value)
_node.Priority = value
if value, ok := dcc.mutation.Priority1(); ok {
_spec.SetField(downloadclients.FieldPriority1, field.TypeInt, value)
_node.Priority1 = value
}
if value, ok := dcc.mutation.RemoveCompletedDownloads(); ok {
_spec.SetField(downloadclients.FieldRemoveCompletedDownloads, field.TypeBool, value)

View File

@@ -56,15 +56,15 @@ func (dcu *DownloadClientsUpdate) SetNillableName(s *string) *DownloadClientsUpd
}
// SetImplementation sets the "implementation" field.
func (dcu *DownloadClientsUpdate) SetImplementation(s string) *DownloadClientsUpdate {
dcu.mutation.SetImplementation(s)
func (dcu *DownloadClientsUpdate) SetImplementation(d downloadclients.Implementation) *DownloadClientsUpdate {
dcu.mutation.SetImplementation(d)
return dcu
}
// SetNillableImplementation sets the "implementation" field if the given value is not nil.
func (dcu *DownloadClientsUpdate) SetNillableImplementation(s *string) *DownloadClientsUpdate {
if s != nil {
dcu.SetImplementation(*s)
func (dcu *DownloadClientsUpdate) SetNillableImplementation(d *downloadclients.Implementation) *DownloadClientsUpdate {
if d != nil {
dcu.SetImplementation(*d)
}
return dcu
}
@@ -125,20 +125,27 @@ func (dcu *DownloadClientsUpdate) SetNillableSettings(s *string) *DownloadClient
return dcu
}
// SetPriority sets the "priority" field.
func (dcu *DownloadClientsUpdate) SetPriority(s string) *DownloadClientsUpdate {
dcu.mutation.SetPriority(s)
// SetPriority1 sets the "priority1" field.
func (dcu *DownloadClientsUpdate) SetPriority1(i int) *DownloadClientsUpdate {
dcu.mutation.ResetPriority1()
dcu.mutation.SetPriority1(i)
return dcu
}
// SetNillablePriority sets the "priority" field if the given value is not nil.
func (dcu *DownloadClientsUpdate) SetNillablePriority(s *string) *DownloadClientsUpdate {
if s != nil {
dcu.SetPriority(*s)
// SetNillablePriority1 sets the "priority1" field if the given value is not nil.
func (dcu *DownloadClientsUpdate) SetNillablePriority1(i *int) *DownloadClientsUpdate {
if i != nil {
dcu.SetPriority1(*i)
}
return dcu
}
// AddPriority1 adds i to the "priority1" field.
func (dcu *DownloadClientsUpdate) AddPriority1(i int) *DownloadClientsUpdate {
dcu.mutation.AddPriority1(i)
return dcu
}
// SetRemoveCompletedDownloads sets the "remove_completed_downloads" field.
func (dcu *DownloadClientsUpdate) SetRemoveCompletedDownloads(b bool) *DownloadClientsUpdate {
dcu.mutation.SetRemoveCompletedDownloads(b)
@@ -213,7 +220,25 @@ func (dcu *DownloadClientsUpdate) ExecX(ctx context.Context) {
}
}
// check runs all checks and user-defined validators on the builder.
func (dcu *DownloadClientsUpdate) check() error {
if v, ok := dcu.mutation.Implementation(); ok {
if err := downloadclients.ImplementationValidator(v); err != nil {
return &ValidationError{Name: "implementation", err: fmt.Errorf(`ent: validator failed for field "DownloadClients.implementation": %w`, err)}
}
}
if v, ok := dcu.mutation.Priority1(); ok {
if err := downloadclients.Priority1Validator(v); err != nil {
return &ValidationError{Name: "priority1", err: fmt.Errorf(`ent: validator failed for field "DownloadClients.priority1": %w`, err)}
}
}
return nil
}
func (dcu *DownloadClientsUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := dcu.check(); err != nil {
return n, err
}
_spec := sqlgraph.NewUpdateSpec(downloadclients.Table, downloadclients.Columns, sqlgraph.NewFieldSpec(downloadclients.FieldID, field.TypeInt))
if ps := dcu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
@@ -229,7 +254,7 @@ func (dcu *DownloadClientsUpdate) sqlSave(ctx context.Context) (n int, err error
_spec.SetField(downloadclients.FieldName, field.TypeString, value)
}
if value, ok := dcu.mutation.Implementation(); ok {
_spec.SetField(downloadclients.FieldImplementation, field.TypeString, value)
_spec.SetField(downloadclients.FieldImplementation, field.TypeEnum, value)
}
if value, ok := dcu.mutation.URL(); ok {
_spec.SetField(downloadclients.FieldURL, field.TypeString, value)
@@ -243,8 +268,11 @@ func (dcu *DownloadClientsUpdate) sqlSave(ctx context.Context) (n int, err error
if value, ok := dcu.mutation.Settings(); ok {
_spec.SetField(downloadclients.FieldSettings, field.TypeString, value)
}
if value, ok := dcu.mutation.Priority(); ok {
_spec.SetField(downloadclients.FieldPriority, field.TypeString, value)
if value, ok := dcu.mutation.Priority1(); ok {
_spec.SetField(downloadclients.FieldPriority1, field.TypeInt, value)
}
if value, ok := dcu.mutation.AddedPriority1(); ok {
_spec.AddField(downloadclients.FieldPriority1, field.TypeInt, value)
}
if value, ok := dcu.mutation.RemoveCompletedDownloads(); ok {
_spec.SetField(downloadclients.FieldRemoveCompletedDownloads, field.TypeBool, value)
@@ -304,15 +332,15 @@ func (dcuo *DownloadClientsUpdateOne) SetNillableName(s *string) *DownloadClient
}
// SetImplementation sets the "implementation" field.
func (dcuo *DownloadClientsUpdateOne) SetImplementation(s string) *DownloadClientsUpdateOne {
dcuo.mutation.SetImplementation(s)
func (dcuo *DownloadClientsUpdateOne) SetImplementation(d downloadclients.Implementation) *DownloadClientsUpdateOne {
dcuo.mutation.SetImplementation(d)
return dcuo
}
// SetNillableImplementation sets the "implementation" field if the given value is not nil.
func (dcuo *DownloadClientsUpdateOne) SetNillableImplementation(s *string) *DownloadClientsUpdateOne {
if s != nil {
dcuo.SetImplementation(*s)
func (dcuo *DownloadClientsUpdateOne) SetNillableImplementation(d *downloadclients.Implementation) *DownloadClientsUpdateOne {
if d != nil {
dcuo.SetImplementation(*d)
}
return dcuo
}
@@ -373,20 +401,27 @@ func (dcuo *DownloadClientsUpdateOne) SetNillableSettings(s *string) *DownloadCl
return dcuo
}
// SetPriority sets the "priority" field.
func (dcuo *DownloadClientsUpdateOne) SetPriority(s string) *DownloadClientsUpdateOne {
dcuo.mutation.SetPriority(s)
// SetPriority1 sets the "priority1" field.
func (dcuo *DownloadClientsUpdateOne) SetPriority1(i int) *DownloadClientsUpdateOne {
dcuo.mutation.ResetPriority1()
dcuo.mutation.SetPriority1(i)
return dcuo
}
// SetNillablePriority sets the "priority" field if the given value is not nil.
func (dcuo *DownloadClientsUpdateOne) SetNillablePriority(s *string) *DownloadClientsUpdateOne {
if s != nil {
dcuo.SetPriority(*s)
// SetNillablePriority1 sets the "priority1" field if the given value is not nil.
func (dcuo *DownloadClientsUpdateOne) SetNillablePriority1(i *int) *DownloadClientsUpdateOne {
if i != nil {
dcuo.SetPriority1(*i)
}
return dcuo
}
// AddPriority1 adds i to the "priority1" field.
func (dcuo *DownloadClientsUpdateOne) AddPriority1(i int) *DownloadClientsUpdateOne {
dcuo.mutation.AddPriority1(i)
return dcuo
}
// SetRemoveCompletedDownloads sets the "remove_completed_downloads" field.
func (dcuo *DownloadClientsUpdateOne) SetRemoveCompletedDownloads(b bool) *DownloadClientsUpdateOne {
dcuo.mutation.SetRemoveCompletedDownloads(b)
@@ -474,7 +509,25 @@ func (dcuo *DownloadClientsUpdateOne) ExecX(ctx context.Context) {
}
}
// check runs all checks and user-defined validators on the builder.
func (dcuo *DownloadClientsUpdateOne) check() error {
if v, ok := dcuo.mutation.Implementation(); ok {
if err := downloadclients.ImplementationValidator(v); err != nil {
return &ValidationError{Name: "implementation", err: fmt.Errorf(`ent: validator failed for field "DownloadClients.implementation": %w`, err)}
}
}
if v, ok := dcuo.mutation.Priority1(); ok {
if err := downloadclients.Priority1Validator(v); err != nil {
return &ValidationError{Name: "priority1", err: fmt.Errorf(`ent: validator failed for field "DownloadClients.priority1": %w`, err)}
}
}
return nil
}
func (dcuo *DownloadClientsUpdateOne) sqlSave(ctx context.Context) (_node *DownloadClients, err error) {
if err := dcuo.check(); err != nil {
return _node, err
}
_spec := sqlgraph.NewUpdateSpec(downloadclients.Table, downloadclients.Columns, sqlgraph.NewFieldSpec(downloadclients.FieldID, field.TypeInt))
id, ok := dcuo.mutation.ID()
if !ok {
@@ -507,7 +560,7 @@ func (dcuo *DownloadClientsUpdateOne) sqlSave(ctx context.Context) (_node *Downl
_spec.SetField(downloadclients.FieldName, field.TypeString, value)
}
if value, ok := dcuo.mutation.Implementation(); ok {
_spec.SetField(downloadclients.FieldImplementation, field.TypeString, value)
_spec.SetField(downloadclients.FieldImplementation, field.TypeEnum, value)
}
if value, ok := dcuo.mutation.URL(); ok {
_spec.SetField(downloadclients.FieldURL, field.TypeString, value)
@@ -521,8 +574,11 @@ func (dcuo *DownloadClientsUpdateOne) sqlSave(ctx context.Context) (_node *Downl
if value, ok := dcuo.mutation.Settings(); ok {
_spec.SetField(downloadclients.FieldSettings, field.TypeString, value)
}
if value, ok := dcuo.mutation.Priority(); ok {
_spec.SetField(downloadclients.FieldPriority, field.TypeString, value)
if value, ok := dcuo.mutation.Priority1(); ok {
_spec.SetField(downloadclients.FieldPriority1, field.TypeInt, value)
}
if value, ok := dcuo.mutation.AddedPriority1(); ok {
_spec.AddField(downloadclients.FieldPriority1, field.TypeInt, value)
}
if value, ok := dcuo.mutation.RemoveCompletedDownloads(); ok {
_spec.SetField(downloadclients.FieldRemoveCompletedDownloads, field.TypeBool, value)

View File

@@ -6,9 +6,11 @@ import (
"context"
"errors"
"fmt"
"polaris/ent/blacklist"
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/importlist"
"polaris/ent/indexers"
"polaris/ent/media"
"polaris/ent/notificationclient"
@@ -80,9 +82,11 @@ var (
func checkColumn(table, column string) error {
initCheck.Do(func() {
columnCheck = sql.NewColumnCheck(map[string]func(string) bool{
blacklist.Table: blacklist.ValidColumn,
downloadclients.Table: downloadclients.ValidColumn,
episode.Table: episode.ValidColumn,
history.Table: history.ValidColumn,
importlist.Table: importlist.ValidColumn,
indexers.Table: indexers.ValidColumn,
media.Table: media.ValidColumn,
notificationclient.Table: notificationclient.ValidColumn,

View File

@@ -31,6 +31,10 @@ type Episode struct {
AirDate string `json:"air_date,omitempty"`
// Status holds the value of the "status" field.
Status episode.Status `json:"status,omitempty"`
// Monitored holds the value of the "monitored" field.
Monitored bool `json:"monitored"`
// TargetFile holds the value of the "target_file" field.
TargetFile string `json:"target_file,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the EpisodeQuery when eager-loading is set.
Edges EpisodeEdges `json:"edges"`
@@ -62,9 +66,11 @@ func (*Episode) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case episode.FieldMonitored:
values[i] = new(sql.NullBool)
case episode.FieldID, episode.FieldMediaID, episode.FieldSeasonNumber, episode.FieldEpisodeNumber:
values[i] = new(sql.NullInt64)
case episode.FieldTitle, episode.FieldOverview, episode.FieldAirDate, episode.FieldStatus:
case episode.FieldTitle, episode.FieldOverview, episode.FieldAirDate, episode.FieldStatus, episode.FieldTargetFile:
values[i] = new(sql.NullString)
default:
values[i] = new(sql.UnknownType)
@@ -129,6 +135,18 @@ func (e *Episode) assignValues(columns []string, values []any) error {
} else if value.Valid {
e.Status = episode.Status(value.String)
}
case episode.FieldMonitored:
if value, ok := values[i].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field monitored", values[i])
} else if value.Valid {
e.Monitored = value.Bool
}
case episode.FieldTargetFile:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field target_file", values[i])
} else if value.Valid {
e.TargetFile = value.String
}
default:
e.selectValues.Set(columns[i], values[i])
}
@@ -190,6 +208,12 @@ func (e *Episode) String() string {
builder.WriteString(", ")
builder.WriteString("status=")
builder.WriteString(fmt.Sprintf("%v", e.Status))
builder.WriteString(", ")
builder.WriteString("monitored=")
builder.WriteString(fmt.Sprintf("%v", e.Monitored))
builder.WriteString(", ")
builder.WriteString("target_file=")
builder.WriteString(e.TargetFile)
builder.WriteByte(')')
return builder.String()
}

View File

@@ -28,6 +28,10 @@ const (
FieldAirDate = "air_date"
// FieldStatus holds the string denoting the status field in the database.
FieldStatus = "status"
// FieldMonitored holds the string denoting the monitored field in the database.
FieldMonitored = "monitored"
// FieldTargetFile holds the string denoting the target_file field in the database.
FieldTargetFile = "target_file"
// EdgeMedia holds the string denoting the media edge name in mutations.
EdgeMedia = "media"
// Table holds the table name of the episode in the database.
@@ -51,6 +55,8 @@ var Columns = []string{
FieldOverview,
FieldAirDate,
FieldStatus,
FieldMonitored,
FieldTargetFile,
}
// ValidColumn reports if the column name is valid (part of the table columns).
@@ -63,6 +69,11 @@ func ValidColumn(column string) bool {
return false
}
var (
// DefaultMonitored holds the default value on creation for the "monitored" field.
DefaultMonitored bool
)
// Status defines the type for the "status" enum field.
type Status string
@@ -133,6 +144,16 @@ func ByStatus(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStatus, opts...).ToFunc()
}
// ByMonitored orders the results by the monitored field.
func ByMonitored(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldMonitored, opts...).ToFunc()
}
// ByTargetFile orders the results by the target_file field.
func ByTargetFile(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldTargetFile, opts...).ToFunc()
}
// ByMediaField orders the results by media field.
func ByMediaField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {

View File

@@ -84,6 +84,16 @@ func AirDate(v string) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldAirDate, v))
}
// Monitored applies equality check predicate on the "monitored" field. It's identical to MonitoredEQ.
func Monitored(v bool) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMonitored, v))
}
// TargetFile applies equality check predicate on the "target_file" field. It's identical to TargetFileEQ.
func TargetFile(v string) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldTargetFile, v))
}
// MediaIDEQ applies the EQ predicate on the "media_id" field.
func MediaIDEQ(v int) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMediaID, v))
@@ -409,6 +419,91 @@ func StatusNotIn(vs ...Status) predicate.Episode {
return predicate.Episode(sql.FieldNotIn(FieldStatus, vs...))
}
// MonitoredEQ applies the EQ predicate on the "monitored" field.
func MonitoredEQ(v bool) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldMonitored, v))
}
// MonitoredNEQ applies the NEQ predicate on the "monitored" field.
func MonitoredNEQ(v bool) predicate.Episode {
return predicate.Episode(sql.FieldNEQ(FieldMonitored, v))
}
// TargetFileEQ applies the EQ predicate on the "target_file" field.
func TargetFileEQ(v string) predicate.Episode {
return predicate.Episode(sql.FieldEQ(FieldTargetFile, v))
}
// TargetFileNEQ applies the NEQ predicate on the "target_file" field.
func TargetFileNEQ(v string) predicate.Episode {
return predicate.Episode(sql.FieldNEQ(FieldTargetFile, v))
}
// TargetFileIn applies the In predicate on the "target_file" field.
func TargetFileIn(vs ...string) predicate.Episode {
return predicate.Episode(sql.FieldIn(FieldTargetFile, vs...))
}
// TargetFileNotIn applies the NotIn predicate on the "target_file" field.
func TargetFileNotIn(vs ...string) predicate.Episode {
return predicate.Episode(sql.FieldNotIn(FieldTargetFile, vs...))
}
// TargetFileGT applies the GT predicate on the "target_file" field.
func TargetFileGT(v string) predicate.Episode {
return predicate.Episode(sql.FieldGT(FieldTargetFile, v))
}
// TargetFileGTE applies the GTE predicate on the "target_file" field.
func TargetFileGTE(v string) predicate.Episode {
return predicate.Episode(sql.FieldGTE(FieldTargetFile, v))
}
// TargetFileLT applies the LT predicate on the "target_file" field.
func TargetFileLT(v string) predicate.Episode {
return predicate.Episode(sql.FieldLT(FieldTargetFile, v))
}
// TargetFileLTE applies the LTE predicate on the "target_file" field.
func TargetFileLTE(v string) predicate.Episode {
return predicate.Episode(sql.FieldLTE(FieldTargetFile, v))
}
// TargetFileContains applies the Contains predicate on the "target_file" field.
func TargetFileContains(v string) predicate.Episode {
return predicate.Episode(sql.FieldContains(FieldTargetFile, v))
}
// TargetFileHasPrefix applies the HasPrefix predicate on the "target_file" field.
func TargetFileHasPrefix(v string) predicate.Episode {
return predicate.Episode(sql.FieldHasPrefix(FieldTargetFile, v))
}
// TargetFileHasSuffix applies the HasSuffix predicate on the "target_file" field.
func TargetFileHasSuffix(v string) predicate.Episode {
return predicate.Episode(sql.FieldHasSuffix(FieldTargetFile, v))
}
// TargetFileIsNil applies the IsNil predicate on the "target_file" field.
func TargetFileIsNil() predicate.Episode {
return predicate.Episode(sql.FieldIsNull(FieldTargetFile))
}
// TargetFileNotNil applies the NotNil predicate on the "target_file" field.
func TargetFileNotNil() predicate.Episode {
return predicate.Episode(sql.FieldNotNull(FieldTargetFile))
}
// TargetFileEqualFold applies the EqualFold predicate on the "target_file" field.
func TargetFileEqualFold(v string) predicate.Episode {
return predicate.Episode(sql.FieldEqualFold(FieldTargetFile, v))
}
// TargetFileContainsFold applies the ContainsFold predicate on the "target_file" field.
func TargetFileContainsFold(v string) predicate.Episode {
return predicate.Episode(sql.FieldContainsFold(FieldTargetFile, v))
}
// HasMedia applies the HasEdge predicate on the "media" edge.
func HasMedia() predicate.Episode {
return predicate.Episode(func(s *sql.Selector) {

View File

@@ -78,6 +78,34 @@ func (ec *EpisodeCreate) SetNillableStatus(e *episode.Status) *EpisodeCreate {
return ec
}
// SetMonitored sets the "monitored" field.
func (ec *EpisodeCreate) SetMonitored(b bool) *EpisodeCreate {
ec.mutation.SetMonitored(b)
return ec
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (ec *EpisodeCreate) SetNillableMonitored(b *bool) *EpisodeCreate {
if b != nil {
ec.SetMonitored(*b)
}
return ec
}
// SetTargetFile sets the "target_file" field.
func (ec *EpisodeCreate) SetTargetFile(s string) *EpisodeCreate {
ec.mutation.SetTargetFile(s)
return ec
}
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
func (ec *EpisodeCreate) SetNillableTargetFile(s *string) *EpisodeCreate {
if s != nil {
ec.SetTargetFile(*s)
}
return ec
}
// SetMedia sets the "media" edge to the Media entity.
func (ec *EpisodeCreate) SetMedia(m *Media) *EpisodeCreate {
return ec.SetMediaID(m.ID)
@@ -122,6 +150,10 @@ func (ec *EpisodeCreate) defaults() {
v := episode.DefaultStatus
ec.mutation.SetStatus(v)
}
if _, ok := ec.mutation.Monitored(); !ok {
v := episode.DefaultMonitored
ec.mutation.SetMonitored(v)
}
}
// check runs all checks and user-defined validators on the builder.
@@ -149,6 +181,9 @@ func (ec *EpisodeCreate) check() error {
return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "Episode.status": %w`, err)}
}
}
if _, ok := ec.mutation.Monitored(); !ok {
return &ValidationError{Name: "monitored", err: errors.New(`ent: missing required field "Episode.monitored"`)}
}
return nil
}
@@ -199,6 +234,14 @@ func (ec *EpisodeCreate) createSpec() (*Episode, *sqlgraph.CreateSpec) {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
_node.Status = value
}
if value, ok := ec.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
_node.Monitored = value
}
if value, ok := ec.mutation.TargetFile(); ok {
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
_node.TargetFile = value
}
if nodes := ec.mutation.MediaIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,

View File

@@ -146,6 +146,40 @@ func (eu *EpisodeUpdate) SetNillableStatus(e *episode.Status) *EpisodeUpdate {
return eu
}
// SetMonitored sets the "monitored" field.
func (eu *EpisodeUpdate) SetMonitored(b bool) *EpisodeUpdate {
eu.mutation.SetMonitored(b)
return eu
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (eu *EpisodeUpdate) SetNillableMonitored(b *bool) *EpisodeUpdate {
if b != nil {
eu.SetMonitored(*b)
}
return eu
}
// SetTargetFile sets the "target_file" field.
func (eu *EpisodeUpdate) SetTargetFile(s string) *EpisodeUpdate {
eu.mutation.SetTargetFile(s)
return eu
}
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
func (eu *EpisodeUpdate) SetNillableTargetFile(s *string) *EpisodeUpdate {
if s != nil {
eu.SetTargetFile(*s)
}
return eu
}
// ClearTargetFile clears the value of the "target_file" field.
func (eu *EpisodeUpdate) ClearTargetFile() *EpisodeUpdate {
eu.mutation.ClearTargetFile()
return eu
}
// SetMedia sets the "media" edge to the Media entity.
func (eu *EpisodeUpdate) SetMedia(m *Media) *EpisodeUpdate {
return eu.SetMediaID(m.ID)
@@ -235,6 +269,15 @@ func (eu *EpisodeUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := eu.mutation.Status(); ok {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
}
if value, ok := eu.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
}
if value, ok := eu.mutation.TargetFile(); ok {
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
}
if eu.mutation.TargetFileCleared() {
_spec.ClearField(episode.FieldTargetFile, field.TypeString)
}
if eu.mutation.MediaCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
@@ -402,6 +445,40 @@ func (euo *EpisodeUpdateOne) SetNillableStatus(e *episode.Status) *EpisodeUpdate
return euo
}
// SetMonitored sets the "monitored" field.
func (euo *EpisodeUpdateOne) SetMonitored(b bool) *EpisodeUpdateOne {
euo.mutation.SetMonitored(b)
return euo
}
// SetNillableMonitored sets the "monitored" field if the given value is not nil.
func (euo *EpisodeUpdateOne) SetNillableMonitored(b *bool) *EpisodeUpdateOne {
if b != nil {
euo.SetMonitored(*b)
}
return euo
}
// SetTargetFile sets the "target_file" field.
func (euo *EpisodeUpdateOne) SetTargetFile(s string) *EpisodeUpdateOne {
euo.mutation.SetTargetFile(s)
return euo
}
// SetNillableTargetFile sets the "target_file" field if the given value is not nil.
func (euo *EpisodeUpdateOne) SetNillableTargetFile(s *string) *EpisodeUpdateOne {
if s != nil {
euo.SetTargetFile(*s)
}
return euo
}
// ClearTargetFile clears the value of the "target_file" field.
func (euo *EpisodeUpdateOne) ClearTargetFile() *EpisodeUpdateOne {
euo.mutation.ClearTargetFile()
return euo
}
// SetMedia sets the "media" edge to the Media entity.
func (euo *EpisodeUpdateOne) SetMedia(m *Media) *EpisodeUpdateOne {
return euo.SetMediaID(m.ID)
@@ -521,6 +598,15 @@ func (euo *EpisodeUpdateOne) sqlSave(ctx context.Context) (_node *Episode, err e
if value, ok := euo.mutation.Status(); ok {
_spec.SetField(episode.FieldStatus, field.TypeEnum, value)
}
if value, ok := euo.mutation.Monitored(); ok {
_spec.SetField(episode.FieldMonitored, field.TypeBool, value)
}
if value, ok := euo.mutation.TargetFile(); ok {
_spec.SetField(episode.FieldTargetFile, field.TypeString, value)
}
if euo.mutation.TargetFileCleared() {
_spec.ClearField(episode.FieldTargetFile, field.TypeString)
}
if euo.mutation.MediaCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,

View File

@@ -3,6 +3,7 @@
package ent
import (
"encoding/json"
"fmt"
"polaris/ent/history"
"strings"
@@ -19,8 +20,12 @@ type History struct {
ID int `json:"id,omitempty"`
// MediaID holds the value of the "media_id" field.
MediaID int `json:"media_id,omitempty"`
// EpisodeID holds the value of the "episode_id" field.
// deprecated
EpisodeID int `json:"episode_id,omitempty"`
// EpisodeNums holds the value of the "episode_nums" field.
EpisodeNums []int `json:"episode_nums,omitempty"`
// SeasonNum holds the value of the "season_num" field.
SeasonNum int `json:"season_num,omitempty"`
// SourceTitle holds the value of the "source_title" field.
SourceTitle string `json:"source_title,omitempty"`
// Date holds the value of the "date" field.
@@ -31,9 +36,13 @@ type History struct {
Size int `json:"size,omitempty"`
// DownloadClientID holds the value of the "download_client_id" field.
DownloadClientID int `json:"download_client_id,omitempty"`
// IndexerID holds the value of the "indexer_id" field.
IndexerID int `json:"indexer_id,omitempty"`
// Link holds the value of the "link" field.
Link string `json:"link,omitempty"`
// Status holds the value of the "status" field.
Status history.Status `json:"status,omitempty"`
// Saved holds the value of the "saved" field.
// deprecated
Saved string `json:"saved,omitempty"`
selectValues sql.SelectValues
}
@@ -43,9 +52,11 @@ func (*History) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case history.FieldID, history.FieldMediaID, history.FieldEpisodeID, history.FieldSize, history.FieldDownloadClientID:
case history.FieldEpisodeNums:
values[i] = new([]byte)
case history.FieldID, history.FieldMediaID, history.FieldEpisodeID, history.FieldSeasonNum, history.FieldSize, history.FieldDownloadClientID, history.FieldIndexerID:
values[i] = new(sql.NullInt64)
case history.FieldSourceTitle, history.FieldTargetDir, history.FieldStatus, history.FieldSaved:
case history.FieldSourceTitle, history.FieldTargetDir, history.FieldLink, history.FieldStatus, history.FieldSaved:
values[i] = new(sql.NullString)
case history.FieldDate:
values[i] = new(sql.NullTime)
@@ -82,6 +93,20 @@ func (h *History) assignValues(columns []string, values []any) error {
} else if value.Valid {
h.EpisodeID = int(value.Int64)
}
case history.FieldEpisodeNums:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field episode_nums", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &h.EpisodeNums); err != nil {
return fmt.Errorf("unmarshal field episode_nums: %w", err)
}
}
case history.FieldSeasonNum:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field season_num", values[i])
} else if value.Valid {
h.SeasonNum = int(value.Int64)
}
case history.FieldSourceTitle:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field source_title", values[i])
@@ -112,6 +137,18 @@ func (h *History) assignValues(columns []string, values []any) error {
} else if value.Valid {
h.DownloadClientID = int(value.Int64)
}
case history.FieldIndexerID:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field indexer_id", values[i])
} else if value.Valid {
h.IndexerID = int(value.Int64)
}
case history.FieldLink:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field link", values[i])
} else if value.Valid {
h.Link = value.String
}
case history.FieldStatus:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field status", values[i])
@@ -166,6 +203,12 @@ func (h *History) String() string {
builder.WriteString("episode_id=")
builder.WriteString(fmt.Sprintf("%v", h.EpisodeID))
builder.WriteString(", ")
builder.WriteString("episode_nums=")
builder.WriteString(fmt.Sprintf("%v", h.EpisodeNums))
builder.WriteString(", ")
builder.WriteString("season_num=")
builder.WriteString(fmt.Sprintf("%v", h.SeasonNum))
builder.WriteString(", ")
builder.WriteString("source_title=")
builder.WriteString(h.SourceTitle)
builder.WriteString(", ")
@@ -181,6 +224,12 @@ func (h *History) String() string {
builder.WriteString("download_client_id=")
builder.WriteString(fmt.Sprintf("%v", h.DownloadClientID))
builder.WriteString(", ")
builder.WriteString("indexer_id=")
builder.WriteString(fmt.Sprintf("%v", h.IndexerID))
builder.WriteString(", ")
builder.WriteString("link=")
builder.WriteString(h.Link)
builder.WriteString(", ")
builder.WriteString("status=")
builder.WriteString(fmt.Sprintf("%v", h.Status))
builder.WriteString(", ")

View File

@@ -17,6 +17,10 @@ const (
FieldMediaID = "media_id"
// FieldEpisodeID holds the string denoting the episode_id field in the database.
FieldEpisodeID = "episode_id"
// FieldEpisodeNums holds the string denoting the episode_nums field in the database.
FieldEpisodeNums = "episode_nums"
// FieldSeasonNum holds the string denoting the season_num field in the database.
FieldSeasonNum = "season_num"
// FieldSourceTitle holds the string denoting the source_title field in the database.
FieldSourceTitle = "source_title"
// FieldDate holds the string denoting the date field in the database.
@@ -27,6 +31,10 @@ const (
FieldSize = "size"
// FieldDownloadClientID holds the string denoting the download_client_id field in the database.
FieldDownloadClientID = "download_client_id"
// FieldIndexerID holds the string denoting the indexer_id field in the database.
FieldIndexerID = "indexer_id"
// FieldLink holds the string denoting the link field in the database.
FieldLink = "link"
// FieldStatus holds the string denoting the status field in the database.
FieldStatus = "status"
// FieldSaved holds the string denoting the saved field in the database.
@@ -40,11 +48,15 @@ var Columns = []string{
FieldID,
FieldMediaID,
FieldEpisodeID,
FieldEpisodeNums,
FieldSeasonNum,
FieldSourceTitle,
FieldDate,
FieldTargetDir,
FieldSize,
FieldDownloadClientID,
FieldIndexerID,
FieldLink,
FieldStatus,
FieldSaved,
}
@@ -73,6 +85,7 @@ const (
StatusSuccess Status = "success"
StatusFail Status = "fail"
StatusUploading Status = "uploading"
StatusSeeding Status = "seeding"
)
func (s Status) String() string {
@@ -82,7 +95,7 @@ func (s Status) String() string {
// StatusValidator is a validator for the "status" field enum values. It is called by the builders before save.
func StatusValidator(s Status) error {
switch s {
case StatusRunning, StatusSuccess, StatusFail, StatusUploading:
case StatusRunning, StatusSuccess, StatusFail, StatusUploading, StatusSeeding:
return nil
default:
return fmt.Errorf("history: invalid enum value for status field: %q", s)
@@ -107,6 +120,11 @@ func ByEpisodeID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldEpisodeID, opts...).ToFunc()
}
// BySeasonNum orders the results by the season_num field.
func BySeasonNum(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldSeasonNum, opts...).ToFunc()
}
// BySourceTitle orders the results by the source_title field.
func BySourceTitle(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldSourceTitle, opts...).ToFunc()
@@ -132,6 +150,16 @@ func ByDownloadClientID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldDownloadClientID, opts...).ToFunc()
}
// ByIndexerID orders the results by the indexer_id field.
func ByIndexerID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldIndexerID, opts...).ToFunc()
}
// ByLink orders the results by the link field.
func ByLink(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldLink, opts...).ToFunc()
}
// ByStatus orders the results by the status field.
func ByStatus(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStatus, opts...).ToFunc()

View File

@@ -64,6 +64,11 @@ func EpisodeID(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldEpisodeID, v))
}
// SeasonNum applies equality check predicate on the "season_num" field. It's identical to SeasonNumEQ.
func SeasonNum(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldSeasonNum, v))
}
// SourceTitle applies equality check predicate on the "source_title" field. It's identical to SourceTitleEQ.
func SourceTitle(v string) predicate.History {
return predicate.History(sql.FieldEQ(FieldSourceTitle, v))
@@ -89,6 +94,16 @@ func DownloadClientID(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldDownloadClientID, v))
}
// IndexerID applies equality check predicate on the "indexer_id" field. It's identical to IndexerIDEQ.
func IndexerID(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldIndexerID, v))
}
// Link applies equality check predicate on the "link" field. It's identical to LinkEQ.
func Link(v string) predicate.History {
return predicate.History(sql.FieldEQ(FieldLink, v))
}
// Saved applies equality check predicate on the "saved" field. It's identical to SavedEQ.
func Saved(v string) predicate.History {
return predicate.History(sql.FieldEQ(FieldSaved, v))
@@ -184,6 +199,66 @@ func EpisodeIDNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldEpisodeID))
}
// EpisodeNumsIsNil applies the IsNil predicate on the "episode_nums" field.
func EpisodeNumsIsNil() predicate.History {
return predicate.History(sql.FieldIsNull(FieldEpisodeNums))
}
// EpisodeNumsNotNil applies the NotNil predicate on the "episode_nums" field.
func EpisodeNumsNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldEpisodeNums))
}
// SeasonNumEQ applies the EQ predicate on the "season_num" field.
func SeasonNumEQ(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldSeasonNum, v))
}
// SeasonNumNEQ applies the NEQ predicate on the "season_num" field.
func SeasonNumNEQ(v int) predicate.History {
return predicate.History(sql.FieldNEQ(FieldSeasonNum, v))
}
// SeasonNumIn applies the In predicate on the "season_num" field.
func SeasonNumIn(vs ...int) predicate.History {
return predicate.History(sql.FieldIn(FieldSeasonNum, vs...))
}
// SeasonNumNotIn applies the NotIn predicate on the "season_num" field.
func SeasonNumNotIn(vs ...int) predicate.History {
return predicate.History(sql.FieldNotIn(FieldSeasonNum, vs...))
}
// SeasonNumGT applies the GT predicate on the "season_num" field.
func SeasonNumGT(v int) predicate.History {
return predicate.History(sql.FieldGT(FieldSeasonNum, v))
}
// SeasonNumGTE applies the GTE predicate on the "season_num" field.
func SeasonNumGTE(v int) predicate.History {
return predicate.History(sql.FieldGTE(FieldSeasonNum, v))
}
// SeasonNumLT applies the LT predicate on the "season_num" field.
func SeasonNumLT(v int) predicate.History {
return predicate.History(sql.FieldLT(FieldSeasonNum, v))
}
// SeasonNumLTE applies the LTE predicate on the "season_num" field.
func SeasonNumLTE(v int) predicate.History {
return predicate.History(sql.FieldLTE(FieldSeasonNum, v))
}
// SeasonNumIsNil applies the IsNil predicate on the "season_num" field.
func SeasonNumIsNil() predicate.History {
return predicate.History(sql.FieldIsNull(FieldSeasonNum))
}
// SeasonNumNotNil applies the NotNil predicate on the "season_num" field.
func SeasonNumNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldSeasonNum))
}
// SourceTitleEQ applies the EQ predicate on the "source_title" field.
func SourceTitleEQ(v string) predicate.History {
return predicate.History(sql.FieldEQ(FieldSourceTitle, v))
@@ -444,6 +519,131 @@ func DownloadClientIDNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldDownloadClientID))
}
// IndexerIDEQ applies the EQ predicate on the "indexer_id" field.
func IndexerIDEQ(v int) predicate.History {
return predicate.History(sql.FieldEQ(FieldIndexerID, v))
}
// IndexerIDNEQ applies the NEQ predicate on the "indexer_id" field.
func IndexerIDNEQ(v int) predicate.History {
return predicate.History(sql.FieldNEQ(FieldIndexerID, v))
}
// IndexerIDIn applies the In predicate on the "indexer_id" field.
func IndexerIDIn(vs ...int) predicate.History {
return predicate.History(sql.FieldIn(FieldIndexerID, vs...))
}
// IndexerIDNotIn applies the NotIn predicate on the "indexer_id" field.
func IndexerIDNotIn(vs ...int) predicate.History {
return predicate.History(sql.FieldNotIn(FieldIndexerID, vs...))
}
// IndexerIDGT applies the GT predicate on the "indexer_id" field.
func IndexerIDGT(v int) predicate.History {
return predicate.History(sql.FieldGT(FieldIndexerID, v))
}
// IndexerIDGTE applies the GTE predicate on the "indexer_id" field.
func IndexerIDGTE(v int) predicate.History {
return predicate.History(sql.FieldGTE(FieldIndexerID, v))
}
// IndexerIDLT applies the LT predicate on the "indexer_id" field.
func IndexerIDLT(v int) predicate.History {
return predicate.History(sql.FieldLT(FieldIndexerID, v))
}
// IndexerIDLTE applies the LTE predicate on the "indexer_id" field.
func IndexerIDLTE(v int) predicate.History {
return predicate.History(sql.FieldLTE(FieldIndexerID, v))
}
// IndexerIDIsNil applies the IsNil predicate on the "indexer_id" field.
func IndexerIDIsNil() predicate.History {
return predicate.History(sql.FieldIsNull(FieldIndexerID))
}
// IndexerIDNotNil applies the NotNil predicate on the "indexer_id" field.
func IndexerIDNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldIndexerID))
}
// LinkEQ applies the EQ predicate on the "link" field.
func LinkEQ(v string) predicate.History {
return predicate.History(sql.FieldEQ(FieldLink, v))
}
// LinkNEQ applies the NEQ predicate on the "link" field.
func LinkNEQ(v string) predicate.History {
return predicate.History(sql.FieldNEQ(FieldLink, v))
}
// LinkIn applies the In predicate on the "link" field.
func LinkIn(vs ...string) predicate.History {
return predicate.History(sql.FieldIn(FieldLink, vs...))
}
// LinkNotIn applies the NotIn predicate on the "link" field.
func LinkNotIn(vs ...string) predicate.History {
return predicate.History(sql.FieldNotIn(FieldLink, vs...))
}
// LinkGT applies the GT predicate on the "link" field.
func LinkGT(v string) predicate.History {
return predicate.History(sql.FieldGT(FieldLink, v))
}
// LinkGTE applies the GTE predicate on the "link" field.
func LinkGTE(v string) predicate.History {
return predicate.History(sql.FieldGTE(FieldLink, v))
}
// LinkLT applies the LT predicate on the "link" field.
func LinkLT(v string) predicate.History {
return predicate.History(sql.FieldLT(FieldLink, v))
}
// LinkLTE applies the LTE predicate on the "link" field.
func LinkLTE(v string) predicate.History {
return predicate.History(sql.FieldLTE(FieldLink, v))
}
// LinkContains applies the Contains predicate on the "link" field.
func LinkContains(v string) predicate.History {
return predicate.History(sql.FieldContains(FieldLink, v))
}
// LinkHasPrefix applies the HasPrefix predicate on the "link" field.
func LinkHasPrefix(v string) predicate.History {
return predicate.History(sql.FieldHasPrefix(FieldLink, v))
}
// LinkHasSuffix applies the HasSuffix predicate on the "link" field.
func LinkHasSuffix(v string) predicate.History {
return predicate.History(sql.FieldHasSuffix(FieldLink, v))
}
// LinkIsNil applies the IsNil predicate on the "link" field.
func LinkIsNil() predicate.History {
return predicate.History(sql.FieldIsNull(FieldLink))
}
// LinkNotNil applies the NotNil predicate on the "link" field.
func LinkNotNil() predicate.History {
return predicate.History(sql.FieldNotNull(FieldLink))
}
// LinkEqualFold applies the EqualFold predicate on the "link" field.
func LinkEqualFold(v string) predicate.History {
return predicate.History(sql.FieldEqualFold(FieldLink, v))
}
// LinkContainsFold applies the ContainsFold predicate on the "link" field.
func LinkContainsFold(v string) predicate.History {
return predicate.History(sql.FieldContainsFold(FieldLink, v))
}
// StatusEQ applies the EQ predicate on the "status" field.
func StatusEQ(v Status) predicate.History {
return predicate.History(sql.FieldEQ(FieldStatus, v))

View File

@@ -40,6 +40,26 @@ func (hc *HistoryCreate) SetNillableEpisodeID(i *int) *HistoryCreate {
return hc
}
// SetEpisodeNums sets the "episode_nums" field.
func (hc *HistoryCreate) SetEpisodeNums(i []int) *HistoryCreate {
hc.mutation.SetEpisodeNums(i)
return hc
}
// SetSeasonNum sets the "season_num" field.
func (hc *HistoryCreate) SetSeasonNum(i int) *HistoryCreate {
hc.mutation.SetSeasonNum(i)
return hc
}
// SetNillableSeasonNum sets the "season_num" field if the given value is not nil.
func (hc *HistoryCreate) SetNillableSeasonNum(i *int) *HistoryCreate {
if i != nil {
hc.SetSeasonNum(*i)
}
return hc
}
// SetSourceTitle sets the "source_title" field.
func (hc *HistoryCreate) SetSourceTitle(s string) *HistoryCreate {
hc.mutation.SetSourceTitle(s)
@@ -86,6 +106,34 @@ func (hc *HistoryCreate) SetNillableDownloadClientID(i *int) *HistoryCreate {
return hc
}
// SetIndexerID sets the "indexer_id" field.
func (hc *HistoryCreate) SetIndexerID(i int) *HistoryCreate {
hc.mutation.SetIndexerID(i)
return hc
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (hc *HistoryCreate) SetNillableIndexerID(i *int) *HistoryCreate {
if i != nil {
hc.SetIndexerID(*i)
}
return hc
}
// SetLink sets the "link" field.
func (hc *HistoryCreate) SetLink(s string) *HistoryCreate {
hc.mutation.SetLink(s)
return hc
}
// SetNillableLink sets the "link" field if the given value is not nil.
func (hc *HistoryCreate) SetNillableLink(s *string) *HistoryCreate {
if s != nil {
hc.SetLink(*s)
}
return hc
}
// SetStatus sets the "status" field.
func (hc *HistoryCreate) SetStatus(h history.Status) *HistoryCreate {
hc.mutation.SetStatus(h)
@@ -206,6 +254,14 @@ func (hc *HistoryCreate) createSpec() (*History, *sqlgraph.CreateSpec) {
_spec.SetField(history.FieldEpisodeID, field.TypeInt, value)
_node.EpisodeID = value
}
if value, ok := hc.mutation.EpisodeNums(); ok {
_spec.SetField(history.FieldEpisodeNums, field.TypeJSON, value)
_node.EpisodeNums = value
}
if value, ok := hc.mutation.SeasonNum(); ok {
_spec.SetField(history.FieldSeasonNum, field.TypeInt, value)
_node.SeasonNum = value
}
if value, ok := hc.mutation.SourceTitle(); ok {
_spec.SetField(history.FieldSourceTitle, field.TypeString, value)
_node.SourceTitle = value
@@ -226,6 +282,14 @@ func (hc *HistoryCreate) createSpec() (*History, *sqlgraph.CreateSpec) {
_spec.SetField(history.FieldDownloadClientID, field.TypeInt, value)
_node.DownloadClientID = value
}
if value, ok := hc.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
_node.IndexerID = value
}
if value, ok := hc.mutation.Link(); ok {
_spec.SetField(history.FieldLink, field.TypeString, value)
_node.Link = value
}
if value, ok := hc.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
_node.Status = value

View File

@@ -12,6 +12,7 @@ import (
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/dialect/sql/sqljson"
"entgo.io/ent/schema/field"
)
@@ -76,6 +77,51 @@ func (hu *HistoryUpdate) ClearEpisodeID() *HistoryUpdate {
return hu
}
// SetEpisodeNums sets the "episode_nums" field.
func (hu *HistoryUpdate) SetEpisodeNums(i []int) *HistoryUpdate {
hu.mutation.SetEpisodeNums(i)
return hu
}
// AppendEpisodeNums appends i to the "episode_nums" field.
func (hu *HistoryUpdate) AppendEpisodeNums(i []int) *HistoryUpdate {
hu.mutation.AppendEpisodeNums(i)
return hu
}
// ClearEpisodeNums clears the value of the "episode_nums" field.
func (hu *HistoryUpdate) ClearEpisodeNums() *HistoryUpdate {
hu.mutation.ClearEpisodeNums()
return hu
}
// SetSeasonNum sets the "season_num" field.
func (hu *HistoryUpdate) SetSeasonNum(i int) *HistoryUpdate {
hu.mutation.ResetSeasonNum()
hu.mutation.SetSeasonNum(i)
return hu
}
// SetNillableSeasonNum sets the "season_num" field if the given value is not nil.
func (hu *HistoryUpdate) SetNillableSeasonNum(i *int) *HistoryUpdate {
if i != nil {
hu.SetSeasonNum(*i)
}
return hu
}
// AddSeasonNum adds i to the "season_num" field.
func (hu *HistoryUpdate) AddSeasonNum(i int) *HistoryUpdate {
hu.mutation.AddSeasonNum(i)
return hu
}
// ClearSeasonNum clears the value of the "season_num" field.
func (hu *HistoryUpdate) ClearSeasonNum() *HistoryUpdate {
hu.mutation.ClearSeasonNum()
return hu
}
// SetSourceTitle sets the "source_title" field.
func (hu *HistoryUpdate) SetSourceTitle(s string) *HistoryUpdate {
hu.mutation.SetSourceTitle(s)
@@ -166,6 +212,53 @@ func (hu *HistoryUpdate) ClearDownloadClientID() *HistoryUpdate {
return hu
}
// SetIndexerID sets the "indexer_id" field.
func (hu *HistoryUpdate) SetIndexerID(i int) *HistoryUpdate {
hu.mutation.ResetIndexerID()
hu.mutation.SetIndexerID(i)
return hu
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (hu *HistoryUpdate) SetNillableIndexerID(i *int) *HistoryUpdate {
if i != nil {
hu.SetIndexerID(*i)
}
return hu
}
// AddIndexerID adds i to the "indexer_id" field.
func (hu *HistoryUpdate) AddIndexerID(i int) *HistoryUpdate {
hu.mutation.AddIndexerID(i)
return hu
}
// ClearIndexerID clears the value of the "indexer_id" field.
func (hu *HistoryUpdate) ClearIndexerID() *HistoryUpdate {
hu.mutation.ClearIndexerID()
return hu
}
// SetLink sets the "link" field.
func (hu *HistoryUpdate) SetLink(s string) *HistoryUpdate {
hu.mutation.SetLink(s)
return hu
}
// SetNillableLink sets the "link" field if the given value is not nil.
func (hu *HistoryUpdate) SetNillableLink(s *string) *HistoryUpdate {
if s != nil {
hu.SetLink(*s)
}
return hu
}
// ClearLink clears the value of the "link" field.
func (hu *HistoryUpdate) ClearLink() *HistoryUpdate {
hu.mutation.ClearLink()
return hu
}
// SetStatus sets the "status" field.
func (hu *HistoryUpdate) SetStatus(h history.Status) *HistoryUpdate {
hu.mutation.SetStatus(h)
@@ -269,6 +362,26 @@ func (hu *HistoryUpdate) sqlSave(ctx context.Context) (n int, err error) {
if hu.mutation.EpisodeIDCleared() {
_spec.ClearField(history.FieldEpisodeID, field.TypeInt)
}
if value, ok := hu.mutation.EpisodeNums(); ok {
_spec.SetField(history.FieldEpisodeNums, field.TypeJSON, value)
}
if value, ok := hu.mutation.AppendedEpisodeNums(); ok {
_spec.AddModifier(func(u *sql.UpdateBuilder) {
sqljson.Append(u, history.FieldEpisodeNums, value)
})
}
if hu.mutation.EpisodeNumsCleared() {
_spec.ClearField(history.FieldEpisodeNums, field.TypeJSON)
}
if value, ok := hu.mutation.SeasonNum(); ok {
_spec.SetField(history.FieldSeasonNum, field.TypeInt, value)
}
if value, ok := hu.mutation.AddedSeasonNum(); ok {
_spec.AddField(history.FieldSeasonNum, field.TypeInt, value)
}
if hu.mutation.SeasonNumCleared() {
_spec.ClearField(history.FieldSeasonNum, field.TypeInt)
}
if value, ok := hu.mutation.SourceTitle(); ok {
_spec.SetField(history.FieldSourceTitle, field.TypeString, value)
}
@@ -293,6 +406,21 @@ func (hu *HistoryUpdate) sqlSave(ctx context.Context) (n int, err error) {
if hu.mutation.DownloadClientIDCleared() {
_spec.ClearField(history.FieldDownloadClientID, field.TypeInt)
}
if value, ok := hu.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
}
if value, ok := hu.mutation.AddedIndexerID(); ok {
_spec.AddField(history.FieldIndexerID, field.TypeInt, value)
}
if hu.mutation.IndexerIDCleared() {
_spec.ClearField(history.FieldIndexerID, field.TypeInt)
}
if value, ok := hu.mutation.Link(); ok {
_spec.SetField(history.FieldLink, field.TypeString, value)
}
if hu.mutation.LinkCleared() {
_spec.ClearField(history.FieldLink, field.TypeString)
}
if value, ok := hu.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
}
@@ -370,6 +498,51 @@ func (huo *HistoryUpdateOne) ClearEpisodeID() *HistoryUpdateOne {
return huo
}
// SetEpisodeNums sets the "episode_nums" field.
func (huo *HistoryUpdateOne) SetEpisodeNums(i []int) *HistoryUpdateOne {
huo.mutation.SetEpisodeNums(i)
return huo
}
// AppendEpisodeNums appends i to the "episode_nums" field.
func (huo *HistoryUpdateOne) AppendEpisodeNums(i []int) *HistoryUpdateOne {
huo.mutation.AppendEpisodeNums(i)
return huo
}
// ClearEpisodeNums clears the value of the "episode_nums" field.
func (huo *HistoryUpdateOne) ClearEpisodeNums() *HistoryUpdateOne {
huo.mutation.ClearEpisodeNums()
return huo
}
// SetSeasonNum sets the "season_num" field.
func (huo *HistoryUpdateOne) SetSeasonNum(i int) *HistoryUpdateOne {
huo.mutation.ResetSeasonNum()
huo.mutation.SetSeasonNum(i)
return huo
}
// SetNillableSeasonNum sets the "season_num" field if the given value is not nil.
func (huo *HistoryUpdateOne) SetNillableSeasonNum(i *int) *HistoryUpdateOne {
if i != nil {
huo.SetSeasonNum(*i)
}
return huo
}
// AddSeasonNum adds i to the "season_num" field.
func (huo *HistoryUpdateOne) AddSeasonNum(i int) *HistoryUpdateOne {
huo.mutation.AddSeasonNum(i)
return huo
}
// ClearSeasonNum clears the value of the "season_num" field.
func (huo *HistoryUpdateOne) ClearSeasonNum() *HistoryUpdateOne {
huo.mutation.ClearSeasonNum()
return huo
}
// SetSourceTitle sets the "source_title" field.
func (huo *HistoryUpdateOne) SetSourceTitle(s string) *HistoryUpdateOne {
huo.mutation.SetSourceTitle(s)
@@ -460,6 +633,53 @@ func (huo *HistoryUpdateOne) ClearDownloadClientID() *HistoryUpdateOne {
return huo
}
// SetIndexerID sets the "indexer_id" field.
func (huo *HistoryUpdateOne) SetIndexerID(i int) *HistoryUpdateOne {
huo.mutation.ResetIndexerID()
huo.mutation.SetIndexerID(i)
return huo
}
// SetNillableIndexerID sets the "indexer_id" field if the given value is not nil.
func (huo *HistoryUpdateOne) SetNillableIndexerID(i *int) *HistoryUpdateOne {
if i != nil {
huo.SetIndexerID(*i)
}
return huo
}
// AddIndexerID adds i to the "indexer_id" field.
func (huo *HistoryUpdateOne) AddIndexerID(i int) *HistoryUpdateOne {
huo.mutation.AddIndexerID(i)
return huo
}
// ClearIndexerID clears the value of the "indexer_id" field.
func (huo *HistoryUpdateOne) ClearIndexerID() *HistoryUpdateOne {
huo.mutation.ClearIndexerID()
return huo
}
// SetLink sets the "link" field.
func (huo *HistoryUpdateOne) SetLink(s string) *HistoryUpdateOne {
huo.mutation.SetLink(s)
return huo
}
// SetNillableLink sets the "link" field if the given value is not nil.
func (huo *HistoryUpdateOne) SetNillableLink(s *string) *HistoryUpdateOne {
if s != nil {
huo.SetLink(*s)
}
return huo
}
// ClearLink clears the value of the "link" field.
func (huo *HistoryUpdateOne) ClearLink() *HistoryUpdateOne {
huo.mutation.ClearLink()
return huo
}
// SetStatus sets the "status" field.
func (huo *HistoryUpdateOne) SetStatus(h history.Status) *HistoryUpdateOne {
huo.mutation.SetStatus(h)
@@ -593,6 +813,26 @@ func (huo *HistoryUpdateOne) sqlSave(ctx context.Context) (_node *History, err e
if huo.mutation.EpisodeIDCleared() {
_spec.ClearField(history.FieldEpisodeID, field.TypeInt)
}
if value, ok := huo.mutation.EpisodeNums(); ok {
_spec.SetField(history.FieldEpisodeNums, field.TypeJSON, value)
}
if value, ok := huo.mutation.AppendedEpisodeNums(); ok {
_spec.AddModifier(func(u *sql.UpdateBuilder) {
sqljson.Append(u, history.FieldEpisodeNums, value)
})
}
if huo.mutation.EpisodeNumsCleared() {
_spec.ClearField(history.FieldEpisodeNums, field.TypeJSON)
}
if value, ok := huo.mutation.SeasonNum(); ok {
_spec.SetField(history.FieldSeasonNum, field.TypeInt, value)
}
if value, ok := huo.mutation.AddedSeasonNum(); ok {
_spec.AddField(history.FieldSeasonNum, field.TypeInt, value)
}
if huo.mutation.SeasonNumCleared() {
_spec.ClearField(history.FieldSeasonNum, field.TypeInt)
}
if value, ok := huo.mutation.SourceTitle(); ok {
_spec.SetField(history.FieldSourceTitle, field.TypeString, value)
}
@@ -617,6 +857,21 @@ func (huo *HistoryUpdateOne) sqlSave(ctx context.Context) (_node *History, err e
if huo.mutation.DownloadClientIDCleared() {
_spec.ClearField(history.FieldDownloadClientID, field.TypeInt)
}
if value, ok := huo.mutation.IndexerID(); ok {
_spec.SetField(history.FieldIndexerID, field.TypeInt, value)
}
if value, ok := huo.mutation.AddedIndexerID(); ok {
_spec.AddField(history.FieldIndexerID, field.TypeInt, value)
}
if huo.mutation.IndexerIDCleared() {
_spec.ClearField(history.FieldIndexerID, field.TypeInt)
}
if value, ok := huo.mutation.Link(); ok {
_spec.SetField(history.FieldLink, field.TypeString, value)
}
if huo.mutation.LinkCleared() {
_spec.ClearField(history.FieldLink, field.TypeString)
}
if value, ok := huo.mutation.Status(); ok {
_spec.SetField(history.FieldStatus, field.TypeEnum, value)
}

View File

@@ -8,6 +8,18 @@ import (
"polaris/ent"
)
// The BlacklistFunc type is an adapter to allow the use of ordinary
// function as Blacklist mutator.
type BlacklistFunc func(context.Context, *ent.BlacklistMutation) (ent.Value, error)
// Mutate calls f(ctx, m).
func (f BlacklistFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
if mv, ok := m.(*ent.BlacklistMutation); ok {
return f(ctx, mv)
}
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.BlacklistMutation", m)
}
// The DownloadClientsFunc type is an adapter to allow the use of ordinary
// function as DownloadClients mutator.
type DownloadClientsFunc func(context.Context, *ent.DownloadClientsMutation) (ent.Value, error)
@@ -44,6 +56,18 @@ func (f HistoryFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, err
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.HistoryMutation", m)
}
// The ImportListFunc type is an adapter to allow the use of ordinary
// function as ImportList mutator.
type ImportListFunc func(context.Context, *ent.ImportListMutation) (ent.Value, error)
// Mutate calls f(ctx, m).
func (f ImportListFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
if mv, ok := m.(*ent.ImportListMutation); ok {
return f(ctx, mv)
}
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.ImportListMutation", m)
}
// The IndexersFunc type is an adapter to allow the use of ordinary
// function as Indexers mutator.
type IndexersFunc func(context.Context, *ent.IndexersMutation) (ent.Value, error)

164
ent/importlist.go Normal file
View File

@@ -0,0 +1,164 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"encoding/json"
"fmt"
"polaris/ent/importlist"
"polaris/ent/schema"
"strings"
"entgo.io/ent"
"entgo.io/ent/dialect/sql"
)
// ImportList is the model entity for the ImportList schema.
type ImportList struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Name holds the value of the "name" field.
Name string `json:"name,omitempty"`
// Type holds the value of the "type" field.
Type importlist.Type `json:"type,omitempty"`
// URL holds the value of the "url" field.
URL string `json:"url,omitempty"`
// Qulity holds the value of the "qulity" field.
Qulity string `json:"qulity,omitempty"`
// StorageID holds the value of the "storage_id" field.
StorageID int `json:"storage_id,omitempty"`
// Settings holds the value of the "settings" field.
Settings schema.ImportListSettings `json:"settings,omitempty"`
selectValues sql.SelectValues
}
// scanValues returns the types for scanning values from sql.Rows.
func (*ImportList) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case importlist.FieldSettings:
values[i] = new([]byte)
case importlist.FieldID, importlist.FieldStorageID:
values[i] = new(sql.NullInt64)
case importlist.FieldName, importlist.FieldType, importlist.FieldURL, importlist.FieldQulity:
values[i] = new(sql.NullString)
default:
values[i] = new(sql.UnknownType)
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the ImportList fields.
func (il *ImportList) assignValues(columns []string, values []any) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case importlist.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
il.ID = int(value.Int64)
case importlist.FieldName:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field name", values[i])
} else if value.Valid {
il.Name = value.String
}
case importlist.FieldType:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field type", values[i])
} else if value.Valid {
il.Type = importlist.Type(value.String)
}
case importlist.FieldURL:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field url", values[i])
} else if value.Valid {
il.URL = value.String
}
case importlist.FieldQulity:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field qulity", values[i])
} else if value.Valid {
il.Qulity = value.String
}
case importlist.FieldStorageID:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field storage_id", values[i])
} else if value.Valid {
il.StorageID = int(value.Int64)
}
case importlist.FieldSettings:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field settings", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &il.Settings); err != nil {
return fmt.Errorf("unmarshal field settings: %w", err)
}
}
default:
il.selectValues.Set(columns[i], values[i])
}
}
return nil
}
// Value returns the ent.Value that was dynamically selected and assigned to the ImportList.
// This includes values selected through modifiers, order, etc.
func (il *ImportList) Value(name string) (ent.Value, error) {
return il.selectValues.Get(name)
}
// Update returns a builder for updating this ImportList.
// Note that you need to call ImportList.Unwrap() before calling this method if this ImportList
// was returned from a transaction, and the transaction was committed or rolled back.
func (il *ImportList) Update() *ImportListUpdateOne {
return NewImportListClient(il.config).UpdateOne(il)
}
// Unwrap unwraps the ImportList entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (il *ImportList) Unwrap() *ImportList {
_tx, ok := il.config.driver.(*txDriver)
if !ok {
panic("ent: ImportList is not a transactional entity")
}
il.config.driver = _tx.drv
return il
}
// String implements the fmt.Stringer.
func (il *ImportList) String() string {
var builder strings.Builder
builder.WriteString("ImportList(")
builder.WriteString(fmt.Sprintf("id=%v, ", il.ID))
builder.WriteString("name=")
builder.WriteString(il.Name)
builder.WriteString(", ")
builder.WriteString("type=")
builder.WriteString(fmt.Sprintf("%v", il.Type))
builder.WriteString(", ")
builder.WriteString("url=")
builder.WriteString(il.URL)
builder.WriteString(", ")
builder.WriteString("qulity=")
builder.WriteString(il.Qulity)
builder.WriteString(", ")
builder.WriteString("storage_id=")
builder.WriteString(fmt.Sprintf("%v", il.StorageID))
builder.WriteString(", ")
builder.WriteString("settings=")
builder.WriteString(fmt.Sprintf("%v", il.Settings))
builder.WriteByte(')')
return builder.String()
}
// ImportLists is a parsable slice of ImportList.
type ImportLists []*ImportList

View File

@@ -0,0 +1,107 @@
// Code generated by ent, DO NOT EDIT.
package importlist
import (
"fmt"
"entgo.io/ent/dialect/sql"
)
const (
// Label holds the string label denoting the importlist type in the database.
Label = "import_list"
// FieldID holds the string denoting the id field in the database.
FieldID = "id"
// FieldName holds the string denoting the name field in the database.
FieldName = "name"
// FieldType holds the string denoting the type field in the database.
FieldType = "type"
// FieldURL holds the string denoting the url field in the database.
FieldURL = "url"
// FieldQulity holds the string denoting the qulity field in the database.
FieldQulity = "qulity"
// FieldStorageID holds the string denoting the storage_id field in the database.
FieldStorageID = "storage_id"
// FieldSettings holds the string denoting the settings field in the database.
FieldSettings = "settings"
// Table holds the table name of the importlist in the database.
Table = "import_lists"
)
// Columns holds all SQL columns for importlist fields.
var Columns = []string{
FieldID,
FieldName,
FieldType,
FieldURL,
FieldQulity,
FieldStorageID,
FieldSettings,
}
// ValidColumn reports if the column name is valid (part of the table columns).
func ValidColumn(column string) bool {
for i := range Columns {
if column == Columns[i] {
return true
}
}
return false
}
// Type defines the type for the "type" enum field.
type Type string
// Type values.
const (
TypePlex Type = "plex"
TypeDoulist Type = "doulist"
)
func (_type Type) String() string {
return string(_type)
}
// TypeValidator is a validator for the "type" field enum values. It is called by the builders before save.
func TypeValidator(_type Type) error {
switch _type {
case TypePlex, TypeDoulist:
return nil
default:
return fmt.Errorf("importlist: invalid enum value for type field: %q", _type)
}
}
// OrderOption defines the ordering options for the ImportList queries.
type OrderOption func(*sql.Selector)
// ByID orders the results by the id field.
func ByID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldID, opts...).ToFunc()
}
// ByName orders the results by the name field.
func ByName(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldName, opts...).ToFunc()
}
// ByType orders the results by the type field.
func ByType(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldType, opts...).ToFunc()
}
// ByURL orders the results by the url field.
func ByURL(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldURL, opts...).ToFunc()
}
// ByQulity orders the results by the qulity field.
func ByQulity(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldQulity, opts...).ToFunc()
}
// ByStorageID orders the results by the storage_id field.
func ByStorageID(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldStorageID, opts...).ToFunc()
}

364
ent/importlist/where.go Normal file
View File

@@ -0,0 +1,364 @@
// Code generated by ent, DO NOT EDIT.
package importlist
import (
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
)
// ID filters vertices based on their ID field.
func ID(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldID, id))
}
// IDEQ applies the EQ predicate on the ID field.
func IDEQ(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldID, id))
}
// IDNEQ applies the NEQ predicate on the ID field.
func IDNEQ(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldID, id))
}
// IDIn applies the In predicate on the ID field.
func IDIn(ids ...int) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldID, ids...))
}
// IDNotIn applies the NotIn predicate on the ID field.
func IDNotIn(ids ...int) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldID, ids...))
}
// IDGT applies the GT predicate on the ID field.
func IDGT(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldID, id))
}
// IDGTE applies the GTE predicate on the ID field.
func IDGTE(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldID, id))
}
// IDLT applies the LT predicate on the ID field.
func IDLT(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldID, id))
}
// IDLTE applies the LTE predicate on the ID field.
func IDLTE(id int) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldID, id))
}
// Name applies equality check predicate on the "name" field. It's identical to NameEQ.
func Name(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldName, v))
}
// URL applies equality check predicate on the "url" field. It's identical to URLEQ.
func URL(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldURL, v))
}
// Qulity applies equality check predicate on the "qulity" field. It's identical to QulityEQ.
func Qulity(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldQulity, v))
}
// StorageID applies equality check predicate on the "storage_id" field. It's identical to StorageIDEQ.
func StorageID(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldStorageID, v))
}
// NameEQ applies the EQ predicate on the "name" field.
func NameEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldName, v))
}
// NameNEQ applies the NEQ predicate on the "name" field.
func NameNEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldName, v))
}
// NameIn applies the In predicate on the "name" field.
func NameIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldName, vs...))
}
// NameNotIn applies the NotIn predicate on the "name" field.
func NameNotIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldName, vs...))
}
// NameGT applies the GT predicate on the "name" field.
func NameGT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldName, v))
}
// NameGTE applies the GTE predicate on the "name" field.
func NameGTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldName, v))
}
// NameLT applies the LT predicate on the "name" field.
func NameLT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldName, v))
}
// NameLTE applies the LTE predicate on the "name" field.
func NameLTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldName, v))
}
// NameContains applies the Contains predicate on the "name" field.
func NameContains(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContains(FieldName, v))
}
// NameHasPrefix applies the HasPrefix predicate on the "name" field.
func NameHasPrefix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasPrefix(FieldName, v))
}
// NameHasSuffix applies the HasSuffix predicate on the "name" field.
func NameHasSuffix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasSuffix(FieldName, v))
}
// NameEqualFold applies the EqualFold predicate on the "name" field.
func NameEqualFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEqualFold(FieldName, v))
}
// NameContainsFold applies the ContainsFold predicate on the "name" field.
func NameContainsFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContainsFold(FieldName, v))
}
// TypeEQ applies the EQ predicate on the "type" field.
func TypeEQ(v Type) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldType, v))
}
// TypeNEQ applies the NEQ predicate on the "type" field.
func TypeNEQ(v Type) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldType, v))
}
// TypeIn applies the In predicate on the "type" field.
func TypeIn(vs ...Type) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldType, vs...))
}
// TypeNotIn applies the NotIn predicate on the "type" field.
func TypeNotIn(vs ...Type) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldType, vs...))
}
// URLEQ applies the EQ predicate on the "url" field.
func URLEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldURL, v))
}
// URLNEQ applies the NEQ predicate on the "url" field.
func URLNEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldURL, v))
}
// URLIn applies the In predicate on the "url" field.
func URLIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldURL, vs...))
}
// URLNotIn applies the NotIn predicate on the "url" field.
func URLNotIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldURL, vs...))
}
// URLGT applies the GT predicate on the "url" field.
func URLGT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldURL, v))
}
// URLGTE applies the GTE predicate on the "url" field.
func URLGTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldURL, v))
}
// URLLT applies the LT predicate on the "url" field.
func URLLT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldURL, v))
}
// URLLTE applies the LTE predicate on the "url" field.
func URLLTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldURL, v))
}
// URLContains applies the Contains predicate on the "url" field.
func URLContains(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContains(FieldURL, v))
}
// URLHasPrefix applies the HasPrefix predicate on the "url" field.
func URLHasPrefix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasPrefix(FieldURL, v))
}
// URLHasSuffix applies the HasSuffix predicate on the "url" field.
func URLHasSuffix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasSuffix(FieldURL, v))
}
// URLIsNil applies the IsNil predicate on the "url" field.
func URLIsNil() predicate.ImportList {
return predicate.ImportList(sql.FieldIsNull(FieldURL))
}
// URLNotNil applies the NotNil predicate on the "url" field.
func URLNotNil() predicate.ImportList {
return predicate.ImportList(sql.FieldNotNull(FieldURL))
}
// URLEqualFold applies the EqualFold predicate on the "url" field.
func URLEqualFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEqualFold(FieldURL, v))
}
// URLContainsFold applies the ContainsFold predicate on the "url" field.
func URLContainsFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContainsFold(FieldURL, v))
}
// QulityEQ applies the EQ predicate on the "qulity" field.
func QulityEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldQulity, v))
}
// QulityNEQ applies the NEQ predicate on the "qulity" field.
func QulityNEQ(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldQulity, v))
}
// QulityIn applies the In predicate on the "qulity" field.
func QulityIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldQulity, vs...))
}
// QulityNotIn applies the NotIn predicate on the "qulity" field.
func QulityNotIn(vs ...string) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldQulity, vs...))
}
// QulityGT applies the GT predicate on the "qulity" field.
func QulityGT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldQulity, v))
}
// QulityGTE applies the GTE predicate on the "qulity" field.
func QulityGTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldQulity, v))
}
// QulityLT applies the LT predicate on the "qulity" field.
func QulityLT(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldQulity, v))
}
// QulityLTE applies the LTE predicate on the "qulity" field.
func QulityLTE(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldQulity, v))
}
// QulityContains applies the Contains predicate on the "qulity" field.
func QulityContains(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContains(FieldQulity, v))
}
// QulityHasPrefix applies the HasPrefix predicate on the "qulity" field.
func QulityHasPrefix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasPrefix(FieldQulity, v))
}
// QulityHasSuffix applies the HasSuffix predicate on the "qulity" field.
func QulityHasSuffix(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldHasSuffix(FieldQulity, v))
}
// QulityEqualFold applies the EqualFold predicate on the "qulity" field.
func QulityEqualFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldEqualFold(FieldQulity, v))
}
// QulityContainsFold applies the ContainsFold predicate on the "qulity" field.
func QulityContainsFold(v string) predicate.ImportList {
return predicate.ImportList(sql.FieldContainsFold(FieldQulity, v))
}
// StorageIDEQ applies the EQ predicate on the "storage_id" field.
func StorageIDEQ(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldEQ(FieldStorageID, v))
}
// StorageIDNEQ applies the NEQ predicate on the "storage_id" field.
func StorageIDNEQ(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldNEQ(FieldStorageID, v))
}
// StorageIDIn applies the In predicate on the "storage_id" field.
func StorageIDIn(vs ...int) predicate.ImportList {
return predicate.ImportList(sql.FieldIn(FieldStorageID, vs...))
}
// StorageIDNotIn applies the NotIn predicate on the "storage_id" field.
func StorageIDNotIn(vs ...int) predicate.ImportList {
return predicate.ImportList(sql.FieldNotIn(FieldStorageID, vs...))
}
// StorageIDGT applies the GT predicate on the "storage_id" field.
func StorageIDGT(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldGT(FieldStorageID, v))
}
// StorageIDGTE applies the GTE predicate on the "storage_id" field.
func StorageIDGTE(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldGTE(FieldStorageID, v))
}
// StorageIDLT applies the LT predicate on the "storage_id" field.
func StorageIDLT(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldLT(FieldStorageID, v))
}
// StorageIDLTE applies the LTE predicate on the "storage_id" field.
func StorageIDLTE(v int) predicate.ImportList {
return predicate.ImportList(sql.FieldLTE(FieldStorageID, v))
}
// SettingsIsNil applies the IsNil predicate on the "settings" field.
func SettingsIsNil() predicate.ImportList {
return predicate.ImportList(sql.FieldIsNull(FieldSettings))
}
// SettingsNotNil applies the NotNil predicate on the "settings" field.
func SettingsNotNil() predicate.ImportList {
return predicate.ImportList(sql.FieldNotNull(FieldSettings))
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.ImportList) predicate.ImportList {
return predicate.ImportList(sql.AndPredicates(predicates...))
}
// Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.ImportList) predicate.ImportList {
return predicate.ImportList(sql.OrPredicates(predicates...))
}
// Not applies the not operator on the given predicate.
func Not(p predicate.ImportList) predicate.ImportList {
return predicate.ImportList(sql.NotPredicates(p))
}

264
ent/importlist_create.go Normal file
View File

@@ -0,0 +1,264 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"polaris/ent/importlist"
"polaris/ent/schema"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// ImportListCreate is the builder for creating a ImportList entity.
type ImportListCreate struct {
config
mutation *ImportListMutation
hooks []Hook
}
// SetName sets the "name" field.
func (ilc *ImportListCreate) SetName(s string) *ImportListCreate {
ilc.mutation.SetName(s)
return ilc
}
// SetType sets the "type" field.
func (ilc *ImportListCreate) SetType(i importlist.Type) *ImportListCreate {
ilc.mutation.SetType(i)
return ilc
}
// SetURL sets the "url" field.
func (ilc *ImportListCreate) SetURL(s string) *ImportListCreate {
ilc.mutation.SetURL(s)
return ilc
}
// SetNillableURL sets the "url" field if the given value is not nil.
func (ilc *ImportListCreate) SetNillableURL(s *string) *ImportListCreate {
if s != nil {
ilc.SetURL(*s)
}
return ilc
}
// SetQulity sets the "qulity" field.
func (ilc *ImportListCreate) SetQulity(s string) *ImportListCreate {
ilc.mutation.SetQulity(s)
return ilc
}
// SetStorageID sets the "storage_id" field.
func (ilc *ImportListCreate) SetStorageID(i int) *ImportListCreate {
ilc.mutation.SetStorageID(i)
return ilc
}
// SetSettings sets the "settings" field.
func (ilc *ImportListCreate) SetSettings(sls schema.ImportListSettings) *ImportListCreate {
ilc.mutation.SetSettings(sls)
return ilc
}
// SetNillableSettings sets the "settings" field if the given value is not nil.
func (ilc *ImportListCreate) SetNillableSettings(sls *schema.ImportListSettings) *ImportListCreate {
if sls != nil {
ilc.SetSettings(*sls)
}
return ilc
}
// Mutation returns the ImportListMutation object of the builder.
func (ilc *ImportListCreate) Mutation() *ImportListMutation {
return ilc.mutation
}
// Save creates the ImportList in the database.
func (ilc *ImportListCreate) Save(ctx context.Context) (*ImportList, error) {
return withHooks(ctx, ilc.sqlSave, ilc.mutation, ilc.hooks)
}
// SaveX calls Save and panics if Save returns an error.
func (ilc *ImportListCreate) SaveX(ctx context.Context) *ImportList {
v, err := ilc.Save(ctx)
if err != nil {
panic(err)
}
return v
}
// Exec executes the query.
func (ilc *ImportListCreate) Exec(ctx context.Context) error {
_, err := ilc.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (ilc *ImportListCreate) ExecX(ctx context.Context) {
if err := ilc.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (ilc *ImportListCreate) check() error {
if _, ok := ilc.mutation.Name(); !ok {
return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "ImportList.name"`)}
}
if _, ok := ilc.mutation.GetType(); !ok {
return &ValidationError{Name: "type", err: errors.New(`ent: missing required field "ImportList.type"`)}
}
if v, ok := ilc.mutation.GetType(); ok {
if err := importlist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
}
}
if _, ok := ilc.mutation.Qulity(); !ok {
return &ValidationError{Name: "qulity", err: errors.New(`ent: missing required field "ImportList.qulity"`)}
}
if _, ok := ilc.mutation.StorageID(); !ok {
return &ValidationError{Name: "storage_id", err: errors.New(`ent: missing required field "ImportList.storage_id"`)}
}
return nil
}
func (ilc *ImportListCreate) sqlSave(ctx context.Context) (*ImportList, error) {
if err := ilc.check(); err != nil {
return nil, err
}
_node, _spec := ilc.createSpec()
if err := sqlgraph.CreateNode(ctx, ilc.driver, _spec); err != nil {
if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return nil, err
}
id := _spec.ID.Value.(int64)
_node.ID = int(id)
ilc.mutation.id = &_node.ID
ilc.mutation.done = true
return _node, nil
}
func (ilc *ImportListCreate) createSpec() (*ImportList, *sqlgraph.CreateSpec) {
var (
_node = &ImportList{config: ilc.config}
_spec = sqlgraph.NewCreateSpec(importlist.Table, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
)
if value, ok := ilc.mutation.Name(); ok {
_spec.SetField(importlist.FieldName, field.TypeString, value)
_node.Name = value
}
if value, ok := ilc.mutation.GetType(); ok {
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
_node.Type = value
}
if value, ok := ilc.mutation.URL(); ok {
_spec.SetField(importlist.FieldURL, field.TypeString, value)
_node.URL = value
}
if value, ok := ilc.mutation.Qulity(); ok {
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
_node.Qulity = value
}
if value, ok := ilc.mutation.StorageID(); ok {
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
_node.StorageID = value
}
if value, ok := ilc.mutation.Settings(); ok {
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
_node.Settings = value
}
return _node, _spec
}
// ImportListCreateBulk is the builder for creating many ImportList entities in bulk.
type ImportListCreateBulk struct {
config
err error
builders []*ImportListCreate
}
// Save creates the ImportList entities in the database.
func (ilcb *ImportListCreateBulk) Save(ctx context.Context) ([]*ImportList, error) {
if ilcb.err != nil {
return nil, ilcb.err
}
specs := make([]*sqlgraph.CreateSpec, len(ilcb.builders))
nodes := make([]*ImportList, len(ilcb.builders))
mutators := make([]Mutator, len(ilcb.builders))
for i := range ilcb.builders {
func(i int, root context.Context) {
builder := ilcb.builders[i]
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*ImportListMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err := builder.check(); err != nil {
return nil, err
}
builder.mutation = mutation
var err error
nodes[i], specs[i] = builder.createSpec()
if i < len(mutators)-1 {
_, err = mutators[i+1].Mutate(root, ilcb.builders[i+1].mutation)
} else {
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
// Invoke the actual operation on the latest mutation in the chain.
if err = sqlgraph.BatchCreate(ctx, ilcb.driver, spec); err != nil {
if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
}
}
if err != nil {
return nil, err
}
mutation.id = &nodes[i].ID
if specs[i].ID.Value != nil {
id := specs[i].ID.Value.(int64)
nodes[i].ID = int(id)
}
mutation.done = true
return nodes[i], nil
})
for i := len(builder.hooks) - 1; i >= 0; i-- {
mut = builder.hooks[i](mut)
}
mutators[i] = mut
}(i, ctx)
}
if len(mutators) > 0 {
if _, err := mutators[0].Mutate(ctx, ilcb.builders[0].mutation); err != nil {
return nil, err
}
}
return nodes, nil
}
// SaveX is like Save, but panics if an error occurs.
func (ilcb *ImportListCreateBulk) SaveX(ctx context.Context) []*ImportList {
v, err := ilcb.Save(ctx)
if err != nil {
panic(err)
}
return v
}
// Exec executes the query.
func (ilcb *ImportListCreateBulk) Exec(ctx context.Context) error {
_, err := ilcb.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (ilcb *ImportListCreateBulk) ExecX(ctx context.Context) {
if err := ilcb.Exec(ctx); err != nil {
panic(err)
}
}

88
ent/importlist_delete.go Normal file
View File

@@ -0,0 +1,88 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"polaris/ent/importlist"
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// ImportListDelete is the builder for deleting a ImportList entity.
type ImportListDelete struct {
config
hooks []Hook
mutation *ImportListMutation
}
// Where appends a list predicates to the ImportListDelete builder.
func (ild *ImportListDelete) Where(ps ...predicate.ImportList) *ImportListDelete {
ild.mutation.Where(ps...)
return ild
}
// Exec executes the deletion query and returns how many vertices were deleted.
func (ild *ImportListDelete) Exec(ctx context.Context) (int, error) {
return withHooks(ctx, ild.sqlExec, ild.mutation, ild.hooks)
}
// ExecX is like Exec, but panics if an error occurs.
func (ild *ImportListDelete) ExecX(ctx context.Context) int {
n, err := ild.Exec(ctx)
if err != nil {
panic(err)
}
return n
}
func (ild *ImportListDelete) sqlExec(ctx context.Context) (int, error) {
_spec := sqlgraph.NewDeleteSpec(importlist.Table, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
if ps := ild.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
affected, err := sqlgraph.DeleteNodes(ctx, ild.driver, _spec)
if err != nil && sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
ild.mutation.done = true
return affected, err
}
// ImportListDeleteOne is the builder for deleting a single ImportList entity.
type ImportListDeleteOne struct {
ild *ImportListDelete
}
// Where appends a list predicates to the ImportListDelete builder.
func (ildo *ImportListDeleteOne) Where(ps ...predicate.ImportList) *ImportListDeleteOne {
ildo.ild.mutation.Where(ps...)
return ildo
}
// Exec executes the deletion query.
func (ildo *ImportListDeleteOne) Exec(ctx context.Context) error {
n, err := ildo.ild.Exec(ctx)
switch {
case err != nil:
return err
case n == 0:
return &NotFoundError{importlist.Label}
default:
return nil
}
}
// ExecX is like Exec, but panics if an error occurs.
func (ildo *ImportListDeleteOne) ExecX(ctx context.Context) {
if err := ildo.Exec(ctx); err != nil {
panic(err)
}
}

526
ent/importlist_query.go Normal file
View File

@@ -0,0 +1,526 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"fmt"
"math"
"polaris/ent/importlist"
"polaris/ent/predicate"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// ImportListQuery is the builder for querying ImportList entities.
type ImportListQuery struct {
config
ctx *QueryContext
order []importlist.OrderOption
inters []Interceptor
predicates []predicate.ImportList
// intermediate query (i.e. traversal path).
sql *sql.Selector
path func(context.Context) (*sql.Selector, error)
}
// Where adds a new predicate for the ImportListQuery builder.
func (ilq *ImportListQuery) Where(ps ...predicate.ImportList) *ImportListQuery {
ilq.predicates = append(ilq.predicates, ps...)
return ilq
}
// Limit the number of records to be returned by this query.
func (ilq *ImportListQuery) Limit(limit int) *ImportListQuery {
ilq.ctx.Limit = &limit
return ilq
}
// Offset to start from.
func (ilq *ImportListQuery) Offset(offset int) *ImportListQuery {
ilq.ctx.Offset = &offset
return ilq
}
// Unique configures the query builder to filter duplicate records on query.
// By default, unique is set to true, and can be disabled using this method.
func (ilq *ImportListQuery) Unique(unique bool) *ImportListQuery {
ilq.ctx.Unique = &unique
return ilq
}
// Order specifies how the records should be ordered.
func (ilq *ImportListQuery) Order(o ...importlist.OrderOption) *ImportListQuery {
ilq.order = append(ilq.order, o...)
return ilq
}
// First returns the first ImportList entity from the query.
// Returns a *NotFoundError when no ImportList was found.
func (ilq *ImportListQuery) First(ctx context.Context) (*ImportList, error) {
nodes, err := ilq.Limit(1).All(setContextOp(ctx, ilq.ctx, "First"))
if err != nil {
return nil, err
}
if len(nodes) == 0 {
return nil, &NotFoundError{importlist.Label}
}
return nodes[0], nil
}
// FirstX is like First, but panics if an error occurs.
func (ilq *ImportListQuery) FirstX(ctx context.Context) *ImportList {
node, err := ilq.First(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return node
}
// FirstID returns the first ImportList ID from the query.
// Returns a *NotFoundError when no ImportList ID was found.
func (ilq *ImportListQuery) FirstID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = ilq.Limit(1).IDs(setContextOp(ctx, ilq.ctx, "FirstID")); err != nil {
return
}
if len(ids) == 0 {
err = &NotFoundError{importlist.Label}
return
}
return ids[0], nil
}
// FirstIDX is like FirstID, but panics if an error occurs.
func (ilq *ImportListQuery) FirstIDX(ctx context.Context) int {
id, err := ilq.FirstID(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return id
}
// Only returns a single ImportList entity found by the query, ensuring it only returns one.
// Returns a *NotSingularError when more than one ImportList entity is found.
// Returns a *NotFoundError when no ImportList entities are found.
func (ilq *ImportListQuery) Only(ctx context.Context) (*ImportList, error) {
nodes, err := ilq.Limit(2).All(setContextOp(ctx, ilq.ctx, "Only"))
if err != nil {
return nil, err
}
switch len(nodes) {
case 1:
return nodes[0], nil
case 0:
return nil, &NotFoundError{importlist.Label}
default:
return nil, &NotSingularError{importlist.Label}
}
}
// OnlyX is like Only, but panics if an error occurs.
func (ilq *ImportListQuery) OnlyX(ctx context.Context) *ImportList {
node, err := ilq.Only(ctx)
if err != nil {
panic(err)
}
return node
}
// OnlyID is like Only, but returns the only ImportList ID in the query.
// Returns a *NotSingularError when more than one ImportList ID is found.
// Returns a *NotFoundError when no entities are found.
func (ilq *ImportListQuery) OnlyID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = ilq.Limit(2).IDs(setContextOp(ctx, ilq.ctx, "OnlyID")); err != nil {
return
}
switch len(ids) {
case 1:
id = ids[0]
case 0:
err = &NotFoundError{importlist.Label}
default:
err = &NotSingularError{importlist.Label}
}
return
}
// OnlyIDX is like OnlyID, but panics if an error occurs.
func (ilq *ImportListQuery) OnlyIDX(ctx context.Context) int {
id, err := ilq.OnlyID(ctx)
if err != nil {
panic(err)
}
return id
}
// All executes the query and returns a list of ImportLists.
func (ilq *ImportListQuery) All(ctx context.Context) ([]*ImportList, error) {
ctx = setContextOp(ctx, ilq.ctx, "All")
if err := ilq.prepareQuery(ctx); err != nil {
return nil, err
}
qr := querierAll[[]*ImportList, *ImportListQuery]()
return withInterceptors[[]*ImportList](ctx, ilq, qr, ilq.inters)
}
// AllX is like All, but panics if an error occurs.
func (ilq *ImportListQuery) AllX(ctx context.Context) []*ImportList {
nodes, err := ilq.All(ctx)
if err != nil {
panic(err)
}
return nodes
}
// IDs executes the query and returns a list of ImportList IDs.
func (ilq *ImportListQuery) IDs(ctx context.Context) (ids []int, err error) {
if ilq.ctx.Unique == nil && ilq.path != nil {
ilq.Unique(true)
}
ctx = setContextOp(ctx, ilq.ctx, "IDs")
if err = ilq.Select(importlist.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
}
// IDsX is like IDs, but panics if an error occurs.
func (ilq *ImportListQuery) IDsX(ctx context.Context) []int {
ids, err := ilq.IDs(ctx)
if err != nil {
panic(err)
}
return ids
}
// Count returns the count of the given query.
func (ilq *ImportListQuery) Count(ctx context.Context) (int, error) {
ctx = setContextOp(ctx, ilq.ctx, "Count")
if err := ilq.prepareQuery(ctx); err != nil {
return 0, err
}
return withInterceptors[int](ctx, ilq, querierCount[*ImportListQuery](), ilq.inters)
}
// CountX is like Count, but panics if an error occurs.
func (ilq *ImportListQuery) CountX(ctx context.Context) int {
count, err := ilq.Count(ctx)
if err != nil {
panic(err)
}
return count
}
// Exist returns true if the query has elements in the graph.
func (ilq *ImportListQuery) Exist(ctx context.Context) (bool, error) {
ctx = setContextOp(ctx, ilq.ctx, "Exist")
switch _, err := ilq.FirstID(ctx); {
case IsNotFound(err):
return false, nil
case err != nil:
return false, fmt.Errorf("ent: check existence: %w", err)
default:
return true, nil
}
}
// ExistX is like Exist, but panics if an error occurs.
func (ilq *ImportListQuery) ExistX(ctx context.Context) bool {
exist, err := ilq.Exist(ctx)
if err != nil {
panic(err)
}
return exist
}
// Clone returns a duplicate of the ImportListQuery builder, including all associated steps. It can be
// used to prepare common query builders and use them differently after the clone is made.
func (ilq *ImportListQuery) Clone() *ImportListQuery {
if ilq == nil {
return nil
}
return &ImportListQuery{
config: ilq.config,
ctx: ilq.ctx.Clone(),
order: append([]importlist.OrderOption{}, ilq.order...),
inters: append([]Interceptor{}, ilq.inters...),
predicates: append([]predicate.ImportList{}, ilq.predicates...),
// clone intermediate query.
sql: ilq.sql.Clone(),
path: ilq.path,
}
}
// GroupBy is used to group vertices by one or more fields/columns.
// It is often used with aggregate functions, like: count, max, mean, min, sum.
//
// Example:
//
// var v []struct {
// Name string `json:"name,omitempty"`
// Count int `json:"count,omitempty"`
// }
//
// client.ImportList.Query().
// GroupBy(importlist.FieldName).
// Aggregate(ent.Count()).
// Scan(ctx, &v)
func (ilq *ImportListQuery) GroupBy(field string, fields ...string) *ImportListGroupBy {
ilq.ctx.Fields = append([]string{field}, fields...)
grbuild := &ImportListGroupBy{build: ilq}
grbuild.flds = &ilq.ctx.Fields
grbuild.label = importlist.Label
grbuild.scan = grbuild.Scan
return grbuild
}
// Select allows the selection one or more fields/columns for the given query,
// instead of selecting all fields in the entity.
//
// Example:
//
// var v []struct {
// Name string `json:"name,omitempty"`
// }
//
// client.ImportList.Query().
// Select(importlist.FieldName).
// Scan(ctx, &v)
func (ilq *ImportListQuery) Select(fields ...string) *ImportListSelect {
ilq.ctx.Fields = append(ilq.ctx.Fields, fields...)
sbuild := &ImportListSelect{ImportListQuery: ilq}
sbuild.label = importlist.Label
sbuild.flds, sbuild.scan = &ilq.ctx.Fields, sbuild.Scan
return sbuild
}
// Aggregate returns a ImportListSelect configured with the given aggregations.
func (ilq *ImportListQuery) Aggregate(fns ...AggregateFunc) *ImportListSelect {
return ilq.Select().Aggregate(fns...)
}
func (ilq *ImportListQuery) prepareQuery(ctx context.Context) error {
for _, inter := range ilq.inters {
if inter == nil {
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
}
if trv, ok := inter.(Traverser); ok {
if err := trv.Traverse(ctx, ilq); err != nil {
return err
}
}
}
for _, f := range ilq.ctx.Fields {
if !importlist.ValidColumn(f) {
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
}
if ilq.path != nil {
prev, err := ilq.path(ctx)
if err != nil {
return err
}
ilq.sql = prev
}
return nil
}
func (ilq *ImportListQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ImportList, error) {
var (
nodes = []*ImportList{}
_spec = ilq.querySpec()
)
_spec.ScanValues = func(columns []string) ([]any, error) {
return (*ImportList).scanValues(nil, columns)
}
_spec.Assign = func(columns []string, values []any) error {
node := &ImportList{config: ilq.config}
nodes = append(nodes, node)
return node.assignValues(columns, values)
}
for i := range hooks {
hooks[i](ctx, _spec)
}
if err := sqlgraph.QueryNodes(ctx, ilq.driver, _spec); err != nil {
return nil, err
}
if len(nodes) == 0 {
return nodes, nil
}
return nodes, nil
}
func (ilq *ImportListQuery) sqlCount(ctx context.Context) (int, error) {
_spec := ilq.querySpec()
_spec.Node.Columns = ilq.ctx.Fields
if len(ilq.ctx.Fields) > 0 {
_spec.Unique = ilq.ctx.Unique != nil && *ilq.ctx.Unique
}
return sqlgraph.CountNodes(ctx, ilq.driver, _spec)
}
func (ilq *ImportListQuery) querySpec() *sqlgraph.QuerySpec {
_spec := sqlgraph.NewQuerySpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
_spec.From = ilq.sql
if unique := ilq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if ilq.path != nil {
_spec.Unique = true
}
if fields := ilq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, importlist.FieldID)
for i := range fields {
if fields[i] != importlist.FieldID {
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
}
}
}
if ps := ilq.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if limit := ilq.ctx.Limit; limit != nil {
_spec.Limit = *limit
}
if offset := ilq.ctx.Offset; offset != nil {
_spec.Offset = *offset
}
if ps := ilq.order; len(ps) > 0 {
_spec.Order = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
return _spec
}
func (ilq *ImportListQuery) sqlQuery(ctx context.Context) *sql.Selector {
builder := sql.Dialect(ilq.driver.Dialect())
t1 := builder.Table(importlist.Table)
columns := ilq.ctx.Fields
if len(columns) == 0 {
columns = importlist.Columns
}
selector := builder.Select(t1.Columns(columns...)...).From(t1)
if ilq.sql != nil {
selector = ilq.sql
selector.Select(selector.Columns(columns...)...)
}
if ilq.ctx.Unique != nil && *ilq.ctx.Unique {
selector.Distinct()
}
for _, p := range ilq.predicates {
p(selector)
}
for _, p := range ilq.order {
p(selector)
}
if offset := ilq.ctx.Offset; offset != nil {
// limit is mandatory for offset clause. We start
// with default value, and override it below if needed.
selector.Offset(*offset).Limit(math.MaxInt32)
}
if limit := ilq.ctx.Limit; limit != nil {
selector.Limit(*limit)
}
return selector
}
// ImportListGroupBy is the group-by builder for ImportList entities.
type ImportListGroupBy struct {
selector
build *ImportListQuery
}
// Aggregate adds the given aggregation functions to the group-by query.
func (ilgb *ImportListGroupBy) Aggregate(fns ...AggregateFunc) *ImportListGroupBy {
ilgb.fns = append(ilgb.fns, fns...)
return ilgb
}
// Scan applies the selector query and scans the result into the given value.
func (ilgb *ImportListGroupBy) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, ilgb.build.ctx, "GroupBy")
if err := ilgb.build.prepareQuery(ctx); err != nil {
return err
}
return scanWithInterceptors[*ImportListQuery, *ImportListGroupBy](ctx, ilgb.build, ilgb, ilgb.build.inters, v)
}
func (ilgb *ImportListGroupBy) sqlScan(ctx context.Context, root *ImportListQuery, v any) error {
selector := root.sqlQuery(ctx).Select()
aggregation := make([]string, 0, len(ilgb.fns))
for _, fn := range ilgb.fns {
aggregation = append(aggregation, fn(selector))
}
if len(selector.SelectedColumns()) == 0 {
columns := make([]string, 0, len(*ilgb.flds)+len(ilgb.fns))
for _, f := range *ilgb.flds {
columns = append(columns, selector.C(f))
}
columns = append(columns, aggregation...)
selector.Select(columns...)
}
selector.GroupBy(selector.Columns(*ilgb.flds...)...)
if err := selector.Err(); err != nil {
return err
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := ilgb.build.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}
// ImportListSelect is the builder for selecting fields of ImportList entities.
type ImportListSelect struct {
*ImportListQuery
selector
}
// Aggregate adds the given aggregation functions to the selector query.
func (ils *ImportListSelect) Aggregate(fns ...AggregateFunc) *ImportListSelect {
ils.fns = append(ils.fns, fns...)
return ils
}
// Scan applies the selector query and scans the result into the given value.
func (ils *ImportListSelect) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, ils.ctx, "Select")
if err := ils.prepareQuery(ctx); err != nil {
return err
}
return scanWithInterceptors[*ImportListQuery, *ImportListSelect](ctx, ils.ImportListQuery, ils, ils.inters, v)
}
func (ils *ImportListSelect) sqlScan(ctx context.Context, root *ImportListQuery, v any) error {
selector := root.sqlQuery(ctx)
aggregation := make([]string, 0, len(ils.fns))
for _, fn := range ils.fns {
aggregation = append(aggregation, fn(selector))
}
switch n := len(*ils.selector.flds); {
case n == 0 && len(aggregation) > 0:
selector.Select(aggregation...)
case n != 0 && len(aggregation) > 0:
selector.AppendSelect(aggregation...)
}
rows := &sql.Rows{}
query, args := selector.Query()
if err := ils.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}

462
ent/importlist_update.go Normal file
View File

@@ -0,0 +1,462 @@
// Code generated by ent, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"polaris/ent/importlist"
"polaris/ent/predicate"
"polaris/ent/schema"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
)
// ImportListUpdate is the builder for updating ImportList entities.
type ImportListUpdate struct {
config
hooks []Hook
mutation *ImportListMutation
}
// Where appends a list predicates to the ImportListUpdate builder.
func (ilu *ImportListUpdate) Where(ps ...predicate.ImportList) *ImportListUpdate {
ilu.mutation.Where(ps...)
return ilu
}
// SetName sets the "name" field.
func (ilu *ImportListUpdate) SetName(s string) *ImportListUpdate {
ilu.mutation.SetName(s)
return ilu
}
// SetNillableName sets the "name" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableName(s *string) *ImportListUpdate {
if s != nil {
ilu.SetName(*s)
}
return ilu
}
// SetType sets the "type" field.
func (ilu *ImportListUpdate) SetType(i importlist.Type) *ImportListUpdate {
ilu.mutation.SetType(i)
return ilu
}
// SetNillableType sets the "type" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableType(i *importlist.Type) *ImportListUpdate {
if i != nil {
ilu.SetType(*i)
}
return ilu
}
// SetURL sets the "url" field.
func (ilu *ImportListUpdate) SetURL(s string) *ImportListUpdate {
ilu.mutation.SetURL(s)
return ilu
}
// SetNillableURL sets the "url" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableURL(s *string) *ImportListUpdate {
if s != nil {
ilu.SetURL(*s)
}
return ilu
}
// ClearURL clears the value of the "url" field.
func (ilu *ImportListUpdate) ClearURL() *ImportListUpdate {
ilu.mutation.ClearURL()
return ilu
}
// SetQulity sets the "qulity" field.
func (ilu *ImportListUpdate) SetQulity(s string) *ImportListUpdate {
ilu.mutation.SetQulity(s)
return ilu
}
// SetNillableQulity sets the "qulity" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableQulity(s *string) *ImportListUpdate {
if s != nil {
ilu.SetQulity(*s)
}
return ilu
}
// SetStorageID sets the "storage_id" field.
func (ilu *ImportListUpdate) SetStorageID(i int) *ImportListUpdate {
ilu.mutation.ResetStorageID()
ilu.mutation.SetStorageID(i)
return ilu
}
// SetNillableStorageID sets the "storage_id" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableStorageID(i *int) *ImportListUpdate {
if i != nil {
ilu.SetStorageID(*i)
}
return ilu
}
// AddStorageID adds i to the "storage_id" field.
func (ilu *ImportListUpdate) AddStorageID(i int) *ImportListUpdate {
ilu.mutation.AddStorageID(i)
return ilu
}
// SetSettings sets the "settings" field.
func (ilu *ImportListUpdate) SetSettings(sls schema.ImportListSettings) *ImportListUpdate {
ilu.mutation.SetSettings(sls)
return ilu
}
// SetNillableSettings sets the "settings" field if the given value is not nil.
func (ilu *ImportListUpdate) SetNillableSettings(sls *schema.ImportListSettings) *ImportListUpdate {
if sls != nil {
ilu.SetSettings(*sls)
}
return ilu
}
// ClearSettings clears the value of the "settings" field.
func (ilu *ImportListUpdate) ClearSettings() *ImportListUpdate {
ilu.mutation.ClearSettings()
return ilu
}
// Mutation returns the ImportListMutation object of the builder.
func (ilu *ImportListUpdate) Mutation() *ImportListMutation {
return ilu.mutation
}
// Save executes the query and returns the number of nodes affected by the update operation.
func (ilu *ImportListUpdate) Save(ctx context.Context) (int, error) {
return withHooks(ctx, ilu.sqlSave, ilu.mutation, ilu.hooks)
}
// SaveX is like Save, but panics if an error occurs.
func (ilu *ImportListUpdate) SaveX(ctx context.Context) int {
affected, err := ilu.Save(ctx)
if err != nil {
panic(err)
}
return affected
}
// Exec executes the query.
func (ilu *ImportListUpdate) Exec(ctx context.Context) error {
_, err := ilu.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (ilu *ImportListUpdate) ExecX(ctx context.Context) {
if err := ilu.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (ilu *ImportListUpdate) check() error {
if v, ok := ilu.mutation.GetType(); ok {
if err := importlist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
}
}
return nil
}
func (ilu *ImportListUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := ilu.check(); err != nil {
return n, err
}
_spec := sqlgraph.NewUpdateSpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
if ps := ilu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := ilu.mutation.Name(); ok {
_spec.SetField(importlist.FieldName, field.TypeString, value)
}
if value, ok := ilu.mutation.GetType(); ok {
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
}
if value, ok := ilu.mutation.URL(); ok {
_spec.SetField(importlist.FieldURL, field.TypeString, value)
}
if ilu.mutation.URLCleared() {
_spec.ClearField(importlist.FieldURL, field.TypeString)
}
if value, ok := ilu.mutation.Qulity(); ok {
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
}
if value, ok := ilu.mutation.StorageID(); ok {
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
}
if value, ok := ilu.mutation.AddedStorageID(); ok {
_spec.AddField(importlist.FieldStorageID, field.TypeInt, value)
}
if value, ok := ilu.mutation.Settings(); ok {
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
}
if ilu.mutation.SettingsCleared() {
_spec.ClearField(importlist.FieldSettings, field.TypeJSON)
}
if n, err = sqlgraph.UpdateNodes(ctx, ilu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{importlist.Label}
} else if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return 0, err
}
ilu.mutation.done = true
return n, nil
}
// ImportListUpdateOne is the builder for updating a single ImportList entity.
type ImportListUpdateOne struct {
config
fields []string
hooks []Hook
mutation *ImportListMutation
}
// SetName sets the "name" field.
func (iluo *ImportListUpdateOne) SetName(s string) *ImportListUpdateOne {
iluo.mutation.SetName(s)
return iluo
}
// SetNillableName sets the "name" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableName(s *string) *ImportListUpdateOne {
if s != nil {
iluo.SetName(*s)
}
return iluo
}
// SetType sets the "type" field.
func (iluo *ImportListUpdateOne) SetType(i importlist.Type) *ImportListUpdateOne {
iluo.mutation.SetType(i)
return iluo
}
// SetNillableType sets the "type" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableType(i *importlist.Type) *ImportListUpdateOne {
if i != nil {
iluo.SetType(*i)
}
return iluo
}
// SetURL sets the "url" field.
func (iluo *ImportListUpdateOne) SetURL(s string) *ImportListUpdateOne {
iluo.mutation.SetURL(s)
return iluo
}
// SetNillableURL sets the "url" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableURL(s *string) *ImportListUpdateOne {
if s != nil {
iluo.SetURL(*s)
}
return iluo
}
// ClearURL clears the value of the "url" field.
func (iluo *ImportListUpdateOne) ClearURL() *ImportListUpdateOne {
iluo.mutation.ClearURL()
return iluo
}
// SetQulity sets the "qulity" field.
func (iluo *ImportListUpdateOne) SetQulity(s string) *ImportListUpdateOne {
iluo.mutation.SetQulity(s)
return iluo
}
// SetNillableQulity sets the "qulity" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableQulity(s *string) *ImportListUpdateOne {
if s != nil {
iluo.SetQulity(*s)
}
return iluo
}
// SetStorageID sets the "storage_id" field.
func (iluo *ImportListUpdateOne) SetStorageID(i int) *ImportListUpdateOne {
iluo.mutation.ResetStorageID()
iluo.mutation.SetStorageID(i)
return iluo
}
// SetNillableStorageID sets the "storage_id" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableStorageID(i *int) *ImportListUpdateOne {
if i != nil {
iluo.SetStorageID(*i)
}
return iluo
}
// AddStorageID adds i to the "storage_id" field.
func (iluo *ImportListUpdateOne) AddStorageID(i int) *ImportListUpdateOne {
iluo.mutation.AddStorageID(i)
return iluo
}
// SetSettings sets the "settings" field.
func (iluo *ImportListUpdateOne) SetSettings(sls schema.ImportListSettings) *ImportListUpdateOne {
iluo.mutation.SetSettings(sls)
return iluo
}
// SetNillableSettings sets the "settings" field if the given value is not nil.
func (iluo *ImportListUpdateOne) SetNillableSettings(sls *schema.ImportListSettings) *ImportListUpdateOne {
if sls != nil {
iluo.SetSettings(*sls)
}
return iluo
}
// ClearSettings clears the value of the "settings" field.
func (iluo *ImportListUpdateOne) ClearSettings() *ImportListUpdateOne {
iluo.mutation.ClearSettings()
return iluo
}
// Mutation returns the ImportListMutation object of the builder.
func (iluo *ImportListUpdateOne) Mutation() *ImportListMutation {
return iluo.mutation
}
// Where appends a list predicates to the ImportListUpdate builder.
func (iluo *ImportListUpdateOne) Where(ps ...predicate.ImportList) *ImportListUpdateOne {
iluo.mutation.Where(ps...)
return iluo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (iluo *ImportListUpdateOne) Select(field string, fields ...string) *ImportListUpdateOne {
iluo.fields = append([]string{field}, fields...)
return iluo
}
// Save executes the query and returns the updated ImportList entity.
func (iluo *ImportListUpdateOne) Save(ctx context.Context) (*ImportList, error) {
return withHooks(ctx, iluo.sqlSave, iluo.mutation, iluo.hooks)
}
// SaveX is like Save, but panics if an error occurs.
func (iluo *ImportListUpdateOne) SaveX(ctx context.Context) *ImportList {
node, err := iluo.Save(ctx)
if err != nil {
panic(err)
}
return node
}
// Exec executes the query on the entity.
func (iluo *ImportListUpdateOne) Exec(ctx context.Context) error {
_, err := iluo.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (iluo *ImportListUpdateOne) ExecX(ctx context.Context) {
if err := iluo.Exec(ctx); err != nil {
panic(err)
}
}
// check runs all checks and user-defined validators on the builder.
func (iluo *ImportListUpdateOne) check() error {
if v, ok := iluo.mutation.GetType(); ok {
if err := importlist.TypeValidator(v); err != nil {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "ImportList.type": %w`, err)}
}
}
return nil
}
func (iluo *ImportListUpdateOne) sqlSave(ctx context.Context) (_node *ImportList, err error) {
if err := iluo.check(); err != nil {
return _node, err
}
_spec := sqlgraph.NewUpdateSpec(importlist.Table, importlist.Columns, sqlgraph.NewFieldSpec(importlist.FieldID, field.TypeInt))
id, ok := iluo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "ImportList.id" for update`)}
}
_spec.Node.ID.Value = id
if fields := iluo.fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))
_spec.Node.Columns = append(_spec.Node.Columns, importlist.FieldID)
for _, f := range fields {
if !importlist.ValidColumn(f) {
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
}
if f != importlist.FieldID {
_spec.Node.Columns = append(_spec.Node.Columns, f)
}
}
}
if ps := iluo.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := iluo.mutation.Name(); ok {
_spec.SetField(importlist.FieldName, field.TypeString, value)
}
if value, ok := iluo.mutation.GetType(); ok {
_spec.SetField(importlist.FieldType, field.TypeEnum, value)
}
if value, ok := iluo.mutation.URL(); ok {
_spec.SetField(importlist.FieldURL, field.TypeString, value)
}
if iluo.mutation.URLCleared() {
_spec.ClearField(importlist.FieldURL, field.TypeString)
}
if value, ok := iluo.mutation.Qulity(); ok {
_spec.SetField(importlist.FieldQulity, field.TypeString, value)
}
if value, ok := iluo.mutation.StorageID(); ok {
_spec.SetField(importlist.FieldStorageID, field.TypeInt, value)
}
if value, ok := iluo.mutation.AddedStorageID(); ok {
_spec.AddField(importlist.FieldStorageID, field.TypeInt, value)
}
if value, ok := iluo.mutation.Settings(); ok {
_spec.SetField(importlist.FieldSettings, field.TypeJSON, value)
}
if iluo.mutation.SettingsCleared() {
_spec.ClearField(importlist.FieldSettings, field.TypeJSON)
}
_node = &ImportList{config: iluo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues
if err = sqlgraph.UpdateNode(ctx, iluo.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{importlist.Label}
} else if sqlgraph.IsConstraintError(err) {
err = &ConstraintError{msg: err.Error(), wrap: err}
}
return nil, err
}
iluo.mutation.done = true
return _node, nil
}

View File

@@ -25,7 +25,11 @@ type Indexers struct {
// EnableRss holds the value of the "enable_rss" field.
EnableRss bool `json:"enable_rss,omitempty"`
// Priority holds the value of the "priority" field.
Priority int `json:"priority,omitempty"`
Priority int `json:"priority,omitempty"`
// minimal seed ratio requied, before removing torrent
SeedRatio float32 `json:"seed_ratio,omitempty"`
// Disabled holds the value of the "disabled" field.
Disabled bool `json:"disabled,omitempty"`
selectValues sql.SelectValues
}
@@ -34,8 +38,10 @@ func (*Indexers) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case indexers.FieldEnableRss:
case indexers.FieldEnableRss, indexers.FieldDisabled:
values[i] = new(sql.NullBool)
case indexers.FieldSeedRatio:
values[i] = new(sql.NullFloat64)
case indexers.FieldID, indexers.FieldPriority:
values[i] = new(sql.NullInt64)
case indexers.FieldName, indexers.FieldImplementation, indexers.FieldSettings:
@@ -91,6 +97,18 @@ func (i *Indexers) assignValues(columns []string, values []any) error {
} else if value.Valid {
i.Priority = int(value.Int64)
}
case indexers.FieldSeedRatio:
if value, ok := values[j].(*sql.NullFloat64); !ok {
return fmt.Errorf("unexpected type %T for field seed_ratio", values[j])
} else if value.Valid {
i.SeedRatio = float32(value.Float64)
}
case indexers.FieldDisabled:
if value, ok := values[j].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field disabled", values[j])
} else if value.Valid {
i.Disabled = value.Bool
}
default:
i.selectValues.Set(columns[j], values[j])
}
@@ -141,6 +159,12 @@ func (i *Indexers) String() string {
builder.WriteString(", ")
builder.WriteString("priority=")
builder.WriteString(fmt.Sprintf("%v", i.Priority))
builder.WriteString(", ")
builder.WriteString("seed_ratio=")
builder.WriteString(fmt.Sprintf("%v", i.SeedRatio))
builder.WriteString(", ")
builder.WriteString("disabled=")
builder.WriteString(fmt.Sprintf("%v", i.Disabled))
builder.WriteByte(')')
return builder.String()
}

View File

@@ -21,6 +21,10 @@ const (
FieldEnableRss = "enable_rss"
// FieldPriority holds the string denoting the priority field in the database.
FieldPriority = "priority"
// FieldSeedRatio holds the string denoting the seed_ratio field in the database.
FieldSeedRatio = "seed_ratio"
// FieldDisabled holds the string denoting the disabled field in the database.
FieldDisabled = "disabled"
// Table holds the table name of the indexers in the database.
Table = "indexers"
)
@@ -33,6 +37,8 @@ var Columns = []string{
FieldSettings,
FieldEnableRss,
FieldPriority,
FieldSeedRatio,
FieldDisabled,
}
// ValidColumn reports if the column name is valid (part of the table columns).
@@ -48,6 +54,12 @@ func ValidColumn(column string) bool {
var (
// DefaultEnableRss holds the default value on creation for the "enable_rss" field.
DefaultEnableRss bool
// DefaultPriority holds the default value on creation for the "priority" field.
DefaultPriority int
// DefaultSeedRatio holds the default value on creation for the "seed_ratio" field.
DefaultSeedRatio float32
// DefaultDisabled holds the default value on creation for the "disabled" field.
DefaultDisabled bool
)
// OrderOption defines the ordering options for the Indexers queries.
@@ -82,3 +94,13 @@ func ByEnableRss(opts ...sql.OrderTermOption) OrderOption {
func ByPriority(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldPriority, opts...).ToFunc()
}
// BySeedRatio orders the results by the seed_ratio field.
func BySeedRatio(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldSeedRatio, opts...).ToFunc()
}
// ByDisabled orders the results by the disabled field.
func ByDisabled(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldDisabled, opts...).ToFunc()
}

View File

@@ -78,6 +78,16 @@ func Priority(v int) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldPriority, v))
}
// SeedRatio applies equality check predicate on the "seed_ratio" field. It's identical to SeedRatioEQ.
func SeedRatio(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldSeedRatio, v))
}
// Disabled applies equality check predicate on the "disabled" field. It's identical to DisabledEQ.
func Disabled(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldDisabled, v))
}
// NameEQ applies the EQ predicate on the "name" field.
func NameEQ(v string) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldName, v))
@@ -323,6 +333,76 @@ func PriorityLTE(v int) predicate.Indexers {
return predicate.Indexers(sql.FieldLTE(FieldPriority, v))
}
// SeedRatioEQ applies the EQ predicate on the "seed_ratio" field.
func SeedRatioEQ(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldSeedRatio, v))
}
// SeedRatioNEQ applies the NEQ predicate on the "seed_ratio" field.
func SeedRatioNEQ(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldNEQ(FieldSeedRatio, v))
}
// SeedRatioIn applies the In predicate on the "seed_ratio" field.
func SeedRatioIn(vs ...float32) predicate.Indexers {
return predicate.Indexers(sql.FieldIn(FieldSeedRatio, vs...))
}
// SeedRatioNotIn applies the NotIn predicate on the "seed_ratio" field.
func SeedRatioNotIn(vs ...float32) predicate.Indexers {
return predicate.Indexers(sql.FieldNotIn(FieldSeedRatio, vs...))
}
// SeedRatioGT applies the GT predicate on the "seed_ratio" field.
func SeedRatioGT(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldGT(FieldSeedRatio, v))
}
// SeedRatioGTE applies the GTE predicate on the "seed_ratio" field.
func SeedRatioGTE(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldGTE(FieldSeedRatio, v))
}
// SeedRatioLT applies the LT predicate on the "seed_ratio" field.
func SeedRatioLT(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldLT(FieldSeedRatio, v))
}
// SeedRatioLTE applies the LTE predicate on the "seed_ratio" field.
func SeedRatioLTE(v float32) predicate.Indexers {
return predicate.Indexers(sql.FieldLTE(FieldSeedRatio, v))
}
// SeedRatioIsNil applies the IsNil predicate on the "seed_ratio" field.
func SeedRatioIsNil() predicate.Indexers {
return predicate.Indexers(sql.FieldIsNull(FieldSeedRatio))
}
// SeedRatioNotNil applies the NotNil predicate on the "seed_ratio" field.
func SeedRatioNotNil() predicate.Indexers {
return predicate.Indexers(sql.FieldNotNull(FieldSeedRatio))
}
// DisabledEQ applies the EQ predicate on the "disabled" field.
func DisabledEQ(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldEQ(FieldDisabled, v))
}
// DisabledNEQ applies the NEQ predicate on the "disabled" field.
func DisabledNEQ(v bool) predicate.Indexers {
return predicate.Indexers(sql.FieldNEQ(FieldDisabled, v))
}
// DisabledIsNil applies the IsNil predicate on the "disabled" field.
func DisabledIsNil() predicate.Indexers {
return predicate.Indexers(sql.FieldIsNull(FieldDisabled))
}
// DisabledNotNil applies the NotNil predicate on the "disabled" field.
func DisabledNotNil() predicate.Indexers {
return predicate.Indexers(sql.FieldNotNull(FieldDisabled))
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.Indexers) predicate.Indexers {
return predicate.Indexers(sql.AndPredicates(predicates...))

View File

@@ -57,6 +57,42 @@ func (ic *IndexersCreate) SetPriority(i int) *IndexersCreate {
return ic
}
// SetNillablePriority sets the "priority" field if the given value is not nil.
func (ic *IndexersCreate) SetNillablePriority(i *int) *IndexersCreate {
if i != nil {
ic.SetPriority(*i)
}
return ic
}
// SetSeedRatio sets the "seed_ratio" field.
func (ic *IndexersCreate) SetSeedRatio(f float32) *IndexersCreate {
ic.mutation.SetSeedRatio(f)
return ic
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (ic *IndexersCreate) SetNillableSeedRatio(f *float32) *IndexersCreate {
if f != nil {
ic.SetSeedRatio(*f)
}
return ic
}
// SetDisabled sets the "disabled" field.
func (ic *IndexersCreate) SetDisabled(b bool) *IndexersCreate {
ic.mutation.SetDisabled(b)
return ic
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (ic *IndexersCreate) SetNillableDisabled(b *bool) *IndexersCreate {
if b != nil {
ic.SetDisabled(*b)
}
return ic
}
// Mutation returns the IndexersMutation object of the builder.
func (ic *IndexersCreate) Mutation() *IndexersMutation {
return ic.mutation
@@ -96,6 +132,18 @@ func (ic *IndexersCreate) defaults() {
v := indexers.DefaultEnableRss
ic.mutation.SetEnableRss(v)
}
if _, ok := ic.mutation.Priority(); !ok {
v := indexers.DefaultPriority
ic.mutation.SetPriority(v)
}
if _, ok := ic.mutation.SeedRatio(); !ok {
v := indexers.DefaultSeedRatio
ic.mutation.SetSeedRatio(v)
}
if _, ok := ic.mutation.Disabled(); !ok {
v := indexers.DefaultDisabled
ic.mutation.SetDisabled(v)
}
}
// check runs all checks and user-defined validators on the builder.
@@ -161,6 +209,14 @@ func (ic *IndexersCreate) createSpec() (*Indexers, *sqlgraph.CreateSpec) {
_spec.SetField(indexers.FieldPriority, field.TypeInt, value)
_node.Priority = value
}
if value, ok := ic.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
_node.SeedRatio = value
}
if value, ok := ic.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
_node.Disabled = value
}
return _node, _spec
}

View File

@@ -104,6 +104,53 @@ func (iu *IndexersUpdate) AddPriority(i int) *IndexersUpdate {
return iu
}
// SetSeedRatio sets the "seed_ratio" field.
func (iu *IndexersUpdate) SetSeedRatio(f float32) *IndexersUpdate {
iu.mutation.ResetSeedRatio()
iu.mutation.SetSeedRatio(f)
return iu
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (iu *IndexersUpdate) SetNillableSeedRatio(f *float32) *IndexersUpdate {
if f != nil {
iu.SetSeedRatio(*f)
}
return iu
}
// AddSeedRatio adds f to the "seed_ratio" field.
func (iu *IndexersUpdate) AddSeedRatio(f float32) *IndexersUpdate {
iu.mutation.AddSeedRatio(f)
return iu
}
// ClearSeedRatio clears the value of the "seed_ratio" field.
func (iu *IndexersUpdate) ClearSeedRatio() *IndexersUpdate {
iu.mutation.ClearSeedRatio()
return iu
}
// SetDisabled sets the "disabled" field.
func (iu *IndexersUpdate) SetDisabled(b bool) *IndexersUpdate {
iu.mutation.SetDisabled(b)
return iu
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (iu *IndexersUpdate) SetNillableDisabled(b *bool) *IndexersUpdate {
if b != nil {
iu.SetDisabled(*b)
}
return iu
}
// ClearDisabled clears the value of the "disabled" field.
func (iu *IndexersUpdate) ClearDisabled() *IndexersUpdate {
iu.mutation.ClearDisabled()
return iu
}
// Mutation returns the IndexersMutation object of the builder.
func (iu *IndexersUpdate) Mutation() *IndexersMutation {
return iu.mutation
@@ -163,6 +210,21 @@ func (iu *IndexersUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := iu.mutation.AddedPriority(); ok {
_spec.AddField(indexers.FieldPriority, field.TypeInt, value)
}
if value, ok := iu.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if value, ok := iu.mutation.AddedSeedRatio(); ok {
_spec.AddField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if iu.mutation.SeedRatioCleared() {
_spec.ClearField(indexers.FieldSeedRatio, field.TypeFloat32)
}
if value, ok := iu.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
}
if iu.mutation.DisabledCleared() {
_spec.ClearField(indexers.FieldDisabled, field.TypeBool)
}
if n, err = sqlgraph.UpdateNodes(ctx, iu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{indexers.Label}
@@ -260,6 +322,53 @@ func (iuo *IndexersUpdateOne) AddPriority(i int) *IndexersUpdateOne {
return iuo
}
// SetSeedRatio sets the "seed_ratio" field.
func (iuo *IndexersUpdateOne) SetSeedRatio(f float32) *IndexersUpdateOne {
iuo.mutation.ResetSeedRatio()
iuo.mutation.SetSeedRatio(f)
return iuo
}
// SetNillableSeedRatio sets the "seed_ratio" field if the given value is not nil.
func (iuo *IndexersUpdateOne) SetNillableSeedRatio(f *float32) *IndexersUpdateOne {
if f != nil {
iuo.SetSeedRatio(*f)
}
return iuo
}
// AddSeedRatio adds f to the "seed_ratio" field.
func (iuo *IndexersUpdateOne) AddSeedRatio(f float32) *IndexersUpdateOne {
iuo.mutation.AddSeedRatio(f)
return iuo
}
// ClearSeedRatio clears the value of the "seed_ratio" field.
func (iuo *IndexersUpdateOne) ClearSeedRatio() *IndexersUpdateOne {
iuo.mutation.ClearSeedRatio()
return iuo
}
// SetDisabled sets the "disabled" field.
func (iuo *IndexersUpdateOne) SetDisabled(b bool) *IndexersUpdateOne {
iuo.mutation.SetDisabled(b)
return iuo
}
// SetNillableDisabled sets the "disabled" field if the given value is not nil.
func (iuo *IndexersUpdateOne) SetNillableDisabled(b *bool) *IndexersUpdateOne {
if b != nil {
iuo.SetDisabled(*b)
}
return iuo
}
// ClearDisabled clears the value of the "disabled" field.
func (iuo *IndexersUpdateOne) ClearDisabled() *IndexersUpdateOne {
iuo.mutation.ClearDisabled()
return iuo
}
// Mutation returns the IndexersMutation object of the builder.
func (iuo *IndexersUpdateOne) Mutation() *IndexersMutation {
return iuo.mutation
@@ -349,6 +458,21 @@ func (iuo *IndexersUpdateOne) sqlSave(ctx context.Context) (_node *Indexers, err
if value, ok := iuo.mutation.AddedPriority(); ok {
_spec.AddField(indexers.FieldPriority, field.TypeInt, value)
}
if value, ok := iuo.mutation.SeedRatio(); ok {
_spec.SetField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if value, ok := iuo.mutation.AddedSeedRatio(); ok {
_spec.AddField(indexers.FieldSeedRatio, field.TypeFloat32, value)
}
if iuo.mutation.SeedRatioCleared() {
_spec.ClearField(indexers.FieldSeedRatio, field.TypeFloat32)
}
if value, ok := iuo.mutation.Disabled(); ok {
_spec.SetField(indexers.FieldDisabled, field.TypeBool, value)
}
if iuo.mutation.DisabledCleared() {
_spec.ClearField(indexers.FieldDisabled, field.TypeBool)
}
_node = &Indexers{config: iuo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues

View File

@@ -3,8 +3,10 @@
package ent
import (
"encoding/json"
"fmt"
"polaris/ent/media"
"polaris/ent/schema"
"strings"
"time"
@@ -43,6 +45,12 @@ type Media struct {
TargetDir string `json:"target_dir,omitempty"`
// tv series only
DownloadHistoryEpisodes bool `json:"download_history_episodes,omitempty"`
// Limiter holds the value of the "limiter" field.
Limiter schema.MediaLimiter `json:"limiter,omitempty"`
// Extras holds the value of the "extras" field.
Extras schema.MediaExtras `json:"extras,omitempty"`
// AlternativeTitles holds the value of the "alternative_titles" field.
AlternativeTitles []schema.AlternativeTilte `json:"alternative_titles,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the MediaQuery when eager-loading is set.
Edges MediaEdges `json:"edges"`
@@ -72,6 +80,8 @@ func (*Media) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
for i := range columns {
switch columns[i] {
case media.FieldLimiter, media.FieldExtras, media.FieldAlternativeTitles:
values[i] = new([]byte)
case media.FieldDownloadHistoryEpisodes:
values[i] = new(sql.NullBool)
case media.FieldID, media.FieldTmdbID, media.FieldStorageID:
@@ -179,6 +189,30 @@ func (m *Media) assignValues(columns []string, values []any) error {
} else if value.Valid {
m.DownloadHistoryEpisodes = value.Bool
}
case media.FieldLimiter:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field limiter", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &m.Limiter); err != nil {
return fmt.Errorf("unmarshal field limiter: %w", err)
}
}
case media.FieldExtras:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field extras", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &m.Extras); err != nil {
return fmt.Errorf("unmarshal field extras: %w", err)
}
}
case media.FieldAlternativeTitles:
if value, ok := values[i].(*[]byte); !ok {
return fmt.Errorf("unexpected type %T for field alternative_titles", values[i])
} else if value != nil && len(*value) > 0 {
if err := json.Unmarshal(*value, &m.AlternativeTitles); err != nil {
return fmt.Errorf("unmarshal field alternative_titles: %w", err)
}
}
default:
m.selectValues.Set(columns[i], values[i])
}
@@ -258,6 +292,15 @@ func (m *Media) String() string {
builder.WriteString(", ")
builder.WriteString("download_history_episodes=")
builder.WriteString(fmt.Sprintf("%v", m.DownloadHistoryEpisodes))
builder.WriteString(", ")
builder.WriteString("limiter=")
builder.WriteString(fmt.Sprintf("%v", m.Limiter))
builder.WriteString(", ")
builder.WriteString("extras=")
builder.WriteString(fmt.Sprintf("%v", m.Extras))
builder.WriteString(", ")
builder.WriteString("alternative_titles=")
builder.WriteString(fmt.Sprintf("%v", m.AlternativeTitles))
builder.WriteByte(')')
return builder.String()
}

View File

@@ -41,6 +41,12 @@ const (
FieldTargetDir = "target_dir"
// FieldDownloadHistoryEpisodes holds the string denoting the download_history_episodes field in the database.
FieldDownloadHistoryEpisodes = "download_history_episodes"
// FieldLimiter holds the string denoting the limiter field in the database.
FieldLimiter = "limiter"
// FieldExtras holds the string denoting the extras field in the database.
FieldExtras = "extras"
// FieldAlternativeTitles holds the string denoting the alternative_titles field in the database.
FieldAlternativeTitles = "alternative_titles"
// EdgeEpisodes holds the string denoting the episodes edge name in mutations.
EdgeEpisodes = "episodes"
// Table holds the table name of the media in the database.
@@ -70,6 +76,9 @@ var Columns = []string{
FieldStorageID,
FieldTargetDir,
FieldDownloadHistoryEpisodes,
FieldLimiter,
FieldExtras,
FieldAlternativeTitles,
}
// ValidColumn reports if the column name is valid (part of the table columns).
@@ -125,6 +134,7 @@ const (
Resolution720p Resolution = "720p"
Resolution1080p Resolution = "1080p"
Resolution2160p Resolution = "2160p"
ResolutionAny Resolution = "any"
)
func (r Resolution) String() string {
@@ -134,7 +144,7 @@ func (r Resolution) String() string {
// ResolutionValidator is a validator for the "resolution" field enum values. It is called by the builders before save.
func ResolutionValidator(r Resolution) error {
switch r {
case Resolution720p, Resolution1080p, Resolution2160p:
case Resolution720p, Resolution1080p, Resolution2160p, ResolutionAny:
return nil
default:
return fmt.Errorf("media: invalid enum value for resolution field: %q", r)

View File

@@ -775,6 +775,36 @@ func DownloadHistoryEpisodesNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldDownloadHistoryEpisodes))
}
// LimiterIsNil applies the IsNil predicate on the "limiter" field.
func LimiterIsNil() predicate.Media {
return predicate.Media(sql.FieldIsNull(FieldLimiter))
}
// LimiterNotNil applies the NotNil predicate on the "limiter" field.
func LimiterNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldLimiter))
}
// ExtrasIsNil applies the IsNil predicate on the "extras" field.
func ExtrasIsNil() predicate.Media {
return predicate.Media(sql.FieldIsNull(FieldExtras))
}
// ExtrasNotNil applies the NotNil predicate on the "extras" field.
func ExtrasNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldExtras))
}
// AlternativeTitlesIsNil applies the IsNil predicate on the "alternative_titles" field.
func AlternativeTitlesIsNil() predicate.Media {
return predicate.Media(sql.FieldIsNull(FieldAlternativeTitles))
}
// AlternativeTitlesNotNil applies the NotNil predicate on the "alternative_titles" field.
func AlternativeTitlesNotNil() predicate.Media {
return predicate.Media(sql.FieldNotNull(FieldAlternativeTitles))
}
// HasEpisodes applies the HasEdge predicate on the "episodes" edge.
func HasEpisodes() predicate.Media {
return predicate.Media(func(s *sql.Selector) {

View File

@@ -8,6 +8,7 @@ import (
"fmt"
"polaris/ent/episode"
"polaris/ent/media"
"polaris/ent/schema"
"time"
"entgo.io/ent/dialect/sql/sqlgraph"
@@ -155,6 +156,40 @@ func (mc *MediaCreate) SetNillableDownloadHistoryEpisodes(b *bool) *MediaCreate
return mc
}
// SetLimiter sets the "limiter" field.
func (mc *MediaCreate) SetLimiter(sl schema.MediaLimiter) *MediaCreate {
mc.mutation.SetLimiter(sl)
return mc
}
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
func (mc *MediaCreate) SetNillableLimiter(sl *schema.MediaLimiter) *MediaCreate {
if sl != nil {
mc.SetLimiter(*sl)
}
return mc
}
// SetExtras sets the "extras" field.
func (mc *MediaCreate) SetExtras(se schema.MediaExtras) *MediaCreate {
mc.mutation.SetExtras(se)
return mc
}
// SetNillableExtras sets the "extras" field if the given value is not nil.
func (mc *MediaCreate) SetNillableExtras(se *schema.MediaExtras) *MediaCreate {
if se != nil {
mc.SetExtras(*se)
}
return mc
}
// SetAlternativeTitles sets the "alternative_titles" field.
func (mc *MediaCreate) SetAlternativeTitles(st []schema.AlternativeTilte) *MediaCreate {
mc.mutation.SetAlternativeTitles(st)
return mc
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (mc *MediaCreate) AddEpisodeIDs(ids ...int) *MediaCreate {
mc.mutation.AddEpisodeIDs(ids...)
@@ -340,6 +375,18 @@ func (mc *MediaCreate) createSpec() (*Media, *sqlgraph.CreateSpec) {
_spec.SetField(media.FieldDownloadHistoryEpisodes, field.TypeBool, value)
_node.DownloadHistoryEpisodes = value
}
if value, ok := mc.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
_node.Limiter = value
}
if value, ok := mc.mutation.Extras(); ok {
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
_node.Extras = value
}
if value, ok := mc.mutation.AlternativeTitles(); ok {
_spec.SetField(media.FieldAlternativeTitles, field.TypeJSON, value)
_node.AlternativeTitles = value
}
if nodes := mc.mutation.EpisodesIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,

View File

@@ -9,10 +9,12 @@ import (
"polaris/ent/episode"
"polaris/ent/media"
"polaris/ent/predicate"
"polaris/ent/schema"
"time"
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/dialect/sql/sqljson"
"entgo.io/ent/schema/field"
)
@@ -249,6 +251,64 @@ func (mu *MediaUpdate) ClearDownloadHistoryEpisodes() *MediaUpdate {
return mu
}
// SetLimiter sets the "limiter" field.
func (mu *MediaUpdate) SetLimiter(sl schema.MediaLimiter) *MediaUpdate {
mu.mutation.SetLimiter(sl)
return mu
}
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
func (mu *MediaUpdate) SetNillableLimiter(sl *schema.MediaLimiter) *MediaUpdate {
if sl != nil {
mu.SetLimiter(*sl)
}
return mu
}
// ClearLimiter clears the value of the "limiter" field.
func (mu *MediaUpdate) ClearLimiter() *MediaUpdate {
mu.mutation.ClearLimiter()
return mu
}
// SetExtras sets the "extras" field.
func (mu *MediaUpdate) SetExtras(se schema.MediaExtras) *MediaUpdate {
mu.mutation.SetExtras(se)
return mu
}
// SetNillableExtras sets the "extras" field if the given value is not nil.
func (mu *MediaUpdate) SetNillableExtras(se *schema.MediaExtras) *MediaUpdate {
if se != nil {
mu.SetExtras(*se)
}
return mu
}
// ClearExtras clears the value of the "extras" field.
func (mu *MediaUpdate) ClearExtras() *MediaUpdate {
mu.mutation.ClearExtras()
return mu
}
// SetAlternativeTitles sets the "alternative_titles" field.
func (mu *MediaUpdate) SetAlternativeTitles(st []schema.AlternativeTilte) *MediaUpdate {
mu.mutation.SetAlternativeTitles(st)
return mu
}
// AppendAlternativeTitles appends st to the "alternative_titles" field.
func (mu *MediaUpdate) AppendAlternativeTitles(st []schema.AlternativeTilte) *MediaUpdate {
mu.mutation.AppendAlternativeTitles(st)
return mu
}
// ClearAlternativeTitles clears the value of the "alternative_titles" field.
func (mu *MediaUpdate) ClearAlternativeTitles() *MediaUpdate {
mu.mutation.ClearAlternativeTitles()
return mu
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (mu *MediaUpdate) AddEpisodeIDs(ids ...int) *MediaUpdate {
mu.mutation.AddEpisodeIDs(ids...)
@@ -401,6 +461,29 @@ func (mu *MediaUpdate) sqlSave(ctx context.Context) (n int, err error) {
if mu.mutation.DownloadHistoryEpisodesCleared() {
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
}
if value, ok := mu.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
}
if mu.mutation.LimiterCleared() {
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
}
if value, ok := mu.mutation.Extras(); ok {
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
}
if mu.mutation.ExtrasCleared() {
_spec.ClearField(media.FieldExtras, field.TypeJSON)
}
if value, ok := mu.mutation.AlternativeTitles(); ok {
_spec.SetField(media.FieldAlternativeTitles, field.TypeJSON, value)
}
if value, ok := mu.mutation.AppendedAlternativeTitles(); ok {
_spec.AddModifier(func(u *sql.UpdateBuilder) {
sqljson.Append(u, media.FieldAlternativeTitles, value)
})
}
if mu.mutation.AlternativeTitlesCleared() {
_spec.ClearField(media.FieldAlternativeTitles, field.TypeJSON)
}
if mu.mutation.EpisodesCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
@@ -686,6 +769,64 @@ func (muo *MediaUpdateOne) ClearDownloadHistoryEpisodes() *MediaUpdateOne {
return muo
}
// SetLimiter sets the "limiter" field.
func (muo *MediaUpdateOne) SetLimiter(sl schema.MediaLimiter) *MediaUpdateOne {
muo.mutation.SetLimiter(sl)
return muo
}
// SetNillableLimiter sets the "limiter" field if the given value is not nil.
func (muo *MediaUpdateOne) SetNillableLimiter(sl *schema.MediaLimiter) *MediaUpdateOne {
if sl != nil {
muo.SetLimiter(*sl)
}
return muo
}
// ClearLimiter clears the value of the "limiter" field.
func (muo *MediaUpdateOne) ClearLimiter() *MediaUpdateOne {
muo.mutation.ClearLimiter()
return muo
}
// SetExtras sets the "extras" field.
func (muo *MediaUpdateOne) SetExtras(se schema.MediaExtras) *MediaUpdateOne {
muo.mutation.SetExtras(se)
return muo
}
// SetNillableExtras sets the "extras" field if the given value is not nil.
func (muo *MediaUpdateOne) SetNillableExtras(se *schema.MediaExtras) *MediaUpdateOne {
if se != nil {
muo.SetExtras(*se)
}
return muo
}
// ClearExtras clears the value of the "extras" field.
func (muo *MediaUpdateOne) ClearExtras() *MediaUpdateOne {
muo.mutation.ClearExtras()
return muo
}
// SetAlternativeTitles sets the "alternative_titles" field.
func (muo *MediaUpdateOne) SetAlternativeTitles(st []schema.AlternativeTilte) *MediaUpdateOne {
muo.mutation.SetAlternativeTitles(st)
return muo
}
// AppendAlternativeTitles appends st to the "alternative_titles" field.
func (muo *MediaUpdateOne) AppendAlternativeTitles(st []schema.AlternativeTilte) *MediaUpdateOne {
muo.mutation.AppendAlternativeTitles(st)
return muo
}
// ClearAlternativeTitles clears the value of the "alternative_titles" field.
func (muo *MediaUpdateOne) ClearAlternativeTitles() *MediaUpdateOne {
muo.mutation.ClearAlternativeTitles()
return muo
}
// AddEpisodeIDs adds the "episodes" edge to the Episode entity by IDs.
func (muo *MediaUpdateOne) AddEpisodeIDs(ids ...int) *MediaUpdateOne {
muo.mutation.AddEpisodeIDs(ids...)
@@ -868,6 +1009,29 @@ func (muo *MediaUpdateOne) sqlSave(ctx context.Context) (_node *Media, err error
if muo.mutation.DownloadHistoryEpisodesCleared() {
_spec.ClearField(media.FieldDownloadHistoryEpisodes, field.TypeBool)
}
if value, ok := muo.mutation.Limiter(); ok {
_spec.SetField(media.FieldLimiter, field.TypeJSON, value)
}
if muo.mutation.LimiterCleared() {
_spec.ClearField(media.FieldLimiter, field.TypeJSON)
}
if value, ok := muo.mutation.Extras(); ok {
_spec.SetField(media.FieldExtras, field.TypeJSON, value)
}
if muo.mutation.ExtrasCleared() {
_spec.ClearField(media.FieldExtras, field.TypeJSON)
}
if value, ok := muo.mutation.AlternativeTitles(); ok {
_spec.SetField(media.FieldAlternativeTitles, field.TypeJSON, value)
}
if value, ok := muo.mutation.AppendedAlternativeTitles(); ok {
_spec.AddModifier(func(u *sql.UpdateBuilder) {
sqljson.Append(u, media.FieldAlternativeTitles, value)
})
}
if muo.mutation.AlternativeTitlesCleared() {
_spec.ClearField(media.FieldAlternativeTitles, field.TypeJSON)
}
if muo.mutation.EpisodesCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,

View File

@@ -8,17 +8,30 @@ import (
)
var (
// BlacklistsColumns holds the columns for the "blacklists" table.
BlacklistsColumns = []*schema.Column{
{Name: "id", Type: field.TypeInt, Increment: true},
{Name: "type", Type: field.TypeEnum, Enums: []string{"media", "torrent"}},
{Name: "value", Type: field.TypeJSON},
{Name: "notes", Type: field.TypeString, Nullable: true},
}
// BlacklistsTable holds the schema information for the "blacklists" table.
BlacklistsTable = &schema.Table{
Name: "blacklists",
Columns: BlacklistsColumns,
PrimaryKey: []*schema.Column{BlacklistsColumns[0]},
}
// DownloadClientsColumns holds the columns for the "download_clients" table.
DownloadClientsColumns = []*schema.Column{
{Name: "id", Type: field.TypeInt, Increment: true},
{Name: "enable", Type: field.TypeBool},
{Name: "name", Type: field.TypeString},
{Name: "implementation", Type: field.TypeString},
{Name: "implementation", Type: field.TypeEnum, Enums: []string{"transmission", "qbittorrent"}},
{Name: "url", Type: field.TypeString},
{Name: "user", Type: field.TypeString, Default: ""},
{Name: "password", Type: field.TypeString, Default: ""},
{Name: "settings", Type: field.TypeString, Default: ""},
{Name: "priority", Type: field.TypeString, Default: ""},
{Name: "priority1", Type: field.TypeInt, Default: 1},
{Name: "remove_completed_downloads", Type: field.TypeBool, Default: true},
{Name: "remove_failed_downloads", Type: field.TypeBool, Default: true},
{Name: "tags", Type: field.TypeString, Default: ""},
@@ -38,6 +51,8 @@ var (
{Name: "overview", Type: field.TypeString},
{Name: "air_date", Type: field.TypeString},
{Name: "status", Type: field.TypeEnum, Enums: []string{"missing", "downloading", "downloaded"}, Default: "missing"},
{Name: "monitored", Type: field.TypeBool, Default: false},
{Name: "target_file", Type: field.TypeString, Nullable: true},
{Name: "media_id", Type: field.TypeInt, Nullable: true},
}
// EpisodesTable holds the schema information for the "episodes" table.
@@ -48,7 +63,7 @@ var (
ForeignKeys: []*schema.ForeignKey{
{
Symbol: "episodes_media_episodes",
Columns: []*schema.Column{EpisodesColumns[7]},
Columns: []*schema.Column{EpisodesColumns[9]},
RefColumns: []*schema.Column{MediaColumns[0]},
OnDelete: schema.SetNull,
},
@@ -59,12 +74,16 @@ var (
{Name: "id", Type: field.TypeInt, Increment: true},
{Name: "media_id", Type: field.TypeInt},
{Name: "episode_id", Type: field.TypeInt, Nullable: true},
{Name: "episode_nums", Type: field.TypeJSON, Nullable: true},
{Name: "season_num", Type: field.TypeInt, Nullable: true},
{Name: "source_title", Type: field.TypeString},
{Name: "date", Type: field.TypeTime},
{Name: "target_dir", Type: field.TypeString},
{Name: "size", Type: field.TypeInt, Default: 0},
{Name: "download_client_id", Type: field.TypeInt, Nullable: true},
{Name: "status", Type: field.TypeEnum, Enums: []string{"running", "success", "fail", "uploading"}},
{Name: "indexer_id", Type: field.TypeInt, Nullable: true},
{Name: "link", Type: field.TypeString, Nullable: true},
{Name: "status", Type: field.TypeEnum, Enums: []string{"running", "success", "fail", "uploading", "seeding"}},
{Name: "saved", Type: field.TypeString, Nullable: true},
}
// HistoriesTable holds the schema information for the "histories" table.
@@ -73,6 +92,22 @@ var (
Columns: HistoriesColumns,
PrimaryKey: []*schema.Column{HistoriesColumns[0]},
}
// ImportListsColumns holds the columns for the "import_lists" table.
ImportListsColumns = []*schema.Column{
{Name: "id", Type: field.TypeInt, Increment: true},
{Name: "name", Type: field.TypeString},
{Name: "type", Type: field.TypeEnum, Enums: []string{"plex", "doulist"}},
{Name: "url", Type: field.TypeString, Nullable: true},
{Name: "qulity", Type: field.TypeString},
{Name: "storage_id", Type: field.TypeInt},
{Name: "settings", Type: field.TypeJSON, Nullable: true},
}
// ImportListsTable holds the schema information for the "import_lists" table.
ImportListsTable = &schema.Table{
Name: "import_lists",
Columns: ImportListsColumns,
PrimaryKey: []*schema.Column{ImportListsColumns[0]},
}
// IndexersColumns holds the columns for the "indexers" table.
IndexersColumns = []*schema.Column{
{Name: "id", Type: field.TypeInt, Increment: true},
@@ -80,7 +115,9 @@ var (
{Name: "implementation", Type: field.TypeString},
{Name: "settings", Type: field.TypeString},
{Name: "enable_rss", Type: field.TypeBool, Default: true},
{Name: "priority", Type: field.TypeInt},
{Name: "priority", Type: field.TypeInt, Default: 50},
{Name: "seed_ratio", Type: field.TypeFloat32, Nullable: true, Default: 0},
{Name: "disabled", Type: field.TypeBool, Nullable: true, Default: false},
}
// IndexersTable holds the schema information for the "indexers" table.
IndexersTable = &schema.Table{
@@ -100,10 +137,13 @@ var (
{Name: "overview", Type: field.TypeString},
{Name: "created_at", Type: field.TypeTime},
{Name: "air_date", Type: field.TypeString, Default: ""},
{Name: "resolution", Type: field.TypeEnum, Enums: []string{"720p", "1080p", "2160p"}, Default: "1080p"},
{Name: "resolution", Type: field.TypeEnum, Enums: []string{"720p", "1080p", "2160p", "any"}, Default: "1080p"},
{Name: "storage_id", Type: field.TypeInt, Nullable: true},
{Name: "target_dir", Type: field.TypeString, Nullable: true},
{Name: "download_history_episodes", Type: field.TypeBool, Nullable: true, Default: false},
{Name: "limiter", Type: field.TypeJSON, Nullable: true},
{Name: "extras", Type: field.TypeJSON, Nullable: true},
{Name: "alternative_titles", Type: field.TypeJSON, Nullable: true},
}
// MediaTable holds the schema information for the "media" table.
MediaTable = &schema.Table{
@@ -141,7 +181,7 @@ var (
StoragesColumns = []*schema.Column{
{Name: "id", Type: field.TypeInt, Increment: true},
{Name: "name", Type: field.TypeString, Unique: true},
{Name: "implementation", Type: field.TypeEnum, Enums: []string{"webdav", "local"}},
{Name: "implementation", Type: field.TypeEnum, Enums: []string{"webdav", "local", "alist"}},
{Name: "tv_path", Type: field.TypeString, Nullable: true},
{Name: "movie_path", Type: field.TypeString, Nullable: true},
{Name: "settings", Type: field.TypeString, Nullable: true},
@@ -156,9 +196,11 @@ var (
}
// Tables holds all the tables in the schema.
Tables = []*schema.Table{
BlacklistsTable,
DownloadClientsTable,
EpisodesTable,
HistoriesTable,
ImportListsTable,
IndexersTable,
MediaTable,
NotificationClientsTable,

View File

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,9 @@ import (
"entgo.io/ent/dialect/sql"
)
// Blacklist is the predicate function for blacklist builders.
type Blacklist func(*sql.Selector)
// DownloadClients is the predicate function for downloadclients builders.
type DownloadClients func(*sql.Selector)
@@ -15,6 +18,9 @@ type Episode func(*sql.Selector)
// History is the predicate function for history builders.
type History func(*sql.Selector)
// ImportList is the predicate function for importlist builders.
type ImportList func(*sql.Selector)
// Indexers is the predicate function for indexers builders.
type Indexers func(*sql.Selector)

View File

@@ -3,7 +3,9 @@
package ent
import (
"polaris/ent/blacklist"
"polaris/ent/downloadclients"
"polaris/ent/episode"
"polaris/ent/history"
"polaris/ent/indexers"
"polaris/ent/media"
@@ -17,6 +19,12 @@ import (
// (default values, validators, hooks and policies) and stitches it
// to their package variables.
func init() {
blacklistFields := schema.Blacklist{}.Fields()
_ = blacklistFields
// blacklistDescValue is the schema descriptor for value field.
blacklistDescValue := blacklistFields[1].Descriptor()
// blacklist.DefaultValue holds the default value on creation for the value field.
blacklist.DefaultValue = blacklistDescValue.Default.(schema.BlacklistValue)
downloadclientsFields := schema.DownloadClients{}.Fields()
_ = downloadclientsFields
// downloadclientsDescUser is the schema descriptor for user field.
@@ -31,10 +39,12 @@ func init() {
downloadclientsDescSettings := downloadclientsFields[6].Descriptor()
// downloadclients.DefaultSettings holds the default value on creation for the settings field.
downloadclients.DefaultSettings = downloadclientsDescSettings.Default.(string)
// downloadclientsDescPriority is the schema descriptor for priority field.
downloadclientsDescPriority := downloadclientsFields[7].Descriptor()
// downloadclients.DefaultPriority holds the default value on creation for the priority field.
downloadclients.DefaultPriority = downloadclientsDescPriority.Default.(string)
// downloadclientsDescPriority1 is the schema descriptor for priority1 field.
downloadclientsDescPriority1 := downloadclientsFields[7].Descriptor()
// downloadclients.DefaultPriority1 holds the default value on creation for the priority1 field.
downloadclients.DefaultPriority1 = downloadclientsDescPriority1.Default.(int)
// downloadclients.Priority1Validator is a validator for the "priority1" field. It is called by the builders before save.
downloadclients.Priority1Validator = downloadclientsDescPriority1.Validators[0].(func(int) error)
// downloadclientsDescRemoveCompletedDownloads is the schema descriptor for remove_completed_downloads field.
downloadclientsDescRemoveCompletedDownloads := downloadclientsFields[8].Descriptor()
// downloadclients.DefaultRemoveCompletedDownloads holds the default value on creation for the remove_completed_downloads field.
@@ -49,10 +59,14 @@ func init() {
downloadclients.DefaultTags = downloadclientsDescTags.Default.(string)
episodeFields := schema.Episode{}.Fields()
_ = episodeFields
// episodeDescMonitored is the schema descriptor for monitored field.
episodeDescMonitored := episodeFields[7].Descriptor()
// episode.DefaultMonitored holds the default value on creation for the monitored field.
episode.DefaultMonitored = episodeDescMonitored.Default.(bool)
historyFields := schema.History{}.Fields()
_ = historyFields
// historyDescSize is the schema descriptor for size field.
historyDescSize := historyFields[5].Descriptor()
historyDescSize := historyFields[7].Descriptor()
// history.DefaultSize holds the default value on creation for the size field.
history.DefaultSize = historyDescSize.Default.(int)
indexersFields := schema.Indexers{}.Fields()
@@ -61,6 +75,18 @@ func init() {
indexersDescEnableRss := indexersFields[3].Descriptor()
// indexers.DefaultEnableRss holds the default value on creation for the enable_rss field.
indexers.DefaultEnableRss = indexersDescEnableRss.Default.(bool)
// indexersDescPriority is the schema descriptor for priority field.
indexersDescPriority := indexersFields[4].Descriptor()
// indexers.DefaultPriority holds the default value on creation for the priority field.
indexers.DefaultPriority = indexersDescPriority.Default.(int)
// indexersDescSeedRatio is the schema descriptor for seed_ratio field.
indexersDescSeedRatio := indexersFields[5].Descriptor()
// indexers.DefaultSeedRatio holds the default value on creation for the seed_ratio field.
indexers.DefaultSeedRatio = indexersDescSeedRatio.Default.(float32)
// indexersDescDisabled is the schema descriptor for disabled field.
indexersDescDisabled := indexersFields[6].Descriptor()
// indexers.DefaultDisabled holds the default value on creation for the disabled field.
indexers.DefaultDisabled = indexersDescDisabled.Default.(bool)
mediaFields := schema.Media{}.Fields()
_ = mediaFields
// mediaDescCreatedAt is the schema descriptor for created_at field.

30
ent/schema/blacklist.go Normal file
View File

@@ -0,0 +1,30 @@
package schema
import (
"entgo.io/ent"
"entgo.io/ent/schema/field"
)
// Blacklist holds the schema definition for the Blacklist entity.
type Blacklist struct {
ent.Schema
}
// Fields of the Blacklist.
func (Blacklist) Fields() []ent.Field {
return []ent.Field{
field.Enum("type").Values("media", "torrent"),
field.JSON("value", BlacklistValue{}).Default(BlacklistValue{}),
field.String("notes").Optional(),
}
}
// Edges of the Blacklist.
func (Blacklist) Edges() []ent.Edge {
return nil
}
type BlacklistValue struct {
TmdbID int `json:"tmdb_id"`
TorrentHash string `json:"torrent_hash"`
}

View File

@@ -1,6 +1,8 @@
package schema
import (
"errors"
"entgo.io/ent"
"entgo.io/ent/schema/field"
)
@@ -15,12 +17,20 @@ func (DownloadClients) Fields() []ent.Field {
return []ent.Field{
field.Bool("enable"),
field.String("name"),
field.String("implementation"),
field.Enum("implementation").Values("transmission", "qbittorrent"),
field.String("url"),
field.String("user").Default(""),
field.String("password").Default(""),
field.String("settings").Default(""),
field.String("priority").Default(""),
field.Int("priority1").Default(1).Validate(func(i int) error {
if i > 50 {
return errors.ErrUnsupported
}
if i <= 0 {
return errors.ErrUnsupported
}
return nil
}),
field.Bool("remove_completed_downloads").Default(true),
field.Bool("remove_failed_downloads").Default(true),
field.String("tags").Default(""),

View File

@@ -21,16 +21,18 @@ func (Episode) Fields() []ent.Field {
field.String("overview"),
field.String("air_date"),
field.Enum("status").Values("missing", "downloading", "downloaded").Default("missing"),
field.Bool("monitored").Default(false).StructTag("json:\"monitored\""), //whether this episode is monitored
field.String("target_file").Optional(),
}
}
// Edges of the Episode.
func (Episode) Edges() []ent.Edge {
return []ent.Edge{
edge.From("media", Media.Type).
Ref("episodes").
Unique().
edge.From("media", Media.Type).
Ref("episodes").
Unique().
Field("media_id"),
}
}
}

View File

@@ -14,14 +14,18 @@ type History struct {
func (History) Fields() []ent.Field {
return []ent.Field{
field.Int("media_id"),
field.Int("episode_id").Optional(),
field.Int("episode_id").Optional().Comment("deprecated"),
field.Ints("episode_nums").Optional(),
field.Int("season_num").Optional(),
field.String("source_title"),
field.Time("date"),
field.String("target_dir"),
field.Int("size").Default(0),
field.Int("download_client_id").Optional(),
field.Enum("status").Values("running", "success", "fail", "uploading"),
field.String("saved").Optional(),
field.Int("indexer_id").Optional(),
field.String("link").Optional(), //should be magnet link
field.Enum("status").Values("running", "success", "fail", "uploading", "seeding"),
field.String("saved").Optional().Comment("deprecated"), //deprecated
}
}

32
ent/schema/importlist.go Normal file
View File

@@ -0,0 +1,32 @@
package schema
import (
"entgo.io/ent"
"entgo.io/ent/schema/field"
)
// ImportList holds the schema definition for the ImportList entity.
type ImportList struct {
ent.Schema
}
// Fields of the ImportList.
func (ImportList) Fields() []ent.Field {
return []ent.Field{
field.String("name"),
field.Enum("type").Values("plex", "doulist"),
field.String("url").Optional(),
field.String("qulity"),
field.Int("storage_id"),
field.JSON("settings", ImportListSettings{}).Optional(),
}
}
// Edges of the ImportList.
func (ImportList) Edges() []ent.Edge {
return nil
}
type ImportListSettings struct {
//Url string `json:"url"`
}

View File

@@ -17,7 +17,9 @@ func (Indexers) Fields() []ent.Field {
field.String("implementation"),
field.String("settings"),
field.Bool("enable_rss").Default(true),
field.Int("priority"),
field.Int("priority").Default(50),
field.Float32("seed_ratio").Optional().Default(0).Comment("minimal seed ratio requied, before removing torrent"),
field.Bool("disabled").Optional().Default(false),
}
}

View File

@@ -25,10 +25,13 @@ func (Media) Fields() []ent.Field {
field.String("overview"),
field.Time("created_at").Default(time.Now()),
field.String("air_date").Default(""),
field.Enum("resolution").Values("720p", "1080p", "2160p").Default("1080p"),
field.Enum("resolution").Values("720p", "1080p", "2160p", "any").Default("1080p"),
field.Int("storage_id").Optional(),
field.String("target_dir").Optional(),
field.Bool("download_history_episodes").Optional().Default(false).Comment("tv series only"),
field.JSON("limiter", MediaLimiter{}).Optional(),
field.JSON("extras", MediaExtras{}).Optional(),
field.JSON("alternative_titles", []AlternativeTilte{}).Optional(),
}
}
@@ -38,3 +41,30 @@ func (Media) Edges() []ent.Edge {
edge.To("episodes", Episode.Type),
}
}
type AlternativeTilte struct {
Iso3166_1 string `json:"iso_3166_1"`
Title string `json:"title"`
Type string `json:"type"`
}
type MediaLimiter struct {
SizeMin int64 `json:"size_min"` //in B
SizeMax int64 `json:"size_max"` //in B
PreferSize int64 `json:"prefer_max"`
}
type MediaExtras struct {
IsAdultMovie bool `json:"is_adult_movie"`
JavId string `json:"javid"`
//OriginCountry []string `json:"origin_country"`
OriginalLanguage string `json:"original_language"`
Genres []struct {
ID int64 `json:"id"`
Name string `json:"name"`
} `json:"genres"`
}
func (m *MediaExtras) IsJav() bool {
return m.IsAdultMovie && m.JavId != ""
}

View File

@@ -14,7 +14,7 @@ type Storage struct {
func (Storage) Fields() []ent.Field {
return []ent.Field{
field.String("name").Unique(),
field.Enum("implementation").Values("webdav", "local"),
field.Enum("implementation").Values("webdav", "local", "alist"),
field.String("tv_path").Optional(),
field.String("movie_path").Optional(),
field.String("settings").Optional(),

View File

@@ -67,6 +67,7 @@ type Implementation string
const (
ImplementationWebdav Implementation = "webdav"
ImplementationLocal Implementation = "local"
ImplementationAlist Implementation = "alist"
)
func (i Implementation) String() string {
@@ -76,7 +77,7 @@ func (i Implementation) String() string {
// ImplementationValidator is a validator for the "implementation" field enum values. It is called by the builders before save.
func ImplementationValidator(i Implementation) error {
switch i {
case ImplementationWebdav, ImplementationLocal:
case ImplementationWebdav, ImplementationLocal, ImplementationAlist:
return nil
default:
return fmt.Errorf("storage: invalid enum value for implementation field: %q", i)

View File

@@ -12,12 +12,16 @@ import (
// Tx is a transactional client that is created by calling Client.Tx().
type Tx struct {
config
// Blacklist is the client for interacting with the Blacklist builders.
Blacklist *BlacklistClient
// DownloadClients is the client for interacting with the DownloadClients builders.
DownloadClients *DownloadClientsClient
// Episode is the client for interacting with the Episode builders.
Episode *EpisodeClient
// History is the client for interacting with the History builders.
History *HistoryClient
// ImportList is the client for interacting with the ImportList builders.
ImportList *ImportListClient
// Indexers is the client for interacting with the Indexers builders.
Indexers *IndexersClient
// Media is the client for interacting with the Media builders.
@@ -159,9 +163,11 @@ func (tx *Tx) Client() *Client {
}
func (tx *Tx) init() {
tx.Blacklist = NewBlacklistClient(tx.config)
tx.DownloadClients = NewDownloadClientsClient(tx.config)
tx.Episode = NewEpisodeClient(tx.config)
tx.History = NewHistoryClient(tx.config)
tx.ImportList = NewImportListClient(tx.config)
tx.Indexers = NewIndexersClient(tx.config)
tx.Media = NewMediaClient(tx.config)
tx.NotificationClient = NewNotificationClientClient(tx.config)
@@ -176,7 +182,7 @@ func (tx *Tx) init() {
// of them in order to commit or rollback the transaction.
//
// If a closed transaction is embedded in one of the generated entities, and the entity
// applies a query, for example: DownloadClients.QueryXXX(), the query will be executed
// applies a query, for example: Blacklist.QueryXXX(), the query will be executed
// through the driver which created this transaction.
//
// Note that txDriver is not goroutine safe.

12
entrypoint.sh Normal file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
groupadd -g ${PGID} abc1
useradd abc1 -u ${PUID} -g ${PGID} -m -s /bin/bash
## 重设权限
chown -R "${PUID}:${PGID}" /app/data
umask ${UMASK:-022}
cd /app
exec gosu "${PUID}:${PGID}" /app/polaris

72
go.mod
View File

@@ -1,35 +1,83 @@
module polaris
go 1.22.4
go 1.23
toolchain go1.23.1
require (
entgo.io/ent v0.13.1
github.com/golang-jwt/jwt/v5 v5.2.1
github.com/mattn/go-sqlite3 v1.14.16
github.com/mattn/go-sqlite3 v1.14.22 // indirect
github.com/robfig/cron v1.2.0
go.uber.org/zap v1.27.0
golang.org/x/net v0.27.0
golang.org/x/net v0.33.0
)
require (
github.com/adrg/strutil v0.3.1
github.com/PuerkitoBio/goquery v1.9.2
github.com/anacrolix/torrent v1.57.1
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
github.com/gin-contrib/zap v1.1.3
github.com/ncruces/go-sqlite3 v0.18.4
github.com/nikoksr/notify v1.0.0
github.com/stretchr/testify v1.9.0
golift.io/starr v1.0.0
)
require (
cloud.google.com/go v0.115.0 // indirect
cloud.google.com/go/ai v0.8.0 // indirect
cloud.google.com/go/auth v0.7.1 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.3 // indirect
cloud.google.com/go/compute/metadata v0.5.0 // indirect
cloud.google.com/go/longrunning v0.5.10 // indirect
github.com/BurntSushi/toml v1.4.0 // indirect
github.com/DATA-DOG/go-sqlmock v1.5.2 // indirect
github.com/anacrolix/generics v0.0.3-0.20240902042256-7fb2702ef0ca // indirect
github.com/anacrolix/missinggo v1.3.0 // indirect
github.com/anacrolix/missinggo/v2 v2.7.4 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect
github.com/blinkbean/dingtalk v1.1.3 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-telegram-bot-api/telegram-bot-api v4.6.4+incompatible // indirect
github.com/go-test/deep v1.0.4 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/generative-ai-go v0.19.0 // indirect
github.com/google/s2a-go v0.1.7 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
github.com/googleapis/gax-go/v2 v2.12.5 // indirect
github.com/gregdel/pushover v1.3.1 // indirect
github.com/huandu/xstrings v1.3.2 // indirect
github.com/minio/sha256-simd v1.0.0 // indirect
github.com/mr-tron/base58 v1.2.0 // indirect
github.com/multiformats/go-multihash v0.2.3 // indirect
github.com/multiformats/go-varint v0.0.6 // indirect
github.com/ncruces/julianday v1.0.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/testify v1.9.0 // indirect
github.com/technoweenie/multipartstreamer v1.0.1 // indirect
golang.org/x/sync v0.7.0 // indirect
github.com/tetratelabs/wazero v1.8.0 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 // indirect
go.opentelemetry.io/otel v1.28.0 // indirect
go.opentelemetry.io/otel/metric v1.28.0 // indirect
go.opentelemetry.io/otel/trace v1.28.0 // indirect
golang.org/x/oauth2 v0.21.0 // indirect
golang.org/x/sync v0.10.0 // indirect
golang.org/x/time v0.5.0 // indirect
google.golang.org/api v0.188.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240711142825-46eb208f015d // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240711142825-46eb208f015d // indirect
google.golang.org/grpc v1.65.0 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
lukechampine.com/blake3 v1.1.6 // indirect
)
require (
@@ -74,11 +122,11 @@ require (
github.com/ugorji/go/codec v1.2.12 // indirect
github.com/zclconf/go-cty v1.8.0 // indirect
golang.org/x/arch v0.8.0 // indirect
golang.org/x/crypto v0.25.0
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56
golang.org/x/mod v0.19.0 // indirect
golang.org/x/sys v0.22.0
golang.org/x/text v0.16.0 // indirect
golang.org/x/crypto v0.31.0
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948
golang.org/x/mod v0.20.0 // indirect
golang.org/x/sys v0.28.0
golang.org/x/text v0.21.0 // indirect
google.golang.org/protobuf v1.34.2 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect

414
go.sum
View File

@@ -1,35 +1,115 @@
ariga.io/atlas v0.19.1-0.20240203083654-5948b60a8e43 h1:GwdJbXydHCYPedeeLt4x/lrlIISQ4JTH1mRWuE5ZZ14=
ariga.io/atlas v0.19.1-0.20240203083654-5948b60a8e43/go.mod h1:uj3pm+hUTVN/X5yfdBexHlZv+1Xu5u5ZbZx7+CDavNU=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.115.0 h1:CnFSK6Xo3lDYRoBKEcAtia6VSC837/ZkJuRduSFnr14=
cloud.google.com/go v0.115.0/go.mod h1:8jIM5vVgoAEoiVxQ/O4BFTfHqulPZgs/ufEzMcFMdWU=
cloud.google.com/go/ai v0.8.0 h1:rXUEz8Wp2OlrM8r1bfmpF2+VKqc1VJpafE3HgzRnD/w=
cloud.google.com/go/ai v0.8.0/go.mod h1:t3Dfk4cM61sytiggo2UyGsDVW3RF1qGZaUKDrZFyqkE=
cloud.google.com/go/auth v0.7.1 h1:Iv1bbpzJ2OIg16m94XI9/tlzZZl3cdeR3nGVGj78N7s=
cloud.google.com/go/auth v0.7.1/go.mod h1:VEc4p5NNxycWQTMQEDQF0bd6aTMb6VgYDXEwiJJQAbs=
cloud.google.com/go/auth/oauth2adapt v0.2.3 h1:MlxF+Pd3OmSudg/b1yZ5lJwoXCEaeedAguodky1PcKI=
cloud.google.com/go/auth/oauth2adapt v0.2.3/go.mod h1:tMQXOfZzFuNuUxOypHlQEXgdfX5cuhwU+ffUuXRJE8I=
cloud.google.com/go/compute v1.24.0 h1:phWcR2eWzRJaL/kOiJwfFsPs4BaKq1j6vnpZrc1YlVg=
cloud.google.com/go/compute/metadata v0.5.0 h1:Zr0eK8JbFv6+Wi4ilXAR8FJ3wyNdpxHKJNPos6LTZOY=
cloud.google.com/go/compute/metadata v0.5.0/go.mod h1:aHnloV2TPI38yx4s9+wAZhHykWvVCfu7hQbF+9CWoiY=
cloud.google.com/go/longrunning v0.5.10 h1:eB/BniENNRKhjz/xgiillrdcH3G74TGSl3BXinGlI7E=
cloud.google.com/go/longrunning v0.5.10/go.mod h1:tljz5guTr5oc/qhlUjBlk7UAIFMOGuPNxkNDZXlLics=
crawshaw.io/iox v0.0.0-20181124134642-c51c3df30797/go.mod h1:sXBiorCo8c46JlQV3oXPKINnZ8mcqnye1EkVkqsectk=
crawshaw.io/sqlite v0.3.2/go.mod h1:igAO5JulrQ1DbdZdtVq48mnZUBAPOeFzer7VhDWNtW4=
entgo.io/ent v0.13.1 h1:uD8QwN1h6SNphdCCzmkMN3feSUzNnVvV/WIkHKMbzOE=
entgo.io/ent v0.13.1/go.mod h1:qCEmo+biw3ccBn9OyL4ZK5dfpwg++l1Gxwac5B1206A=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/adrg/strutil v0.3.1 h1:OLvSS7CSJO8lBii4YmBt8jiK9QOtB9CzCzwl4Ic/Fz4=
github.com/adrg/strutil v0.3.1/go.mod h1:8h90y18QLrs11IBffcGX3NW/GFBXCMcNg4M7H6MspPA=
github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU=
github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=
github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE=
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
github.com/RoaringBitmap/roaring v0.4.7/go.mod h1:8khRDP4HmeXns4xIj9oGrKSz7XTQiJx2zgh7AcNke4w=
github.com/RoaringBitmap/roaring v0.4.17/go.mod h1:D3qVegWTmfCaX4Bl5CrBE9hfrSrrXIr8KVNvRsDi1NI=
github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06Mq5mKs52e1TwOo=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/anacrolix/dht/v2 v2.19.2-0.20221121215055-066ad8494444 h1:8V0K09lrGoeT2KRJNOtspA7q+OMxGwQqK/Ug0IiaaRE=
github.com/anacrolix/dht/v2 v2.19.2-0.20221121215055-066ad8494444/go.mod h1:MctKM1HS5YYDb3F30NGJxLE+QPuqWoT5ReW/4jt8xew=
github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
github.com/anacrolix/envpprof v1.0.0/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
github.com/anacrolix/envpprof v1.1.0/go.mod h1:My7T5oSqVfEn4MD4Meczkw/f5lSIndGAKu/0SM/rkf4=
github.com/anacrolix/generics v0.0.3-0.20240902042256-7fb2702ef0ca h1:aiiGqSQWjtVNdi8zUMfA//IrM8fPkv2bWwZVPbDe0wg=
github.com/anacrolix/generics v0.0.3-0.20240902042256-7fb2702ef0ca/go.mod h1:MN3ve08Z3zSV/rTuX/ouI4lNdlfTxgdafQJiLzyNRB8=
github.com/anacrolix/log v0.3.0/go.mod h1:lWvLTqzAnCWPJA08T2HCstZi0L1y2Wyvm3FJgwU9jwU=
github.com/anacrolix/log v0.6.0/go.mod h1:lWvLTqzAnCWPJA08T2HCstZi0L1y2Wyvm3FJgwU9jwU=
github.com/anacrolix/missinggo v1.1.0/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xopPFJJbTi5yIo=
github.com/anacrolix/missinggo v1.1.2-0.20190815015349-b888af804467/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xopPFJJbTi5yIo=
github.com/anacrolix/missinggo v1.2.1/go.mod h1:J5cMhif8jPmFoC3+Uvob3OXXNIhOUikzMt+uUjeM21Y=
github.com/anacrolix/missinggo v1.3.0 h1:06HlMsudotL7BAELRZs0yDZ4yVXsHXGi323QBjAVASw=
github.com/anacrolix/missinggo v1.3.0/go.mod h1:bqHm8cE8xr+15uVfMG3BFui/TxyB6//H5fwlq/TeqMc=
github.com/anacrolix/missinggo/perf v1.0.0/go.mod h1:ljAFWkBuzkO12MQclXzZrosP5urunoLS0Cbvb4V0uMQ=
github.com/anacrolix/missinggo/v2 v2.2.0/go.mod h1:o0jgJoYOyaoYQ4E2ZMISVa9c88BbUBVQQW4QeRkNCGY=
github.com/anacrolix/missinggo/v2 v2.5.1/go.mod h1:WEjqh2rmKECd0t1VhQkLGTdIWXO6f6NLjp5GlMZ+6FA=
github.com/anacrolix/missinggo/v2 v2.7.4 h1:47h5OXoPV8JbA/ACA+FLwKdYbAinuDO8osc2Cu9xkxg=
github.com/anacrolix/missinggo/v2 v2.7.4/go.mod h1:vVO5FEziQm+NFmJesc7StpkquZk+WJFCaL0Wp//2sa0=
github.com/anacrolix/stm v0.2.0/go.mod h1:zoVQRvSiGjGoTmbM0vSLIiaKjWtNPeTvXUSdJQA4hsg=
github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
github.com/anacrolix/tagflag v1.0.0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
github.com/anacrolix/tagflag v1.1.0/go.mod h1:Scxs9CV10NQatSmbyjqmqmeQNwGzlNe0CMUMIxqHIG8=
github.com/anacrolix/torrent v1.57.1 h1:CS8rYfC2Oe15NPBhwCNs/3WBY6HiBCPDFpY+s9aFHbA=
github.com/anacrolix/torrent v1.57.1/go.mod h1:NNBg4lP2/us9Hp5+cLNcZRILM69cNoKIkqMGqr9AuR0=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
github.com/benbjohnson/immutable v0.2.0/go.mod h1:uc6OHo6PN2++n98KHLxW8ef4W42ylHiQSENghE1ezxI=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/blinkbean/dingtalk v1.1.3 h1:MbidFZYom7DTFHD/YIs+eaI7kRy52kmWE/sy0xjo6E4=
github.com/blinkbean/dingtalk v1.1.3/go.mod h1:9BaLuGSBqY3vT5hstValh48DbsKO7vaHaJnG9pXwbto=
github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 h1:GKTyiRCL6zVf5wWaqKnf+7Qs6GbEPfd4iMOitWzXJx8=
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8/go.mod h1:spo1JLcs67NmW1aVLEgtA8Yy1elc+X8y5SRW1sFW4Og=
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cyruzin/golang-tmdb v1.6.3 h1:TKK9h+uuwiDOaFlsVispG1KxqhsSM5Y4ZELnUF3GlqU=
github.com/cyruzin/golang-tmdb v1.6.3/go.mod h1:ZSryJLCcY+9TiKU+LbouXKns++YBrM8Tizannr05c+I=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I=
@@ -42,6 +122,21 @@ github.com/gin-contrib/zap v1.1.3 h1:9e/U9fYd4/OBfmSEBs5hHZq114uACn7bpuzvCkcJySA
github.com/gin-contrib/zap v1.1.3/go.mod h1:+BD/6NYZKJyUpqVoJEvgeq9GLz8pINEQvak9LHNOTSE=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/glycerine/goconvey v0.0.0-20190315024820-982ee783a72e/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4=
github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
@@ -52,26 +147,71 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-telegram-bot-api/telegram-bot-api v4.6.4+incompatible h1:2cauKuaELYAEARXRkq2LrJ0yDDv1rW7+wrTEdVL3uaU=
github.com/go-telegram-bot-api/telegram-bot-api v4.6.4+incompatible/go.mod h1:qf9acutJ8cwBUhm1bqgz6Bei9/C/c93FPDljKWwsOgM=
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/go-test/deep v1.0.4 h1:u2CU3YKy9I2pmu9pX0eq50wCgjfGIt539SqR7FbHiho=
github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/generative-ai-go v0.19.0 h1:R71szggh8wHMCUlEMsW2A/3T+5LdEIkiaHSYgSpUgdg=
github.com/google/generative-ai-go v0.19.0/go.mod h1:JYolL13VG7j79kM5BtHz4qwONHkeJQzOCkKXnpqtS/E=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
github.com/googleapis/gax-go/v2 v2.12.5 h1:8gw9KZK8TiVKB6q3zHY3SBzLnrGp6HQjyfYBYGmXdxA=
github.com/googleapis/gax-go/v2 v2.12.5/go.mod h1:BUDKcWo+RaKq5SC9vVYL0wLADa3VcfswbOMMRmB9H3E=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190309154008-847fc94819f9/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gregdel/pushover v1.3.1 h1:4bMLITOZ15+Zpi6qqoGqOPuVHCwSUvMCgVnN5Xhilfo=
github.com/gregdel/pushover v1.3.1/go.mod h1:EcaO66Nn1StkpEm1iKtBTV3d2A16SoMsVER1PthX7to=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc=
@@ -80,12 +220,30 @@ github.com/hekmon/cunits/v2 v2.1.0 h1:k6wIjc4PlacNOHwKEMBgWV2/c8jyD4eRMs5mR1BBhI
github.com/hekmon/cunits/v2 v2.1.0/go.mod h1:9r1TycXYXaTmEWlAIfFV8JT+Xo59U96yUJAYHxzii2M=
github.com/hekmon/transmissionrpc/v3 v3.0.0 h1:0Fb11qE0IBh4V4GlOwHNYpqpjcYDp5GouolwrpmcUDQ=
github.com/hekmon/transmissionrpc/v3 v3.0.0/go.mod h1:38SlNhFzinVUuY87wGj3acOmRxeYZAZfrj6Re7UgCDg=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4=
github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw=
github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/jordan-wright/email v4.0.1-0.20210109023952-943e75fe5223+incompatible h1:jdpOPRN1zP63Td1hDQbZW73xKmzDvZHzVdNYxhnTMDA=
github.com/jordan-wright/email v4.0.1-0.20210109023952-943e75fe5223+incompatible/go.mod h1:1c7szIrayyPPB/987hsnvNzLushdWf4o/79s3P08L8A=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE=
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@@ -101,10 +259,11 @@ github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0V
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g=
github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM=
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
@@ -112,49 +271,99 @@ github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o=
github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc=
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw=
github.com/multiformats/go-multihash v0.2.3 h1:7Lyc8XfX/IY2jWb/gI7JP+o7JEq9hOa7BFvVU9RSh+U=
github.com/multiformats/go-multihash v0.2.3/go.mod h1:dXgKXCXjBzdscBLk9JkjINiEsCKRVch90MdaGiKsvSM=
github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY=
github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/natefinch/lumberjack v2.0.0+incompatible h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM=
github.com/natefinch/lumberjack v2.0.0+incompatible/go.mod h1:Wi9p2TTF5DG5oU+6YfsmYQpsTIOm0B1VNzQg9Mw6nPk=
github.com/ncruces/go-sqlite3 v0.18.4 h1:Je8o3y33MDwPYY/Cacas8yCsuoUzpNY/AgoSlN2ekyE=
github.com/ncruces/go-sqlite3 v0.18.4/go.mod h1:4HLag13gq1k10s4dfGBhMfRVsssJRT9/5hYqVM9RUYo=
github.com/ncruces/julianday v1.0.0 h1:fH0OKwa7NWvniGQtxdJRxAgkBMolni2BjDHaWTxqt7M=
github.com/ncruces/julianday v1.0.0/go.mod h1:Dusn2KvZrrovOMJuOt0TNXL6tB7U2E8kvza5fFc9G7g=
github.com/nikoksr/notify v1.0.0 h1:qe9/6FRsWdxBgQgWcpvQ0sv8LRGJZDpRB4TkL2uNdO8=
github.com/nikoksr/notify v1.0.0/go.mod h1:hPaaDt30d6LAA7/5nb0e48Bp/MctDfycCSs8VEgN29I=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.5.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ=
github.com/robfig/cron v1.2.0/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k=
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v0.0.0-20190215210624-980c5ac6f3ac/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
github.com/smartystreets/goconvey v0.0.0-20190306220146-200a235640ff/go.mod h1:KSQcGKpxUMHk3nbYzs/tIBAM2iDooCn0BmttHOJEbLs=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
@@ -166,14 +375,37 @@ github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/technoweenie/multipartstreamer v1.0.1 h1:XRztA5MXiR1TIRHxH2uNxXxaIkKQDeX7m2XsSOlQEnM=
github.com/technoweenie/multipartstreamer v1.0.1/go.mod h1:jNVxdtShOxzAsukZwTSw6MDx5eUJoiEBsSvzDU9uzog=
github.com/tetratelabs/wazero v1.8.0 h1:iEKu0d4c2Pd+QSRieYbnQC9yiFlMS9D+Jr0LsRmcF4g=
github.com/tetratelabs/wazero v1.8.0/go.mod h1:yAI0XTsMBhREkM/YDAK/zNou3GoiAce1P6+rp/wQhjs=
github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/tinylib/msgp v1.1.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4=
github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
github.com/willf/bitset v1.1.9/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA=
github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk=
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0 h1:9G6E0TXzGFVfTnawRzrPl83iHOAV7L8NJiR8RSGYV1g=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0/go.mod h1:azvtTADFQJA8mX80jIH/akaE7h+dbm/sVuaHqN13w74=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 h1:4K4tsIXefpVJtvA/8srF4V4y0akAoPHkIslgAkjixJA=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0/go.mod h1:jjdQuTGVsXV4vSs+CJ2qYDeDPf9yIJV23qlIzBm73Vg=
go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo=
go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4=
go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q=
go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s=
go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g=
go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
@@ -183,47 +415,169 @@ go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8=
golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA=
golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys=
golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs=
golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg=
golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golift.io/starr v1.0.0 h1:IDSaSL+ZYxdLT/Lg//dg/iwZ39LHO3D5CmbLCOgSXbI=
golift.io/starr v1.0.0/go.mod h1:xnUwp4vK62bDvozW9QHUYc08m6kjwaZnGw3Db65fQHw=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/api v0.188.0 h1:51y8fJ/b1AaaBRJr4yWm96fPcuxSo0JcegXE3DaHQHw=
google.golang.org/api v0.188.0/go.mod h1:VR0d+2SIiWOYG3r/jdm7adPW9hI2aRv9ETOSCQ9Beag=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto v0.0.0-20240711142825-46eb208f015d h1:/hmn0Ku5kWij/kjGsrcJeC1T/MrJi2iNWwgAqrihFwc=
google.golang.org/genproto/googleapis/api v0.0.0-20240711142825-46eb208f015d h1:kHjw/5UfflP/L5EbledDrcG4C2597RtymmGRZvHiCuY=
google.golang.org/genproto/googleapis/api v0.0.0-20240711142825-46eb208f015d/go.mod h1:mw8MG/Qz5wfgYr6VqVCiZcHe/GJEfI+oGGDCohaVgB0=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240711142825-46eb208f015d h1:JU0iKnSg02Gmb5ZdV8nYsKEKsP6o/FGVWTrw4i1DA9A=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240711142825-46eb208f015d/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc=
google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
lukechampine.com/blake3 v1.1.6 h1:H3cROdztr7RCfoaTpGZFQsrqvweFLrqS73j7L7cmR5c=
lukechampine.com/blake3 v1.1.6/go.mod h1:tkKEOtDkNtklkXtLNEOGNq5tcV90tJiA1vAA12R78LA=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@@ -26,6 +26,7 @@ func init() {
MaxSize: 50, // megabytes
MaxBackups: 3,
MaxAge: 30, // days
Compress: true,
})
}

196
pkg/alist/alist.go Normal file
View File

@@ -0,0 +1,196 @@
package alist
import (
"bytes"
"encoding/json"
"io"
"net/http"
"net/url"
"strings"
"time"
"github.com/pkg/errors"
)
type Resposne[T any] struct {
Code int `json:"code"`
Message string `json:"message"`
Data T `json:"data"`
}
type Config struct {
Username string
Password string
URL string
}
func New(cfg *Config) *Client {
cfg.URL = strings.Trim(cfg.URL, "/")
return &Client{
cfg: cfg,
http: http.DefaultClient,
}
}
type Client struct {
cfg *Config
http *http.Client
token string
}
func (c *Client) Login() (string, error) {
p := map[string]string{
"username": c.cfg.Username,
"password": c.cfg.Password,
}
data, _ := json.Marshal(p)
resp, err := c.http.Post(c.cfg.URL+loginUrl, "application/json", bytes.NewBuffer(data))
if err != nil {
return "", errors.Wrap(err, "login")
}
defer resp.Body.Close()
d1, err := io.ReadAll(resp.Body)
if err != nil {
return "", errors.Wrap(err, "read body")
}
var rp Resposne[map[string]string]
err = json.Unmarshal(d1, &rp)
if err != nil {
return "", errors.Wrap(err, "json")
}
if rp.Code != 200 {
return "", errors.Errorf("alist error: code %d, %s", rp.Code, rp.Message)
}
c.token = rp.Data["token"]
return c.token, nil
}
type LsInfo struct {
Content []struct {
Name string `json:"name"`
Size int `json:"size"`
IsDir bool `json:"is_dir"`
Modified time.Time `json:"modified"`
Created time.Time `json:"created"`
Sign string `json:"sign"`
Thumb string `json:"thumb"`
Type int `json:"type"`
Hashinfo string `json:"hashinfo"`
HashInfo any `json:"hash_info"`
} `json:"content"`
Total int `json:"total"`
Readme string `json:"readme"`
Header string `json:"header"`
Write bool `json:"write"`
Provider string `json:"provider"`
}
func (c *Client) Ls(dir string) (*LsInfo, error) {
in := map[string]string{
"path": dir,
}
resp, err := c.post(c.cfg.URL+lsUrl, in)
if err != nil {
return nil, errors.Wrap(err, "http")
}
var out Resposne[LsInfo]
err = json.Unmarshal(resp, &out)
if err != nil {
return nil, err
}
if out.Code != 200 {
return nil, errors.Errorf("alist error: code %d, %s", out.Code, out.Message)
}
return &out.Data, nil
}
func (c *Client) Mkdir(dir string) error {
in := map[string]string{
"path": dir,
}
resp, err := c.post(c.cfg.URL+mkdirUrl, in)
if err != nil {
return errors.Wrap(err, "http")
}
var out Resposne[any]
err = json.Unmarshal(resp, &out)
if err != nil {
return err
}
if out.Code != 200 {
return errors.Errorf("alist error: code %d, %s", out.Code, out.Message)
}
return nil
}
func (c *Client) post(url string, body interface{}) ([]byte, error) {
data, err := json.Marshal(body)
if err != nil {
return nil, err
}
req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(data))
if err != nil {
return nil, errors.Wrap(err, "new request")
}
req.Header.Add("Authorization", c.token)
req.Header.Set("Content-Type", "application/json")
resp, err := c.http.Do(req)
if err != nil {
return nil, errors.Wrap(err, "http")
}
defer resp.Body.Close()
d1, err := io.ReadAll(resp.Body)
if err != nil {
return nil, errors.Wrap(err, "read body")
}
return d1, nil
}
type UploadStreamResponse struct {
Task struct {
ID string `json:"id"`
Name string `json:"name"`
State int `json:"state"`
Status string `json:"status"`
Progress int `json:"progress"`
Error string `json:"error"`
} `json:"task"`
}
func (c *Client) UploadStream(reader io.Reader, size int64, toDir string) (*UploadStreamResponse, error) {
req, err := http.NewRequest(http.MethodPut, c.cfg.URL+streamUploadUrl, reader)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", c.token)
req.Header.Add("File-Path", url.PathEscape(toDir))
req.Header.Add("As-Task", "true")
req.Header.Add("Content-Type", "application/octet-stream")
req.ContentLength = size
res, err := c.http.Do(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
d1, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
var out Resposne[UploadStreamResponse]
err = json.Unmarshal(d1, &out)
if err != nil {
return nil, err
}
if out.Code != 200 {
return nil, errors.Errorf("alist error: code %d, %s", out.Code, out.Message)
}
return &out.Data, nil
}

46
pkg/alist/alist_test.go Normal file
View File

@@ -0,0 +1,46 @@
package alist
import (
"os"
"polaris/log"
"testing"
)
func TestLogin(t *testing.T) {
c := New(&Config{
URL: "http://10.0.0.8:5244/",
Username: "",
Password: "",
})
cre, err := c.Login()
if err != nil {
log.Errorf("login fail: %v", err)
t.Fail()
} else {
log.Errorf("login success: %s", cre)
}
info, err := c.Ls("/aliyun")
if err != nil {
log.Errorf("ls fail: %v", err)
t.Fail()
} else {
log.Infof("ls results: %+v", info)
}
f, err := os.Open("/Users/simonding/Downloads/Steam Link_1.3.9_APKPure.apk")
if err != nil {
log.Errorf("openfile: %v", err)
t.Fail()
} else {
defer f.Close()
ss, _ := f.Stat()
log.Infof("upload file size %d", ss.Size())
info, err := c.UploadStream(f, ss.Size(), "/aliyun/Steam Link_1.3.9_APKPure.apk")
if err != nil {
log.Errorf("upload error: %v", err)
t.Fail()
} else {
log.Infof("upload success: %+v", info)
}
}
}

8
pkg/alist/url.go Normal file
View File

@@ -0,0 +1,8 @@
package alist
const (
loginUrl = "/api/auth/login"
lsUrl = "/api/fs/list"
mkdirUrl = "/api/fs/mkdir"
streamUploadUrl = "/api/fs/put"
)

52
pkg/cache/cache.go vendored Normal file
View File

@@ -0,0 +1,52 @@
package cache
import (
"polaris/log"
"polaris/pkg/utils"
"time"
)
func NewCache[T comparable, S any](timeout time.Duration) *Cache[T, S] {
c := &Cache[T, S]{
m: utils.Map[T, inner[S]]{},
timeout: timeout,
}
return c
}
type Cache[T comparable, S any] struct {
m utils.Map[T, inner[S]]
timeout time.Duration
}
type inner[S any] struct {
t time.Time
s S
}
func (c *Cache[T, S]) Set(key T, value S) {
c.m.Store(key, inner[S]{t: time.Now(), s: value})
}
func (c *Cache[T, S]) Get(key T) (S, bool) {
c.m.Range(func(key T, value inner[S]) bool {
if time.Since(value.t) > c.timeout {
log.Debugf("delete old cache: %v", key)
c.m.Delete(key)
}
return true
})
v, ok := c.m.Load(key)
if !ok {
return getZero[S](), ok
}
return v.s, ok
}
func getZero[T any]() T {
var result T
return result
}

View File

@@ -1,16 +1,20 @@
package pkg
type Torrent interface {
Name() string
Progress() int
Name() (string, error)
Progress() (int, error)
Stop() error
Start() error
Remove() error
Save() string
//Save() string
Exists() bool
SeedRatio() (float64, error)
GetHash() string
//Reload() error
}
type Downloader interface {
GetAll() ([]Torrent, error)
Download(link, dir string) (Torrent, error)
}
type Storage interface {
}

266
pkg/gemini/gemini.go Normal file
View File

@@ -0,0 +1,266 @@
package gemini
import (
"bytes"
"context"
"encoding/json"
"fmt"
"polaris/log"
"strings"
"github.com/google/generative-ai-go/genai"
"google.golang.org/api/option"
)
func NewClient(apiKey, modelName string) (*Client, error) {
ctx := context.Background()
client, err := genai.NewClient(ctx, option.WithAPIKey(apiKey))
if err != nil {
return nil, err
}
return &Client{apiKey: apiKey, modelName: modelName, c: client}, nil
}
type Client struct {
apiKey string
modelName string
c *genai.Client
}
type TvInfo struct {
TitleEnglish string `json:"title_english"`
TitleChinses string `json:"title_chinese"`
Season int `json:"season"`
StartEpisode int `json:"start_episode"`
EndEpisode int `json:"end_episode"`
Resolution string `json:"resolution"`
Subtitle string `json:"subtitle"`
ReleaseGroup string `json:"release_group"`
Year int `json:"year"`
AudioLanguage string `json:"audio_language"`
IsCompleteSeason bool `json:"is_complete_season"`
}
func (c *Client) ParseTvInfo(q string) (*TvInfo, error) {
log.Info(q)
ctx := context.Background()
model := c.c.GenerativeModel(c.modelName)
model.ResponseMIMEType = "application/json"
model.ResponseSchema = &genai.Schema{
Type: genai.TypeObject,
Properties: map[string]*genai.Schema{
"title_english": {Type: genai.TypeString},
"title_chinese": {Type: genai.TypeString},
"season": {Type: genai.TypeInteger, Description: "season number"},
"start_episode": {Type: genai.TypeInteger},
"end_episode": {Type: genai.TypeInteger},
//"episodes": {Type: genai.TypeString},
"resolution": {Type: genai.TypeString},
"subtitle": {Type: genai.TypeString},
"release_group": {Type: genai.TypeString},
"year": {Type: genai.TypeInteger},
"audio_language": {Type: genai.TypeString},
"is_complete_season": {Type: genai.TypeBoolean},
},
Required: []string{"title_english", "title_chinese", "season", "start_episode", "resolution"},
}
resp, err := model.GenerateContent(ctx, genai.Text(q))
if err != nil {
return nil, err
}
for _, part := range resp.Candidates[0].Content.Parts {
if txt, ok := part.(genai.Text); ok {
var info TvInfo
if err := json.Unmarshal([]byte(txt), &info); err != nil {
return nil, err
}
return &info, nil
}
}
return nil, fmt.Errorf("not found")
}
type MovieInfo struct {
TitleEnglish string `json:"title_english"`
TitleChinses string `json:"title_chinese"`
Resolution string `json:"resolution"`
Subtitle string `json:"subtitle"`
ReleaseGroup string `json:"release_group"`
Year int `json:"year"`
AudioLanguage string `json:"audio_language"`
}
func (c *Client) ParseMovieInfo(q string) (*MovieInfo, error) {
log.Info(q)
ctx := context.Background()
model := c.c.GenerativeModel(c.modelName)
model.ResponseMIMEType = "application/json"
model.ResponseSchema = &genai.Schema{
Type: genai.TypeObject,
Properties: map[string]*genai.Schema{
"title_english": {Type: genai.TypeString},
"title_chinese": {Type: genai.TypeString},
"resolution": {Type: genai.TypeString},
"subtitle": {Type: genai.TypeString},
"release_group": {Type: genai.TypeString},
"year": {Type: genai.TypeInteger},
"audio_language": {Type: genai.TypeString},
},
Required: []string{"title_english", "title_chinese", "resolution"},
}
resp, err := model.GenerateContent(ctx, genai.Text(q))
if err != nil {
return nil, err
}
for _, part := range resp.Candidates[0].Content.Parts {
if txt, ok := part.(genai.Text); ok {
var info MovieInfo
if err := json.Unmarshal([]byte(txt), &info); err != nil {
return nil, err
}
return &info, nil
}
}
return nil, fmt.Errorf("not found")
}
func (c *Client) isTvSeries(q string) (bool, error) {
ctx := context.Background()
model := c.c.GenerativeModel(c.modelName)
model.ResponseMIMEType = "application/json"
model.ResponseSchema = &genai.Schema{
Type: genai.TypeBoolean, Nullable: true, Description: "whether the input text implies a tv series",
}
resp, err := model.GenerateContent(ctx, genai.Text(q))
if err != nil {
return false, err
}
for _, part := range resp.Candidates[0].Content.Parts {
if txt, ok := part.(genai.Text); ok {
return strings.ToLower(string(txt)) == "true", nil
}
}
return false, fmt.Errorf("error")
}
func (c *Client) ImpliesSameTvOrMovie(torrentName, mediaName string) bool {
ctx := context.Background()
model := c.c.GenerativeModel(c.modelName)
model.ResponseMIMEType = "application/json"
model.ResponseSchema = &genai.Schema{
Type: genai.TypeBoolean, Nullable: true,
}
q := fmt.Sprintf("whether this file name \"%s\" implies the same TV series or movie with name \"%s\"?", torrentName, mediaName)
resp, err := model.GenerateContent(ctx, genai.Text(q))
if err != nil {
return false
}
for _, part := range resp.Candidates[0].Content.Parts {
if txt, ok := part.(genai.Text); ok {
return strings.ToLower(string(txt)) == "true"
}
}
return false
}
func (c *Client) FilterTvOrMovies(resourcesNames []string, titles ...string) ([]string, error) {
ctx := context.Background()
model := c.c.GenerativeModel(c.modelName)
model.ResponseMIMEType = "application/json"
model.ResponseSchema = &genai.Schema{
Type: genai.TypeArray,
Items: &genai.Schema{Type: genai.TypeString},
}
for i, s := range titles {
titles[i] = "\"" + s + "\""
}
p := &bytes.Buffer{}
p.WriteString(`the following list of file names, list all of which implies the same TV series or movie of name`)
p.WriteString(strings.Join(titles, " or "))
p.WriteString(":\n")
for _, r := range resourcesNames {
p.WriteString(" * ")
p.WriteString(r)
p.WriteString("\n")
}
log.Debugf("FilterTvOrMovies prompt is %s", p.String())
resp, err := model.GenerateContent(ctx, genai.Text(p.String()))
if err != nil {
return nil, err
}
for _, part := range resp.Candidates[0].Content.Parts {
if txt, ok := part.(genai.Text); ok {
var s []string
if err := json.Unmarshal([]byte(txt), &s); err != nil {
return nil, err
}
return s, nil
}
}
return nil, fmt.Errorf("nothing found")
}
func (c *Client) FilterMovies(resourcesNames []string, year int, titles ...string) ([]string, error) {
ctx := context.Background()
model := c.c.GenerativeModel(c.modelName)
model.ResponseMIMEType = "application/json"
model.ResponseSchema = &genai.Schema{
Type: genai.TypeArray,
Items: &genai.Schema{Type: genai.TypeString},
}
for i, s := range titles {
titles[i] = "\"" + s + "\""
}
p := &bytes.Buffer{}
p.WriteString( fmt.Sprint("the following list of file names, list all of which match following criteria: 1. Is movie 2. Released in year %d 3. Have name of ", year))
p.WriteString(strings.Join(titles, " or "))
p.WriteString(":\n")
for _, r := range resourcesNames {
p.WriteString(" * ")
p.WriteString(r)
p.WriteString("\n")
}
log.Debugf("FilterTvOrMovies prompt is %s", p.String())
resp, err := model.GenerateContent(ctx, genai.Text(p.String()))
if err != nil {
return nil, err
}
for _, part := range resp.Candidates[0].Content.Parts {
if txt, ok := part.(genai.Text); ok {
var s []string
if err := json.Unmarshal([]byte(txt), &s); err != nil {
return nil, err
}
return s, nil
}
}
return nil, fmt.Errorf("nothing found")
}

View File

@@ -0,0 +1,8 @@
package gemini
import (
"testing"
)
func Test_any1(t *testing.T) {
}

View File

@@ -0,0 +1,32 @@
# Include any files or directories that you don't want to be copied to your
# container here (e.g., local build artifacts, temporary files, etc.).
#
# For more help, visit the .dockerignore file reference guide at
# https://docs.docker.com/engine/reference/builder/#dockerignore-file
**/.DS_Store
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/bin
**/charts
**/docker-compose*
**/compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md

1
pkg/go-qbittorrent/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
./main.go

View File

@@ -0,0 +1,19 @@
go-qbittorrent
==================
Golang wrapper for qBittorrent Web API (for versions above v4.1) forked from [superturkey650](https://github.com/superturkey650/go-qbittorrent) version (only supporting older API version)
This wrapper is based on the methods described in [qBittorrent's Official Web API](https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)>)
Some methods are only supported in qBittorent's latest version (v4.5 when writing).
It'll be best if you upgrade your client to a latest version.
An example can be found in main.go
Installation
============
The best way is to install with go get::
$ go get github.com/simon-ding/go-qbittorrent/qbt

260
pkg/go-qbittorrent/docs.txt Normal file
View File

@@ -0,0 +1,260 @@
PACKAGE DOCUMENTATION
package qbt
import "/Users/me/Repos/go/src/go-qbittorrent/qbt"
TYPES
type BasicTorrent struct {
AddedOn int `json:"added_on"`
Category string `json:"category"`
CompletionOn int64 `json:"completion_on"`
Dlspeed int `json:"dlspeed"`
Eta int `json:"eta"`
ForceStart bool `json:"force_start"`
Hash string `json:"hash"`
Name string `json:"name"`
NumComplete int `json:"num_complete"`
NumIncomplete int `json:"num_incomplete"`
NumLeechs int `json:"num_leechs"`
NumSeeds int `json:"num_seeds"`
Priority int `json:"priority"`
Progress int `json:"progress"`
Ratio int `json:"ratio"`
SavePath string `json:"save_path"`
SeqDl bool `json:"seq_dl"`
Size int `json:"size"`
State string `json:"state"`
SuperSeeding bool `json:"super_seeding"`
Upspeed int `json:"upspeed"`
}
BasicTorrent holds a basic torrent object from qbittorrent
type Client struct {
URL string
Authenticated bool
Session string //replace with session type
Jar http.CookieJar
// contains filtered or unexported fields
}
Client creates a connection to qbittorrent and performs requests
func NewClient(url string) *Client
NewClient creates a new client connection to qbittorrent
func (c *Client) AddTrackers(infoHash string, trackers string) (*http.Response, error)
AddTrackers adds trackers to a specific torrent
func (c *Client) DecreasePriority(infoHashList []string) (*http.Response, error)
DecreasePriority decreases the priority of a list of torrents
func (c *Client) DeletePermanently(infoHashList []string) (*http.Response, error)
DeletePermanently deletes all files for a list of torrents
func (c *Client) DeleteTemp(infoHashList []string) (*http.Response, error)
DeleteTemp deletes the temporary files for a list of torrents
func (c *Client) DownloadFromFile(file string, options map[string]string) (*http.Response, error)
DownloadFromFile downloads a torrent from a file
func (c *Client) DownloadFromLink(link string, options map[string]string) (*http.Response, error)
DownloadFromLink starts downloading a torrent from a link
func (c *Client) ForceStart(infoHashList []string, value bool) (*http.Response, error)
ForceStart force starts a list of torrents
func (c *Client) GetAlternativeSpeedStatus() (status bool, err error)
GetAlternativeSpeedStatus gets the alternative speed status of your
qbittorrent client
func (c *Client) GetGlobalDownloadLimit() (limit int, err error)
GetGlobalDownloadLimit gets the global download limit of your
qbittorrent client
func (c *Client) GetGlobalUploadLimit() (limit int, err error)
GetGlobalUploadLimit gets the global upload limit of your qbittorrent
client
func (c *Client) GetTorrentDownloadLimit(infoHashList []string) (limits map[string]string, err error)
GetTorrentDownloadLimit gets the download limit for a list of torrents
func (c *Client) GetTorrentUploadLimit(infoHashList []string) (limits map[string]string, err error)
GetTorrentUploadLimit gets the upload limit for a list of torrents
func (c *Client) IncreasePriority(infoHashList []string) (*http.Response, error)
IncreasePriority increases the priority of a list of torrents
func (c *Client) Login(username string, password string) (loggedIn bool, err error)
Login logs you in to the qbittorrent client
func (c *Client) Logout() (loggedOut bool, err error)
Logout logs you out of the qbittorrent client
func (c *Client) Pause(infoHash string) (*http.Response, error)
Pause pauses a specific torrent
func (c *Client) PauseAll() (*http.Response, error)
PauseAll pauses all torrents
func (c *Client) PauseMultiple(infoHashList []string) (*http.Response, error)
PauseMultiple pauses a list of torrents
func (c *Client) Recheck(infoHashList []string) (*http.Response, error)
Recheck rechecks a list of torrents
func (c *Client) Resume(infoHash string) (*http.Response, error)
Resume resumes a specific torrent
func (c *Client) ResumeAll(infoHashList []string) (*http.Response, error)
ResumeAll resumes all torrents
func (c *Client) ResumeMultiple(infoHashList []string) (*http.Response, error)
ResumeMultiple resumes a list of torrents
func (c *Client) SetCategory(infoHashList []string, category string) (*http.Response, error)
SetCategory sets the category for a list of torrents
func (c *Client) SetFilePriority(infoHash string, fileID string, priority string) (*http.Response, error)
SetFilePriority sets the priority for a specific torrent file
func (c *Client) SetGlobalDownloadLimit(limit string) (*http.Response, error)
SetGlobalDownloadLimit sets the global download limit of your
qbittorrent client
func (c *Client) SetGlobalUploadLimit(limit string) (*http.Response, error)
SetGlobalUploadLimit sets the global upload limit of your qbittorrent
client
func (c *Client) SetLabel(infoHashList []string, label string) (*http.Response, error)
SetLabel sets the labels for a list of torrents
func (c *Client) SetMaxPriority(infoHashList []string) (*http.Response, error)
SetMaxPriority sets the max priority for a list of torrents
func (c *Client) SetMinPriority(infoHashList []string) (*http.Response, error)
SetMinPriority sets the min priority for a list of torrents
func (c *Client) SetPreferences(params map[string]string) (*http.Response, error)
SetPreferences sets the preferences of your qbittorrent client
func (c *Client) SetTorrentDownloadLimit(infoHashList []string, limit string) (*http.Response, error)
SetTorrentDownloadLimit sets the download limit for a list of torrents
func (c *Client) SetTorrentUploadLimit(infoHashList []string, limit string) (*http.Response, error)
SetTorrentUploadLimit sets the upload limit of a list of torrents
func (c *Client) Shutdown() (shuttingDown bool, err error)
Shutdown shuts down the qbittorrent client
func (c *Client) Sync(rid string) (Sync, error)
Sync syncs main data of qbittorrent
func (c *Client) ToggleAlternativeSpeed() (*http.Response, error)
ToggleAlternativeSpeed toggles the alternative speed of your qbittorrent
client
func (c *Client) ToggleFirstLastPiecePriority(infoHashList []string) (*http.Response, error)
ToggleFirstLastPiecePriority toggles first last piece priority of a list
of torrents
func (c *Client) ToggleSequentialDownload(infoHashList []string) (*http.Response, error)
ToggleSequentialDownload toggles the download sequence of a list of
torrents
func (c *Client) Torrent(infoHash string) (Torrent, error)
Torrent gets a specific torrent
func (c *Client) TorrentFiles(infoHash string) ([]TorrentFile, error)
TorrentFiles gets the files of a specifc torrent
func (c *Client) TorrentTrackers(infoHash string) ([]Tracker, error)
TorrentTrackers gets all trackers for a specific torrent
func (c *Client) TorrentWebSeeds(infoHash string) ([]WebSeed, error)
TorrentWebSeeds gets seeders for a specific torrent
func (c *Client) Torrents(filters map[string]string) (torrentList []BasicTorrent, err error)
Torrents gets a list of all torrents in qbittorrent matching your filter
type Sync struct {
Categories []string `json:"categories"`
FullUpdate bool `json:"full_update"`
Rid int `json:"rid"`
ServerState struct {
ConnectionStatus string `json:"connection_status"`
DhtNodes int `json:"dht_nodes"`
DlInfoData int `json:"dl_info_data"`
DlInfoSpeed int `json:"dl_info_speed"`
DlRateLimit int `json:"dl_rate_limit"`
Queueing bool `json:"queueing"`
RefreshInterval int `json:"refresh_interval"`
UpInfoData int `json:"up_info_data"`
UpInfoSpeed int `json:"up_info_speed"`
UpRateLimit int `json:"up_rate_limit"`
UseAltSpeedLimits bool `json:"use_alt_speed_limits"`
} `json:"server_state"`
Torrents map[string]Torrent `json:"torrents"`
}
Sync holds the sync response struct
type Torrent struct {
AdditionDate int `json:"addition_date"`
Comment string `json:"comment"`
CompletionDate int `json:"completion_date"`
CreatedBy string `json:"created_by"`
CreationDate int `json:"creation_date"`
DlLimit int `json:"dl_limit"`
DlSpeed int `json:"dl_speed"`
DlSpeedAvg int `json:"dl_speed_avg"`
Eta int `json:"eta"`
LastSeen int `json:"last_seen"`
NbConnections int `json:"nb_connections"`
NbConnectionsLimit int `json:"nb_connections_limit"`
Peers int `json:"peers"`
PeersTotal int `json:"peers_total"`
PieceSize int `json:"piece_size"`
PiecesHave int `json:"pieces_have"`
PiecesNum int `json:"pieces_num"`
Reannounce int `json:"reannounce"`
SavePath string `json:"save_path"`
SeedingTime int `json:"seeding_time"`
Seeds int `json:"seeds"`
SeedsTotal int `json:"seeds_total"`
ShareRatio float64 `json:"share_ratio"`
TimeElapsed int `json:"time_elapsed"`
TotalDownloaded int `json:"total_downloaded"`
TotalDownloadedSession int `json:"total_downloaded_session"`
TotalSize int `json:"total_size"`
TotalUploaded int `json:"total_uploaded"`
TotalUploadedSession int `json:"total_uploaded_session"`
TotalWasted int `json:"total_wasted"`
UpLimit int `json:"up_limit"`
UpSpeed int `json:"up_speed"`
UpSpeedAvg int `json:"up_speed_avg"`
}
Torrent holds a torrent object from qbittorrent
type TorrentFile struct {
IsSeed bool `json:"is_seed"`
Name string `json:"name"`
Priority int `json:"priority"`
Progress int `json:"progress"`
Size int `json:"size"`
}
TorrentFile holds a torrent file object from qbittorrent
type Tracker struct {
Msg string `json:"msg"`
NumPeers int `json:"num_peers"`
Status string `json:"status"`
URL string `json:"url"`
}
Tracker holds a tracker object from qbittorrent
type WebSeed struct {
URL string `json:"url"`
}
WebSeed holds a webseed object from qbittorrent

View File

@@ -0,0 +1,66 @@
package qbittorrent
import (
"fmt"
"polaris/pkg/go-qbittorrent/qbt"
"github.com/davecgh/go-spew/spew"
)
func main() {
// connect to qbittorrent client
qb := qbt.NewClient("http://localhost:8181")
// login to the client
loginOpts := qbt.LoginOptions{
Username: "username",
Password: "password",
}
err := qb.Login(loginOpts)
if err != nil {
fmt.Println(err)
}
// ********************
// DOWNLOAD A TORRENT *
// ********************
// were not using any filters so the options map is empty
downloadOpts := qbt.DownloadOptions{}
// set the path to the file
//path := "/Users/me/Downloads/Source.Code.2011.1080p.BluRay.H264.AAC-RARBG-[rarbg.to].torrent"
links := []string{"http://rarbg.to/download.php?id=9buc5hp&h=d73&f=Courage.the.Cowardly.Dog.1999.S01.1080p.AMZN.WEBRip.DD2.0.x264-NOGRP%5Brartv%5D-[rarbg.to].torrent"}
// download the torrent using the file
// the wrapper will handle opening and closing the file for you
err = qb.DownloadLinks(links, downloadOpts)
if err != nil {
fmt.Println("[-] Download torrent from link")
fmt.Println(err)
} else {
fmt.Println("[+] Download torrent from link")
}
// ******************
// GET ALL TORRENTS *
// ******************
torrentsOpts := qbt.TorrentsOptions{}
filter := "inactive"
sort := "name"
hash := "d739f78a12b241ba62719b1064701ffbb45498a8"
torrentsOpts.Filter = &filter
torrentsOpts.Sort = &sort
torrentsOpts.Hashes = []string{hash}
torrents, err := qb.Torrents(torrentsOpts)
if err != nil {
fmt.Println("[-] Get torrent list")
fmt.Println(err)
} else {
fmt.Println("[+] Get torrent list")
if len(torrents) > 0 {
spew.Dump(torrents[0])
} else {
fmt.Println("No torrents found")
}
}
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,387 @@
package qbt
// BasicTorrent holds a basic torrent object from qbittorrent
type BasicTorrent struct {
Category string `json:"category"`
CompletionOn int64 `json:"completion_on"`
Dlspeed int `json:"dlspeed"`
Eta int `json:"eta"`
ForceStart bool `json:"force_start"`
Hash string `json:"hash"`
Name string `json:"name"`
NumComplete int `json:"num_complete"`
NumIncomplete int `json:"num_incomplete"`
NumLeechs int `json:"num_leechs"`
NumSeeds int `json:"num_seeds"`
Priority int `json:"priority"`
Progress int `json:"progress"`
Ratio int `json:"ratio"`
SavePath string `json:"save_path"`
SeqDl bool `json:"seq_dl"`
Size int `json:"size"`
State string `json:"state"`
SuperSeeding bool `json:"super_seeding"`
Upspeed int `json:"upspeed"`
FirstLastPiecePriority bool `json:"f_l_piece_prio"`
}
// Torrent holds a torrent object from qbittorrent
// with more information than BasicTorrent
type Torrent struct {
AdditionDate int `json:"addition_date"`
Comment string `json:"comment"`
CompletionDate int `json:"completion_date"`
CreatedBy string `json:"created_by"`
CreationDate int `json:"creation_date"`
DlLimit int `json:"dl_limit"`
DlSpeed int `json:"dl_speed"`
DlSpeedAvg int `json:"dl_speed_avg"`
Eta int `json:"eta"`
LastSeen int `json:"last_seen"`
NbConnections int `json:"nb_connections"`
NbConnectionsLimit int `json:"nb_connections_limit"`
Peers int `json:"peers"`
PeersTotal int `json:"peers_total"`
PieceSize int `json:"piece_size"`
PiecesHave int `json:"pieces_have"`
PiecesNum int `json:"pieces_num"`
Reannounce int `json:"reannounce"`
SavePath string `json:"save_path"`
SeedingTime int `json:"seeding_time"`
Seeds int `json:"seeds"`
SeedsTotal int `json:"seeds_total"`
ShareRatio float64 `json:"share_ratio"`
TimeElapsed int `json:"time_elapsed"`
TotalDl int `json:"total_downloaded"`
TotalDlSession int `json:"total_downloaded_session"`
TotalSize int `json:"total_size"`
TotalUl int `json:"total_uploaded"`
TotalUlSession int `json:"total_uploaded_session"`
TotalWasted int `json:"total_wasted"`
UpLimit int `json:"up_limit"`
UpSpeed int `json:"up_speed"`
UpSpeedAvg int `json:"up_speed_avg"`
}
type TorrentInfo struct {
AddedOn int64 `json:"added_on"`
AmountLeft int64 `json:"amount_left"`
AutoTmm bool `json:"auto_tmm"`
Availability int64 `json:"availability"`
Category string `json:"category"`
Completed int64 `json:"completed"`
CompletionOn int64 `json:"completion_on"`
ContentPath string `json:"content_path"`
DlLimit int64 `json:"dl_limit"`
Dlspeed int64 `json:"dlspeed"`
Downloaded int64 `json:"downloaded"`
DownloadedSession int64 `json:"downloaded_session"`
Eta int64 `json:"eta"`
FLPiecePrio bool `json:"f_l_piece_prio"`
ForceStart bool `json:"force_start"`
Hash string `json:"hash"`
LastActivity int64 `json:"last_activity"`
MagnetURI string `json:"magnet_uri"`
MaxRatio float64 `json:"max_ratio"`
MaxSeedingTime int64 `json:"max_seeding_time"`
Name string `json:"name"`
NumComplete int64 `json:"num_complete"`
NumIncomplete int64 `json:"num_incomplete"`
NumLeechs int64 `json:"num_leechs"`
NumSeeds int64 `json:"num_seeds"`
Priority int64 `json:"priority"`
Progress float64 `json:"progress"`
Ratio float64 `json:"ratio"`
RatioLimit int64 `json:"ratio_limit"`
SavePath string `json:"save_path"`
SeedingTimeLimit int64 `json:"seeding_time_limit"`
SeenComplete int64 `json:"seen_complete"`
SeqDl bool `json:"seq_dl"`
Size int64 `json:"size"`
State string `json:"state"`
SuperSeeding bool `json:"super_seeding"`
Tags string `json:"tags"`
TimeActive int64 `json:"time_active"`
TotalSize int64 `json:"total_size"`
Tracker string `json:"tracker"`
TrackersCount int64 `json:"trackers_count"`
UpLimit int64 `json:"up_limit"`
Uploaded int64 `json:"uploaded"`
UploadedSession int64 `json:"uploaded_session"`
Upspeed int64 `json:"upspeed"`
}
// Tracker holds a tracker object from qbittorrent
type Tracker struct {
Msg string `json:"msg"`
NumPeers int `json:"num_peers"`
NumSeeds int `json:"num_seeds"`
NumLeeches int `json:"num_leeches"`
NumDownloaded int `json:"num_downloaded"`
Tier int `json:"tier"`
Status int `json:"status"`
URL string `json:"url"`
}
// WebSeed holds a webseed object from qbittorrent
type WebSeed struct {
URL string `json:"url"`
}
// TorrentFile holds a torrent file object from qbittorrent
type TorrentFile struct {
Index int `json:"index"`
IsSeed bool `json:"is_seed"`
Name string `json:"name"`
Availability float32 `json:"availability"`
Priority int `json:"priority"`
Progress int `json:"progress"`
Size int `json:"size"`
PieceRange []int `json:"piece_range"`
}
// Sync holds the sync response struct which contains
// the server state and a map of infohashes to Torrents
type Sync struct {
Categories []string `json:"categories"`
FullUpdate bool `json:"full_update"`
Rid int `json:"rid"`
ServerState struct {
ConnectionStatus string `json:"connection_status"`
DhtNodes int `json:"dht_nodes"`
DlInfoData int `json:"dl_info_data"`
DlInfoSpeed int `json:"dl_info_speed"`
DlRateLimit int `json:"dl_rate_limit"`
Queueing bool `json:"queueing"`
RefreshInterval int `json:"refresh_interval"`
UpInfoData int `json:"up_info_data"`
UpInfoSpeed int `json:"up_info_speed"`
UpRateLimit int `json:"up_rate_limit"`
UseAltSpeedLimits bool `json:"use_alt_speed_limits"`
} `json:"server_state"`
Torrents map[string]Torrent `json:"torrents"`
}
type BuildInfo struct {
QTVersion string `json:"qt"`
LibtorrentVersion string `json:"libtorrent"`
BoostVersion string `json:"boost"`
OpenSSLVersion string `json:"openssl"`
AppBitness string `json:"bitness"`
}
type Preferences struct {
Locale string `json:"locale"`
CreateSubfolderEnabled bool `json:"create_subfolder_enabled"`
StartPausedEnabled bool `json:"start_paused_enabled"`
AutoDeleteMode int `json:"auto_delete_mode"`
PreallocateAll bool `json:"preallocate_all"`
IncompleteFilesExt bool `json:"incomplete_files_ext"`
AutoTMMEnabled bool `json:"auto_tmm_enabled"`
TorrentChangedTMMEnabled bool `json:"torrent_changed_tmm_enabled"`
SavePathChangedTMMEnabled bool `json:"save_path_changed_tmm_enabled"`
CategoryChangedTMMEnabled bool `json:"category_changed_tmm_enabled"`
SavePath string `json:"save_path"`
TempPathEnabled bool `json:"temp_path_enabled"`
TempPath string `json:"temp_path"`
ScanDirs map[string]interface{} `json:"scan_dirs"`
ExportDir string `json:"export_dir"`
ExportDirFin string `json:"export_dir_fin"`
MailNotificationEnabled string `json:"mail_notification_enabled"`
MailNotificationSender string `json:"mail_notification_sender"`
MailNotificationEmail string `json:"mail_notification_email"`
MailNotificationSMPTP string `json:"mail_notification_smtp"`
MailNotificationSSLEnabled bool `json:"mail_notification_ssl_enabled"`
MailNotificationAuthEnabled bool `json:"mail_notification_auth_enabled"`
MailNotificationUsername string `json:"mail_notification_username"`
MailNotificationPassword string `json:"mail_notification_password"`
AutorunEnabled bool `json:"autorun_enabled"`
AutorunProgram string `json:"autorun_program"`
QueueingEnabled bool `json:"queueing_enabled"`
MaxActiveDls int `json:"max_active_downloads"`
MaxActiveTorrents int `json:"max_active_torrents"`
MaxActiveUls int `json:"max_active_uploads"`
DontCountSlowTorrents bool `json:"dont_count_slow_torrents"`
SlowTorrentDlRateThreshold int `json:"slow_torrent_dl_rate_threshold"`
SlowTorrentUlRateThreshold int `json:"slow_torrent_ul_rate_threshold"`
SlowTorrentInactiveTimer int `json:"slow_torrent_inactive_timer"`
MaxRatioEnabled bool `json:"max_ratio_enabled"`
MaxRatio float64 `json:"max_ratio"`
MaxRatioAct bool `json:"max_ratio_act"`
ListenPort int `json:"listen_port"`
UPNP bool `json:"upnp"`
RandomPort bool `json:"random_port"`
DlLimit int `json:"dl_limit"`
UlLimit int `json:"up_limit"`
MaxConnections int `json:"max_connec"`
MaxConnectionsPerTorrent int `json:"max_connec_per_torrent"`
MaxUls int `json:"max_uploads"`
MaxUlsPerTorrent int `json:"max_uploads_per_torrent"`
UTPEnabled bool `json:"enable_utp"`
LimitUTPRate bool `json:"limit_utp_rate"`
LimitTCPOverhead bool `json:"limit_tcp_overhead"`
LimitLANPeers bool `json:"limit_lan_peers"`
AltDlLimit int `json:"alt_dl_limit"`
AltUlLimit int `json:"alt_up_limit"`
SchedulerEnabled bool `json:"scheduler_enabled"`
ScheduleFromHour int `json:"schedule_from_hour"`
ScheduleFromMin int `json:"schedule_from_min"`
ScheduleToHour int `json:"schedule_to_hour"`
ScheduleToMin int `json:"schedule_to_min"`
SchedulerDays int `json:"scheduler_days"`
DHTEnabled bool `json:"dht"`
DHTSameAsBT bool `json:"dhtSameAsBT"`
DHTPort int `json:"dht_port"`
PexEnabled bool `json:"pex"`
LSDEnabled bool `json:"lsd"`
Encryption int `json:"encryption"`
AnonymousMode bool `json:"anonymous_mode"`
ProxyType int `json:"proxy_type"`
ProxyIP string `json:"proxy_ip"`
ProxyPort int `json:"proxy_port"`
ProxyPeerConnections bool `json:"proxy_peer_connections"`
ForceProxy bool `json:"force_proxy"`
ProxyAuthEnabled bool `json:"proxy_auth_enabled"`
ProxyUsername string `json:"proxy_username"`
ProxyPassword string `json:"proxy_password"`
IPFilterEnabled bool `json:"ip_filter_enabled"`
IPFilterPath string `json:"ip_filter_path"`
IPFilterTrackers string `json:"ip_filter_trackers"`
WebUIDomainList string `json:"web_ui_domain_list"`
WebUIAddress string `json:"web_ui_address"`
WebUIPort int `json:"web_ui_port"`
WebUIUPNPEnabled bool `json:"web_ui_upnp"`
WebUIUsername string `json:"web_ui_username"`
WebUIPassword string `json:"web_ui_password"`
WebUICSRFProtectionEnabled bool `json:"web_ui_csrf_protection_enabled"`
WebUIClickjackingProtectionEnabled bool `json:"web_ui_clickjacking_protection_enabled"`
BypassLocalAuth bool `json:"bypass_local_auth"`
BypassAuthSubnetWhitelistEnabled bool `json:"bypass_auth_subnet_whitelist_enabled"`
BypassAuthSubnetWhitelist string `json:"bypass_auth_subnet_whitelist"`
AltWebUIEnabled bool `json:"alternative_webui_enabled"`
AltWebUIPath string `json:"alternative_webui_path"`
UseHTTPS bool `json:"use_https"`
SSLKey string `json:"ssl_key"`
SSLCert string `json:"ssl_cert"`
DynDNSEnabled bool `json:"dyndns_enabled"`
DynDNSService int `json:"dyndns_service"`
DynDNSUsername string `json:"dyndns_username"`
DynDNSPassword string `json:"dyndns_password"`
DynDNSDomain string `json:"dyndns_domain"`
RSSRefreshInterval int `json:"rss_refresh_interval"`
RSSMaxArtPerFeed int `json:"rss_max_articles_per_feed"`
RSSProcessingEnabled bool `json:"rss_processing_enabled"`
RSSAutoDlEnabled bool `json:"rss_auto_downloading_enabled"`
}
// Log
type Log struct {
ID int `json:"id"`
Message string `json:"message"`
Timestamp int `json:"timestamp"`
Type int `json:"type"`
}
// PeerLog
type PeerLog struct {
ID int `json:"id"`
IP string `json:"ip"`
Blocked bool `json:"blocked"`
Timestamp int `json:"timestamp"`
Reason string `json:"reason"`
}
// Info
type Info struct {
ConnectionStatus string `json:"connection_status"`
DHTNodes int `json:"dht_nodes"`
DlInfoData int `json:"dl_info_data"`
DlInfoSpeed int `json:"dl_info_speed"`
DlRateLimit int `json:"dl_rate_limit"`
UlInfoData int `json:"up_info_data"`
UlInfoSpeed int `json:"up_info_speed"`
UlRateLimit int `json:"up_rate_limit"`
Queueing bool `json:"queueing"`
UseAltSpeedLimits bool `json:"use_alt_speed_limits"`
RefreshInterval int `json:"refresh_interval"`
}
type TorrentsOptions struct {
Filter *string // all, downloading, completed, paused, active, inactive => optional
Category *string // => optional
Sort *string // => optional
Reverse *bool // => optional
Limit *int // => optional (no negatives)
Offset *int // => optional (negatives allowed)
Hashes []string // separated by | => optional
}
// Category of torrent
type Category struct {
Name string `json:"name"`
SavePath string `json:"savePath"`
}
// Categories mapping
type Categories struct {
Category map[string]Category
}
// LoginOptions contains all options for /login endpoint
type LoginOptions struct {
Username string
Password string
}
// AddTrackersOptions contains all options for /addTrackers endpoint
type AddTrackersOptions struct {
Hash string
Trackers []string
}
// EditTrackerOptions contains all options for /editTracker endpoint
type EditTrackerOptions struct {
Hash string
OrigURL string
NewURL string
}
// RemoveTrackersOptions contains all options for /removeTrackers endpoint
type RemoveTrackersOptions struct {
Hash string
Trackers []string
}
type DownloadOptions struct {
Savepath *string
Cookie *string
Category *string
SkipHashChecking *bool
Paused *bool
RootFolder *bool
Rename *string
UploadSpeedLimit *int
DownloadSpeedLimit *int
SequentialDownload *bool
AutomaticTorrentManagement *bool
FirstLastPiecePriority *bool
}
type InfoOptions struct {
Filter *string
Category *string
Sort *string
Reverse *bool
Limit *int
Offset *int
Hashes []string
}
type PriorityValues int
const (
Do_not_download PriorityValues = 0
Normal_priority PriorityValues = 1
High_priority PriorityValues = 6
Maximal_priority PriorityValues = 7
)

View File

@@ -0,0 +1,24 @@
package tools
import (
"fmt"
"io"
"net/http"
"net/http/httputil"
)
// PrintResponse prints the body of a response
func PrintResponse(body io.ReadCloser) {
r, _ := io.ReadAll(body)
fmt.Println("response: " + string(r))
}
// PrintRequest prints a request
func PrintRequest(req *http.Request) error {
r, err := httputil.DumpRequest(req, true)
if err != nil {
return err
}
fmt.Println("request: " + string(r))
return nil
}

View File

@@ -0,0 +1,117 @@
package douban
import (
"fmt"
"net/http"
"polaris/log"
"polaris/pkg/importlist"
"strconv"
"strings"
"github.com/PuerkitoBio/goquery"
)
const ua = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
func ParseDoulist(doulistUrl string) (*importlist.Response, error) {
if !strings.Contains(doulistUrl, "doulist") {
return nil, fmt.Errorf("not doulist")
}
req, err := http.NewRequest("GET", doulistUrl, nil)
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", ua)
res, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != 200 {
return nil, fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
}
doc, err := goquery.NewDocumentFromReader(res.Body)
if err != nil {
return nil, err
}
var items []importlist.Item
doc.Find("div[class=doulist-item]").Each(func(i int, selection *goquery.Selection) {
titleDiv := selection.Find("div[class=title]")
link := titleDiv.Find("div>a")
href, ok := link.Attr("href")
if !ok {
return
}
abstract := selection.Find("div[class=abstract]")
lines := strings.Split(abstract.Text(), "\n")
year := 0
for _, l := range lines {
if strings.Contains(l, "年份") {
ppp := strings.Split(l, ":")
if len(ppp) < 2 {
continue
} else {
n := ppp[1]
n1, err := strconv.Atoi(strings.TrimSpace(n))
if err != nil {
log.Errorf("convert year number %s to int error: %v", n, err)
continue
}
year = n1
}
}
}
_, err := parseDetailPage(strings.TrimSpace(href))
if err != nil {
log.Errorf("get detail page: %v", err)
return
}
item := importlist.Item{
Title: strings.TrimSpace(link.Text()),
Year: year,
}
items = append(items, item)
_ = item
//println(link.Text(), href)
})
return &importlist.Response{Items: items}, nil
}
func parseDetailPage(url string) (string, error) {
println(url)
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return "", err
}
req.Header.Set("User-Agent", ua)
res, err := http.DefaultClient.Do(req)
if err != nil {
return "", err
}
defer res.Body.Close()
if res.StatusCode != 200 {
return "", fmt.Errorf("status code error: %d %s", res.StatusCode, res.Status)
}
doc, err := goquery.NewDocumentFromReader(res.Body)
if err != nil {
return "", err
}
doc.Find("div[class='subject clearfix']").Each(func(i int, se *goquery.Selection) {
println(se.Text())
se.Children().Get(1)
imdb := se.Find("div[class='info']").First().Children().Last()
println(imdb.Text())
})
_ = doc
return "", nil
}

View File

@@ -0,0 +1,11 @@
package douban
import (
"polaris/log"
"testing"
)
func TestParseDoulist(t *testing.T) {
r, err := ParseDoulist("https://www.douban.com/doulist/81580/")
log.Info(r, err)
}

View File

@@ -0,0 +1,13 @@
package importlist
type Item struct {
Title string
Year int
ImdbID string
TvdbID string
TmdbID string
}
type Response struct {
Items []Item
}

View File

@@ -0,0 +1,96 @@
package plexwatchlist
import (
"encoding/xml"
"io"
"net/http"
"polaris/pkg/importlist"
"strings"
"github.com/pkg/errors"
)
type Response struct {
XMLName xml.Name `xml:"rss"`
Text string `xml:",chardata"`
Atom string `xml:"atom,attr"`
Media string `xml:"media,attr"`
Version string `xml:"version,attr"`
Channel struct {
Text string `xml:",chardata"`
Title string `xml:"title"`
Link struct {
Text string `xml:",chardata"`
Href string `xml:"href,attr"`
Rel string `xml:"rel,attr"`
Type string `xml:"type,attr"`
} `xml:"link"`
Description string `xml:"description"`
Category string `xml:"category"`
Item []struct {
Text string `xml:",chardata"`
Title string `xml:"title"`
PubDate string `xml:"pubDate"`
Link string `xml:"link"`
Description string `xml:"description"`
Category string `xml:"category"`
Credit []struct {
Text string `xml:",chardata"`
Role string `xml:"role,attr"`
} `xml:"credit"`
Thumbnail struct {
Text string `xml:",chardata"`
URL string `xml:"url,attr"`
} `xml:"thumbnail"`
Keywords string `xml:"keywords"`
Rating struct {
Text string `xml:",chardata"`
Scheme string `xml:"scheme,attr"`
} `xml:"rating"`
Guid struct {
Text string `xml:",chardata"`
IsPermaLink string `xml:"isPermaLink,attr"`
} `xml:"guid"`
} `xml:"item"`
} `xml:"channel"`
}
func (r *Response) convert() *importlist.Response {
res := &importlist.Response{}
for _, im := range r.Channel.Item {
item := importlist.Item{
Title: im.Title,
}
id := strings.ToLower(im.Guid.Text)
if strings.HasPrefix(id, "tvdb") {
tvdbid := strings.TrimPrefix(id, "tvdb://")
item.TvdbID = tvdbid
} else if strings.HasPrefix(id, "imdb") {
imdbid := strings.TrimPrefix(id, "imdb://")
item.ImdbID = imdbid
} else if strings.HasPrefix(id, "tmdb") {
tmdbid := strings.TrimPrefix(id, "tmdb://")
item.TmdbID = tmdbid
}
res.Items = append(res.Items, item)
}
return res
}
func ParsePlexWatchlist(url string) (*importlist.Response, error) {
resp, err := http.Get(url)
if err != nil {
return nil, errors.Wrap(err, "http get")
}
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
if err != nil {
return nil, errors.Wrap(err, "read data")
}
var rrr Response
err = xml.Unmarshal(data, &rrr)
if err != nil {
return nil, errors.Wrap(err, "xml")
}
return rrr.convert(), nil
}

Some files were not shown because too many files have changed in this diff Show More