mirror of
https://github.com/stashapp/stash.git
synced 2025-12-18 21:04:37 +03:00
Merge pull request #2175 from stashapp/develop
Merge to master for v0.12.0 release
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -30,6 +30,7 @@ ui/v2.5/src/core/generated-*.tsx
|
|||||||
.idea/**/dictionaries
|
.idea/**/dictionaries
|
||||||
.idea/**/shelf
|
.idea/**/shelf
|
||||||
.vscode
|
.vscode
|
||||||
|
.devcontainer
|
||||||
|
|
||||||
# Generated files
|
# Generated files
|
||||||
.idea/**/contentModel.xml
|
.idea/**/contentModel.xml
|
||||||
|
|||||||
14
Makefile
14
Makefile
@@ -165,17 +165,17 @@ pre-ui:
|
|||||||
|
|
||||||
.PHONY: ui
|
.PHONY: ui
|
||||||
ui: pre-build
|
ui: pre-build
|
||||||
$(SET) REACT_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
|
$(SET) VITE_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
|
||||||
$(SET) REACT_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
|
$(SET) VITE_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
|
||||||
$(SET) REACT_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
|
$(SET) VITE_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
|
||||||
cd ui/v2.5 && yarn build
|
cd ui/v2.5 && yarn build
|
||||||
|
|
||||||
.PHONY: ui-start
|
.PHONY: ui-start
|
||||||
ui-start: pre-build
|
ui-start: pre-build
|
||||||
$(SET) REACT_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
|
$(SET) VITE_APP_DATE="$(BUILD_DATE)" $(SEPARATOR) \
|
||||||
$(SET) REACT_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
|
$(SET) VITE_APP_GITHASH=$(GITHASH) $(SEPARATOR) \
|
||||||
$(SET) REACT_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
|
$(SET) VITE_APP_STASH_VERSION=$(STASH_VERSION) $(SEPARATOR) \
|
||||||
cd ui/v2.5 && yarn start
|
cd ui/v2.5 && yarn start --host
|
||||||
|
|
||||||
.PHONY: fmt-ui
|
.PHONY: fmt-ui
|
||||||
fmt-ui:
|
fmt-ui:
|
||||||
|
|||||||
@@ -41,6 +41,12 @@ Download and run Stash. It will prompt you for some configuration options and a
|
|||||||
|
|
||||||
The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our stash-box database. Note that this data source is not comprehensive and you may need to use the scrapers to identify some of your media.
|
The simplest way to tag a large number of files is by using the [Tagger](https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/docs/en/Tagger.md) which uses filename keywords to help identify the file and pull in scene and performer information from our stash-box database. Note that this data source is not comprehensive and you may need to use the scrapers to identify some of your media.
|
||||||
|
|
||||||
|
# Translation
|
||||||
|
[](https://translate.stashapp.cc/engage/stash/)
|
||||||
|
🇧🇷 🇨🇳 🇬🇧 🇫🇮 🇫🇷 🇩🇪 🇮🇹 🇪🇸 🇸🇪 🇹🇼
|
||||||
|
|
||||||
|
Stash is available in 10 languages (so far!) and it could be in your language too. If you want to help us translate Stash into your language, you can make an account at [translate.stashapp.cc](https://translate.stashapp.cc/projects/stash/stash-desktop-client/) to get started contributing new languages or improving existing ones. Thanks!
|
||||||
|
|
||||||
# Support (FAQ)
|
# Support (FAQ)
|
||||||
|
|
||||||
Answers to other Frequently Asked Questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ)
|
Answers to other Frequently Asked Questions can be found [on our Wiki](https://github.com/stashapp/stash/wiki/FAQ)
|
||||||
|
|||||||
@@ -24,22 +24,22 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MingW.
|
|||||||
|
|
||||||
### macOS
|
### macOS
|
||||||
|
|
||||||
TODO
|
1. If you don't have it already, install the [Homebrew package manager](https://brew.sh).
|
||||||
|
2. Install dependencies: `brew install go git yarn gcc make`
|
||||||
|
|
||||||
## Commands
|
## Commands
|
||||||
|
|
||||||
* `make generate` - Generate Go and UI GraphQL files
|
|
||||||
* `make build` - Builds the binary (make sure to build the UI as well... see below)
|
|
||||||
* `make docker-build` - Locally builds and tags a complete 'stash/build' docker image
|
|
||||||
* `make pre-ui` - Installs the UI dependencies. Only needs to be run once before building the UI for the first time, or if the dependencies are updated
|
* `make pre-ui` - Installs the UI dependencies. Only needs to be run once before building the UI for the first time, or if the dependencies are updated
|
||||||
|
* `make generate` - Generate Go and UI GraphQL files
|
||||||
* `make fmt-ui` - Formats the UI source code
|
* `make fmt-ui` - Formats the UI source code
|
||||||
* `make ui` - Builds the frontend
|
* `make ui` - Builds the frontend
|
||||||
|
* `make build` - Builds the binary (make sure to build the UI as well... see below)
|
||||||
|
* `make docker-build` - Locally builds and tags a complete 'stash/build' docker image
|
||||||
* `make lint` - Run the linter on the backend
|
* `make lint` - Run the linter on the backend
|
||||||
* `make fmt` - Run `go fmt`
|
* `make fmt` - Run `go fmt`
|
||||||
* `make it` - Run the unit and integration tests
|
* `make it` - Run the unit and integration tests
|
||||||
* `make validate` - Run all of the tests and checks required to submit a PR
|
* `make validate` - Run all of the tests and checks required to submit a PR
|
||||||
* `make ui-start` - Runs the UI in development mode. Requires a running stash server to connect to. Stash port can be changed from the default of `9999` with environment variable `REACT_APP_PLATFORM_PORT`.
|
* `make ui-start` - Runs the UI in development mode. Requires a running stash server to connect to. Stash server port can be changed from the default of `9999` using environment variable `VITE_APP_PLATFORM_PORT`. UI runs on port `3000` or the next available port.
|
||||||
|
|
||||||
## Building a release
|
## Building a release
|
||||||
|
|
||||||
|
|||||||
16
go.mod
16
go.mod
@@ -4,7 +4,7 @@ require (
|
|||||||
github.com/99designs/gqlgen v0.12.2
|
github.com/99designs/gqlgen v0.12.2
|
||||||
github.com/Yamashou/gqlgenc v0.0.0-20200902035953-4dbef3551953
|
github.com/Yamashou/gqlgenc v0.0.0-20200902035953-4dbef3551953
|
||||||
github.com/anacrolix/dms v1.2.2
|
github.com/anacrolix/dms v1.2.2
|
||||||
github.com/antchfx/htmlquery v1.2.3
|
github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758
|
||||||
github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84
|
github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84
|
||||||
github.com/chromedp/chromedp v0.7.3
|
github.com/chromedp/chromedp v0.7.3
|
||||||
github.com/corona10/goimagehash v1.0.3
|
github.com/corona10/goimagehash v1.0.3
|
||||||
@@ -37,20 +37,24 @@ require (
|
|||||||
github.com/vektra/mockery/v2 v2.2.1
|
github.com/vektra/mockery/v2 v2.2.1
|
||||||
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5
|
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5
|
||||||
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
|
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
|
||||||
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023
|
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9
|
||||||
golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf
|
golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf
|
||||||
golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b // indirect
|
golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b // indirect
|
||||||
golang.org/x/text v0.3.6
|
golang.org/x/text v0.3.7
|
||||||
golang.org/x/tools v0.1.5 // indirect
|
golang.org/x/tools v0.1.5 // indirect
|
||||||
gopkg.in/sourcemap.v1 v1.0.5 // indirect
|
gopkg.in/sourcemap.v1 v1.0.5 // indirect
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require github.com/vektah/gqlparser/v2 v2.0.1
|
require (
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0
|
||||||
|
github.com/vearutop/statigz v1.1.6
|
||||||
|
github.com/vektah/gqlparser/v2 v2.0.1
|
||||||
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/agnivade/levenshtein v1.1.0 // indirect
|
github.com/agnivade/levenshtein v1.1.0 // indirect
|
||||||
github.com/antchfx/xpath v1.1.6 // indirect
|
github.com/antchfx/xpath v1.2.0 // indirect
|
||||||
github.com/chromedp/sysutil v1.0.0 // indirect
|
github.com/chromedp/sysutil v1.0.0 // indirect
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
@@ -58,7 +62,7 @@ require (
|
|||||||
github.com/gobwas/httphead v0.1.0 // indirect
|
github.com/gobwas/httphead v0.1.0 // indirect
|
||||||
github.com/gobwas/pool v0.2.1 // indirect
|
github.com/gobwas/pool v0.2.1 // indirect
|
||||||
github.com/gobwas/ws v1.1.0-rc.5 // indirect
|
github.com/gobwas/ws v1.1.0-rc.5 // indirect
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||||
github.com/hashicorp/errwrap v1.0.0 // indirect
|
github.com/hashicorp/errwrap v1.0.0 // indirect
|
||||||
github.com/hashicorp/go-multierror v1.1.0 // indirect
|
github.com/hashicorp/go-multierror v1.1.0 // indirect
|
||||||
github.com/hashicorp/golang-lru v0.5.1 // indirect
|
github.com/hashicorp/golang-lru v0.5.1 // indirect
|
||||||
|
|||||||
25
go.sum
25
go.sum
@@ -79,10 +79,12 @@ github.com/anacrolix/missinggo v1.1.0/go.mod h1:MBJu3Sk/k3ZfGYcS7z18gwfu72Ey/xop
|
|||||||
github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
|
github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
|
||||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
|
||||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||||
github.com/antchfx/htmlquery v1.2.3 h1:sP3NFDneHx2stfNXCKbhHFo8XgNjCACnU/4AO5gWz6M=
|
github.com/andybalholm/brotli v1.0.3 h1:fpcw+r1N1h0Poc1F/pHbW40cUm/lMEQslZtCkBQ0UnM=
|
||||||
github.com/antchfx/htmlquery v1.2.3/go.mod h1:B0ABL+F5irhhMWg54ymEZinzMSi0Kt3I2if0BLYa3V0=
|
github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
github.com/antchfx/xpath v1.1.6 h1:6sVh6hB5T6phw1pFpHRQ+C4bd8sNI+O58flqtg7h0R0=
|
github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758 h1:Ldjwcl7T8VqCKgQQ0TfPI8fNb8O/GtMXcYaHlqOu99s=
|
||||||
github.com/antchfx/xpath v1.1.6/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
|
github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758/go.mod h1:2xO6iu3EVWs7R2JYqBbp8YzG50gj/ofqs5/0VZoDZLc=
|
||||||
|
github.com/antchfx/xpath v1.2.0 h1:mbwv7co+x0RwgeGAOHdrKy89GvHaGvxxBtPK0uF9Zr8=
|
||||||
|
github.com/antchfx/xpath v1.2.0/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||||
github.com/apache/arrow/go/arrow v0.0.0-20200601151325-b2287a20f230/go.mod h1:QNYViu/X0HXDHw7m3KXzWSVXIbfUvJqBFe6Gj8/pYA0=
|
github.com/apache/arrow/go/arrow v0.0.0-20200601151325-b2287a20f230/go.mod h1:QNYViu/X0HXDHw7m3KXzWSVXIbfUvJqBFe6Gj8/pYA0=
|
||||||
github.com/apache/arrow/go/arrow v0.0.0-20210521153258-78c88a9f517b/go.mod h1:R4hW3Ug0s+n4CUsWHKOj00Pu01ZqU4x/hSF5kXUcXKQ=
|
github.com/apache/arrow/go/arrow v0.0.0-20210521153258-78c88a9f517b/go.mod h1:R4hW3Ug0s+n4CUsWHKOj00Pu01ZqU4x/hSF5kXUcXKQ=
|
||||||
@@ -126,6 +128,8 @@ github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCS
|
|||||||
github.com/bkaradzic/go-lz4 v1.0.0/go.mod h1:0YdlkowM3VswSROI7qDxhRvJ3sLhlFrRRwjwegp5jy4=
|
github.com/bkaradzic/go-lz4 v1.0.0/go.mod h1:0YdlkowM3VswSROI7qDxhRvJ3sLhlFrRRwjwegp5jy4=
|
||||||
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
||||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
||||||
|
github.com/bool64/dev v0.1.41 h1:L554LCQZc3d7mtcdPUgDbSrCVbr48/30zgu0VuC/FTA=
|
||||||
|
github.com/bool64/dev v0.1.41/go.mod h1:cTHiTDNc8EewrQPy3p1obNilpMpdmlUesDkFTF2zRWU=
|
||||||
github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
|
github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
|
||||||
github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
|
github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
|
||||||
github.com/cenkalti/backoff/v4 v4.0.2/go.mod h1:eEew/i+1Q6OrCDZh3WiXYv3+nJwBASZ8Bog/87DQnVg=
|
github.com/cenkalti/backoff/v4 v4.0.2/go.mod h1:eEew/i+1Q6OrCDZh3WiXYv3+nJwBASZ8Bog/87DQnVg=
|
||||||
@@ -268,8 +272,9 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU
|
|||||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
|
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
|
||||||
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||||
@@ -505,6 +510,8 @@ github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
|||||||
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
|
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
|
||||||
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
|
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||||
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||||
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
|
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
|
||||||
@@ -706,6 +713,8 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1
|
|||||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||||
github.com/urfave/cli/v2 v2.1.1 h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k=
|
github.com/urfave/cli/v2 v2.1.1 h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k=
|
||||||
github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ=
|
github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ=
|
||||||
|
github.com/vearutop/statigz v1.1.6 h1:si1zvulh/6P4S/SjFticuKQ8/EgQISglaRuycj8PWso=
|
||||||
|
github.com/vearutop/statigz v1.1.6/go.mod h1:czAv7iXgPv/s+xsgXpVEhhD0NSOQ4wZPgmM/n7LANDI=
|
||||||
github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e h1:+w0Zm/9gaWpEAyDlU1eKOuk5twTjAjuevXqcJJw8hrg=
|
github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e h1:+w0Zm/9gaWpEAyDlU1eKOuk5twTjAjuevXqcJJw8hrg=
|
||||||
github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U=
|
github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U=
|
||||||
github.com/vektah/gqlparser v1.3.1 h1:8b0IcD3qZKWJQHSzynbDlrtP3IxVydZ2DZepCGofqfU=
|
github.com/vektah/gqlparser v1.3.1 h1:8b0IcD3qZKWJQHSzynbDlrtP3IxVydZ2DZepCGofqfU=
|
||||||
@@ -863,8 +872,9 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
|||||||
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
||||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||||
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023 h1:ADo5wSpq2gqaCGQWzk7S5vd//0iyyLeAratkEoG5dLE=
|
|
||||||
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
|
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9 h1:0qxwC5n+ttVOINCBeRHO0nq9X7uy8SDsPoi5OaCdIEI=
|
||||||
|
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
@@ -986,8 +996,9 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
|||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
|
|
||||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
|
||||||
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
|||||||
@@ -14,6 +14,10 @@ resolver:
|
|||||||
struct_tag: gqlgen
|
struct_tag: gqlgen
|
||||||
|
|
||||||
models:
|
models:
|
||||||
|
# Scalars
|
||||||
|
Timestamp:
|
||||||
|
model: github.com/stashapp/stash/pkg/models.Timestamp
|
||||||
|
# Objects
|
||||||
Gallery:
|
Gallery:
|
||||||
model: github.com/stashapp/stash/pkg/models.Gallery
|
model: github.com/stashapp/stash/pkg/models.Gallery
|
||||||
Image:
|
Image:
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ fragment ConfigGeneralData on ConfigGeneralResult {
|
|||||||
databasePath
|
databasePath
|
||||||
generatedPath
|
generatedPath
|
||||||
metadataPath
|
metadataPath
|
||||||
|
scrapersPath
|
||||||
cachePath
|
cachePath
|
||||||
calculateMD5
|
calculateMD5
|
||||||
videoFileNamingAlgorithm
|
videoFileNamingAlgorithm
|
||||||
@@ -61,7 +62,7 @@ fragment ConfigInterfaceData on ConfigInterfaceResult {
|
|||||||
cssEnabled
|
cssEnabled
|
||||||
language
|
language
|
||||||
slideshowDelay
|
slideshowDelay
|
||||||
disabledDropdownCreate {
|
disableDropdownCreate {
|
||||||
performer
|
performer
|
||||||
tag
|
tag
|
||||||
studio
|
studio
|
||||||
@@ -106,6 +107,16 @@ fragment ScraperSourceData on ScraperSource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult {
|
fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult {
|
||||||
|
scan {
|
||||||
|
useFileMetadata
|
||||||
|
stripFileExtension
|
||||||
|
scanGeneratePreviews
|
||||||
|
scanGenerateImagePreviews
|
||||||
|
scanGenerateSprites
|
||||||
|
scanGeneratePhashes
|
||||||
|
scanGenerateThumbnails
|
||||||
|
}
|
||||||
|
|
||||||
identify {
|
identify {
|
||||||
sources {
|
sources {
|
||||||
source {
|
source {
|
||||||
@@ -120,6 +131,31 @@ fragment ConfigDefaultSettingsData on ConfigDefaultSettingsResult {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
autoTag {
|
||||||
|
performers
|
||||||
|
studios
|
||||||
|
tags
|
||||||
|
}
|
||||||
|
|
||||||
|
generate {
|
||||||
|
sprites
|
||||||
|
previews
|
||||||
|
imagePreviews
|
||||||
|
previewOptions {
|
||||||
|
previewSegments
|
||||||
|
previewSegmentDuration
|
||||||
|
previewExcludeStart
|
||||||
|
previewExcludeEnd
|
||||||
|
previewPreset
|
||||||
|
}
|
||||||
|
markers
|
||||||
|
markerImagePreviews
|
||||||
|
markerScreenshots
|
||||||
|
transcodes
|
||||||
|
phashes
|
||||||
|
interactiveHeatmapsSpeeds
|
||||||
|
}
|
||||||
|
|
||||||
deleteFile
|
deleteFile
|
||||||
deleteGenerated
|
deleteGenerated
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ fragment GalleryData on Gallery {
|
|||||||
id
|
id
|
||||||
checksum
|
checksum
|
||||||
path
|
path
|
||||||
|
created_at
|
||||||
|
updated_at
|
||||||
title
|
title
|
||||||
date
|
date
|
||||||
url
|
url
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ fragment ImageData on Image {
|
|||||||
organized
|
organized
|
||||||
o_counter
|
o_counter
|
||||||
path
|
path
|
||||||
|
created_at
|
||||||
|
updated_at
|
||||||
|
|
||||||
file {
|
file {
|
||||||
size
|
size
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ fragment SlimSceneData on Scene {
|
|||||||
path
|
path
|
||||||
phash
|
phash
|
||||||
interactive
|
interactive
|
||||||
|
interactive_speed
|
||||||
|
|
||||||
file {
|
file {
|
||||||
size
|
size
|
||||||
@@ -33,6 +34,7 @@ fragment SlimSceneData on Scene {
|
|||||||
chapters_vtt
|
chapters_vtt
|
||||||
sprite
|
sprite
|
||||||
funscript
|
funscript
|
||||||
|
interactive_heatmap
|
||||||
}
|
}
|
||||||
|
|
||||||
scene_markers {
|
scene_markers {
|
||||||
|
|||||||
@@ -12,6 +12,9 @@ fragment SceneData on Scene {
|
|||||||
path
|
path
|
||||||
phash
|
phash
|
||||||
interactive
|
interactive
|
||||||
|
interactive_speed
|
||||||
|
created_at
|
||||||
|
updated_at
|
||||||
|
|
||||||
file {
|
file {
|
||||||
size
|
size
|
||||||
@@ -33,6 +36,7 @@ fragment SceneData on Scene {
|
|||||||
chapters_vtt
|
chapters_vtt
|
||||||
sprite
|
sprite
|
||||||
funscript
|
funscript
|
||||||
|
interactive_heatmap
|
||||||
}
|
}
|
||||||
|
|
||||||
scene_markers {
|
scene_markers {
|
||||||
|
|||||||
@@ -1,27 +1,3 @@
|
|||||||
query ScrapeFreeones($performer_name: String!) {
|
|
||||||
scrapeFreeones(performer_name: $performer_name) {
|
|
||||||
name
|
|
||||||
url
|
|
||||||
twitter
|
|
||||||
instagram
|
|
||||||
birthdate
|
|
||||||
ethnicity
|
|
||||||
country
|
|
||||||
eye_color
|
|
||||||
height
|
|
||||||
measurements
|
|
||||||
fake_tits
|
|
||||||
career_length
|
|
||||||
tattoos
|
|
||||||
piercings
|
|
||||||
aliases
|
|
||||||
details
|
|
||||||
death_date
|
|
||||||
hair_color
|
|
||||||
weight
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query ScrapeFreeonesPerformers($q: String!) {
|
query ScrapeFreeonesPerformers($q: String!) {
|
||||||
scrapeFreeonesPerformerList(query: $q)
|
scrapeFreeonesPerformerList(query: $q)
|
||||||
}
|
}
|
||||||
@@ -67,10 +67,12 @@ type Query {
|
|||||||
# Scrapers
|
# Scrapers
|
||||||
|
|
||||||
"""List available scrapers"""
|
"""List available scrapers"""
|
||||||
listPerformerScrapers: [Scraper!]!
|
listScrapers(types: [ScrapeContentType!]!): [Scraper!]!
|
||||||
listSceneScrapers: [Scraper!]!
|
listPerformerScrapers: [Scraper!]! @deprecated(reason: "Use listScrapers(types: [PERFORMER])")
|
||||||
listGalleryScrapers: [Scraper!]!
|
listSceneScrapers: [Scraper!]! @deprecated(reason: "Use listScrapers(types: [SCENE])")
|
||||||
listMovieScrapers: [Scraper!]!
|
listGalleryScrapers: [Scraper!]! @deprecated(reason: "Use listScrapers(types: [GALLERY])")
|
||||||
|
listMovieScrapers: [Scraper!]! @deprecated(reason: "Use listScrapers(types: [MOVIE])")
|
||||||
|
|
||||||
|
|
||||||
"""Scrape for a single scene"""
|
"""Scrape for a single scene"""
|
||||||
scrapeSingleScene(source: ScraperSourceInput!, input: ScrapeSingleSceneInput!): [ScrapedScene!]!
|
scrapeSingleScene(source: ScraperSourceInput!, input: ScrapeSingleSceneInput!): [ScrapedScene!]!
|
||||||
@@ -88,6 +90,9 @@ type Query {
|
|||||||
"""Scrape for a single movie"""
|
"""Scrape for a single movie"""
|
||||||
scrapeSingleMovie(source: ScraperSourceInput!, input: ScrapeSingleMovieInput!): [ScrapedMovie!]!
|
scrapeSingleMovie(source: ScraperSourceInput!, input: ScrapeSingleMovieInput!): [ScrapedMovie!]!
|
||||||
|
|
||||||
|
"Scrapes content based on a URL"
|
||||||
|
scrapeURL(url: String!, ty: ScrapeContentType!): ScrapedContent
|
||||||
|
|
||||||
"""Scrapes a complete performer record based on a URL"""
|
"""Scrapes a complete performer record based on a URL"""
|
||||||
scrapePerformerURL(url: String!): ScrapedPerformer
|
scrapePerformerURL(url: String!): ScrapedPerformer
|
||||||
"""Scrapes a complete performer record based on a URL"""
|
"""Scrapes a complete performer record based on a URL"""
|
||||||
@@ -106,8 +111,6 @@ type Query {
|
|||||||
"""Scrapes a complete gallery record based on an existing gallery"""
|
"""Scrapes a complete gallery record based on an existing gallery"""
|
||||||
scrapeGallery(scraper_id: ID!, gallery: GalleryUpdateInput!): ScrapedGallery @deprecated(reason: "use scrapeSingleGallery")
|
scrapeGallery(scraper_id: ID!, gallery: GalleryUpdateInput!): ScrapedGallery @deprecated(reason: "use scrapeSingleGallery")
|
||||||
|
|
||||||
"""Scrape a performer using Freeones"""
|
|
||||||
scrapeFreeones(performer_name: String!): ScrapedPerformer @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")
|
|
||||||
"""Scrape a list of performers from a query"""
|
"""Scrape a list of performers from a query"""
|
||||||
scrapeFreeonesPerformerList(query: String!): [String!]! @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")
|
scrapeFreeonesPerformerList(query: String!): [String!]! @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")
|
||||||
|
|
||||||
|
|||||||
@@ -41,16 +41,18 @@ input ConfigGeneralInput {
|
|||||||
generatedPath: String
|
generatedPath: String
|
||||||
"""Path to import/export files"""
|
"""Path to import/export files"""
|
||||||
metadataPath: String
|
metadataPath: String
|
||||||
|
"""Path to scrapers"""
|
||||||
|
scrapersPath: String
|
||||||
"""Path to cache"""
|
"""Path to cache"""
|
||||||
cachePath: String
|
cachePath: String
|
||||||
"""Whether to calculate MD5 checksums for scene video files"""
|
"""Whether to calculate MD5 checksums for scene video files"""
|
||||||
calculateMD5: Boolean!
|
calculateMD5: Boolean
|
||||||
"""Hash algorithm to use for generated file naming"""
|
"""Hash algorithm to use for generated file naming"""
|
||||||
videoFileNamingAlgorithm: HashAlgorithm!
|
videoFileNamingAlgorithm: HashAlgorithm
|
||||||
"""Number of parallel tasks to start during scan/generate"""
|
"""Number of parallel tasks to start during scan/generate"""
|
||||||
parallelTasks: Int
|
parallelTasks: Int
|
||||||
"""Include audio stream in previews"""
|
"""Include audio stream in previews"""
|
||||||
previewAudio: Boolean!
|
previewAudio: Boolean
|
||||||
"""Number of segments in a preview file"""
|
"""Number of segments in a preview file"""
|
||||||
previewSegments: Int
|
previewSegments: Int
|
||||||
"""Preview segment duration, in seconds"""
|
"""Preview segment duration, in seconds"""
|
||||||
@@ -78,13 +80,13 @@ input ConfigGeneralInput {
|
|||||||
"""Name of the log file"""
|
"""Name of the log file"""
|
||||||
logFile: String
|
logFile: String
|
||||||
"""Whether to also output to stderr"""
|
"""Whether to also output to stderr"""
|
||||||
logOut: Boolean!
|
logOut: Boolean
|
||||||
"""Minimum log level"""
|
"""Minimum log level"""
|
||||||
logLevel: String!
|
logLevel: String
|
||||||
"""Whether to log http access"""
|
"""Whether to log http access"""
|
||||||
logAccess: Boolean!
|
logAccess: Boolean
|
||||||
"""True if galleries should be created from folders with images"""
|
"""True if galleries should be created from folders with images"""
|
||||||
createGalleriesFromFolders: Boolean!
|
createGalleriesFromFolders: Boolean
|
||||||
"""Array of video file extensions"""
|
"""Array of video file extensions"""
|
||||||
videoExtensions: [String!]
|
videoExtensions: [String!]
|
||||||
"""Array of image file extensions"""
|
"""Array of image file extensions"""
|
||||||
@@ -104,7 +106,7 @@ input ConfigGeneralInput {
|
|||||||
"""Whether the scraper should check for invalid certificates"""
|
"""Whether the scraper should check for invalid certificates"""
|
||||||
scraperCertCheck: Boolean @deprecated(reason: "use mutation ConfigureScraping(input: ConfigScrapingInput) instead")
|
scraperCertCheck: Boolean @deprecated(reason: "use mutation ConfigureScraping(input: ConfigScrapingInput) instead")
|
||||||
"""Stash-box instances used for tagging"""
|
"""Stash-box instances used for tagging"""
|
||||||
stashBoxes: [StashBoxInput!]!
|
stashBoxes: [StashBoxInput!]
|
||||||
}
|
}
|
||||||
|
|
||||||
type ConfigGeneralResult {
|
type ConfigGeneralResult {
|
||||||
@@ -282,7 +284,8 @@ type ConfigInterfaceResult {
|
|||||||
slideshowDelay: Int
|
slideshowDelay: Int
|
||||||
|
|
||||||
"""Fields are true if creating via dropdown menus are disabled"""
|
"""Fields are true if creating via dropdown menus are disabled"""
|
||||||
disabledDropdownCreate: ConfigDisableDropdownCreate!
|
disableDropdownCreate: ConfigDisableDropdownCreate!
|
||||||
|
disabledDropdownCreate: ConfigDisableDropdownCreate! @deprecated(reason: "Use disableDropdownCreate")
|
||||||
|
|
||||||
"""Handy Connection Key"""
|
"""Handy Connection Key"""
|
||||||
handyKey: String
|
handyKey: String
|
||||||
@@ -316,7 +319,7 @@ input ConfigScrapingInput {
|
|||||||
"""Scraper CDP path. Path to chrome executable or remote address"""
|
"""Scraper CDP path. Path to chrome executable or remote address"""
|
||||||
scraperCDPPath: String
|
scraperCDPPath: String
|
||||||
"""Whether the scraper should check for invalid certificates"""
|
"""Whether the scraper should check for invalid certificates"""
|
||||||
scraperCertCheck: Boolean!
|
scraperCertCheck: Boolean
|
||||||
"""Tags blacklist during scraping"""
|
"""Tags blacklist during scraping"""
|
||||||
excludeTagPatterns: [String!]
|
excludeTagPatterns: [String!]
|
||||||
}
|
}
|
||||||
@@ -333,7 +336,10 @@ type ConfigScrapingResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ConfigDefaultSettingsResult {
|
type ConfigDefaultSettingsResult {
|
||||||
|
scan: ScanMetadataOptions
|
||||||
identify: IdentifyMetadataTaskOptions
|
identify: IdentifyMetadataTaskOptions
|
||||||
|
autoTag: AutoTagMetadataOptions
|
||||||
|
generate: GenerateMetadataOptions
|
||||||
|
|
||||||
"""If true, delete file checkbox will be checked by default"""
|
"""If true, delete file checkbox will be checked by default"""
|
||||||
deleteFile: Boolean
|
deleteFile: Boolean
|
||||||
@@ -342,7 +348,10 @@ type ConfigDefaultSettingsResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
input ConfigDefaultSettingsInput {
|
input ConfigDefaultSettingsInput {
|
||||||
|
scan: ScanMetadataInput
|
||||||
identify: IdentifyMetadataInput
|
identify: IdentifyMetadataInput
|
||||||
|
autoTag: AutoTagMetadataInput
|
||||||
|
generate: GenerateMetadataInput
|
||||||
|
|
||||||
"""If true, delete file checkbox will be checked by default"""
|
"""If true, delete file checkbox will be checked by default"""
|
||||||
deleteFile: Boolean
|
deleteFile: Boolean
|
||||||
|
|||||||
@@ -158,6 +158,8 @@ input SceneFilterType {
|
|||||||
url: StringCriterionInput
|
url: StringCriterionInput
|
||||||
"""Filter by interactive"""
|
"""Filter by interactive"""
|
||||||
interactive: Boolean
|
interactive: Boolean
|
||||||
|
"""Filter by InteractiveSpeed"""
|
||||||
|
interactive_speed: IntCriterionInput
|
||||||
}
|
}
|
||||||
|
|
||||||
input MovieFilterType {
|
input MovieFilterType {
|
||||||
|
|||||||
@@ -9,7 +9,10 @@ input GenerateMetadataInput {
|
|||||||
markerImagePreviews: Boolean
|
markerImagePreviews: Boolean
|
||||||
markerScreenshots: Boolean
|
markerScreenshots: Boolean
|
||||||
transcodes: Boolean
|
transcodes: Boolean
|
||||||
|
"""Generate transcodes even if not required"""
|
||||||
|
forceTranscodes: Boolean
|
||||||
phashes: Boolean
|
phashes: Boolean
|
||||||
|
interactiveHeatmapsSpeeds: Boolean
|
||||||
|
|
||||||
"""scene ids to generate for"""
|
"""scene ids to generate for"""
|
||||||
sceneIDs: [ID!]
|
sceneIDs: [ID!]
|
||||||
@@ -33,8 +36,41 @@ input GeneratePreviewOptionsInput {
|
|||||||
previewPreset: PreviewPreset
|
previewPreset: PreviewPreset
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type GenerateMetadataOptions {
|
||||||
|
sprites: Boolean
|
||||||
|
previews: Boolean
|
||||||
|
imagePreviews: Boolean
|
||||||
|
previewOptions: GeneratePreviewOptions
|
||||||
|
markers: Boolean
|
||||||
|
markerImagePreviews: Boolean
|
||||||
|
markerScreenshots: Boolean
|
||||||
|
transcodes: Boolean
|
||||||
|
phashes: Boolean
|
||||||
|
interactiveHeatmapsSpeeds: Boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
type GeneratePreviewOptions {
|
||||||
|
"""Number of segments in a preview file"""
|
||||||
|
previewSegments: Int
|
||||||
|
"""Preview segment duration, in seconds"""
|
||||||
|
previewSegmentDuration: Float
|
||||||
|
"""Duration of start of video to exclude when generating previews"""
|
||||||
|
previewExcludeStart: String
|
||||||
|
"""Duration of end of video to exclude when generating previews"""
|
||||||
|
previewExcludeEnd: String
|
||||||
|
"""Preset when generating preview"""
|
||||||
|
previewPreset: PreviewPreset
|
||||||
|
}
|
||||||
|
|
||||||
|
"Filter options for meta data scannning"
|
||||||
|
input ScanMetaDataFilterInput {
|
||||||
|
"If set, files with a modification time before this time point are ignored by the scan"
|
||||||
|
minModTime: Timestamp
|
||||||
|
}
|
||||||
|
|
||||||
input ScanMetadataInput {
|
input ScanMetadataInput {
|
||||||
paths: [String!]
|
paths: [String!]
|
||||||
|
|
||||||
"""Set name, date, details from metadata (if present)"""
|
"""Set name, date, details from metadata (if present)"""
|
||||||
useFileMetadata: Boolean
|
useFileMetadata: Boolean
|
||||||
"""Strip file extension from title"""
|
"""Strip file extension from title"""
|
||||||
@@ -49,9 +85,31 @@ input ScanMetadataInput {
|
|||||||
scanGeneratePhashes: Boolean
|
scanGeneratePhashes: Boolean
|
||||||
"""Generate image thumbnails during scan"""
|
"""Generate image thumbnails during scan"""
|
||||||
scanGenerateThumbnails: Boolean
|
scanGenerateThumbnails: Boolean
|
||||||
|
|
||||||
|
"Filter options for the scan"
|
||||||
|
filter: ScanMetaDataFilterInput
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScanMetadataOptions {
|
||||||
|
"""Set name, date, details from metadata (if present)"""
|
||||||
|
useFileMetadata: Boolean!
|
||||||
|
"""Strip file extension from title"""
|
||||||
|
stripFileExtension: Boolean!
|
||||||
|
"""Generate previews during scan"""
|
||||||
|
scanGeneratePreviews: Boolean!
|
||||||
|
"""Generate image previews during scan"""
|
||||||
|
scanGenerateImagePreviews: Boolean!
|
||||||
|
"""Generate sprites during scan"""
|
||||||
|
scanGenerateSprites: Boolean!
|
||||||
|
"""Generate phashes during scan"""
|
||||||
|
scanGeneratePhashes: Boolean!
|
||||||
|
"""Generate image thumbnails during scan"""
|
||||||
|
scanGenerateThumbnails: Boolean!
|
||||||
}
|
}
|
||||||
|
|
||||||
input CleanMetadataInput {
|
input CleanMetadataInput {
|
||||||
|
paths: [String!]
|
||||||
|
|
||||||
"""Do a dry run. Don't delete any files"""
|
"""Do a dry run. Don't delete any files"""
|
||||||
dryRun: Boolean!
|
dryRun: Boolean!
|
||||||
}
|
}
|
||||||
@@ -67,15 +125,24 @@ input AutoTagMetadataInput {
|
|||||||
tags: [String!]
|
tags: [String!]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type AutoTagMetadataOptions {
|
||||||
|
"""IDs of performers to tag files with, or "*" for all"""
|
||||||
|
performers: [String!]
|
||||||
|
"""IDs of studios to tag files with, or "*" for all"""
|
||||||
|
studios: [String!]
|
||||||
|
"""IDs of tags to tag files with, or "*" for all"""
|
||||||
|
tags: [String!]
|
||||||
|
}
|
||||||
|
|
||||||
enum IdentifyFieldStrategy {
|
enum IdentifyFieldStrategy {
|
||||||
"""Never sets the field value"""
|
"""Never sets the field value"""
|
||||||
IGNORE
|
IGNORE
|
||||||
"""
|
"""
|
||||||
For multi-value fields, merge with existing.
|
For multi-value fields, merge with existing.
|
||||||
For single-value fields, ignore if already set
|
For single-value fields, ignore if already set
|
||||||
"""
|
"""
|
||||||
MERGE
|
MERGE
|
||||||
"""Always replaces the value if a value is found.
|
"""Always replaces the value if a value is found.
|
||||||
For multi-value fields, any existing values are removed and replaced with the
|
For multi-value fields, any existing values are removed and replaced with the
|
||||||
scraped values.
|
scraped values.
|
||||||
"""
|
"""
|
||||||
|
|||||||
7
graphql/schema/types/scalars.graphql
Normal file
7
graphql/schema/types/scalars.graphql
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
|
||||||
|
"""
|
||||||
|
Timestamp is a point in time. It is always output as RFC3339-compatible time points.
|
||||||
|
It can be input as a RFC3339 string, or as "<4h" for "4 hours in the past" or ">5m"
|
||||||
|
for "5 minutes in the future"
|
||||||
|
"""
|
||||||
|
scalar Timestamp
|
||||||
@@ -18,6 +18,7 @@ type ScenePathsType {
|
|||||||
chapters_vtt: String # Resolver
|
chapters_vtt: String # Resolver
|
||||||
sprite: String # Resolver
|
sprite: String # Resolver
|
||||||
funscript: String # Resolver
|
funscript: String # Resolver
|
||||||
|
interactive_heatmap: String # Resolver
|
||||||
}
|
}
|
||||||
|
|
||||||
type SceneMovie {
|
type SceneMovie {
|
||||||
@@ -39,6 +40,7 @@ type Scene {
|
|||||||
path: String!
|
path: String!
|
||||||
phash: String
|
phash: String
|
||||||
interactive: Boolean!
|
interactive: Boolean!
|
||||||
|
interactive_speed: Int
|
||||||
created_at: Time!
|
created_at: Time!
|
||||||
updated_at: Time!
|
updated_at: Time!
|
||||||
file_mod_time: Time
|
file_mod_time: Time
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
enum ScrapeType {
|
enum ScrapeType {
|
||||||
"""From text query"""
|
"""From text query"""
|
||||||
NAME
|
NAME
|
||||||
"""From existing object"""
|
"""From existing object"""
|
||||||
FRAGMENT
|
FRAGMENT
|
||||||
@@ -7,6 +7,22 @@ enum ScrapeType {
|
|||||||
URL
|
URL
|
||||||
}
|
}
|
||||||
|
|
||||||
|
"Type of the content a scraper generates"
|
||||||
|
enum ScrapeContentType {
|
||||||
|
GALLERY
|
||||||
|
MOVIE
|
||||||
|
PERFORMER
|
||||||
|
SCENE
|
||||||
|
}
|
||||||
|
|
||||||
|
"Scraped Content is the forming union over the different scrapers"
|
||||||
|
union ScrapedContent = ScrapedStudio
|
||||||
|
| ScrapedTag
|
||||||
|
| ScrapedScene
|
||||||
|
| ScrapedGallery
|
||||||
|
| ScrapedMovie
|
||||||
|
| ScrapedPerformer
|
||||||
|
|
||||||
type ScraperSpec {
|
type ScraperSpec {
|
||||||
"""URLs matching these can be scraped with"""
|
"""URLs matching these can be scraped with"""
|
||||||
urls: [String!]
|
urls: [String!]
|
||||||
@@ -26,6 +42,7 @@ type Scraper {
|
|||||||
movie: ScraperSpec
|
movie: ScraperSpec
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
type ScrapedStudio {
|
type ScrapedStudio {
|
||||||
"""Set if studio matched"""
|
"""Set if studio matched"""
|
||||||
stored_id: ID
|
stored_id: ID
|
||||||
|
|||||||
@@ -17,14 +17,31 @@ type imageBox struct {
|
|||||||
files []string
|
files []string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var imageExtensions = []string{
|
||||||
|
".jpg",
|
||||||
|
".jpeg",
|
||||||
|
".png",
|
||||||
|
".gif",
|
||||||
|
".svg",
|
||||||
|
".webp",
|
||||||
|
}
|
||||||
|
|
||||||
func newImageBox(box fs.FS) (*imageBox, error) {
|
func newImageBox(box fs.FS) (*imageBox, error) {
|
||||||
ret := &imageBox{
|
ret := &imageBox{
|
||||||
box: box,
|
box: box,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := fs.WalkDir(box, ".", func(path string, d fs.DirEntry, err error) error {
|
err := fs.WalkDir(box, ".", func(path string, d fs.DirEntry, err error) error {
|
||||||
if !d.IsDir() {
|
if d.IsDir() {
|
||||||
ret.files = append(ret.files, path)
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
baseName := strings.ToLower(d.Name())
|
||||||
|
for _, ext := range imageExtensions {
|
||||||
|
if strings.HasSuffix(baseName, ext) {
|
||||||
|
ret.files = append(ret.files, path)
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ var matcher = language.NewMatcher([]language.Tag{
|
|||||||
language.MustParse("de-DE"),
|
language.MustParse("de-DE"),
|
||||||
language.MustParse("it-IT"),
|
language.MustParse("it-IT"),
|
||||||
language.MustParse("fr-FR"),
|
language.MustParse("fr-FR"),
|
||||||
|
language.MustParse("fi-FI"),
|
||||||
language.MustParse("pt-BR"),
|
language.MustParse("pt-BR"),
|
||||||
language.MustParse("sv-SE"),
|
language.MustParse("sv-SE"),
|
||||||
language.MustParse("zh-CN"),
|
language.MustParse("zh-CN"),
|
||||||
|
|||||||
@@ -7,13 +7,22 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/manager"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/plugin"
|
"github.com/stashapp/stash/pkg/plugin"
|
||||||
|
"github.com/stashapp/stash/pkg/scraper"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
// ErrNotImplemented is an error which means the given functionality isn't implemented by the API.
|
||||||
ErrNotImplemented = errors.New("not implemented")
|
ErrNotImplemented = errors.New("not implemented")
|
||||||
ErrNotSupported = errors.New("not supported")
|
|
||||||
|
// ErrNotSupported is returned whenever there's a test, which can be used to guard against the error,
|
||||||
|
// but the given parameters aren't supported by the system.
|
||||||
|
ErrNotSupported = errors.New("not supported")
|
||||||
|
|
||||||
|
// ErrInput signifies errors where the input isn't valid for some reason. And no more specific error exists.
|
||||||
|
ErrInput = errors.New("input error")
|
||||||
)
|
)
|
||||||
|
|
||||||
type hookExecutor interface {
|
type hookExecutor interface {
|
||||||
@@ -25,6 +34,10 @@ type Resolver struct {
|
|||||||
hookExecutor hookExecutor
|
hookExecutor hookExecutor
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *Resolver) scraperCache() *scraper.Cache {
|
||||||
|
return manager.GetInstance().ScraperCache
|
||||||
|
}
|
||||||
|
|
||||||
func (r *Resolver) Gallery() models.GalleryResolver {
|
func (r *Resolver) Gallery() models.GalleryResolver {
|
||||||
return &galleryResolver{r}
|
return &galleryResolver{r}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -61,6 +61,14 @@ func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, er
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
|
||||||
|
if obj.InteractiveSpeed.Valid {
|
||||||
|
interactive_speed := int(obj.InteractiveSpeed.Int64)
|
||||||
|
return &interactive_speed, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) {
|
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) {
|
||||||
width := int(obj.Width.Int64)
|
width := int(obj.Width.Int64)
|
||||||
height := int(obj.Height.Int64)
|
height := int(obj.Height.Int64)
|
||||||
@@ -89,16 +97,18 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
|
|||||||
spritePath := builder.GetSpriteURL()
|
spritePath := builder.GetSpriteURL()
|
||||||
chaptersVttPath := builder.GetChaptersVTTURL()
|
chaptersVttPath := builder.GetChaptersVTTURL()
|
||||||
funscriptPath := builder.GetFunscriptURL()
|
funscriptPath := builder.GetFunscriptURL()
|
||||||
|
interactiveHeatmap := builder.GetInteractiveHeatmapURL()
|
||||||
|
|
||||||
return &models.ScenePathsType{
|
return &models.ScenePathsType{
|
||||||
Screenshot: &screenshotPath,
|
Screenshot: &screenshotPath,
|
||||||
Preview: &previewPath,
|
Preview: &previewPath,
|
||||||
Stream: &streamPath,
|
Stream: &streamPath,
|
||||||
Webp: &webpPath,
|
Webp: &webpPath,
|
||||||
Vtt: &vttPath,
|
Vtt: &vttPath,
|
||||||
ChaptersVtt: &chaptersVttPath,
|
ChaptersVtt: &chaptersVttPath,
|
||||||
Sprite: &spritePath,
|
Sprite: &spritePath,
|
||||||
Funscript: &funscriptPath,
|
Funscript: &funscriptPath,
|
||||||
|
InteractiveHeatmap: &interactiveHeatmap,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -57,6 +57,20 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
validateDir := func(key string, value string, optional bool) error {
|
||||||
|
if err := checkConfigOverride(config.Metadata); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !optional || value != "" {
|
||||||
|
if err := utils.EnsureDir(value); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
existingDBPath := c.GetDatabasePath()
|
existingDBPath := c.GetDatabasePath()
|
||||||
if input.DatabasePath != nil && existingDBPath != *input.DatabasePath {
|
if input.DatabasePath != nil && existingDBPath != *input.DatabasePath {
|
||||||
if err := checkConfigOverride(config.Database); err != nil {
|
if err := checkConfigOverride(config.Database); err != nil {
|
||||||
@@ -72,64 +86,70 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||||||
|
|
||||||
existingGeneratedPath := c.GetGeneratedPath()
|
existingGeneratedPath := c.GetGeneratedPath()
|
||||||
if input.GeneratedPath != nil && existingGeneratedPath != *input.GeneratedPath {
|
if input.GeneratedPath != nil && existingGeneratedPath != *input.GeneratedPath {
|
||||||
if err := checkConfigOverride(config.Generated); err != nil {
|
if err := validateDir(config.Generated, *input.GeneratedPath, false); err != nil {
|
||||||
return makeConfigGeneralResult(), err
|
return makeConfigGeneralResult(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := utils.EnsureDir(*input.GeneratedPath); err != nil {
|
c.Set(config.Generated, input.GeneratedPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
refreshScraperCache := false
|
||||||
|
existingScrapersPath := c.GetScrapersPath()
|
||||||
|
if input.ScrapersPath != nil && existingScrapersPath != *input.ScrapersPath {
|
||||||
|
if err := validateDir(config.ScrapersPath, *input.ScrapersPath, false); err != nil {
|
||||||
return makeConfigGeneralResult(), err
|
return makeConfigGeneralResult(), err
|
||||||
}
|
}
|
||||||
c.Set(config.Generated, input.GeneratedPath)
|
|
||||||
|
refreshScraperCache = true
|
||||||
|
c.Set(config.ScrapersPath, input.ScrapersPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
existingMetadataPath := c.GetMetadataPath()
|
existingMetadataPath := c.GetMetadataPath()
|
||||||
if input.MetadataPath != nil && existingMetadataPath != *input.MetadataPath {
|
if input.MetadataPath != nil && existingMetadataPath != *input.MetadataPath {
|
||||||
if err := checkConfigOverride(config.Metadata); err != nil {
|
if err := validateDir(config.Metadata, *input.MetadataPath, true); err != nil {
|
||||||
return makeConfigGeneralResult(), err
|
return makeConfigGeneralResult(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
if *input.MetadataPath != "" {
|
|
||||||
if err := utils.EnsureDir(*input.MetadataPath); err != nil {
|
|
||||||
return makeConfigGeneralResult(), err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
c.Set(config.Metadata, input.MetadataPath)
|
c.Set(config.Metadata, input.MetadataPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
existingCachePath := c.GetCachePath()
|
existingCachePath := c.GetCachePath()
|
||||||
if input.CachePath != nil && existingCachePath != *input.CachePath {
|
if input.CachePath != nil && existingCachePath != *input.CachePath {
|
||||||
if err := checkConfigOverride(config.Metadata); err != nil {
|
if err := validateDir(config.Cache, *input.CachePath, true); err != nil {
|
||||||
return makeConfigGeneralResult(), err
|
return makeConfigGeneralResult(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
if *input.CachePath != "" {
|
|
||||||
if err := utils.EnsureDir(*input.CachePath); err != nil {
|
|
||||||
return makeConfigGeneralResult(), err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
c.Set(config.Cache, input.CachePath)
|
c.Set(config.Cache, input.CachePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !input.CalculateMd5 && input.VideoFileNamingAlgorithm == models.HashAlgorithmMd5 {
|
if input.VideoFileNamingAlgorithm != nil && *input.VideoFileNamingAlgorithm != c.GetVideoFileNamingAlgorithm() {
|
||||||
return makeConfigGeneralResult(), errors.New("calculateMD5 must be true if using MD5")
|
calculateMD5 := c.IsCalculateMD5()
|
||||||
}
|
if input.CalculateMd5 != nil {
|
||||||
|
calculateMD5 = *input.CalculateMd5
|
||||||
|
}
|
||||||
|
if !calculateMD5 && *input.VideoFileNamingAlgorithm == models.HashAlgorithmMd5 {
|
||||||
|
return makeConfigGeneralResult(), errors.New("calculateMD5 must be true if using MD5")
|
||||||
|
}
|
||||||
|
|
||||||
if input.VideoFileNamingAlgorithm != c.GetVideoFileNamingAlgorithm() {
|
|
||||||
// validate changing VideoFileNamingAlgorithm
|
// validate changing VideoFileNamingAlgorithm
|
||||||
if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, input.VideoFileNamingAlgorithm); err != nil {
|
if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, *input.VideoFileNamingAlgorithm); err != nil {
|
||||||
return makeConfigGeneralResult(), err
|
return makeConfigGeneralResult(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set(config.VideoFileNamingAlgorithm, input.VideoFileNamingAlgorithm)
|
c.Set(config.VideoFileNamingAlgorithm, *input.VideoFileNamingAlgorithm)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set(config.CalculateMD5, input.CalculateMd5)
|
if input.CalculateMd5 != nil {
|
||||||
|
c.Set(config.CalculateMD5, *input.CalculateMd5)
|
||||||
|
}
|
||||||
|
|
||||||
if input.ParallelTasks != nil {
|
if input.ParallelTasks != nil {
|
||||||
c.Set(config.ParallelTasks, *input.ParallelTasks)
|
c.Set(config.ParallelTasks, *input.ParallelTasks)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set(config.PreviewAudio, input.PreviewAudio)
|
if input.PreviewAudio != nil {
|
||||||
|
c.Set(config.PreviewAudio, *input.PreviewAudio)
|
||||||
|
}
|
||||||
|
|
||||||
if input.PreviewSegments != nil {
|
if input.PreviewSegments != nil {
|
||||||
c.Set(config.PreviewSegments, *input.PreviewSegments)
|
c.Set(config.PreviewSegments, *input.PreviewSegments)
|
||||||
@@ -185,12 +205,17 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||||||
c.Set(config.LogFile, input.LogFile)
|
c.Set(config.LogFile, input.LogFile)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set(config.LogOut, input.LogOut)
|
if input.LogOut != nil {
|
||||||
c.Set(config.LogAccess, input.LogAccess)
|
c.Set(config.LogOut, *input.LogOut)
|
||||||
|
}
|
||||||
|
|
||||||
if input.LogLevel != c.GetLogLevel() {
|
if input.LogAccess != nil {
|
||||||
|
c.Set(config.LogAccess, *input.LogAccess)
|
||||||
|
}
|
||||||
|
|
||||||
|
if input.LogLevel != nil && *input.LogLevel != c.GetLogLevel() {
|
||||||
c.Set(config.LogLevel, input.LogLevel)
|
c.Set(config.LogLevel, input.LogLevel)
|
||||||
logger.SetLogLevel(input.LogLevel)
|
logger.SetLogLevel(*input.LogLevel)
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Excludes != nil {
|
if input.Excludes != nil {
|
||||||
@@ -213,14 +238,15 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||||||
c.Set(config.GalleryExtensions, input.GalleryExtensions)
|
c.Set(config.GalleryExtensions, input.GalleryExtensions)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders)
|
if input.CreateGalleriesFromFolders != nil {
|
||||||
|
c.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders)
|
||||||
|
}
|
||||||
|
|
||||||
if input.CustomPerformerImageLocation != nil {
|
if input.CustomPerformerImageLocation != nil {
|
||||||
c.Set(config.CustomPerformerImageLocation, *input.CustomPerformerImageLocation)
|
c.Set(config.CustomPerformerImageLocation, *input.CustomPerformerImageLocation)
|
||||||
initialiseCustomImages()
|
initialiseCustomImages()
|
||||||
}
|
}
|
||||||
|
|
||||||
refreshScraperCache := false
|
|
||||||
if input.ScraperUserAgent != nil {
|
if input.ScraperUserAgent != nil {
|
||||||
c.Set(config.ScraperUserAgent, input.ScraperUserAgent)
|
c.Set(config.ScraperUserAgent, input.ScraperUserAgent)
|
||||||
refreshScraperCache = true
|
refreshScraperCache = true
|
||||||
@@ -293,14 +319,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
|
|||||||
c.Set(config.SlideshowDelay, *input.SlideshowDelay)
|
c.Set(config.SlideshowDelay, *input.SlideshowDelay)
|
||||||
}
|
}
|
||||||
|
|
||||||
css := ""
|
|
||||||
|
|
||||||
if input.CSS != nil {
|
if input.CSS != nil {
|
||||||
css = *input.CSS
|
c.SetCSS(*input.CSS)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.SetCSS(css)
|
|
||||||
|
|
||||||
setBool(config.CSSEnabled, input.CSSEnabled)
|
setBool(config.CSSEnabled, input.CSSEnabled)
|
||||||
|
|
||||||
if input.DisableDropdownCreate != nil {
|
if input.DisableDropdownCreate != nil {
|
||||||
@@ -332,7 +354,9 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.Confi
|
|||||||
c.Set(config.DLNAServerName, *input.ServerName)
|
c.Set(config.DLNAServerName, *input.ServerName)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set(config.DLNADefaultIPWhitelist, input.WhitelistedIPs)
|
if input.WhitelistedIPs != nil {
|
||||||
|
c.Set(config.DLNADefaultIPWhitelist, input.WhitelistedIPs)
|
||||||
|
}
|
||||||
|
|
||||||
currentDLNAEnabled := c.GetDLNADefaultEnabled()
|
currentDLNAEnabled := c.GetDLNADefaultEnabled()
|
||||||
if input.Enabled != nil && *input.Enabled != currentDLNAEnabled {
|
if input.Enabled != nil && *input.Enabled != currentDLNAEnabled {
|
||||||
@@ -349,7 +373,9 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.Confi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set(config.DLNAInterfaces, input.Interfaces)
|
if input.Interfaces != nil {
|
||||||
|
c.Set(config.DLNAInterfaces, input.Interfaces)
|
||||||
|
}
|
||||||
|
|
||||||
if err := c.Write(); err != nil {
|
if err := c.Write(); err != nil {
|
||||||
return makeConfigDLNAResult(), err
|
return makeConfigDLNAResult(), err
|
||||||
@@ -376,7 +402,10 @@ func (r *mutationResolver) ConfigureScraping(ctx context.Context, input models.C
|
|||||||
c.Set(config.ScraperExcludeTagPatterns, input.ExcludeTagPatterns)
|
c.Set(config.ScraperExcludeTagPatterns, input.ExcludeTagPatterns)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set(config.ScraperCertCheck, input.ScraperCertCheck)
|
if input.ScraperCertCheck != nil {
|
||||||
|
c.Set(config.ScraperCertCheck, input.ScraperCertCheck)
|
||||||
|
}
|
||||||
|
|
||||||
if refreshScraperCache {
|
if refreshScraperCache {
|
||||||
manager.GetInstance().RefreshScraperCache()
|
manager.GetInstance().RefreshScraperCache()
|
||||||
}
|
}
|
||||||
@@ -394,6 +423,18 @@ func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input models.C
|
|||||||
c.Set(config.DefaultIdentifySettings, input.Identify)
|
c.Set(config.DefaultIdentifySettings, input.Identify)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if input.Scan != nil {
|
||||||
|
c.Set(config.DefaultScanSettings, input.Scan)
|
||||||
|
}
|
||||||
|
|
||||||
|
if input.AutoTag != nil {
|
||||||
|
c.Set(config.DefaultAutoTagSettings, input.AutoTag)
|
||||||
|
}
|
||||||
|
|
||||||
|
if input.Generate != nil {
|
||||||
|
c.Set(config.DefaultGenerateSettings, input.Generate)
|
||||||
|
}
|
||||||
|
|
||||||
if input.DeleteFile != nil {
|
if input.DeleteFile != nil {
|
||||||
c.Set(config.DeleteFileDefault, *input.DeleteFile)
|
c.Set(config.DeleteFileDefault, *input.DeleteFile)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,9 +5,12 @@ import (
|
|||||||
"database/sql"
|
"database/sql"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/file"
|
||||||
|
"github.com/stashapp/stash/pkg/image"
|
||||||
"github.com/stashapp/stash/pkg/manager"
|
"github.com/stashapp/stash/pkg/manager"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/plugin"
|
"github.com/stashapp/stash/pkg/plugin"
|
||||||
@@ -395,8 +398,14 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
|||||||
}
|
}
|
||||||
|
|
||||||
var galleries []*models.Gallery
|
var galleries []*models.Gallery
|
||||||
var imgsToPostProcess []*models.Image
|
var imgsDestroyed []*models.Image
|
||||||
var imgsToDelete []*models.Image
|
fileDeleter := &image.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
Paths: manager.GetInstance().Paths,
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
|
||||||
|
deleteFile := utils.IsTrue(input.DeleteFile)
|
||||||
|
|
||||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||||
qb := repo.Gallery()
|
qb := repo.Gallery()
|
||||||
@@ -422,13 +431,19 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, img := range imgs {
|
for _, img := range imgs {
|
||||||
if err := iqb.Destroy(img.ID); err != nil {
|
if err := image.Destroy(img, iqb, fileDeleter, deleteGenerated, false); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
imgsToPostProcess = append(imgsToPostProcess, img)
|
imgsDestroyed = append(imgsDestroyed, img)
|
||||||
}
|
}
|
||||||
} else if input.DeleteFile != nil && *input.DeleteFile {
|
|
||||||
|
if deleteFile {
|
||||||
|
if err := fileDeleter.Files([]string{gallery.Path.String}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if deleteFile {
|
||||||
// Delete image if it is only attached to this gallery
|
// Delete image if it is only attached to this gallery
|
||||||
imgs, err := iqb.FindByGalleryID(id)
|
imgs, err := iqb.FindByGalleryID(id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -442,14 +457,16 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(imgGalleries) == 1 {
|
if len(imgGalleries) == 1 {
|
||||||
if err := iqb.Destroy(img.ID); err != nil {
|
if err := image.Destroy(img, iqb, fileDeleter, deleteGenerated, deleteFile); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
imgsToDelete = append(imgsToDelete, img)
|
imgsDestroyed = append(imgsDestroyed, img)
|
||||||
imgsToPostProcess = append(imgsToPostProcess, img)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// we only want to delete a folder-based gallery if it is empty.
|
||||||
|
// don't do this with the file deleter
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := qb.Destroy(id); err != nil {
|
if err := qb.Destroy(id); err != nil {
|
||||||
@@ -459,44 +476,53 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// if delete file is true, then delete the file as well
|
// perform the post-commit actions
|
||||||
// if it fails, just log a message
|
fileDeleter.Commit()
|
||||||
if input.DeleteFile != nil && *input.DeleteFile {
|
|
||||||
// #1804 - delete the image files first, since they must be removed
|
|
||||||
// before deleting a folder
|
|
||||||
for _, img := range imgsToDelete {
|
|
||||||
manager.DeleteImageFile(img)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, gallery := range galleries {
|
for _, gallery := range galleries {
|
||||||
manager.DeleteGalleryFile(gallery)
|
// don't delete stash library paths
|
||||||
}
|
if utils.IsTrue(input.DeleteFile) && !gallery.Zip && gallery.Path.Valid && !isStashPath(gallery.Path.String) {
|
||||||
}
|
// try to remove the folder - it is possible that it is not empty
|
||||||
|
// so swallow the error if present
|
||||||
// if delete generated is true, then delete the generated files
|
_ = os.Remove(gallery.Path.String)
|
||||||
// for the gallery
|
|
||||||
if input.DeleteGenerated != nil && *input.DeleteGenerated {
|
|
||||||
for _, img := range imgsToPostProcess {
|
|
||||||
manager.DeleteGeneratedImageFiles(img)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// call post hook after performing the other actions
|
// call post hook after performing the other actions
|
||||||
for _, gallery := range galleries {
|
for _, gallery := range galleries {
|
||||||
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, input, nil)
|
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
|
||||||
|
GalleryDestroyInput: input,
|
||||||
|
Checksum: gallery.Checksum,
|
||||||
|
Path: gallery.Path.String,
|
||||||
|
}, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// call image destroy post hook as well
|
// call image destroy post hook as well
|
||||||
for _, img := range imgsToDelete {
|
for _, img := range imgsDestroyed {
|
||||||
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, nil, nil)
|
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
|
||||||
|
Checksum: img.Checksum,
|
||||||
|
Path: img.Path,
|
||||||
|
}, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isStashPath(path string) bool {
|
||||||
|
stashConfigs := manager.GetInstance().Config.GetStashPaths()
|
||||||
|
for _, config := range stashConfigs {
|
||||||
|
if path == config.Path {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.GalleryAddInput) (bool, error) {
|
func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.GalleryAddInput) (bool, error) {
|
||||||
galleryID, err := strconv.Atoi(input.GalleryID)
|
galleryID, err := strconv.Atoi(input.GalleryID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/file"
|
||||||
|
"github.com/stashapp/stash/pkg/image"
|
||||||
"github.com/stashapp/stash/pkg/manager"
|
"github.com/stashapp/stash/pkg/manager"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/plugin"
|
"github.com/stashapp/stash/pkg/plugin"
|
||||||
@@ -281,38 +283,38 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
|
|||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var image *models.Image
|
var i *models.Image
|
||||||
|
fileDeleter := &image.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
Paths: manager.GetInstance().Paths,
|
||||||
|
}
|
||||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||||
qb := repo.Image()
|
qb := repo.Image()
|
||||||
|
|
||||||
image, err = qb.Find(imageID)
|
i, err = qb.Find(imageID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if image == nil {
|
if i == nil {
|
||||||
return fmt.Errorf("image with id %d not found", imageID)
|
return fmt.Errorf("image with id %d not found", imageID)
|
||||||
}
|
}
|
||||||
|
|
||||||
return qb.Destroy(imageID)
|
return image.Destroy(i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile))
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// if delete generated is true, then delete the generated files
|
// perform the post-commit actions
|
||||||
// for the image
|
fileDeleter.Commit()
|
||||||
if input.DeleteGenerated != nil && *input.DeleteGenerated {
|
|
||||||
manager.DeleteGeneratedImageFiles(image)
|
|
||||||
}
|
|
||||||
|
|
||||||
// if delete file is true, then delete the file as well
|
|
||||||
// if it fails, just log a message
|
|
||||||
if input.DeleteFile != nil && *input.DeleteFile {
|
|
||||||
manager.DeleteImageFile(image)
|
|
||||||
}
|
|
||||||
|
|
||||||
// call post hook after performing the other actions
|
// call post hook after performing the other actions
|
||||||
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, input, nil)
|
r.hookExecutor.ExecutePostHooks(ctx, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
|
||||||
|
ImageDestroyInput: input,
|
||||||
|
Checksum: i.Checksum,
|
||||||
|
Path: i.Path,
|
||||||
|
}, nil)
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
@@ -324,46 +326,47 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
|
|||||||
}
|
}
|
||||||
|
|
||||||
var images []*models.Image
|
var images []*models.Image
|
||||||
|
fileDeleter := &image.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
Paths: manager.GetInstance().Paths,
|
||||||
|
}
|
||||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||||
qb := repo.Image()
|
qb := repo.Image()
|
||||||
|
|
||||||
for _, imageID := range imageIDs {
|
for _, imageID := range imageIDs {
|
||||||
|
|
||||||
image, err := qb.Find(imageID)
|
i, err := qb.Find(imageID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if image == nil {
|
if i == nil {
|
||||||
return fmt.Errorf("image with id %d not found", imageID)
|
return fmt.Errorf("image with id %d not found", imageID)
|
||||||
}
|
}
|
||||||
|
|
||||||
images = append(images, image)
|
images = append(images, i)
|
||||||
if err := qb.Destroy(imageID); err != nil {
|
|
||||||
|
if err := image.Destroy(i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// perform the post-commit actions
|
||||||
|
fileDeleter.Commit()
|
||||||
|
|
||||||
for _, image := range images {
|
for _, image := range images {
|
||||||
// if delete generated is true, then delete the generated files
|
|
||||||
// for the image
|
|
||||||
if input.DeleteGenerated != nil && *input.DeleteGenerated {
|
|
||||||
manager.DeleteGeneratedImageFiles(image)
|
|
||||||
}
|
|
||||||
|
|
||||||
// if delete file is true, then delete the file as well
|
|
||||||
// if it fails, just log a message
|
|
||||||
if input.DeleteFile != nil && *input.DeleteFile {
|
|
||||||
manager.DeleteImageFile(image)
|
|
||||||
}
|
|
||||||
|
|
||||||
// call post hook after performing the other actions
|
// call post hook after performing the other actions
|
||||||
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, input, nil)
|
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, plugin.ImagesDestroyInput{
|
||||||
|
ImagesDestroyInput: input,
|
||||||
|
Checksum: image.Checksum,
|
||||||
|
Path: image.Path,
|
||||||
|
}, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/file"
|
||||||
"github.com/stashapp/stash/pkg/manager"
|
"github.com/stashapp/stash/pkg/manager"
|
||||||
"github.com/stashapp/stash/pkg/manager/config"
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
@@ -456,96 +457,105 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
|
|||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var scene *models.Scene
|
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
||||||
var postCommitFunc func()
|
|
||||||
|
var s *models.Scene
|
||||||
|
fileDeleter := &scene.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
FileNamingAlgo: fileNamingAlgo,
|
||||||
|
Paths: manager.GetInstance().Paths,
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
|
||||||
|
deleteFile := utils.IsTrue(input.DeleteFile)
|
||||||
|
|
||||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||||
qb := repo.Scene()
|
qb := repo.Scene()
|
||||||
var err error
|
var err error
|
||||||
scene, err = qb.Find(sceneID)
|
s, err = qb.Find(sceneID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if scene == nil {
|
if s == nil {
|
||||||
return fmt.Errorf("scene with id %d not found", sceneID)
|
return fmt.Errorf("scene with id %d not found", sceneID)
|
||||||
}
|
}
|
||||||
|
|
||||||
postCommitFunc, err = manager.DestroyScene(scene, repo)
|
// kill any running encoders
|
||||||
return err
|
manager.KillRunningStreams(s, fileNamingAlgo)
|
||||||
|
|
||||||
|
return scene.Destroy(s, repo, fileDeleter, deleteGenerated, deleteFile)
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// perform the post-commit actions
|
// perform the post-commit actions
|
||||||
postCommitFunc()
|
fileDeleter.Commit()
|
||||||
|
|
||||||
// if delete generated is true, then delete the generated files
|
|
||||||
// for the scene
|
|
||||||
if input.DeleteGenerated != nil && *input.DeleteGenerated {
|
|
||||||
manager.DeleteGeneratedSceneFiles(scene, config.GetInstance().GetVideoFileNamingAlgorithm())
|
|
||||||
}
|
|
||||||
|
|
||||||
// if delete file is true, then delete the file as well
|
|
||||||
// if it fails, just log a message
|
|
||||||
if input.DeleteFile != nil && *input.DeleteFile {
|
|
||||||
manager.DeleteSceneFile(scene)
|
|
||||||
}
|
|
||||||
|
|
||||||
// call post hook after performing the other actions
|
// call post hook after performing the other actions
|
||||||
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, input, nil)
|
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
|
||||||
|
SceneDestroyInput: input,
|
||||||
|
Checksum: s.Checksum.String,
|
||||||
|
OSHash: s.OSHash.String,
|
||||||
|
Path: s.Path,
|
||||||
|
}, nil)
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.ScenesDestroyInput) (bool, error) {
|
func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.ScenesDestroyInput) (bool, error) {
|
||||||
var scenes []*models.Scene
|
var scenes []*models.Scene
|
||||||
var postCommitFuncs []func()
|
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
||||||
|
|
||||||
|
fileDeleter := &scene.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
FileNamingAlgo: fileNamingAlgo,
|
||||||
|
Paths: manager.GetInstance().Paths,
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
|
||||||
|
deleteFile := utils.IsTrue(input.DeleteFile)
|
||||||
|
|
||||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||||
qb := repo.Scene()
|
qb := repo.Scene()
|
||||||
|
|
||||||
for _, id := range input.Ids {
|
for _, id := range input.Ids {
|
||||||
sceneID, _ := strconv.Atoi(id)
|
sceneID, _ := strconv.Atoi(id)
|
||||||
|
|
||||||
scene, err := qb.Find(sceneID)
|
s, err := qb.Find(sceneID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if scene != nil {
|
if s != nil {
|
||||||
scenes = append(scenes, scene)
|
scenes = append(scenes, s)
|
||||||
}
|
|
||||||
f, err := manager.DestroyScene(scene, repo)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
postCommitFuncs = append(postCommitFuncs, f)
|
// kill any running encoders
|
||||||
|
manager.KillRunningStreams(s, fileNamingAlgo)
|
||||||
|
|
||||||
|
if err := scene.Destroy(s, repo, fileDeleter, deleteGenerated, deleteFile); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range postCommitFuncs {
|
// perform the post-commit actions
|
||||||
f()
|
fileDeleter.Commit()
|
||||||
}
|
|
||||||
|
|
||||||
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
|
|
||||||
for _, scene := range scenes {
|
for _, scene := range scenes {
|
||||||
// if delete generated is true, then delete the generated files
|
|
||||||
// for the scene
|
|
||||||
if input.DeleteGenerated != nil && *input.DeleteGenerated {
|
|
||||||
manager.DeleteGeneratedSceneFiles(scene, fileNamingAlgo)
|
|
||||||
}
|
|
||||||
|
|
||||||
// if delete file is true, then delete the file as well
|
|
||||||
// if it fails, just log a message
|
|
||||||
if input.DeleteFile != nil && *input.DeleteFile {
|
|
||||||
manager.DeleteSceneFile(scene)
|
|
||||||
}
|
|
||||||
|
|
||||||
// call post hook after performing the other actions
|
// call post hook after performing the other actions
|
||||||
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, input, nil)
|
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{
|
||||||
|
ScenesDestroyInput: input,
|
||||||
|
Checksum: scene.Checksum.String,
|
||||||
|
OSHash: scene.OSHash.String,
|
||||||
|
Path: scene.Path,
|
||||||
|
}, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
@@ -646,7 +656,14 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
|
|||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var postCommitFunc func()
|
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
||||||
|
|
||||||
|
fileDeleter := &scene.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
FileNamingAlgo: fileNamingAlgo,
|
||||||
|
Paths: manager.GetInstance().Paths,
|
||||||
|
}
|
||||||
|
|
||||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||||
qb := repo.SceneMarker()
|
qb := repo.SceneMarker()
|
||||||
sqb := repo.Scene()
|
sqb := repo.Scene()
|
||||||
@@ -661,18 +678,19 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
|
|||||||
return fmt.Errorf("scene marker with id %d not found", markerID)
|
return fmt.Errorf("scene marker with id %d not found", markerID)
|
||||||
}
|
}
|
||||||
|
|
||||||
scene, err := sqb.Find(int(marker.SceneID.Int64))
|
s, err := sqb.Find(int(marker.SceneID.Int64))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
postCommitFunc, err = manager.DestroySceneMarker(scene, marker, qb)
|
return scene.DestroyMarker(s, marker, qb, fileDeleter)
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
postCommitFunc()
|
// perform the post-commit actions
|
||||||
|
fileDeleter.Commit()
|
||||||
|
|
||||||
r.hookExecutor.ExecutePostHooks(ctx, markerID, plugin.SceneMarkerDestroyPost, id, nil)
|
r.hookExecutor.ExecutePostHooks(ctx, markerID, plugin.SceneMarkerDestroyPost, id, nil)
|
||||||
|
|
||||||
@@ -682,7 +700,15 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
|
|||||||
func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, changedMarker models.SceneMarker, tagIDs []int) (*models.SceneMarker, error) {
|
func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, changedMarker models.SceneMarker, tagIDs []int) (*models.SceneMarker, error) {
|
||||||
var existingMarker *models.SceneMarker
|
var existingMarker *models.SceneMarker
|
||||||
var sceneMarker *models.SceneMarker
|
var sceneMarker *models.SceneMarker
|
||||||
var scene *models.Scene
|
var s *models.Scene
|
||||||
|
|
||||||
|
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
|
||||||
|
|
||||||
|
fileDeleter := &scene.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
FileNamingAlgo: fileNamingAlgo,
|
||||||
|
Paths: manager.GetInstance().Paths,
|
||||||
|
}
|
||||||
|
|
||||||
// Start the transaction and save the scene marker
|
// Start the transaction and save the scene marker
|
||||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||||
@@ -704,26 +730,31 @@ func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, cha
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
scene, err = sqb.Find(int(existingMarker.SceneID.Int64))
|
s, err = sqb.Find(int(existingMarker.SceneID.Int64))
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// remove the marker preview if the timestamp was changed
|
||||||
|
if s != nil && existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
|
||||||
|
seconds := int(existingMarker.Seconds)
|
||||||
|
if err := fileDeleter.MarkMarkerFiles(s, seconds); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Save the marker tags
|
// Save the marker tags
|
||||||
// If this tag is the primary tag, then let's not add it.
|
// If this tag is the primary tag, then let's not add it.
|
||||||
tagIDs = utils.IntExclude(tagIDs, []int{changedMarker.PrimaryTagID})
|
tagIDs = utils.IntExclude(tagIDs, []int{changedMarker.PrimaryTagID})
|
||||||
return qb.UpdateTags(sceneMarker.ID, tagIDs)
|
return qb.UpdateTags(sceneMarker.ID, tagIDs)
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove the marker preview if the timestamp was changed
|
// perform the post-commit actions
|
||||||
if scene != nil && existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
|
fileDeleter.Commit()
|
||||||
seconds := int(existingMarker.Seconds)
|
|
||||||
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetInstance().GetVideoFileNamingAlgorithm())
|
|
||||||
}
|
|
||||||
|
|
||||||
return sceneMarker, nil
|
return sceneMarker, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
|
|||||||
|
|
||||||
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
||||||
|
|
||||||
return client.SubmitStashBoxFingerprints(input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
|
return client.SubmitStashBoxFingerprints(ctx, input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) (string, error) {
|
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) (string, error) {
|
||||||
|
|||||||
@@ -347,5 +347,6 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMerge
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
r.hookExecutor.ExecutePostHooks(ctx, t.ID, plugin.TagMergePost, input, nil)
|
||||||
return t, nil
|
return t, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -121,6 +121,9 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
|
|||||||
handyKey := config.GetHandyKey()
|
handyKey := config.GetHandyKey()
|
||||||
scriptOffset := config.GetFunscriptOffset()
|
scriptOffset := config.GetFunscriptOffset()
|
||||||
|
|
||||||
|
// FIXME - misnamed output field means we have redundant fields
|
||||||
|
disableDropdownCreate := config.GetDisableDropdownCreate()
|
||||||
|
|
||||||
return &models.ConfigInterfaceResult{
|
return &models.ConfigInterfaceResult{
|
||||||
MenuItems: menuItems,
|
MenuItems: menuItems,
|
||||||
SoundOnPreview: &soundOnPreview,
|
SoundOnPreview: &soundOnPreview,
|
||||||
@@ -136,9 +139,13 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
|
|||||||
CSSEnabled: &cssEnabled,
|
CSSEnabled: &cssEnabled,
|
||||||
Language: &language,
|
Language: &language,
|
||||||
SlideshowDelay: &slideshowDelay,
|
SlideshowDelay: &slideshowDelay,
|
||||||
DisabledDropdownCreate: config.GetDisableDropdownCreate(),
|
|
||||||
HandyKey: &handyKey,
|
// FIXME - see above
|
||||||
FunscriptOffset: &scriptOffset,
|
DisabledDropdownCreate: disableDropdownCreate,
|
||||||
|
DisableDropdownCreate: disableDropdownCreate,
|
||||||
|
|
||||||
|
HandyKey: &handyKey,
|
||||||
|
FunscriptOffset: &scriptOffset,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -174,6 +181,9 @@ func makeConfigDefaultsResult() *models.ConfigDefaultSettingsResult {
|
|||||||
|
|
||||||
return &models.ConfigDefaultSettingsResult{
|
return &models.ConfigDefaultSettingsResult{
|
||||||
Identify: config.GetDefaultIdentifySettings(),
|
Identify: config.GetDefaultIdentifySettings(),
|
||||||
|
Scan: config.GetDefaultScanSettings(),
|
||||||
|
AutoTag: config.GetDefaultAutoTagSettings(),
|
||||||
|
Generate: config.GetDefaultGenerateSettings(),
|
||||||
DeleteFile: &deleteFileDefault,
|
DeleteFile: &deleteFileDefault,
|
||||||
DeleteGenerated: &deleteGeneratedDefault,
|
DeleteGenerated: &deleteGeneratedDefault,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,53 +6,57 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/manager"
|
|
||||||
"github.com/stashapp/stash/pkg/manager/config"
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
"github.com/stashapp/stash/pkg/scraper"
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox"
|
"github.com/stashapp/stash/pkg/scraper/stashbox"
|
||||||
)
|
)
|
||||||
|
|
||||||
// deprecated
|
func (r *queryResolver) ScrapeURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
func (r *queryResolver) ScrapeFreeones(ctx context.Context, performer_name string) (*models.ScrapedPerformer, error) {
|
return r.scraperCache().ScrapeURL(ctx, url, ty)
|
||||||
scrapedPerformer := models.ScrapedPerformerInput{
|
|
||||||
Name: &performer_name,
|
|
||||||
}
|
|
||||||
return manager.GetInstance().ScraperCache.ScrapePerformer(scraper.FreeonesScraperID, scrapedPerformer)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// deprecated
|
// deprecated
|
||||||
func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) {
|
func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) {
|
||||||
scrapedPerformers, err := manager.GetInstance().ScraperCache.ScrapePerformerList(scraper.FreeonesScraperID, query)
|
content, err := r.scraperCache().ScrapeName(ctx, scraper.FreeonesScraperID, query, models.ScrapeContentTypePerformer)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
performers, err := marshalScrapedPerformers(content)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
var ret []string
|
var ret []string
|
||||||
for _, v := range scrapedPerformers {
|
for _, p := range performers {
|
||||||
if v.Name != nil {
|
if p.Name != nil {
|
||||||
ret = append(ret, *v.Name)
|
ret = append(ret, *p.Name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ListScrapers(ctx context.Context, types []models.ScrapeContentType) ([]*models.Scraper, error) {
|
||||||
|
return r.scraperCache().ListScrapers(types), nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||||
return manager.GetInstance().ScraperCache.ListPerformerScrapers(), nil
|
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypePerformer}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||||
return manager.GetInstance().ScraperCache.ListSceneScrapers(), nil
|
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeScene}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ListGalleryScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
func (r *queryResolver) ListGalleryScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||||
return manager.GetInstance().ScraperCache.ListGalleryScrapers(), nil
|
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeGallery}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||||
return manager.GetInstance().ScraperCache.ListMovieScrapers(), nil
|
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeMovie}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) {
|
func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) {
|
||||||
@@ -60,15 +64,29 @@ func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID strin
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return manager.GetInstance().ScraperCache.ScrapePerformerList(scraperID, query)
|
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, models.ScrapeContentTypePerformer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalScrapedPerformers(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
||||||
return manager.GetInstance().ScraperCache.ScrapePerformer(scraperID, scrapedPerformer)
|
content, err := r.scraperCache().ScrapeFragment(ctx, scraperID, scraper.Input{Performer: &scrapedPerformer})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return marshalScrapedPerformer(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) {
|
func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) {
|
||||||
return manager.GetInstance().ScraperCache.ScrapePerformerURL(url)
|
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypePerformer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalScrapedPerformer(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) {
|
||||||
@@ -76,50 +94,80 @@ func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string,
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return manager.GetInstance().ScraperCache.ScrapeSceneQuery(scraperID, query)
|
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, models.ScrapeContentTypeScene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalScrapedScenes(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||||
id, err := strconv.Atoi(scene.ID)
|
id, err := strconv.Atoi(scene.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("%w: scene.ID is not an integer: '%s'", ErrInput, scene.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, models.ScrapeContentTypeScene)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return manager.GetInstance().ScraperCache.ScrapeScene(scraperID, id)
|
return marshalScrapedScene(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
|
||||||
return manager.GetInstance().ScraperCache.ScrapeSceneURL(url)
|
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeScene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalScrapedScene(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
|
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
|
||||||
id, err := strconv.Atoi(gallery.ID)
|
id, err := strconv.Atoi(gallery.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, gallery.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, models.ScrapeContentTypeGallery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return manager.GetInstance().ScraperCache.ScrapeGallery(scraperID, id)
|
return marshalScrapedGallery(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) {
|
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) {
|
||||||
return manager.GetInstance().ScraperCache.ScrapeGalleryURL(url)
|
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeGallery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalScrapedGallery(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) {
|
func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) {
|
||||||
return manager.GetInstance().ScraperCache.ScrapeMovieURL(url)
|
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeMovie)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalScrapedMovie(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.StashBoxSceneQueryInput) ([]*models.ScrapedScene, error) {
|
func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.StashBoxSceneQueryInput) ([]*models.ScrapedScene, error) {
|
||||||
boxes := config.GetInstance().GetStashBoxes()
|
boxes := config.GetInstance().GetStashBoxes()
|
||||||
|
|
||||||
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
|
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
|
||||||
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
|
return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, input.StashBoxIndex)
|
||||||
}
|
}
|
||||||
|
|
||||||
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
||||||
|
|
||||||
if len(input.SceneIds) > 0 {
|
if len(input.SceneIds) > 0 {
|
||||||
return client.FindStashBoxScenesByFingerprintsFlat(input.SceneIds)
|
return client.FindStashBoxScenesByFingerprintsFlat(ctx, input.SceneIds)
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Q != nil {
|
if input.Q != nil {
|
||||||
@@ -133,17 +181,17 @@ func (r *queryResolver) QueryStashBoxPerformer(ctx context.Context, input models
|
|||||||
boxes := config.GetInstance().GetStashBoxes()
|
boxes := config.GetInstance().GetStashBoxes()
|
||||||
|
|
||||||
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
|
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
|
||||||
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
|
return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, input.StashBoxIndex)
|
||||||
}
|
}
|
||||||
|
|
||||||
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
||||||
|
|
||||||
if len(input.PerformerIds) > 0 {
|
if len(input.PerformerIds) > 0 {
|
||||||
return client.FindStashBoxPerformersByNames(input.PerformerIds)
|
return client.FindStashBoxPerformersByNames(ctx, input.PerformerIds)
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Q != nil {
|
if input.Q != nil {
|
||||||
return client.QueryStashBoxPerformer(*input.Q)
|
return client.QueryStashBoxPerformer(ctx, *input.Q)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil
|
return nil, nil
|
||||||
@@ -153,7 +201,7 @@ func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) {
|
|||||||
boxes := config.GetInstance().GetStashBoxes()
|
boxes := config.GetInstance().GetStashBoxes()
|
||||||
|
|
||||||
if index < 0 || index >= len(boxes) {
|
if index < 0 || index >= len(boxes) {
|
||||||
return nil, fmt.Errorf("invalid stash_box_index %d", index)
|
return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, index)
|
||||||
}
|
}
|
||||||
|
|
||||||
return stashbox.NewClient(*boxes[index], r.txnManager), nil
|
return stashbox.NewClient(*boxes[index], r.txnManager), nil
|
||||||
@@ -161,7 +209,8 @@ func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) {
|
|||||||
|
|
||||||
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleSceneInput) ([]*models.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleSceneInput) ([]*models.ScrapedScene, error) {
|
||||||
if source.ScraperID != nil {
|
if source.ScraperID != nil {
|
||||||
var singleScene *models.ScrapedScene
|
var c models.ScrapedContent
|
||||||
|
var content []models.ScrapedContent
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
@@ -169,26 +218,24 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
|
|||||||
var sceneID int
|
var sceneID int
|
||||||
sceneID, err = strconv.Atoi(*input.SceneID)
|
sceneID, err = strconv.Atoi(*input.SceneID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("%w: sceneID is not an integer: '%s'", ErrInput, *input.SceneID)
|
||||||
}
|
}
|
||||||
singleScene, err = manager.GetInstance().ScraperCache.ScrapeScene(*source.ScraperID, sceneID)
|
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, sceneID, models.ScrapeContentTypeScene)
|
||||||
|
content = []models.ScrapedContent{c}
|
||||||
case input.SceneInput != nil:
|
case input.SceneInput != nil:
|
||||||
singleScene, err = manager.GetInstance().ScraperCache.ScrapeSceneFragment(*source.ScraperID, *input.SceneInput)
|
c, err = r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Scene: input.SceneInput})
|
||||||
|
content = []models.ScrapedContent{c}
|
||||||
case input.Query != nil:
|
case input.Query != nil:
|
||||||
return manager.GetInstance().ScraperCache.ScrapeSceneQuery(*source.ScraperID, *input.Query)
|
content, err = r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, models.ScrapeContentTypeScene)
|
||||||
default:
|
default:
|
||||||
err = errors.New("scene_id, scene_input or query must be set")
|
err = fmt.Errorf("%w: scene_id, scene_input, or query must be set", ErrInput)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if singleScene != nil {
|
return marshalScrapedScenes(content)
|
||||||
return []*models.ScrapedScene{singleScene}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
} else if source.StashBoxIndex != nil {
|
} else if source.StashBoxIndex != nil {
|
||||||
client, err := r.getStashBoxClient(*source.StashBoxIndex)
|
client, err := r.getStashBoxClient(*source.StashBoxIndex)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -196,15 +243,15 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
|
|||||||
}
|
}
|
||||||
|
|
||||||
if input.SceneID != nil {
|
if input.SceneID != nil {
|
||||||
return client.FindStashBoxScenesByFingerprintsFlat([]string{*input.SceneID})
|
return client.FindStashBoxScenesByFingerprintsFlat(ctx, []string{*input.SceneID})
|
||||||
} else if input.Query != nil {
|
} else if input.Query != nil {
|
||||||
return client.QueryStashBoxScene(ctx, *input.Query)
|
return client.QueryStashBoxScene(ctx, *input.Query)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("scene_id or query must be set")
|
return nil, fmt.Errorf("%w: scene_id or query must be set", ErrInput)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
return nil, fmt.Errorf("%w: scraper_id or stash_box_index must be set", ErrInput)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) {
|
||||||
@@ -216,7 +263,7 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.Scr
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return client.FindStashBoxScenesByFingerprints(input.SceneIds)
|
return client.FindStashBoxScenesByFingerprints(ctx, input.SceneIds)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||||
@@ -225,20 +272,21 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.Scr
|
|||||||
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
|
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
|
||||||
if source.ScraperID != nil {
|
if source.ScraperID != nil {
|
||||||
if input.PerformerInput != nil {
|
if input.PerformerInput != nil {
|
||||||
singlePerformer, err := manager.GetInstance().ScraperCache.ScrapePerformer(*source.ScraperID, *input.PerformerInput)
|
performer, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Performer: input.PerformerInput})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if singlePerformer != nil {
|
return marshalScrapedPerformers([]models.ScrapedContent{performer})
|
||||||
return []*models.ScrapedPerformer{singlePerformer}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Query != nil {
|
if input.Query != nil {
|
||||||
return manager.GetInstance().ScraperCache.ScrapePerformerList(*source.ScraperID, *input.Query)
|
content, err := r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, models.ScrapeContentTypePerformer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return marshalScrapedPerformers(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, ErrNotImplemented
|
return nil, ErrNotImplemented
|
||||||
@@ -251,9 +299,9 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models
|
|||||||
var ret []*models.StashBoxPerformerQueryResult
|
var ret []*models.StashBoxPerformerQueryResult
|
||||||
switch {
|
switch {
|
||||||
case input.PerformerID != nil:
|
case input.PerformerID != nil:
|
||||||
ret, err = client.FindStashBoxPerformersByNames([]string{*input.PerformerID})
|
ret, err = client.FindStashBoxPerformersByNames(ctx, []string{*input.PerformerID})
|
||||||
case input.Query != nil:
|
case input.Query != nil:
|
||||||
ret, err = client.QueryStashBoxPerformer(*input.Query)
|
ret, err = client.QueryStashBoxPerformer(ctx, *input.Query)
|
||||||
default:
|
default:
|
||||||
return nil, ErrNotImplemented
|
return nil, ErrNotImplemented
|
||||||
}
|
}
|
||||||
@@ -281,45 +329,43 @@ func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return client.FindStashBoxPerformersByPerformerNames(input.PerformerIds)
|
return client.FindStashBoxPerformersByPerformerNames(ctx, input.PerformerIds)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleGalleryInput) ([]*models.ScrapedGallery, error) {
|
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleGalleryInput) ([]*models.ScrapedGallery, error) {
|
||||||
if source.ScraperID != nil {
|
if source.StashBoxIndex != nil {
|
||||||
var singleGallery *models.ScrapedGallery
|
|
||||||
var err error
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case input.GalleryID != nil:
|
|
||||||
var galleryID int
|
|
||||||
galleryID, err = strconv.Atoi(*input.GalleryID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGallery(*source.ScraperID, galleryID)
|
|
||||||
case input.GalleryInput != nil:
|
|
||||||
singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGalleryFragment(*source.ScraperID, *input.GalleryInput)
|
|
||||||
default:
|
|
||||||
return nil, ErrNotImplemented
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if singleGallery != nil {
|
|
||||||
return []*models.ScrapedGallery{singleGallery}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
} else if source.StashBoxIndex != nil {
|
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("scraper_id must be set")
|
if source.ScraperID == nil {
|
||||||
|
return nil, fmt.Errorf("%w: scraper_id must be set", ErrInput)
|
||||||
|
}
|
||||||
|
|
||||||
|
var c models.ScrapedContent
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case input.GalleryID != nil:
|
||||||
|
galleryID, err := strconv.Atoi(*input.GalleryID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, *input.GalleryID)
|
||||||
|
}
|
||||||
|
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, galleryID, models.ScrapeContentTypeGallery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return marshalScrapedGalleries([]models.ScrapedContent{c})
|
||||||
|
case input.GalleryInput != nil:
|
||||||
|
c, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Gallery: input.GalleryInput})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return marshalScrapedGalleries([]models.ScrapedContent{c})
|
||||||
|
default:
|
||||||
|
return nil, ErrNotImplemented
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) {
|
func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) {
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ func (rs sceneRoutes) Routes() chi.Router {
|
|||||||
r.Get("/webp", rs.Webp)
|
r.Get("/webp", rs.Webp)
|
||||||
r.Get("/vtt/chapter", rs.ChapterVtt)
|
r.Get("/vtt/chapter", rs.ChapterVtt)
|
||||||
r.Get("/funscript", rs.Funscript)
|
r.Get("/funscript", rs.Funscript)
|
||||||
|
r.Get("/interactive_heatmap", rs.InteractiveHeatmap)
|
||||||
|
|
||||||
r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream)
|
r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream)
|
||||||
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
|
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
|
||||||
@@ -273,6 +274,13 @@ func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
|
|||||||
utils.ServeFileNoCache(w, r, funscript)
|
utils.ServeFileNoCache(w, r, funscript)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) InteractiveHeatmap(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||||
|
w.Header().Set("Content-Type", "image/png")
|
||||||
|
filepath := manager.GetInstance().Paths.Scene.GetInteractiveHeatmapPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
|
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
|
||||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||||
w.Header().Set("Content-Type", "text/vtt")
|
w.Header().Set("Content-Type", "text/vtt")
|
||||||
|
|||||||
139
pkg/api/scraped_content.go
Normal file
139
pkg/api/scraped_content.go
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// marshalScrapedScenes converts ScrapedContent into ScrapedScene. If conversion fails, an
|
||||||
|
// error is returned to the caller.
|
||||||
|
func marshalScrapedScenes(content []models.ScrapedContent) ([]*models.ScrapedScene, error) {
|
||||||
|
var ret []*models.ScrapedScene
|
||||||
|
for _, c := range content {
|
||||||
|
if c == nil {
|
||||||
|
ret = append(ret, nil)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch s := c.(type) {
|
||||||
|
case *models.ScrapedScene:
|
||||||
|
ret = append(ret, s)
|
||||||
|
case models.ScrapedScene:
|
||||||
|
ret = append(ret, &s)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedScene", models.ErrConversion)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshalScrapedPerformers converts ScrapedContent into ScrapedPerformer. If conversion
|
||||||
|
// fails, an error is returned to the caller.
|
||||||
|
func marshalScrapedPerformers(content []models.ScrapedContent) ([]*models.ScrapedPerformer, error) {
|
||||||
|
var ret []*models.ScrapedPerformer
|
||||||
|
for _, c := range content {
|
||||||
|
if c == nil {
|
||||||
|
ret = append(ret, nil)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch p := c.(type) {
|
||||||
|
case *models.ScrapedPerformer:
|
||||||
|
ret = append(ret, p)
|
||||||
|
case models.ScrapedPerformer:
|
||||||
|
ret = append(ret, &p)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedPerformer", models.ErrConversion)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshalScrapedGalleries converts ScrapedContent into ScrapedGallery. If
|
||||||
|
// conversion fails, an error is returned.
|
||||||
|
func marshalScrapedGalleries(content []models.ScrapedContent) ([]*models.ScrapedGallery, error) {
|
||||||
|
var ret []*models.ScrapedGallery
|
||||||
|
for _, c := range content {
|
||||||
|
if c == nil {
|
||||||
|
ret = append(ret, nil)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch g := c.(type) {
|
||||||
|
case *models.ScrapedGallery:
|
||||||
|
ret = append(ret, g)
|
||||||
|
case models.ScrapedGallery:
|
||||||
|
ret = append(ret, &g)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGallery", models.ErrConversion)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshalScrapedMovies converts ScrapedContent into ScrapedMovie. If conversion
|
||||||
|
// fails, an error is returned.
|
||||||
|
func marshalScrapedMovies(content []models.ScrapedContent) ([]*models.ScrapedMovie, error) {
|
||||||
|
var ret []*models.ScrapedMovie
|
||||||
|
for _, c := range content {
|
||||||
|
if c == nil {
|
||||||
|
ret = append(ret, nil)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch m := c.(type) {
|
||||||
|
case *models.ScrapedMovie:
|
||||||
|
ret = append(ret, m)
|
||||||
|
case models.ScrapedMovie:
|
||||||
|
ret = append(ret, &m)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("%w: cannot turn ScrapedConetnt into ScrapedMovie", models.ErrConversion)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshalScrapedPerformer will marshal a single performer
|
||||||
|
func marshalScrapedPerformer(content models.ScrapedContent) (*models.ScrapedPerformer, error) {
|
||||||
|
p, err := marshalScrapedPerformers([]models.ScrapedContent{content})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return p[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshalScrapedScene will marshal a single scraped scene
|
||||||
|
func marshalScrapedScene(content models.ScrapedContent) (*models.ScrapedScene, error) {
|
||||||
|
s, err := marshalScrapedScenes([]models.ScrapedContent{content})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return s[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshalScrapedGallery will marshal a single scraped gallery
|
||||||
|
func marshalScrapedGallery(content models.ScrapedContent) (*models.ScrapedGallery, error) {
|
||||||
|
g, err := marshalScrapedGalleries([]models.ScrapedContent{content})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return g[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshalScrapedMovie will marshal a single scraped movie
|
||||||
|
func marshalScrapedMovie(content models.ScrapedContent) (*models.ScrapedMovie, error) {
|
||||||
|
m, err := marshalScrapedMovies([]models.ScrapedContent{content})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return m[0], nil
|
||||||
|
}
|
||||||
@@ -30,6 +30,7 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/manager/config"
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
"github.com/vearutop/statigz"
|
||||||
)
|
)
|
||||||
|
|
||||||
var version string
|
var version string
|
||||||
@@ -53,6 +54,7 @@ func Start(uiBox embed.FS, loginUIBox embed.FS) {
|
|||||||
if c.GetLogAccess() {
|
if c.GetLogAccess() {
|
||||||
r.Use(middleware.Logger)
|
r.Use(middleware.Logger)
|
||||||
}
|
}
|
||||||
|
r.Use(SecurityHeadersMiddleware)
|
||||||
r.Use(middleware.DefaultCompress)
|
r.Use(middleware.DefaultCompress)
|
||||||
r.Use(middleware.StripSlashes)
|
r.Use(middleware.StripSlashes)
|
||||||
r.Use(cors.AllowAll().Handler)
|
r.Use(cors.AllowAll().Handler)
|
||||||
@@ -205,19 +207,22 @@ func Start(uiBox embed.FS, loginUIBox embed.FS) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
prefix := getProxyPrefix(r.Header)
|
prefix := getProxyPrefix(r.Header)
|
||||||
baseURLIndex := strings.Replace(string(data), "%BASE_URL%", prefix+"/", 2)
|
baseURLIndex := strings.ReplaceAll(string(data), "/%BASE_URL%", prefix)
|
||||||
baseURLIndex = strings.Replace(baseURLIndex, "base href=\"/\"", fmt.Sprintf("base href=\"%s\"", prefix+"/"), 2)
|
baseURLIndex = strings.Replace(baseURLIndex, "base href=\"/\"", fmt.Sprintf("base href=\"%s\"", prefix+"/"), 1)
|
||||||
_, _ = w.Write([]byte(baseURLIndex))
|
_, _ = w.Write([]byte(baseURLIndex))
|
||||||
} else {
|
} else {
|
||||||
isStatic, _ := path.Match("/static/*/*", r.URL.Path)
|
isStatic, _ := path.Match("/static/*/*", r.URL.Path)
|
||||||
if isStatic {
|
if isStatic {
|
||||||
w.Header().Add("Cache-Control", "max-age=604800000")
|
w.Header().Add("Cache-Control", "max-age=604800000")
|
||||||
}
|
}
|
||||||
uiRoot, err := fs.Sub(uiBox, uiRootDir)
|
|
||||||
if err != nil {
|
prefix := getProxyPrefix(r.Header)
|
||||||
panic(err)
|
if prefix != "" {
|
||||||
|
r.URL.Path = strings.Replace(r.URL.Path, prefix, "", 1)
|
||||||
}
|
}
|
||||||
http.FileServer(http.FS(uiRoot)).ServeHTTP(w, r)
|
r.URL.Path = uiRootDir + r.URL.Path
|
||||||
|
|
||||||
|
statigz.FileServer(uiBox).ServeHTTP(w, r)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -338,6 +343,36 @@ var (
|
|||||||
BaseURLCtxKey = &contextKey{"BaseURL"}
|
BaseURLCtxKey = &contextKey{"BaseURL"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func SecurityHeadersMiddleware(next http.Handler) http.Handler {
|
||||||
|
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
c := config.GetInstance()
|
||||||
|
connectableOrigins := "connect-src data: 'self'"
|
||||||
|
|
||||||
|
// Workaround Safari bug https://bugs.webkit.org/show_bug.cgi?id=201591
|
||||||
|
// Allows websocket requests to any origin
|
||||||
|
connectableOrigins += " ws: wss:"
|
||||||
|
|
||||||
|
// The graphql playground pulls its frontend from a cdn
|
||||||
|
connectableOrigins += " https://cdn.jsdelivr.net "
|
||||||
|
|
||||||
|
if !c.IsNewSystem() && c.GetHandyKey() != "" {
|
||||||
|
connectableOrigins += " https://www.handyfeeling.com"
|
||||||
|
}
|
||||||
|
connectableOrigins += "; "
|
||||||
|
|
||||||
|
cspDirectives := "default-src data: 'self' 'unsafe-inline';" + connectableOrigins + "img-src data: *; script-src 'self' https://cdn.jsdelivr.net 'unsafe-inline'; media-src 'self' blob:; child-src 'none'; object-src 'none'; form-action 'self'"
|
||||||
|
|
||||||
|
w.Header().Set("Referrer-Policy", "same-origin")
|
||||||
|
w.Header().Set("X-Content-Type-Options", "nosniff")
|
||||||
|
w.Header().Set("X-Frame-Options", "DENY")
|
||||||
|
w.Header().Set("X-XSS-Protection", "1")
|
||||||
|
w.Header().Set("Content-Security-Policy", cspDirectives)
|
||||||
|
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
return http.HandlerFunc(fn)
|
||||||
|
}
|
||||||
|
|
||||||
func BaseURLMiddleware(next http.Handler) http.Handler {
|
func BaseURLMiddleware(next http.Handler) http.Handler {
|
||||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
|
|||||||
@@ -66,3 +66,7 @@ func (b SceneURLBuilder) GetSceneMarkerStreamScreenshotURL(sceneMarkerID int) st
|
|||||||
func (b SceneURLBuilder) GetFunscriptURL() string {
|
func (b SceneURLBuilder) GetFunscriptURL() string {
|
||||||
return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
|
return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b SceneURLBuilder) GetInteractiveHeatmapURL() string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "/interactive_heatmap"
|
||||||
|
}
|
||||||
|
|||||||
@@ -60,14 +60,12 @@ func (t *tagger) tagPerformers(performerReader models.PerformerReader, addFunc a
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFunc) error {
|
func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFunc) error {
|
||||||
others, err := match.PathToStudios(t.Path, studioReader)
|
studio, err := match.PathToStudio(t.Path, studioReader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// only add first studio
|
if studio != nil {
|
||||||
if len(others) > 0 {
|
|
||||||
studio := others[0]
|
|
||||||
added, err := addFunc(t.ID, studio.ID)
|
added, err := addFunc(t.ID, studio.ID)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ import (
|
|||||||
var DB *sqlx.DB
|
var DB *sqlx.DB
|
||||||
var WriteMu sync.Mutex
|
var WriteMu sync.Mutex
|
||||||
var dbPath string
|
var dbPath string
|
||||||
var appSchemaVersion uint = 28
|
var appSchemaVersion uint = 29
|
||||||
var databaseSchemaVersion uint
|
var databaseSchemaVersion uint
|
||||||
|
|
||||||
//go:embed migrations/*.sql
|
//go:embed migrations/*.sql
|
||||||
|
|||||||
1
pkg/database/migrations/29_interactive_speed.up.sql
Normal file
1
pkg/database/migrations/29_interactive_speed.up.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE `scenes` ADD COLUMN `interactive_speed` int
|
||||||
@@ -44,6 +44,15 @@ func Download(ctx context.Context, configDirectory string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validate that the urls contained what we needed
|
||||||
|
executables := []string{getFFMPEGFilename(), getFFProbeFilename()}
|
||||||
|
for _, executable := range executables {
|
||||||
|
_, err := os.Stat(filepath.Join(configDirectory, executable))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -76,7 +85,6 @@ func DownloadSingle(ctx context.Context, configDirectory, url string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Configure where we want to download the archive
|
// Configure where we want to download the archive
|
||||||
urlExt := path.Ext(url)
|
|
||||||
urlBase := path.Base(url)
|
urlBase := path.Base(url)
|
||||||
archivePath := filepath.Join(configDirectory, urlBase)
|
archivePath := filepath.Join(configDirectory, urlBase)
|
||||||
_ = os.Remove(archivePath) // remove archive if it already exists
|
_ = os.Remove(archivePath) // remove archive if it already exists
|
||||||
@@ -118,7 +126,7 @@ func DownloadSingle(ctx context.Context, configDirectory, url string) error {
|
|||||||
|
|
||||||
logger.Info("Downloading complete")
|
logger.Info("Downloading complete")
|
||||||
|
|
||||||
if urlExt == ".zip" {
|
if resp.Header.Get("Content-Type") == "application/zip" {
|
||||||
logger.Infof("Unzipping %s...", archivePath)
|
logger.Infof("Unzipping %s...", archivePath)
|
||||||
if err := unzip(archivePath, configDirectory); err != nil {
|
if err := unzip(archivePath, configDirectory); err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -126,12 +134,18 @@ func DownloadSingle(ctx context.Context, configDirectory, url string) error {
|
|||||||
|
|
||||||
// On OSX or Linux set downloaded files permissions
|
// On OSX or Linux set downloaded files permissions
|
||||||
if runtime.GOOS == "darwin" || runtime.GOOS == "linux" {
|
if runtime.GOOS == "darwin" || runtime.GOOS == "linux" {
|
||||||
if err := os.Chmod(filepath.Join(configDirectory, "ffmpeg"), 0755); err != nil {
|
_, err = os.Stat(filepath.Join(configDirectory, "ffmpeg"))
|
||||||
return err
|
if !os.IsNotExist(err) {
|
||||||
|
if err = os.Chmod(filepath.Join(configDirectory, "ffmpeg"), 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := os.Chmod(filepath.Join(configDirectory, "ffprobe"), 0755); err != nil {
|
_, err = os.Stat(filepath.Join(configDirectory, "ffprobe"))
|
||||||
return err
|
if !os.IsNotExist(err) {
|
||||||
|
if err := os.Chmod(filepath.Join(configDirectory, "ffprobe"), 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: In future possible clear xattr to allow running on osx without user intervention
|
// TODO: In future possible clear xattr to allow running on osx without user intervention
|
||||||
@@ -139,8 +153,6 @@ func DownloadSingle(ctx context.Context, configDirectory, url string) error {
|
|||||||
// xattr -c /path/to/binary -- xattr.Remove(path, "com.apple.quarantine")
|
// xattr -c /path/to/binary -- xattr.Remove(path, "com.apple.quarantine")
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Infof("ffmpeg and ffprobe successfully installed in %s", configDirectory)
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("ffmpeg was downloaded to %s", archivePath)
|
return fmt.Errorf("ffmpeg was downloaded to %s", archivePath)
|
||||||
}
|
}
|
||||||
@@ -152,7 +164,7 @@ func getFFMPEGURL() []string {
|
|||||||
var urls []string
|
var urls []string
|
||||||
switch runtime.GOOS {
|
switch runtime.GOOS {
|
||||||
case "darwin":
|
case "darwin":
|
||||||
urls = []string{"https://evermeet.cx/ffmpeg/ffmpeg-4.3.1.zip", "https://evermeet.cx/ffmpeg/ffprobe-4.3.1.zip"}
|
urls = []string{"https://evermeet.cx/ffmpeg/getrelease/zip", "https://evermeet.cx/ffmpeg/getrelease/ffprobe/zip"}
|
||||||
case "linux":
|
case "linux":
|
||||||
switch runtime.GOARCH {
|
switch runtime.GOARCH {
|
||||||
case "amd64":
|
case "amd64":
|
||||||
|
|||||||
@@ -10,9 +10,22 @@ type SceneMarkerOptions struct {
|
|||||||
Seconds int
|
Seconds int
|
||||||
Width int
|
Width int
|
||||||
OutputPath string
|
OutputPath string
|
||||||
|
Audio bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *Encoder) SceneMarkerVideo(probeResult VideoFile, options SceneMarkerOptions) error {
|
func (e *Encoder) SceneMarkerVideo(probeResult VideoFile, options SceneMarkerOptions) error {
|
||||||
|
|
||||||
|
argsAudio := []string{
|
||||||
|
"-c:a", "aac",
|
||||||
|
"-b:a", "64k",
|
||||||
|
}
|
||||||
|
|
||||||
|
if !options.Audio {
|
||||||
|
argsAudio = []string{
|
||||||
|
"-an",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
args := []string{
|
args := []string{
|
||||||
"-v", "error",
|
"-v", "error",
|
||||||
"-ss", strconv.Itoa(options.Seconds),
|
"-ss", strconv.Itoa(options.Seconds),
|
||||||
@@ -29,11 +42,10 @@ func (e *Encoder) SceneMarkerVideo(probeResult VideoFile, options SceneMarkerOpt
|
|||||||
"-threads", "4",
|
"-threads", "4",
|
||||||
"-vf", fmt.Sprintf("scale=%v:-2", options.Width),
|
"-vf", fmt.Sprintf("scale=%v:-2", options.Width),
|
||||||
"-sws_flags", "lanczos",
|
"-sws_flags", "lanczos",
|
||||||
"-c:a", "aac",
|
|
||||||
"-b:a", "64k",
|
|
||||||
"-strict", "-2",
|
"-strict", "-2",
|
||||||
options.OutputPath,
|
|
||||||
}
|
}
|
||||||
|
args = append(args, argsAudio...)
|
||||||
|
args = append(args, options.OutputPath)
|
||||||
_, err := e.run(probeResult.Path, args, nil)
|
_, err := e.run(probeResult.Path, args, nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
161
pkg/file/delete.go
Normal file
161
pkg/file/delete.go
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
package file
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io/fs"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
const deleteFileSuffix = ".delete"
|
||||||
|
|
||||||
|
// RenamerRemover provides access to the Rename and Remove functions.
|
||||||
|
type RenamerRemover interface {
|
||||||
|
Rename(oldpath, newpath string) error
|
||||||
|
Remove(name string) error
|
||||||
|
RemoveAll(path string) error
|
||||||
|
Stat(name string) (fs.FileInfo, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type renamerRemoverImpl struct {
|
||||||
|
RenameFn func(oldpath, newpath string) error
|
||||||
|
RemoveFn func(name string) error
|
||||||
|
RemoveAllFn func(path string) error
|
||||||
|
StatFn func(path string) (fs.FileInfo, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r renamerRemoverImpl) Rename(oldpath, newpath string) error {
|
||||||
|
return r.RenameFn(oldpath, newpath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r renamerRemoverImpl) Remove(name string) error {
|
||||||
|
return r.RemoveFn(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r renamerRemoverImpl) RemoveAll(path string) error {
|
||||||
|
return r.RemoveAllFn(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r renamerRemoverImpl) Stat(path string) (fs.FileInfo, error) {
|
||||||
|
return r.StatFn(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deleter is used to safely delete files and directories from the filesystem.
|
||||||
|
// During a transaction, files and directories are marked for deletion using
|
||||||
|
// the Files and Dirs methods. This will rename the files/directories to be
|
||||||
|
// deleted. If the transaction is rolled back, then the files/directories can
|
||||||
|
// be restored to their original state with the Abort method. If the
|
||||||
|
// transaction is committed, the marked files are then deleted from the
|
||||||
|
// filesystem using the Complete method.
|
||||||
|
type Deleter struct {
|
||||||
|
RenamerRemover RenamerRemover
|
||||||
|
files []string
|
||||||
|
dirs []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDeleter() *Deleter {
|
||||||
|
return &Deleter{
|
||||||
|
RenamerRemover: renamerRemoverImpl{
|
||||||
|
RenameFn: os.Rename,
|
||||||
|
RemoveFn: os.Remove,
|
||||||
|
RemoveAllFn: os.RemoveAll,
|
||||||
|
StatFn: os.Stat,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Files designates files to be deleted. Each file marked will be renamed to add
|
||||||
|
// a `.delete` suffix. An error is returned if a file could not be renamed.
|
||||||
|
// Note that if an error is returned, then some files may be left renamed.
|
||||||
|
// Abort should be called to restore marked files if this function returns an
|
||||||
|
// error.
|
||||||
|
func (d *Deleter) Files(paths []string) error {
|
||||||
|
for _, p := range paths {
|
||||||
|
// fail silently if the file does not exist
|
||||||
|
if _, err := d.RenamerRemover.Stat(p); err != nil {
|
||||||
|
if errors.Is(err, fs.ErrNotExist) {
|
||||||
|
logger.Warnf("File %q does not exist and therefore cannot be deleted. Ignoring.", p)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Errorf("check file %q exists: %w", p, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := d.renameForDelete(p); err != nil {
|
||||||
|
return fmt.Errorf("marking file %q for deletion: %w", p, err)
|
||||||
|
}
|
||||||
|
d.files = append(d.files, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dirs designates directories to be deleted. Each directory marked will be renamed to add
|
||||||
|
// a `.delete` suffix. An error is returned if a directory could not be renamed.
|
||||||
|
// Note that if an error is returned, then some directories may be left renamed.
|
||||||
|
// Abort should be called to restore marked files/directories if this function returns an
|
||||||
|
// error.
|
||||||
|
func (d *Deleter) Dirs(paths []string) error {
|
||||||
|
for _, p := range paths {
|
||||||
|
// fail silently if the file does not exist
|
||||||
|
if _, err := d.RenamerRemover.Stat(p); err != nil {
|
||||||
|
if errors.Is(err, fs.ErrNotExist) {
|
||||||
|
logger.Warnf("Directory %q does not exist and therefore cannot be deleted. Ignoring.", p)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Errorf("check directory %q exists: %w", p, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := d.renameForDelete(p); err != nil {
|
||||||
|
return fmt.Errorf("marking directory %q for deletion: %w", p, err)
|
||||||
|
}
|
||||||
|
d.dirs = append(d.dirs, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rollback tries to rename all marked files and directories back to their
|
||||||
|
// original names and clears the marked list. Any errors encountered are
|
||||||
|
// logged. All files will be attempted regardless of any errors occurred.
|
||||||
|
func (d *Deleter) Rollback() {
|
||||||
|
for _, f := range append(d.files, d.dirs...) {
|
||||||
|
if err := d.renameForRestore(f); err != nil {
|
||||||
|
logger.Warnf("Error restoring %q: %v", f, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
d.files = nil
|
||||||
|
d.dirs = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit deletes all files marked for deletion and clears the marked list.
|
||||||
|
// Any errors encountered are logged. All files will be attempted, regardless
|
||||||
|
// of the errors encountered.
|
||||||
|
func (d *Deleter) Commit() {
|
||||||
|
for _, f := range d.files {
|
||||||
|
if err := d.RenamerRemover.Remove(f + deleteFileSuffix); err != nil {
|
||||||
|
logger.Warnf("Error deleting file %q: %v", f+deleteFileSuffix, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range d.dirs {
|
||||||
|
if err := d.RenamerRemover.RemoveAll(f + deleteFileSuffix); err != nil {
|
||||||
|
logger.Warnf("Error deleting directory %q: %v", f+deleteFileSuffix, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
d.files = nil
|
||||||
|
d.dirs = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Deleter) renameForDelete(path string) error {
|
||||||
|
return d.RenamerRemover.Rename(path, path+deleteFileSuffix)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Deleter) renameForRestore(path string) error {
|
||||||
|
return d.RenamerRemover.Rename(path+deleteFileSuffix, path)
|
||||||
|
}
|
||||||
40
pkg/gallery/filter.go
Normal file
40
pkg/gallery/filter.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package gallery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func PathsFilter(paths []string) *models.GalleryFilterType {
|
||||||
|
if paths == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sep := string(filepath.Separator)
|
||||||
|
|
||||||
|
var ret *models.GalleryFilterType
|
||||||
|
var or *models.GalleryFilterType
|
||||||
|
for _, p := range paths {
|
||||||
|
newOr := &models.GalleryFilterType{}
|
||||||
|
if or != nil {
|
||||||
|
or.Or = newOr
|
||||||
|
} else {
|
||||||
|
ret = newOr
|
||||||
|
}
|
||||||
|
|
||||||
|
or = newOr
|
||||||
|
|
||||||
|
if !strings.HasSuffix(p, sep) {
|
||||||
|
p += sep
|
||||||
|
}
|
||||||
|
|
||||||
|
or.Path = &models.StringCriterionInput{
|
||||||
|
Modifier: models.CriterionModifierEquals,
|
||||||
|
Value: p + "%",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
@@ -12,7 +12,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type SceneScraper interface {
|
type SceneScraper interface {
|
||||||
ScrapeScene(sceneID int) (*models.ScrapedScene, error)
|
ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type SceneUpdatePostHookExecutor interface {
|
type SceneUpdatePostHookExecutor interface {
|
||||||
@@ -34,7 +34,7 @@ type SceneIdentifier struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *SceneIdentifier) Identify(ctx context.Context, txnManager models.TransactionManager, scene *models.Scene) error {
|
func (t *SceneIdentifier) Identify(ctx context.Context, txnManager models.TransactionManager, scene *models.Scene) error {
|
||||||
result, err := t.scrapeScene(scene)
|
result, err := t.scrapeScene(ctx, scene)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -57,11 +57,11 @@ type scrapeResult struct {
|
|||||||
source ScraperSource
|
source ScraperSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *SceneIdentifier) scrapeScene(scene *models.Scene) (*scrapeResult, error) {
|
func (t *SceneIdentifier) scrapeScene(ctx context.Context, scene *models.Scene) (*scrapeResult, error) {
|
||||||
// iterate through the input sources
|
// iterate through the input sources
|
||||||
for _, source := range t.Sources {
|
for _, source := range t.Sources {
|
||||||
// scrape using the source
|
// scrape using the source
|
||||||
scraped, err := source.Scraper.ScrapeScene(scene.ID)
|
scraped, err := source.Scraper.ScrapeScene(ctx, scene.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error scraping from %v: %v", source.Scraper, err)
|
return nil, fmt.Errorf("error scraping from %v: %v", source.Scraper, err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ type mockSceneScraper struct {
|
|||||||
results map[int]*models.ScrapedScene
|
results map[int]*models.ScrapedScene
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mockSceneScraper) ScrapeScene(sceneID int) (*models.ScrapedScene, error) {
|
func (s mockSceneScraper) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
|
||||||
if utils.IntInclude(s.errIDs, sceneID) {
|
if utils.IntInclude(s.errIDs, sceneID) {
|
||||||
return nil, errors.New("scrape scene error")
|
return nil, errors.New("scrape scene error")
|
||||||
}
|
}
|
||||||
|
|||||||
48
pkg/image/delete.go
Normal file
48
pkg/image/delete.go
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
package image
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/stashapp/stash/pkg/file"
|
||||||
|
"github.com/stashapp/stash/pkg/manager/paths"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Destroyer interface {
|
||||||
|
Destroy(id int) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// FileDeleter is an extension of file.Deleter that handles deletion of image files.
|
||||||
|
type FileDeleter struct {
|
||||||
|
file.Deleter
|
||||||
|
|
||||||
|
Paths *paths.Paths
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarkGeneratedFiles marks for deletion the generated files for the provided image.
|
||||||
|
func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
|
||||||
|
thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
|
||||||
|
exists, _ := utils.FileExists(thumbPath)
|
||||||
|
if exists {
|
||||||
|
return d.Files([]string{thumbPath})
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destroy destroys an image, optionally marking the file and generated files for deletion.
|
||||||
|
func Destroy(i *models.Image, destroyer Destroyer, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error {
|
||||||
|
// don't try to delete if the image is in a zip file
|
||||||
|
if deleteFile && !file.IsZipPath(i.Path) {
|
||||||
|
if err := fileDeleter.Files([]string{i.Path}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if deleteGenerated {
|
||||||
|
if err := fileDeleter.MarkGeneratedFiles(i); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return destroyer.Destroy(i.ID)
|
||||||
|
}
|
||||||
40
pkg/image/filter.go
Normal file
40
pkg/image/filter.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package image
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func PathsFilter(paths []string) *models.ImageFilterType {
|
||||||
|
if paths == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sep := string(filepath.Separator)
|
||||||
|
|
||||||
|
var ret *models.ImageFilterType
|
||||||
|
var or *models.ImageFilterType
|
||||||
|
for _, p := range paths {
|
||||||
|
newOr := &models.ImageFilterType{}
|
||||||
|
if or != nil {
|
||||||
|
or.Or = newOr
|
||||||
|
} else {
|
||||||
|
ret = newOr
|
||||||
|
}
|
||||||
|
|
||||||
|
or = newOr
|
||||||
|
|
||||||
|
if !strings.HasSuffix(p, sep) {
|
||||||
|
p += sep
|
||||||
|
}
|
||||||
|
|
||||||
|
or.Path = &models.StringCriterionInput{
|
||||||
|
Modifier: models.CriterionModifierEquals,
|
||||||
|
Value: p + "%",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
@@ -124,13 +124,14 @@ const (
|
|||||||
WallShowTitle = "wall_show_title"
|
WallShowTitle = "wall_show_title"
|
||||||
defaultWallShowTitle = true
|
defaultWallShowTitle = true
|
||||||
|
|
||||||
CustomPerformerImageLocation = "custom_performer_image_location"
|
CustomPerformerImageLocation = "custom_performer_image_location"
|
||||||
MaximumLoopDuration = "maximum_loop_duration"
|
MaximumLoopDuration = "maximum_loop_duration"
|
||||||
AutostartVideo = "autostart_video"
|
AutostartVideo = "autostart_video"
|
||||||
AutostartVideoOnPlaySelected = "autostart_video_on_play_selected"
|
AutostartVideoOnPlaySelected = "autostart_video_on_play_selected"
|
||||||
ContinuePlaylistDefault = "continue_playlist_default"
|
autostartVideoOnPlaySelectedDefault = true
|
||||||
ShowStudioAsText = "show_studio_as_text"
|
ContinuePlaylistDefault = "continue_playlist_default"
|
||||||
CSSEnabled = "cssEnabled"
|
ShowStudioAsText = "show_studio_as_text"
|
||||||
|
CSSEnabled = "cssEnabled"
|
||||||
|
|
||||||
WallPlayback = "wall_playback"
|
WallPlayback = "wall_playback"
|
||||||
defaultWallPlayback = "video"
|
defaultWallPlayback = "video"
|
||||||
@@ -167,7 +168,11 @@ const (
|
|||||||
LogAccess = "logAccess"
|
LogAccess = "logAccess"
|
||||||
defaultLogAccess = true
|
defaultLogAccess = true
|
||||||
|
|
||||||
|
// Default settings
|
||||||
|
DefaultScanSettings = "defaults.scan_task"
|
||||||
DefaultIdentifySettings = "defaults.identify_task"
|
DefaultIdentifySettings = "defaults.identify_task"
|
||||||
|
DefaultAutoTagSettings = "defaults.auto_tag_task"
|
||||||
|
DefaultGenerateSettings = "defaults.generate_task"
|
||||||
|
|
||||||
DeleteFileDefault = "defaults.delete_file"
|
DeleteFileDefault = "defaults.delete_file"
|
||||||
DeleteGeneratedDefault = "defaults.delete_generated"
|
DeleteGeneratedDefault = "defaults.delete_generated"
|
||||||
@@ -826,15 +831,18 @@ func (i *Instance) GetAutostartVideo() bool {
|
|||||||
func (i *Instance) GetAutostartVideoOnPlaySelected() bool {
|
func (i *Instance) GetAutostartVideoOnPlaySelected() bool {
|
||||||
i.Lock()
|
i.Lock()
|
||||||
defer i.Unlock()
|
defer i.Unlock()
|
||||||
viper.SetDefault(AutostartVideoOnPlaySelected, true)
|
|
||||||
return viper.GetBool(AutostartVideoOnPlaySelected)
|
ret := autostartVideoOnPlaySelectedDefault
|
||||||
|
v := i.viper(AutostartVideoOnPlaySelected)
|
||||||
|
if v.IsSet(AutostartVideoOnPlaySelected) {
|
||||||
|
ret = v.GetBool(AutostartVideoOnPlaySelected)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Instance) GetContinuePlaylistDefault() bool {
|
func (i *Instance) GetContinuePlaylistDefault() bool {
|
||||||
i.Lock()
|
return i.getBool(ContinuePlaylistDefault)
|
||||||
defer i.Unlock()
|
|
||||||
viper.SetDefault(ContinuePlaylistDefault, false)
|
|
||||||
return viper.GetBool(ContinuePlaylistDefault)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Instance) GetShowStudioAsText() bool {
|
func (i *Instance) GetShowStudioAsText() bool {
|
||||||
@@ -949,6 +957,63 @@ func (i *Instance) GetDefaultIdentifySettings() *models.IdentifyMetadataTaskOpti
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetDefaultScanSettings returns the default Scan task settings.
|
||||||
|
// Returns nil if the settings could not be unmarshalled, or if it
|
||||||
|
// has not been set.
|
||||||
|
func (i *Instance) GetDefaultScanSettings() *models.ScanMetadataOptions {
|
||||||
|
i.RLock()
|
||||||
|
defer i.RUnlock()
|
||||||
|
v := i.viper(DefaultScanSettings)
|
||||||
|
|
||||||
|
if v.IsSet(DefaultScanSettings) {
|
||||||
|
var ret models.ScanMetadataOptions
|
||||||
|
if err := v.UnmarshalKey(DefaultScanSettings, &ret); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &ret
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDefaultAutoTagSettings returns the default Scan task settings.
|
||||||
|
// Returns nil if the settings could not be unmarshalled, or if it
|
||||||
|
// has not been set.
|
||||||
|
func (i *Instance) GetDefaultAutoTagSettings() *models.AutoTagMetadataOptions {
|
||||||
|
i.RLock()
|
||||||
|
defer i.RUnlock()
|
||||||
|
v := i.viper(DefaultAutoTagSettings)
|
||||||
|
|
||||||
|
if v.IsSet(DefaultAutoTagSettings) {
|
||||||
|
var ret models.AutoTagMetadataOptions
|
||||||
|
if err := v.UnmarshalKey(DefaultAutoTagSettings, &ret); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &ret
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDefaultGenerateSettings returns the default Scan task settings.
|
||||||
|
// Returns nil if the settings could not be unmarshalled, or if it
|
||||||
|
// has not been set.
|
||||||
|
func (i *Instance) GetDefaultGenerateSettings() *models.GenerateMetadataOptions {
|
||||||
|
i.RLock()
|
||||||
|
defer i.RUnlock()
|
||||||
|
v := i.viper(DefaultGenerateSettings)
|
||||||
|
|
||||||
|
if v.IsSet(DefaultGenerateSettings) {
|
||||||
|
var ret models.GenerateMetadataOptions
|
||||||
|
if err := v.UnmarshalKey(DefaultGenerateSettings, &ret); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &ret
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// GetTrustedProxies returns a comma separated list of ip addresses that should allow proxying.
|
// GetTrustedProxies returns a comma separated list of ip addresses that should allow proxying.
|
||||||
// When empty, allow from any private network
|
// When empty, allow from any private network
|
||||||
func (i *Instance) GetTrustedProxies() []string {
|
func (i *Instance) GetTrustedProxies() []string {
|
||||||
|
|||||||
270
pkg/manager/generator_interactive_heatmap_speed.go
Normal file
270
pkg/manager/generator_interactive_heatmap_speed.go
Normal file
@@ -0,0 +1,270 @@
|
|||||||
|
package manager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"image"
|
||||||
|
"image/draw"
|
||||||
|
"image/png"
|
||||||
|
"io/ioutil"
|
||||||
|
"math"
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/lucasb-eyer/go-colorful"
|
||||||
|
)
|
||||||
|
|
||||||
|
type InteractiveHeatmapSpeedGenerator struct {
|
||||||
|
InteractiveSpeed int64
|
||||||
|
Funscript Script
|
||||||
|
FunscriptPath string
|
||||||
|
HeatmapPath string
|
||||||
|
Width int
|
||||||
|
Height int
|
||||||
|
NumSegments int
|
||||||
|
}
|
||||||
|
|
||||||
|
type Script struct {
|
||||||
|
// Version of Launchscript
|
||||||
|
Version string `json:"version"`
|
||||||
|
// Inverted causes up and down movement to be flipped.
|
||||||
|
Inverted bool `json:"inverted,omitempty"`
|
||||||
|
// Range is the percentage of a full stroke to use.
|
||||||
|
Range int `json:"range,omitempty"`
|
||||||
|
// Actions are the timed moves.
|
||||||
|
Actions []Action `json:"actions"`
|
||||||
|
AvarageSpeed int64
|
||||||
|
}
|
||||||
|
|
||||||
|
// Action is a move at a specific time.
|
||||||
|
type Action struct {
|
||||||
|
// At time in milliseconds the action should fire.
|
||||||
|
At int64 `json:"at"`
|
||||||
|
// Pos is the place in percent to move to.
|
||||||
|
Pos int `json:"pos"`
|
||||||
|
|
||||||
|
Slope float64
|
||||||
|
Intensity int64
|
||||||
|
Speed float64
|
||||||
|
}
|
||||||
|
|
||||||
|
type GradientTable []struct {
|
||||||
|
Col colorful.Color
|
||||||
|
Pos float64
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewInteractiveHeatmapSpeedGenerator(funscriptPath string, heatmapPath string) *InteractiveHeatmapSpeedGenerator {
|
||||||
|
return &InteractiveHeatmapSpeedGenerator{
|
||||||
|
FunscriptPath: funscriptPath,
|
||||||
|
HeatmapPath: heatmapPath,
|
||||||
|
Width: 320,
|
||||||
|
Height: 15,
|
||||||
|
NumSegments: 150,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *InteractiveHeatmapSpeedGenerator) Generate() error {
|
||||||
|
funscript, err := g.LoadFunscriptData(g.FunscriptPath)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
g.Funscript = funscript
|
||||||
|
g.Funscript.UpdateIntensityAndSpeed()
|
||||||
|
|
||||||
|
err = g.RenderHeatmap()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
g.InteractiveSpeed = g.Funscript.CalculateMedian()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *InteractiveHeatmapSpeedGenerator) LoadFunscriptData(path string) (Script, error) {
|
||||||
|
data, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return Script{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var funscript Script
|
||||||
|
err = json.Unmarshal(data, &funscript)
|
||||||
|
if err != nil {
|
||||||
|
return Script{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if funscript.Actions == nil {
|
||||||
|
return Script{}, fmt.Errorf("actions list missing in %s", path)
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.SliceStable(funscript.Actions, func(i, j int) bool { return funscript.Actions[i].At < funscript.Actions[j].At })
|
||||||
|
|
||||||
|
// trim actions with negative timestamps to avoid index range errors when generating heatmap
|
||||||
|
|
||||||
|
isValid := func(x int64) bool { return x >= 0 }
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for _, x := range funscript.Actions {
|
||||||
|
if isValid(x.At) {
|
||||||
|
funscript.Actions[i] = x
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
funscript.Actions = funscript.Actions[:i]
|
||||||
|
|
||||||
|
return funscript, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (funscript *Script) UpdateIntensityAndSpeed() {
|
||||||
|
|
||||||
|
var t1, t2 int64
|
||||||
|
var p1, p2 int
|
||||||
|
var slope float64
|
||||||
|
var intensity int64
|
||||||
|
for i := range funscript.Actions {
|
||||||
|
if i == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
t1 = funscript.Actions[i].At
|
||||||
|
t2 = funscript.Actions[i-1].At
|
||||||
|
p1 = funscript.Actions[i].Pos
|
||||||
|
p2 = funscript.Actions[i-1].Pos
|
||||||
|
|
||||||
|
slope = math.Min(math.Max(1/(2*float64(t1-t2)/1000), 0), 20)
|
||||||
|
intensity = int64(slope * math.Abs((float64)(p1-p2)))
|
||||||
|
speed := math.Abs(float64(p1-p2)) / float64(t1-t2) * 1000
|
||||||
|
|
||||||
|
funscript.Actions[i].Slope = slope
|
||||||
|
funscript.Actions[i].Intensity = intensity
|
||||||
|
funscript.Actions[i].Speed = speed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// funscript needs to have intensity updated first
|
||||||
|
func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap() error {
|
||||||
|
|
||||||
|
gradient := g.Funscript.getGradientTable(g.NumSegments)
|
||||||
|
|
||||||
|
img := image.NewRGBA(image.Rect(0, 0, g.Width, g.Height))
|
||||||
|
for x := 0; x < g.Width; x++ {
|
||||||
|
c := gradient.GetInterpolatedColorFor(float64(x) / float64(g.Width))
|
||||||
|
draw.Draw(img, image.Rect(x, 0, x+1, g.Height), &image.Uniform{c}, image.Point{}, draw.Src)
|
||||||
|
}
|
||||||
|
|
||||||
|
// add 10 minute marks
|
||||||
|
maxts := g.Funscript.Actions[len(g.Funscript.Actions)-1].At
|
||||||
|
const tick = 600000
|
||||||
|
var ts int64 = tick
|
||||||
|
c, _ := colorful.Hex("#000000")
|
||||||
|
for ts < maxts {
|
||||||
|
x := int(float64(ts) / float64(maxts) * float64(g.Width))
|
||||||
|
draw.Draw(img, image.Rect(x-1, g.Height/2, x+1, g.Height), &image.Uniform{c}, image.Point{}, draw.Src)
|
||||||
|
ts += tick
|
||||||
|
}
|
||||||
|
|
||||||
|
outpng, err := os.Create(g.HeatmapPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer outpng.Close()
|
||||||
|
|
||||||
|
err = png.Encode(outpng, img)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (funscript *Script) CalculateMedian() int64 {
|
||||||
|
sort.Slice(funscript.Actions, func(i, j int) bool {
|
||||||
|
return funscript.Actions[i].Speed < funscript.Actions[j].Speed
|
||||||
|
})
|
||||||
|
|
||||||
|
mNumber := len(funscript.Actions) / 2
|
||||||
|
|
||||||
|
if len(funscript.Actions)%2 != 0 {
|
||||||
|
return int64(funscript.Actions[mNumber].Speed)
|
||||||
|
}
|
||||||
|
|
||||||
|
return int64((funscript.Actions[mNumber-1].Speed + funscript.Actions[mNumber].Speed) / 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gt GradientTable) GetInterpolatedColorFor(t float64) colorful.Color {
|
||||||
|
for i := 0; i < len(gt)-1; i++ {
|
||||||
|
c1 := gt[i]
|
||||||
|
c2 := gt[i+1]
|
||||||
|
if c1.Pos <= t && t <= c2.Pos {
|
||||||
|
// We are in between c1 and c2. Go blend them!
|
||||||
|
t := (t - c1.Pos) / (c2.Pos - c1.Pos)
|
||||||
|
return c1.Col.BlendHcl(c2.Col, t).Clamped()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Nothing found? Means we're at (or past) the last gradient keypoint.
|
||||||
|
return gt[len(gt)-1].Col
|
||||||
|
}
|
||||||
|
|
||||||
|
func (funscript Script) getGradientTable(numSegments int) GradientTable {
|
||||||
|
segments := make([]struct {
|
||||||
|
count int
|
||||||
|
intensity int
|
||||||
|
}, numSegments)
|
||||||
|
gradient := make(GradientTable, numSegments)
|
||||||
|
|
||||||
|
maxts := funscript.Actions[len(funscript.Actions)-1].At
|
||||||
|
|
||||||
|
for _, a := range funscript.Actions {
|
||||||
|
segment := int(float64(a.At) / float64(maxts+1) * float64(numSegments))
|
||||||
|
segments[segment].count++
|
||||||
|
segments[segment].intensity += int(a.Intensity)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < numSegments; i++ {
|
||||||
|
gradient[i].Pos = float64(i) / float64(numSegments-1)
|
||||||
|
if segments[i].count > 0 {
|
||||||
|
gradient[i].Col = getSegmentColor(float64(segments[i].intensity) / float64(segments[i].count))
|
||||||
|
} else {
|
||||||
|
gradient[i].Col = getSegmentColor(0.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return gradient
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSegmentColor(intensity float64) colorful.Color {
|
||||||
|
colorBlue, _ := colorful.Hex("#1e90ff") // DodgerBlue
|
||||||
|
colorGreen, _ := colorful.Hex("#228b22") // ForestGreen
|
||||||
|
colorYellow, _ := colorful.Hex("#ffd700") // Gold
|
||||||
|
colorRed, _ := colorful.Hex("#dc143c") // Crimson
|
||||||
|
colorPurple, _ := colorful.Hex("#800080") // Purple
|
||||||
|
colorBlack, _ := colorful.Hex("#0f001e")
|
||||||
|
colorBackground, _ := colorful.Hex("#30404d") // Same as GridCard bg
|
||||||
|
|
||||||
|
var stepSize = 60.0
|
||||||
|
var f float64
|
||||||
|
var c colorful.Color
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case intensity <= 0.001:
|
||||||
|
c = colorBackground
|
||||||
|
case intensity <= 1*stepSize:
|
||||||
|
f = (intensity - 0*stepSize) / stepSize
|
||||||
|
c = colorBlue.BlendLab(colorGreen, f)
|
||||||
|
case intensity <= 2*stepSize:
|
||||||
|
f = (intensity - 1*stepSize) / stepSize
|
||||||
|
c = colorGreen.BlendLab(colorYellow, f)
|
||||||
|
case intensity <= 3*stepSize:
|
||||||
|
f = (intensity - 2*stepSize) / stepSize
|
||||||
|
c = colorYellow.BlendLab(colorRed, f)
|
||||||
|
case intensity <= 4*stepSize:
|
||||||
|
f = (intensity - 3*stepSize) / stepSize
|
||||||
|
c = colorRed.BlendRgb(colorPurple, f)
|
||||||
|
default:
|
||||||
|
f = (intensity - 4*stepSize) / (5 * stepSize)
|
||||||
|
f = math.Min(f, 1.0)
|
||||||
|
c = colorPurple.BlendLab(colorBlack, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package manager
|
package manager
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
"image/color"
|
"image/color"
|
||||||
@@ -33,6 +34,12 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO
|
|||||||
if !exists {
|
if !exists {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FFMPEG bombs out if we try to request 89 snapshots from a 2 second video
|
||||||
|
if videoFile.Duration < 3 {
|
||||||
|
return nil, errors.New("video too short to create sprite")
|
||||||
|
}
|
||||||
|
|
||||||
generator, err := newGeneratorInfo(videoFile)
|
generator, err := newGeneratorInfo(videoFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|||||||
@@ -2,34 +2,11 @@ package manager
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"archive/zip"
|
"archive/zip"
|
||||||
"os"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// DeleteGeneratedImageFiles deletes generated files for the provided image.
|
|
||||||
func DeleteGeneratedImageFiles(image *models.Image) {
|
|
||||||
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
|
|
||||||
exists, _ := utils.FileExists(thumbPath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(thumbPath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", thumbPath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteImageFile deletes the image file from the filesystem.
|
|
||||||
func DeleteImageFile(image *models.Image) {
|
|
||||||
err := os.Remove(image.Path)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", image.Path, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func walkGalleryZip(path string, walkFunc func(file *zip.File) error) error {
|
func walkGalleryZip(path string, walkFunc func(file *zip.File) error) error {
|
||||||
readCloser, err := zip.OpenReader(path)
|
readCloser, err := zip.OpenReader(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -276,6 +276,9 @@ func (s *singleton) RefreshConfig() {
|
|||||||
if err := utils.EnsureDir(s.Paths.Generated.Downloads); err != nil {
|
if err := utils.EnsureDir(s.Paths.Generated.Downloads); err != nil {
|
||||||
logger.Warnf("could not create directory for Downloads: %v", err)
|
logger.Warnf("could not create directory for Downloads: %v", err)
|
||||||
}
|
}
|
||||||
|
if err := utils.EnsureDir(s.Paths.Generated.InteractiveHeatmap); err != nil {
|
||||||
|
logger.Warnf("could not create directory for Interactive Heatmaps: %v", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,13 +13,14 @@ const thumbDirDepth int = 2
|
|||||||
const thumbDirLength int = 2 // thumbDirDepth * thumbDirLength must be smaller than the length of checksum
|
const thumbDirLength int = 2 // thumbDirDepth * thumbDirLength must be smaller than the length of checksum
|
||||||
|
|
||||||
type generatedPaths struct {
|
type generatedPaths struct {
|
||||||
Screenshots string
|
Screenshots string
|
||||||
Thumbnails string
|
Thumbnails string
|
||||||
Vtt string
|
Vtt string
|
||||||
Markers string
|
Markers string
|
||||||
Transcodes string
|
Transcodes string
|
||||||
Downloads string
|
Downloads string
|
||||||
Tmp string
|
Tmp string
|
||||||
|
InteractiveHeatmap string
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGeneratedPaths(path string) *generatedPaths {
|
func newGeneratedPaths(path string) *generatedPaths {
|
||||||
@@ -31,6 +32,7 @@ func newGeneratedPaths(path string) *generatedPaths {
|
|||||||
gp.Transcodes = filepath.Join(path, "transcodes")
|
gp.Transcodes = filepath.Join(path, "transcodes")
|
||||||
gp.Downloads = filepath.Join(path, "download_stage")
|
gp.Downloads = filepath.Join(path, "download_stage")
|
||||||
gp.Tmp = filepath.Join(path, "tmp")
|
gp.Tmp = filepath.Join(path, "tmp")
|
||||||
|
gp.InteractiveHeatmap = filepath.Join(path, "interactive_heatmaps")
|
||||||
return &gp
|
return &gp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
package paths
|
package paths
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type scenePaths struct {
|
type scenePaths struct {
|
||||||
@@ -51,3 +52,7 @@ func (sp *scenePaths) GetSpriteImageFilePath(checksum string) string {
|
|||||||
func (sp *scenePaths) GetSpriteVttFilePath(checksum string) string {
|
func (sp *scenePaths) GetSpriteVttFilePath(checksum string) string {
|
||||||
return filepath.Join(sp.generated.Vtt, checksum+"_thumbs.vtt")
|
return filepath.Join(sp.generated.Vtt, checksum+"_thumbs.vtt")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetInteractiveHeatmapPath(checksum string) string {
|
||||||
|
return filepath.Join(sp.generated.InteractiveHeatmap, checksum+".png")
|
||||||
|
}
|
||||||
|
|||||||
@@ -44,7 +44,20 @@ func WaitAndDeregisterStream(filepath string, w *http.ResponseWriter, r *http.Re
|
|||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
func KillRunningStreams(path string) {
|
func KillRunningStreams(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
|
||||||
|
killRunningStreams(scene.Path)
|
||||||
|
|
||||||
|
sceneHash := scene.GetHash(fileNamingAlgo)
|
||||||
|
|
||||||
|
if sceneHash == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
transcodePath := GetInstance().Paths.Scene.GetTranscodePath(sceneHash)
|
||||||
|
killRunningStreams(transcodePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func killRunningStreams(path string) {
|
||||||
ffmpeg.KillRunningEncoders(path)
|
ffmpeg.KillRunningEncoders(path)
|
||||||
|
|
||||||
streamingFilesMutex.RLock()
|
streamingFilesMutex.RLock()
|
||||||
|
|||||||
@@ -2,190 +2,13 @@ package manager
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
|
||||||
"github.com/stashapp/stash/pkg/manager/config"
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DestroyScene deletes a scene and its associated relationships from the
|
|
||||||
// database. Returns a function to perform any post-commit actions.
|
|
||||||
func DestroyScene(scene *models.Scene, repo models.Repository) (func(), error) {
|
|
||||||
qb := repo.Scene()
|
|
||||||
mqb := repo.SceneMarker()
|
|
||||||
|
|
||||||
markers, err := mqb.FindBySceneID(scene.ID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var funcs []func()
|
|
||||||
for _, m := range markers {
|
|
||||||
f, err := DestroySceneMarker(scene, m, mqb)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
funcs = append(funcs, f)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := qb.Destroy(scene.ID); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return func() {
|
|
||||||
for _, f := range funcs {
|
|
||||||
f()
|
|
||||||
}
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// DestroySceneMarker deletes the scene marker from the database and returns a
|
|
||||||
// function that removes the generated files, to be executed after the
|
|
||||||
// transaction is successfully committed.
|
|
||||||
func DestroySceneMarker(scene *models.Scene, sceneMarker *models.SceneMarker, qb models.SceneMarkerWriter) (func(), error) {
|
|
||||||
if err := qb.Destroy(sceneMarker.ID); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// delete the preview for the marker
|
|
||||||
return func() {
|
|
||||||
seconds := int(sceneMarker.Seconds)
|
|
||||||
DeleteSceneMarkerFiles(scene, seconds, config.GetInstance().GetVideoFileNamingAlgorithm())
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteGeneratedSceneFiles deletes generated files for the provided scene.
|
|
||||||
func DeleteGeneratedSceneFiles(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
|
|
||||||
sceneHash := scene.GetHash(fileNamingAlgo)
|
|
||||||
|
|
||||||
if sceneHash == "" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
markersFolder := filepath.Join(GetInstance().Paths.Generated.Markers, sceneHash)
|
|
||||||
|
|
||||||
exists, _ := utils.FileExists(markersFolder)
|
|
||||||
if exists {
|
|
||||||
err := os.RemoveAll(markersFolder)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete folder %s: %s", markersFolder, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
thumbPath := GetInstance().Paths.Scene.GetThumbnailScreenshotPath(sceneHash)
|
|
||||||
exists, _ = utils.FileExists(thumbPath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(thumbPath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", thumbPath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
normalPath := GetInstance().Paths.Scene.GetScreenshotPath(sceneHash)
|
|
||||||
exists, _ = utils.FileExists(normalPath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(normalPath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", normalPath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
streamPreviewPath := GetInstance().Paths.Scene.GetStreamPreviewPath(sceneHash)
|
|
||||||
exists, _ = utils.FileExists(streamPreviewPath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(streamPreviewPath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", streamPreviewPath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
streamPreviewImagePath := GetInstance().Paths.Scene.GetStreamPreviewImagePath(sceneHash)
|
|
||||||
exists, _ = utils.FileExists(streamPreviewImagePath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(streamPreviewImagePath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", streamPreviewImagePath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
transcodePath := GetInstance().Paths.Scene.GetTranscodePath(sceneHash)
|
|
||||||
exists, _ = utils.FileExists(transcodePath)
|
|
||||||
if exists {
|
|
||||||
// kill any running streams
|
|
||||||
KillRunningStreams(transcodePath)
|
|
||||||
|
|
||||||
err := os.Remove(transcodePath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", transcodePath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
spritePath := GetInstance().Paths.Scene.GetSpriteImageFilePath(sceneHash)
|
|
||||||
exists, _ = utils.FileExists(spritePath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(spritePath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", spritePath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
vttPath := GetInstance().Paths.Scene.GetSpriteVttFilePath(sceneHash)
|
|
||||||
exists, _ = utils.FileExists(vttPath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(vttPath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", vttPath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteSceneMarkerFiles deletes generated files for a scene marker with the
|
|
||||||
// provided scene and timestamp.
|
|
||||||
func DeleteSceneMarkerFiles(scene *models.Scene, seconds int, fileNamingAlgo models.HashAlgorithm) {
|
|
||||||
videoPath := GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(fileNamingAlgo), seconds)
|
|
||||||
imagePath := GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(fileNamingAlgo), seconds)
|
|
||||||
screenshotPath := GetInstance().Paths.SceneMarkers.GetStreamScreenshotPath(scene.GetHash(fileNamingAlgo), seconds)
|
|
||||||
|
|
||||||
exists, _ := utils.FileExists(videoPath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(videoPath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", videoPath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exists, _ = utils.FileExists(imagePath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(imagePath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", imagePath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exists, _ = utils.FileExists(screenshotPath)
|
|
||||||
if exists {
|
|
||||||
err := os.Remove(screenshotPath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", screenshotPath, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteSceneFile deletes the scene video file from the filesystem.
|
|
||||||
func DeleteSceneFile(scene *models.Scene) {
|
|
||||||
// kill any running encoders
|
|
||||||
KillRunningStreams(scene.Path)
|
|
||||||
|
|
||||||
err := os.Remove(scene.Path)
|
|
||||||
if err != nil {
|
|
||||||
logger.Warnf("Could not delete file %s: %s", scene.Path, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) {
|
func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) {
|
||||||
var container ffmpeg.Container
|
var container ffmpeg.Container
|
||||||
if scene.Format.Valid {
|
if scene.Format.Valid {
|
||||||
|
|||||||
@@ -3,9 +3,10 @@ package manager
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/file"
|
||||||
|
"github.com/stashapp/stash/pkg/gallery"
|
||||||
"github.com/stashapp/stash/pkg/image"
|
"github.com/stashapp/stash/pkg/image"
|
||||||
"github.com/stashapp/stash/pkg/job"
|
"github.com/stashapp/stash/pkg/job"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
@@ -46,7 +47,7 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||||||
if err := j.processImages(ctx, progress, r.Image()); err != nil {
|
if err := j.processImages(ctx, progress, r.Image()); err != nil {
|
||||||
return fmt.Errorf("error cleaning images: %w", err)
|
return fmt.Errorf("error cleaning images: %w", err)
|
||||||
}
|
}
|
||||||
if err := j.processGalleries(ctx, progress, r.Gallery()); err != nil {
|
if err := j.processGalleries(ctx, progress, r.Gallery(), r.Image()); err != nil {
|
||||||
return fmt.Errorf("error cleaning galleries: %w", err)
|
return fmt.Errorf("error cleaning galleries: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,28 +67,35 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (j *cleanJob) getCount(r models.ReaderRepository) (int, error) {
|
func (j *cleanJob) getCount(r models.ReaderRepository) (int, error) {
|
||||||
sceneCount, err := r.Scene().Count()
|
sceneFilter := scene.PathsFilter(j.input.Paths)
|
||||||
|
sceneResult, err := r.Scene().Query(models.SceneQueryOptions{
|
||||||
|
QueryOptions: models.QueryOptions{
|
||||||
|
Count: true,
|
||||||
|
},
|
||||||
|
SceneFilter: sceneFilter,
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
imageCount, err := r.Image().Count()
|
imageCount, err := r.Image().QueryCount(image.PathsFilter(j.input.Paths), nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
galleryCount, err := r.Gallery().Count()
|
galleryCount, err := r.Gallery().QueryCount(gallery.PathsFilter(j.input.Paths), nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return sceneCount + imageCount + galleryCount, nil
|
return sceneResult.Count + imageCount + galleryCount, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb models.SceneReader) error {
|
func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb models.SceneReader) error {
|
||||||
batchSize := 1000
|
batchSize := 1000
|
||||||
|
|
||||||
findFilter := models.BatchFindFilter(batchSize)
|
findFilter := models.BatchFindFilter(batchSize)
|
||||||
|
sceneFilter := scene.PathsFilter(j.input.Paths)
|
||||||
sort := "path"
|
sort := "path"
|
||||||
findFilter.Sort = &sort
|
findFilter.Sort = &sort
|
||||||
|
|
||||||
@@ -99,7 +107,7 @@ func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
scenes, err := scene.Query(qb, nil, findFilter)
|
scenes, err := scene.Query(qb, sceneFilter, findFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error querying for scenes: %w", err)
|
return fmt.Errorf("error querying for scenes: %w", err)
|
||||||
}
|
}
|
||||||
@@ -146,10 +154,11 @@ func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress, qb models.GalleryReader) error {
|
func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress, qb models.GalleryReader, iqb models.ImageReader) error {
|
||||||
batchSize := 1000
|
batchSize := 1000
|
||||||
|
|
||||||
findFilter := models.BatchFindFilter(batchSize)
|
findFilter := models.BatchFindFilter(batchSize)
|
||||||
|
galleryFilter := gallery.PathsFilter(j.input.Paths)
|
||||||
sort := "path"
|
sort := "path"
|
||||||
findFilter.Sort = &sort
|
findFilter.Sort = &sort
|
||||||
|
|
||||||
@@ -161,14 +170,14 @@ func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress,
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
galleries, _, err := qb.Query(nil, findFilter)
|
galleries, _, err := qb.Query(galleryFilter, findFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error querying for galleries: %w", err)
|
return fmt.Errorf("error querying for galleries: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, gallery := range galleries {
|
for _, gallery := range galleries {
|
||||||
progress.ExecuteTask(fmt.Sprintf("Assessing gallery %s for clean", gallery.GetTitle()), func() {
|
progress.ExecuteTask(fmt.Sprintf("Assessing gallery %s for clean", gallery.GetTitle()), func() {
|
||||||
if j.shouldCleanGallery(gallery) {
|
if j.shouldCleanGallery(gallery, iqb) {
|
||||||
toDelete = append(toDelete, gallery.ID)
|
toDelete = append(toDelete, gallery.ID)
|
||||||
} else {
|
} else {
|
||||||
// increment progress, no further processing
|
// increment progress, no further processing
|
||||||
@@ -210,6 +219,7 @@ func (j *cleanJob) processImages(ctx context.Context, progress *job.Progress, qb
|
|||||||
batchSize := 1000
|
batchSize := 1000
|
||||||
|
|
||||||
findFilter := models.BatchFindFilter(batchSize)
|
findFilter := models.BatchFindFilter(batchSize)
|
||||||
|
imageFilter := image.PathsFilter(j.input.Paths)
|
||||||
|
|
||||||
// performance consideration: order by path since default ordering by
|
// performance consideration: order by path since default ordering by
|
||||||
// title is slow
|
// title is slow
|
||||||
@@ -224,7 +234,7 @@ func (j *cleanJob) processImages(ctx context.Context, progress *job.Progress, qb
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
images, err := image.Query(qb, nil, findFilter)
|
images, err := image.Query(qb, imageFilter, findFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error querying for images: %w", err)
|
return fmt.Errorf("error querying for images: %w", err)
|
||||||
}
|
}
|
||||||
@@ -308,9 +318,9 @@ func (j *cleanJob) shouldCleanScene(s *models.Scene) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *cleanJob) shouldCleanGallery(g *models.Gallery) bool {
|
func (j *cleanJob) shouldCleanGallery(g *models.Gallery, qb models.ImageReader) bool {
|
||||||
// never clean manually created galleries
|
// never clean manually created galleries
|
||||||
if !g.Zip {
|
if !g.Path.Valid {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -326,9 +336,27 @@ func (j *cleanJob) shouldCleanGallery(g *models.Gallery) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
config := config.GetInstance()
|
config := config.GetInstance()
|
||||||
if !utils.MatchExtension(path, config.GetGalleryExtensions()) {
|
if g.Zip {
|
||||||
logger.Infof("File extension does not match gallery extensions. Marking to clean: \"%s\"", path)
|
if !utils.MatchExtension(path, config.GetGalleryExtensions()) {
|
||||||
return true
|
logger.Infof("File extension does not match gallery extensions. Marking to clean: \"%s\"", path)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if countImagesInZip(path) == 0 {
|
||||||
|
logger.Infof("Gallery has 0 images. Marking to clean: \"%s\"", path)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// folder-based - delete if it has no images
|
||||||
|
count, err := qb.CountByGalleryID(g.ID)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("Error trying to count gallery images for %q: %v", path, err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if count == 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if matchFile(path, config.GetImageExcludes()) {
|
if matchFile(path, config.GetImageExcludes()) {
|
||||||
@@ -336,11 +364,6 @@ func (j *cleanJob) shouldCleanGallery(g *models.Gallery) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
if countImagesInZip(path) == 0 {
|
|
||||||
logger.Infof("Gallery has 0 images. Marking to clean: \"%s\"", path)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -370,72 +393,99 @@ func (j *cleanJob) shouldCleanImage(s *models.Image) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (j *cleanJob) deleteScene(ctx context.Context, fileNamingAlgorithm models.HashAlgorithm, sceneID int) {
|
func (j *cleanJob) deleteScene(ctx context.Context, fileNamingAlgorithm models.HashAlgorithm, sceneID int) {
|
||||||
var postCommitFunc func()
|
fileNamingAlgo := GetInstance().Config.GetVideoFileNamingAlgorithm()
|
||||||
var scene *models.Scene
|
|
||||||
|
fileDeleter := &scene.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
FileNamingAlgo: fileNamingAlgo,
|
||||||
|
Paths: GetInstance().Paths,
|
||||||
|
}
|
||||||
|
var s *models.Scene
|
||||||
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
||||||
qb := repo.Scene()
|
qb := repo.Scene()
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
scene, err = qb.Find(sceneID)
|
s, err = qb.Find(sceneID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
postCommitFunc, err = DestroyScene(scene, repo)
|
|
||||||
return err
|
return scene.Destroy(s, repo, fileDeleter, true, false)
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
|
|
||||||
logger.Errorf("Error deleting scene from database: %s", err.Error())
|
logger.Errorf("Error deleting scene from database: %s", err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
postCommitFunc()
|
// perform the post-commit actions
|
||||||
|
fileDeleter.Commit()
|
||||||
|
|
||||||
DeleteGeneratedSceneFiles(scene, fileNamingAlgorithm)
|
GetInstance().PluginCache.ExecutePostHooks(ctx, sceneID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
|
||||||
|
Checksum: s.Checksum.String,
|
||||||
GetInstance().PluginCache.ExecutePostHooks(ctx, sceneID, plugin.SceneDestroyPost, nil, nil)
|
OSHash: s.OSHash.String,
|
||||||
|
Path: s.Path,
|
||||||
|
}, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *cleanJob) deleteGallery(ctx context.Context, galleryID int) {
|
func (j *cleanJob) deleteGallery(ctx context.Context, galleryID int) {
|
||||||
|
var g *models.Gallery
|
||||||
|
|
||||||
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
||||||
qb := repo.Gallery()
|
qb := repo.Gallery()
|
||||||
|
|
||||||
|
var err error
|
||||||
|
g, err = qb.Find(galleryID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
return qb.Destroy(galleryID)
|
return qb.Destroy(galleryID)
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
logger.Errorf("Error deleting gallery from database: %s", err.Error())
|
logger.Errorf("Error deleting gallery from database: %s", err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryDestroyPost, nil, nil)
|
GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
|
||||||
|
Checksum: g.Checksum,
|
||||||
|
Path: g.Path.String,
|
||||||
|
}, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *cleanJob) deleteImage(ctx context.Context, imageID int) {
|
func (j *cleanJob) deleteImage(ctx context.Context, imageID int) {
|
||||||
var checksum string
|
fileDeleter := &image.FileDeleter{
|
||||||
|
Deleter: *file.NewDeleter(),
|
||||||
|
Paths: GetInstance().Paths,
|
||||||
|
}
|
||||||
|
|
||||||
|
var i *models.Image
|
||||||
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
if err := j.txnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
||||||
qb := repo.Image()
|
qb := repo.Image()
|
||||||
|
|
||||||
image, err := qb.Find(imageID)
|
var err error
|
||||||
|
i, err = qb.Find(imageID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if image == nil {
|
if i == nil {
|
||||||
return fmt.Errorf("image not found: %d", imageID)
|
return fmt.Errorf("image not found: %d", imageID)
|
||||||
}
|
}
|
||||||
|
|
||||||
checksum = image.Checksum
|
return image.Destroy(i, qb, fileDeleter, true, false)
|
||||||
|
|
||||||
return qb.Destroy(imageID)
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
fileDeleter.Rollback()
|
||||||
|
|
||||||
logger.Errorf("Error deleting image from database: %s", err.Error())
|
logger.Errorf("Error deleting image from database: %s", err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove cache image
|
// perform the post-commit actions
|
||||||
pathErr := os.Remove(GetInstance().Paths.Generated.GetThumbnailPath(checksum, models.DefaultGthumbWidth))
|
fileDeleter.Commit()
|
||||||
if pathErr != nil {
|
GetInstance().PluginCache.ExecutePostHooks(ctx, imageID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
|
||||||
logger.Errorf("Error deleting thumbnail image from cache: %s", pathErr)
|
Checksum: i.Checksum,
|
||||||
}
|
Path: i.Path,
|
||||||
|
}, nil)
|
||||||
GetInstance().PluginCache.ExecutePostHooks(ctx, imageID, plugin.ImageDestroyPost, nil, nil)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func getStashFromPath(pathToCheck string) *models.StashConfig {
|
func getStashFromPath(pathToCheck string) *models.StashConfig {
|
||||||
|
|||||||
@@ -26,12 +26,13 @@ type GenerateJob struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type totalsGenerate struct {
|
type totalsGenerate struct {
|
||||||
sprites int64
|
sprites int64
|
||||||
previews int64
|
previews int64
|
||||||
imagePreviews int64
|
imagePreviews int64
|
||||||
markers int64
|
markers int64
|
||||||
transcodes int64
|
transcodes int64
|
||||||
phashes int64
|
phashes int64
|
||||||
|
interactiveHeatmapSpeeds int64
|
||||||
|
|
||||||
tasks int
|
tasks int
|
||||||
}
|
}
|
||||||
@@ -94,7 +95,7 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes", totals.sprites, totals.previews, totals.imagePreviews, totals.markers, totals.transcodes, totals.phashes)
|
logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes %d heatmaps & speeds", totals.sprites, totals.previews, totals.imagePreviews, totals.markers, totals.transcodes, totals.phashes, totals.interactiveHeatmapSpeeds)
|
||||||
|
|
||||||
progress.SetTotal(int(totals.tasks))
|
progress.SetTotal(int(totals.tasks))
|
||||||
}()
|
}()
|
||||||
@@ -251,9 +252,11 @@ func (j *GenerateJob) queueSceneJobs(scene *models.Scene, queue chan<- Task, tot
|
|||||||
}
|
}
|
||||||
|
|
||||||
if utils.IsTrue(j.input.Transcodes) {
|
if utils.IsTrue(j.input.Transcodes) {
|
||||||
|
forceTranscode := utils.IsTrue(j.input.ForceTranscodes)
|
||||||
task := &GenerateTranscodeTask{
|
task := &GenerateTranscodeTask{
|
||||||
Scene: *scene,
|
Scene: *scene,
|
||||||
Overwrite: j.overwrite,
|
Overwrite: j.overwrite,
|
||||||
|
Force: forceTranscode,
|
||||||
fileNamingAlgorithm: j.fileNamingAlgo,
|
fileNamingAlgorithm: j.fileNamingAlgo,
|
||||||
}
|
}
|
||||||
if task.isTranscodeNeeded() {
|
if task.isTranscodeNeeded() {
|
||||||
@@ -277,6 +280,21 @@ func (j *GenerateJob) queueSceneJobs(scene *models.Scene, queue chan<- Task, tot
|
|||||||
queue <- task
|
queue <- task
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if utils.IsTrue(j.input.InteractiveHeatmapsSpeeds) {
|
||||||
|
task := &GenerateInteractiveHeatmapSpeedTask{
|
||||||
|
Scene: *scene,
|
||||||
|
Overwrite: j.overwrite,
|
||||||
|
fileNamingAlgorithm: j.fileNamingAlgo,
|
||||||
|
TxnManager: j.txnManager,
|
||||||
|
}
|
||||||
|
|
||||||
|
if task.shouldGenerate() {
|
||||||
|
totals.interactiveHeatmapSpeeds++
|
||||||
|
totals.tasks++
|
||||||
|
queue <- task
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *GenerateJob) queueMarkerJob(marker *models.SceneMarker, queue chan<- Task, totals *totalsGenerate) {
|
func (j *GenerateJob) queueMarkerJob(marker *models.SceneMarker, queue chan<- Task, totals *totalsGenerate) {
|
||||||
|
|||||||
87
pkg/manager/task_generate_interactive_heatmap_speed.go
Normal file
87
pkg/manager/task_generate_interactive_heatmap_speed.go
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
package manager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GenerateInteractiveHeatmapSpeedTask struct {
|
||||||
|
Scene models.Scene
|
||||||
|
Overwrite bool
|
||||||
|
fileNamingAlgorithm models.HashAlgorithm
|
||||||
|
TxnManager models.TransactionManager
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string {
|
||||||
|
return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
|
||||||
|
if !t.shouldGenerate() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||||
|
funscriptPath := utils.GetFunscriptPath(t.Scene.Path)
|
||||||
|
heatmapPath := instance.Paths.Scene.GetInteractiveHeatmapPath(videoChecksum)
|
||||||
|
|
||||||
|
generator := NewInteractiveHeatmapSpeedGenerator(funscriptPath, heatmapPath)
|
||||||
|
|
||||||
|
err := generator.Generate()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("error generating heatmap: %s", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
median := sql.NullInt64{
|
||||||
|
Int64: generator.InteractiveSpeed,
|
||||||
|
Valid: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
var s *models.Scene
|
||||||
|
|
||||||
|
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||||
|
var err error
|
||||||
|
s, err = r.Scene().FindByPath(t.Scene.Path)
|
||||||
|
return err
|
||||||
|
}); err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||||
|
qb := r.Scene()
|
||||||
|
scenePartial := models.ScenePartial{
|
||||||
|
ID: s.ID,
|
||||||
|
InteractiveSpeed: &median,
|
||||||
|
}
|
||||||
|
_, err := qb.Update(scenePartial)
|
||||||
|
return err
|
||||||
|
}); err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool {
|
||||||
|
if !t.Scene.Interactive {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||||
|
return !t.doesHeatmapExist(sceneHash) || t.Overwrite
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *GenerateInteractiveHeatmapSpeedTask) doesHeatmapExist(sceneChecksum string) bool {
|
||||||
|
if sceneChecksum == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
imageExists, _ := utils.FileExists(instance.Paths.Scene.GetInteractiveHeatmapPath(sceneChecksum))
|
||||||
|
return imageExists
|
||||||
|
}
|
||||||
@@ -117,6 +117,7 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene
|
|||||||
ScenePath: scene.Path,
|
ScenePath: scene.Path,
|
||||||
Seconds: seconds,
|
Seconds: seconds,
|
||||||
Width: 640,
|
Width: 640,
|
||||||
|
Audio: instance.Config.GetPreviewAudio(),
|
||||||
}
|
}
|
||||||
|
|
||||||
encoder := instance.FFMPEG
|
encoder := instance.FFMPEG
|
||||||
|
|||||||
@@ -211,8 +211,8 @@ type stashboxSource struct {
|
|||||||
endpoint string
|
endpoint string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s stashboxSource) ScrapeScene(sceneID int) (*models.ScrapedScene, error) {
|
func (s stashboxSource) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
|
||||||
results, err := s.FindStashBoxScenesByFingerprintsFlat([]string{strconv.Itoa(sceneID)})
|
results, err := s.FindStashBoxScenesByFingerprintsFlat(ctx, []string{strconv.Itoa(sceneID)})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error querying stash-box using scene ID %d: %w", sceneID, err)
|
return nil, fmt.Errorf("error querying stash-box using scene ID %d: %w", sceneID, err)
|
||||||
}
|
}
|
||||||
@@ -233,8 +233,17 @@ type scraperSource struct {
|
|||||||
scraperID string
|
scraperID string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s scraperSource) ScrapeScene(sceneID int) (*models.ScrapedScene, error) {
|
func (s scraperSource) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
|
||||||
return s.cache.ScrapeScene(s.scraperID, sceneID)
|
content, err := s.cache.ScrapeID(ctx, s.scraperID, sceneID, models.ScrapeContentTypeScene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if scene, ok := content.(models.ScrapedScene); ok {
|
||||||
|
return &scene, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errors.New("could not convert content to scene")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s scraperSource) String() string {
|
func (s scraperSource) String() string {
|
||||||
|
|||||||
@@ -146,6 +146,11 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||||||
func (j *ScanJob) queueFiles(ctx context.Context, paths []*models.StashConfig, scanQueue chan<- scanFile, parallelTasks int) (total int, newFiles int) {
|
func (j *ScanJob) queueFiles(ctx context.Context, paths []*models.StashConfig, scanQueue chan<- scanFile, parallelTasks int) (total int, newFiles int) {
|
||||||
defer close(scanQueue)
|
defer close(scanQueue)
|
||||||
|
|
||||||
|
var minModTime time.Time
|
||||||
|
if j.input.Filter != nil && j.input.Filter.MinModTime != nil {
|
||||||
|
minModTime = *j.input.Filter.MinModTime
|
||||||
|
}
|
||||||
|
|
||||||
wg := sizedwaitgroup.New(parallelTasks)
|
wg := sizedwaitgroup.New(parallelTasks)
|
||||||
|
|
||||||
for _, sp := range paths {
|
for _, sp := range paths {
|
||||||
@@ -160,6 +165,11 @@ func (j *ScanJob) queueFiles(ctx context.Context, paths []*models.StashConfig, s
|
|||||||
return context.Canceled
|
return context.Canceled
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// exit early on cutoff
|
||||||
|
if info.Mode().IsRegular() && info.ModTime().Before(minModTime) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
wg.Add()
|
wg.Add()
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ func (t *ScanTask) scanScene() *models.Scene {
|
|||||||
VideoFileCreator: &instance.FFProbe,
|
VideoFileCreator: &instance.FFProbe,
|
||||||
PluginCache: instance.PluginCache,
|
PluginCache: instance.PluginCache,
|
||||||
MutexManager: t.mutexManager,
|
MutexManager: t.mutexManager,
|
||||||
|
UseFileMetadata: t.UseFileMetadata,
|
||||||
}
|
}
|
||||||
|
|
||||||
if s != nil {
|
if s != nil {
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
|
|||||||
|
|
||||||
if t.refresh {
|
if t.refresh {
|
||||||
var performerID string
|
var performerID string
|
||||||
txnErr := t.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
txnErr := t.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
stashids, _ := r.Performer().GetStashIDs(t.performer.ID)
|
stashids, _ := r.Performer().GetStashIDs(t.performer.ID)
|
||||||
for _, id := range stashids {
|
for _, id := range stashids {
|
||||||
if id.Endpoint == t.box.Endpoint {
|
if id.Endpoint == t.box.Endpoint {
|
||||||
@@ -57,7 +57,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
|
|||||||
logger.Warnf("error while executing read transaction: %v", err)
|
logger.Warnf("error while executing read transaction: %v", err)
|
||||||
}
|
}
|
||||||
if performerID != "" {
|
if performerID != "" {
|
||||||
performer, err = client.FindStashBoxPerformerByID(performerID)
|
performer, err = client.FindStashBoxPerformerByID(ctx, performerID)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
var name string
|
var name string
|
||||||
@@ -66,7 +66,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
|
|||||||
} else {
|
} else {
|
||||||
name = t.performer.Name.String
|
name = t.performer.Name.String
|
||||||
}
|
}
|
||||||
performer, err = client.FindStashBoxPerformerByName(name)
|
performer, err = client.FindStashBoxPerformerByName(ctx, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -15,6 +15,9 @@ type GenerateTranscodeTask struct {
|
|||||||
Scene models.Scene
|
Scene models.Scene
|
||||||
Overwrite bool
|
Overwrite bool
|
||||||
fileNamingAlgorithm models.HashAlgorithm
|
fileNamingAlgorithm models.HashAlgorithm
|
||||||
|
|
||||||
|
// is true, generate even if video is browser-supported
|
||||||
|
Force bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *GenerateTranscodeTask) GetDescription() string {
|
func (t *GenerateTranscodeTask) GetDescription() string {
|
||||||
@@ -49,7 +52,7 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
|
|||||||
audioCodec = ffmpeg.AudioCodec(t.Scene.AudioCodec.String)
|
audioCodec = ffmpeg.AudioCodec(t.Scene.AudioCodec.String)
|
||||||
}
|
}
|
||||||
|
|
||||||
if ffmpeg.IsStreamable(videoCodec, audioCodec, container) {
|
if !t.Force && ffmpeg.IsStreamable(videoCodec, audioCodec, container) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,6 +98,14 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
|
|||||||
// used only when counting files to generate, doesn't affect the actual transcode generation
|
// used only when counting files to generate, doesn't affect the actual transcode generation
|
||||||
// if container is missing from DB it is treated as non supported in order not to delay the user
|
// if container is missing from DB it is treated as non supported in order not to delay the user
|
||||||
func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
|
func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
|
||||||
|
hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
|
||||||
|
if !t.Overwrite && hasTranscode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Force {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
videoCodec := t.Scene.VideoCodec.String
|
videoCodec := t.Scene.VideoCodec.String
|
||||||
container := ""
|
container := ""
|
||||||
@@ -111,9 +122,5 @@ func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
|
|
||||||
if !t.Overwrite && hasTranscode {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/gallery"
|
||||||
"github.com/stashapp/stash/pkg/image"
|
"github.com/stashapp/stash/pkg/image"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scene"
|
"github.com/stashapp/stash/pkg/scene"
|
||||||
@@ -58,7 +59,9 @@ func getPathWords(path string) []string {
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func nameMatchesPath(name, path string) bool {
|
// nameMatchesPath returns the index in the path for the right-most match.
|
||||||
|
// Returns -1 if not found.
|
||||||
|
func nameMatchesPath(name, path string) int {
|
||||||
// escape specific regex characters
|
// escape specific regex characters
|
||||||
name = regexp.QuoteMeta(name)
|
name = regexp.QuoteMeta(name)
|
||||||
|
|
||||||
@@ -72,7 +75,13 @@ func nameMatchesPath(name, path string) bool {
|
|||||||
reStr = `(?:^|_|[^\w\d])` + reStr + `(?:$|_|[^\w\d])`
|
reStr = `(?:^|_|[^\w\d])` + reStr + `(?:$|_|[^\w\d])`
|
||||||
|
|
||||||
re := regexp.MustCompile(reStr)
|
re := regexp.MustCompile(reStr)
|
||||||
return re.MatchString(path)
|
found := re.FindAllStringIndex(path, -1)
|
||||||
|
|
||||||
|
if found == nil {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
return found[len(found)-1][0]
|
||||||
}
|
}
|
||||||
|
|
||||||
func PathToPerformers(path string, performerReader models.PerformerReader) ([]*models.Performer, error) {
|
func PathToPerformers(path string, performerReader models.PerformerReader) ([]*models.Performer, error) {
|
||||||
@@ -86,7 +95,7 @@ func PathToPerformers(path string, performerReader models.PerformerReader) ([]*m
|
|||||||
var ret []*models.Performer
|
var ret []*models.Performer
|
||||||
for _, p := range performers {
|
for _, p := range performers {
|
||||||
// TODO - commenting out alias handling until both sides work correctly
|
// TODO - commenting out alias handling until both sides work correctly
|
||||||
if nameMatchesPath(p.Name.String, path) { // || nameMatchesPath(p.Aliases.String, path) {
|
if nameMatchesPath(p.Name.String, path) != -1 { // || nameMatchesPath(p.Aliases.String, path) {
|
||||||
ret = append(ret, p)
|
ret = append(ret, p)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -94,7 +103,10 @@ func PathToPerformers(path string, performerReader models.PerformerReader) ([]*m
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func PathToStudios(path string, reader models.StudioReader) ([]*models.Studio, error) {
|
// PathToStudio returns the Studio that matches the given path.
|
||||||
|
// Where multiple matching studios are found, the one that matches the latest
|
||||||
|
// position in the path is returned.
|
||||||
|
func PathToStudio(path string, reader models.StudioReader) (*models.Studio, error) {
|
||||||
words := getPathWords(path)
|
words := getPathWords(path)
|
||||||
candidates, err := reader.QueryForAutoTag(words)
|
candidates, err := reader.QueryForAutoTag(words)
|
||||||
|
|
||||||
@@ -102,29 +114,26 @@ func PathToStudios(path string, reader models.StudioReader) ([]*models.Studio, e
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret []*models.Studio
|
var ret *models.Studio
|
||||||
|
index := -1
|
||||||
for _, c := range candidates {
|
for _, c := range candidates {
|
||||||
matches := false
|
matchIndex := nameMatchesPath(c.Name.String, path)
|
||||||
if nameMatchesPath(c.Name.String, path) {
|
if matchIndex != -1 && matchIndex > index {
|
||||||
matches = true
|
ret = c
|
||||||
|
index = matchIndex
|
||||||
}
|
}
|
||||||
|
|
||||||
if !matches {
|
aliases, err := reader.GetAliases(c.ID)
|
||||||
aliases, err := reader.GetAliases(c.ID)
|
if err != nil {
|
||||||
if err != nil {
|
return nil, err
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, alias := range aliases {
|
|
||||||
if nameMatchesPath(alias, path) {
|
|
||||||
matches = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if matches {
|
for _, alias := range aliases {
|
||||||
ret = append(ret, c)
|
matchIndex = nameMatchesPath(alias, path)
|
||||||
|
if matchIndex != -1 && matchIndex > index {
|
||||||
|
ret = c
|
||||||
|
index = matchIndex
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,7 +151,7 @@ func PathToTags(path string, tagReader models.TagReader) ([]*models.Tag, error)
|
|||||||
var ret []*models.Tag
|
var ret []*models.Tag
|
||||||
for _, t := range tags {
|
for _, t := range tags {
|
||||||
matches := false
|
matches := false
|
||||||
if nameMatchesPath(t.Name, path) {
|
if nameMatchesPath(t.Name, path) != -1 {
|
||||||
matches = true
|
matches = true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -152,7 +161,7 @@ func PathToTags(path string, tagReader models.TagReader) ([]*models.Tag, error)
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, alias := range aliases {
|
for _, alias := range aliases {
|
||||||
if nameMatchesPath(alias, path) {
|
if nameMatchesPath(alias, path) != -1 {
|
||||||
matches = true
|
matches = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -167,38 +176,6 @@ func PathToTags(path string, tagReader models.TagReader) ([]*models.Tag, error)
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func scenePathsFilter(paths []string) *models.SceneFilterType {
|
|
||||||
if paths == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
sep := string(filepath.Separator)
|
|
||||||
|
|
||||||
var ret *models.SceneFilterType
|
|
||||||
var or *models.SceneFilterType
|
|
||||||
for _, p := range paths {
|
|
||||||
newOr := &models.SceneFilterType{}
|
|
||||||
if or != nil {
|
|
||||||
or.Or = newOr
|
|
||||||
} else {
|
|
||||||
ret = newOr
|
|
||||||
}
|
|
||||||
|
|
||||||
or = newOr
|
|
||||||
|
|
||||||
if !strings.HasSuffix(p, sep) {
|
|
||||||
p += sep
|
|
||||||
}
|
|
||||||
|
|
||||||
or.Path = &models.StringCriterionInput{
|
|
||||||
Modifier: models.CriterionModifierEquals,
|
|
||||||
Value: p + "%",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
func PathToScenes(name string, paths []string, sceneReader models.SceneReader) ([]*models.Scene, error) {
|
func PathToScenes(name string, paths []string, sceneReader models.SceneReader) ([]*models.Scene, error) {
|
||||||
regex := getPathQueryRegex(name)
|
regex := getPathQueryRegex(name)
|
||||||
organized := false
|
organized := false
|
||||||
@@ -210,7 +187,7 @@ func PathToScenes(name string, paths []string, sceneReader models.SceneReader) (
|
|||||||
Organized: &organized,
|
Organized: &organized,
|
||||||
}
|
}
|
||||||
|
|
||||||
filter.And = scenePathsFilter(paths)
|
filter.And = scene.PathsFilter(paths)
|
||||||
|
|
||||||
pp := models.PerPageAll
|
pp := models.PerPageAll
|
||||||
scenes, err := scene.Query(sceneReader, &filter, &models.FindFilterType{
|
scenes, err := scene.Query(sceneReader, &filter, &models.FindFilterType{
|
||||||
@@ -223,7 +200,7 @@ func PathToScenes(name string, paths []string, sceneReader models.SceneReader) (
|
|||||||
|
|
||||||
var ret []*models.Scene
|
var ret []*models.Scene
|
||||||
for _, p := range scenes {
|
for _, p := range scenes {
|
||||||
if nameMatchesPath(name, p.Path) {
|
if nameMatchesPath(name, p.Path) != -1 {
|
||||||
ret = append(ret, p)
|
ret = append(ret, p)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -231,38 +208,6 @@ func PathToScenes(name string, paths []string, sceneReader models.SceneReader) (
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func imagePathsFilter(paths []string) *models.ImageFilterType {
|
|
||||||
if paths == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
sep := string(filepath.Separator)
|
|
||||||
|
|
||||||
var ret *models.ImageFilterType
|
|
||||||
var or *models.ImageFilterType
|
|
||||||
for _, p := range paths {
|
|
||||||
newOr := &models.ImageFilterType{}
|
|
||||||
if or != nil {
|
|
||||||
or.Or = newOr
|
|
||||||
} else {
|
|
||||||
ret = newOr
|
|
||||||
}
|
|
||||||
|
|
||||||
or = newOr
|
|
||||||
|
|
||||||
if !strings.HasSuffix(p, sep) {
|
|
||||||
p += sep
|
|
||||||
}
|
|
||||||
|
|
||||||
or.Path = &models.StringCriterionInput{
|
|
||||||
Modifier: models.CriterionModifierEquals,
|
|
||||||
Value: p + "%",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
func PathToImages(name string, paths []string, imageReader models.ImageReader) ([]*models.Image, error) {
|
func PathToImages(name string, paths []string, imageReader models.ImageReader) ([]*models.Image, error) {
|
||||||
regex := getPathQueryRegex(name)
|
regex := getPathQueryRegex(name)
|
||||||
organized := false
|
organized := false
|
||||||
@@ -274,7 +219,7 @@ func PathToImages(name string, paths []string, imageReader models.ImageReader) (
|
|||||||
Organized: &organized,
|
Organized: &organized,
|
||||||
}
|
}
|
||||||
|
|
||||||
filter.And = imagePathsFilter(paths)
|
filter.And = image.PathsFilter(paths)
|
||||||
|
|
||||||
pp := models.PerPageAll
|
pp := models.PerPageAll
|
||||||
images, err := image.Query(imageReader, &filter, &models.FindFilterType{
|
images, err := image.Query(imageReader, &filter, &models.FindFilterType{
|
||||||
@@ -287,7 +232,7 @@ func PathToImages(name string, paths []string, imageReader models.ImageReader) (
|
|||||||
|
|
||||||
var ret []*models.Image
|
var ret []*models.Image
|
||||||
for _, p := range images {
|
for _, p := range images {
|
||||||
if nameMatchesPath(name, p.Path) {
|
if nameMatchesPath(name, p.Path) != -1 {
|
||||||
ret = append(ret, p)
|
ret = append(ret, p)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -295,38 +240,6 @@ func PathToImages(name string, paths []string, imageReader models.ImageReader) (
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func galleryPathsFilter(paths []string) *models.GalleryFilterType {
|
|
||||||
if paths == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
sep := string(filepath.Separator)
|
|
||||||
|
|
||||||
var ret *models.GalleryFilterType
|
|
||||||
var or *models.GalleryFilterType
|
|
||||||
for _, p := range paths {
|
|
||||||
newOr := &models.GalleryFilterType{}
|
|
||||||
if or != nil {
|
|
||||||
or.Or = newOr
|
|
||||||
} else {
|
|
||||||
ret = newOr
|
|
||||||
}
|
|
||||||
|
|
||||||
or = newOr
|
|
||||||
|
|
||||||
if !strings.HasSuffix(p, sep) {
|
|
||||||
p += sep
|
|
||||||
}
|
|
||||||
|
|
||||||
or.Path = &models.StringCriterionInput{
|
|
||||||
Modifier: models.CriterionModifierEquals,
|
|
||||||
Value: p + "%",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
func PathToGalleries(name string, paths []string, galleryReader models.GalleryReader) ([]*models.Gallery, error) {
|
func PathToGalleries(name string, paths []string, galleryReader models.GalleryReader) ([]*models.Gallery, error) {
|
||||||
regex := getPathQueryRegex(name)
|
regex := getPathQueryRegex(name)
|
||||||
organized := false
|
organized := false
|
||||||
@@ -338,7 +251,7 @@ func PathToGalleries(name string, paths []string, galleryReader models.GalleryRe
|
|||||||
Organized: &organized,
|
Organized: &organized,
|
||||||
}
|
}
|
||||||
|
|
||||||
filter.And = galleryPathsFilter(paths)
|
filter.And = gallery.PathsFilter(paths)
|
||||||
|
|
||||||
pp := models.PerPageAll
|
pp := models.PerPageAll
|
||||||
gallerys, _, err := galleryReader.Query(&filter, &models.FindFilterType{
|
gallerys, _, err := galleryReader.Query(&filter, &models.FindFilterType{
|
||||||
@@ -351,7 +264,7 @@ func PathToGalleries(name string, paths []string, galleryReader models.GalleryRe
|
|||||||
|
|
||||||
var ret []*models.Gallery
|
var ret []*models.Gallery
|
||||||
for _, p := range gallerys {
|
for _, p := range gallerys {
|
||||||
if nameMatchesPath(name, p.Path.String) {
|
if nameMatchesPath(name, p.Path.String) != -1 {
|
||||||
ret = append(ret, p)
|
ret = append(ret, p)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
76
pkg/match/path_test.go
Normal file
76
pkg/match/path_test.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
package match
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func Test_nameMatchesPath(t *testing.T) {
|
||||||
|
const name = "first last"
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
path string
|
||||||
|
want int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"exact",
|
||||||
|
name,
|
||||||
|
0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"partial",
|
||||||
|
"first",
|
||||||
|
-1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"separator",
|
||||||
|
"first.last",
|
||||||
|
0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"separator",
|
||||||
|
"first-last",
|
||||||
|
0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"separator",
|
||||||
|
"first_last",
|
||||||
|
0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"separators",
|
||||||
|
"first.-_ last",
|
||||||
|
0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"within string",
|
||||||
|
"before_first last/after",
|
||||||
|
6,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"not within string",
|
||||||
|
"beforefirst last/after",
|
||||||
|
-1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"not within string",
|
||||||
|
"before/first lastafter",
|
||||||
|
-1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"not within string",
|
||||||
|
"first last1",
|
||||||
|
-1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"not within string",
|
||||||
|
"1first last",
|
||||||
|
-1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if got := nameMatchesPath(name, tt.path); got != tt.want {
|
||||||
|
t.Errorf("nameMatchesPath() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,4 +2,10 @@ package models
|
|||||||
|
|
||||||
import "errors"
|
import "errors"
|
||||||
|
|
||||||
var ErrNotFound = errors.New("not found")
|
var (
|
||||||
|
// ErrNotFound signifies entities which are not found
|
||||||
|
ErrNotFound = errors.New("not found")
|
||||||
|
|
||||||
|
// ErrConversion signifies conversion errors
|
||||||
|
ErrConversion = errors.New("conversion error")
|
||||||
|
)
|
||||||
|
|||||||
@@ -9,32 +9,33 @@ import (
|
|||||||
|
|
||||||
// Scene stores the metadata for a single video scene.
|
// Scene stores the metadata for a single video scene.
|
||||||
type Scene struct {
|
type Scene struct {
|
||||||
ID int `db:"id" json:"id"`
|
ID int `db:"id" json:"id"`
|
||||||
Checksum sql.NullString `db:"checksum" json:"checksum"`
|
Checksum sql.NullString `db:"checksum" json:"checksum"`
|
||||||
OSHash sql.NullString `db:"oshash" json:"oshash"`
|
OSHash sql.NullString `db:"oshash" json:"oshash"`
|
||||||
Path string `db:"path" json:"path"`
|
Path string `db:"path" json:"path"`
|
||||||
Title sql.NullString `db:"title" json:"title"`
|
Title sql.NullString `db:"title" json:"title"`
|
||||||
Details sql.NullString `db:"details" json:"details"`
|
Details sql.NullString `db:"details" json:"details"`
|
||||||
URL sql.NullString `db:"url" json:"url"`
|
URL sql.NullString `db:"url" json:"url"`
|
||||||
Date SQLiteDate `db:"date" json:"date"`
|
Date SQLiteDate `db:"date" json:"date"`
|
||||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||||
Organized bool `db:"organized" json:"organized"`
|
Organized bool `db:"organized" json:"organized"`
|
||||||
OCounter int `db:"o_counter" json:"o_counter"`
|
OCounter int `db:"o_counter" json:"o_counter"`
|
||||||
Size sql.NullString `db:"size" json:"size"`
|
Size sql.NullString `db:"size" json:"size"`
|
||||||
Duration sql.NullFloat64 `db:"duration" json:"duration"`
|
Duration sql.NullFloat64 `db:"duration" json:"duration"`
|
||||||
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
|
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
|
||||||
Format sql.NullString `db:"format" json:"format_name"`
|
Format sql.NullString `db:"format" json:"format_name"`
|
||||||
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
|
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
|
||||||
Width sql.NullInt64 `db:"width" json:"width"`
|
Width sql.NullInt64 `db:"width" json:"width"`
|
||||||
Height sql.NullInt64 `db:"height" json:"height"`
|
Height sql.NullInt64 `db:"height" json:"height"`
|
||||||
Framerate sql.NullFloat64 `db:"framerate" json:"framerate"`
|
Framerate sql.NullFloat64 `db:"framerate" json:"framerate"`
|
||||||
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
||||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||||
FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
||||||
Phash sql.NullInt64 `db:"phash,omitempty" json:"phash"`
|
Phash sql.NullInt64 `db:"phash,omitempty" json:"phash"`
|
||||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
Interactive bool `db:"interactive" json:"interactive"`
|
Interactive bool `db:"interactive" json:"interactive"`
|
||||||
|
InteractiveSpeed sql.NullInt64 `db:"interactive_speed" json:"interactive_speed"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Scene) File() File {
|
func (s *Scene) File() File {
|
||||||
@@ -92,32 +93,33 @@ func (s *Scene) SetFile(f File) {
|
|||||||
// ScenePartial represents part of a Scene object. It is used to update
|
// ScenePartial represents part of a Scene object. It is used to update
|
||||||
// the database entry. Only non-nil fields will be updated.
|
// the database entry. Only non-nil fields will be updated.
|
||||||
type ScenePartial struct {
|
type ScenePartial struct {
|
||||||
ID int `db:"id" json:"id"`
|
ID int `db:"id" json:"id"`
|
||||||
Checksum *sql.NullString `db:"checksum" json:"checksum"`
|
Checksum *sql.NullString `db:"checksum" json:"checksum"`
|
||||||
OSHash *sql.NullString `db:"oshash" json:"oshash"`
|
OSHash *sql.NullString `db:"oshash" json:"oshash"`
|
||||||
Path *string `db:"path" json:"path"`
|
Path *string `db:"path" json:"path"`
|
||||||
Title *sql.NullString `db:"title" json:"title"`
|
Title *sql.NullString `db:"title" json:"title"`
|
||||||
Details *sql.NullString `db:"details" json:"details"`
|
Details *sql.NullString `db:"details" json:"details"`
|
||||||
URL *sql.NullString `db:"url" json:"url"`
|
URL *sql.NullString `db:"url" json:"url"`
|
||||||
Date *SQLiteDate `db:"date" json:"date"`
|
Date *SQLiteDate `db:"date" json:"date"`
|
||||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||||
Organized *bool `db:"organized" json:"organized"`
|
Organized *bool `db:"organized" json:"organized"`
|
||||||
Size *sql.NullString `db:"size" json:"size"`
|
Size *sql.NullString `db:"size" json:"size"`
|
||||||
Duration *sql.NullFloat64 `db:"duration" json:"duration"`
|
Duration *sql.NullFloat64 `db:"duration" json:"duration"`
|
||||||
VideoCodec *sql.NullString `db:"video_codec" json:"video_codec"`
|
VideoCodec *sql.NullString `db:"video_codec" json:"video_codec"`
|
||||||
Format *sql.NullString `db:"format" json:"format_name"`
|
Format *sql.NullString `db:"format" json:"format_name"`
|
||||||
AudioCodec *sql.NullString `db:"audio_codec" json:"audio_codec"`
|
AudioCodec *sql.NullString `db:"audio_codec" json:"audio_codec"`
|
||||||
Width *sql.NullInt64 `db:"width" json:"width"`
|
Width *sql.NullInt64 `db:"width" json:"width"`
|
||||||
Height *sql.NullInt64 `db:"height" json:"height"`
|
Height *sql.NullInt64 `db:"height" json:"height"`
|
||||||
Framerate *sql.NullFloat64 `db:"framerate" json:"framerate"`
|
Framerate *sql.NullFloat64 `db:"framerate" json:"framerate"`
|
||||||
Bitrate *sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
Bitrate *sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
||||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||||
MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"`
|
MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"`
|
||||||
FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
||||||
Phash *sql.NullInt64 `db:"phash,omitempty" json:"phash"`
|
Phash *sql.NullInt64 `db:"phash,omitempty" json:"phash"`
|
||||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
Interactive *bool `db:"interactive" json:"interactive"`
|
Interactive *bool `db:"interactive" json:"interactive"`
|
||||||
|
InteractiveSpeed *sql.NullInt64 `db:"interactive_speed" json:"interactive_speed"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateInput constructs a SceneUpdateInput using the populated fields in the ScenePartial object.
|
// UpdateInput constructs a SceneUpdateInput using the populated fields in the ScenePartial object.
|
||||||
|
|||||||
167
pkg/models/search.go
Normal file
167
pkg/models/search.go
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
const (
|
||||||
|
or = "OR"
|
||||||
|
orSymbol = "|"
|
||||||
|
notPrefix = '-'
|
||||||
|
phraseChar = '"'
|
||||||
|
)
|
||||||
|
|
||||||
|
// SearchSpecs provides the specifications for text-based searches.
|
||||||
|
type SearchSpecs struct {
|
||||||
|
// MustHave specifies all of the terms that must appear in the results.
|
||||||
|
MustHave []string
|
||||||
|
|
||||||
|
// AnySets specifies sets of terms where one of each set must appear in the results.
|
||||||
|
AnySets [][]string
|
||||||
|
|
||||||
|
// MustNot specifies all terms that must not appear in the results.
|
||||||
|
MustNot []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// combinePhrases detects quote characters at the start and end of
|
||||||
|
// words and combines the contents into a single word.
|
||||||
|
func combinePhrases(words []string) []string {
|
||||||
|
var ret []string
|
||||||
|
startIndex := -1
|
||||||
|
for i, w := range words {
|
||||||
|
if startIndex == -1 {
|
||||||
|
// looking for start of phrase
|
||||||
|
// this could either be " or -"
|
||||||
|
ww := w
|
||||||
|
if len(w) > 0 && w[0] == notPrefix {
|
||||||
|
ww = w[1:]
|
||||||
|
}
|
||||||
|
if len(ww) > 0 && ww[0] == phraseChar && (len(ww) < 2 || ww[len(ww)-1] != phraseChar) {
|
||||||
|
startIndex = i
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, w)
|
||||||
|
} else if len(w) > 0 && w[len(w)-1] == phraseChar { // looking for end of phrase
|
||||||
|
// combine words
|
||||||
|
phrase := strings.Join(words[startIndex:i+1], " ")
|
||||||
|
|
||||||
|
// add to return value
|
||||||
|
ret = append(ret, phrase)
|
||||||
|
startIndex = -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if startIndex != -1 {
|
||||||
|
ret = append(ret, words[startIndex:]...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractOrConditions(words []string, searchSpec *SearchSpecs) []string {
|
||||||
|
for foundOr := true; foundOr; {
|
||||||
|
foundOr = false
|
||||||
|
for i, w := range words {
|
||||||
|
if i > 0 && i < len(words)-1 && (strings.EqualFold(w, or) || w == orSymbol) {
|
||||||
|
// found an OR keyword
|
||||||
|
// first operand will be the last word
|
||||||
|
startIndex := i - 1
|
||||||
|
|
||||||
|
// find the last operand
|
||||||
|
// this will be the last word not preceded by OR
|
||||||
|
lastIndex := len(words) - 1
|
||||||
|
for ii := i + 2; ii < len(words); ii += 2 {
|
||||||
|
if !strings.EqualFold(words[ii], or) {
|
||||||
|
lastIndex = ii - 1
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
foundOr = true
|
||||||
|
|
||||||
|
// combine the words into an any set
|
||||||
|
var set []string
|
||||||
|
for ii := startIndex; ii <= lastIndex; ii += 2 {
|
||||||
|
word := extractPhrase(words[ii])
|
||||||
|
if word == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
set = append(set, word)
|
||||||
|
}
|
||||||
|
|
||||||
|
searchSpec.AnySets = append(searchSpec.AnySets, set)
|
||||||
|
|
||||||
|
// take out the OR'd words
|
||||||
|
words = append(words[0:startIndex], words[lastIndex+1:]...)
|
||||||
|
|
||||||
|
// break and reparse
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return words
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractNotConditions(words []string, searchSpec *SearchSpecs) []string {
|
||||||
|
var ret []string
|
||||||
|
|
||||||
|
for _, w := range words {
|
||||||
|
if len(w) > 1 && w[0] == notPrefix {
|
||||||
|
word := extractPhrase(w[1:])
|
||||||
|
if word == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
searchSpec.MustNot = append(searchSpec.MustNot, word)
|
||||||
|
} else {
|
||||||
|
ret = append(ret, w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractPhrase(w string) string {
|
||||||
|
if len(w) > 1 && w[0] == phraseChar && w[len(w)-1] == phraseChar {
|
||||||
|
return w[1 : len(w)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
return w
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseSearchString parses the Q value and returns a SearchSpecs object.
|
||||||
|
//
|
||||||
|
// By default, any words in the search value must appear in the results.
|
||||||
|
// Words encompassed by quotes (") as treated as a single term.
|
||||||
|
// Where keyword "OR" (case-insensitive) appears (and is not part of a quoted phrase), one of the
|
||||||
|
// OR'd terms must appear in the results.
|
||||||
|
// Where a keyword is prefixed with "-", that keyword must not appear in the results.
|
||||||
|
// Where OR appears as the first or last term, or where one of the OR operands has a
|
||||||
|
// not prefix, then the OR is treated literally.
|
||||||
|
func ParseSearchString(s string) SearchSpecs {
|
||||||
|
s = strings.TrimSpace(s)
|
||||||
|
|
||||||
|
if s == "" {
|
||||||
|
return SearchSpecs{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// break into words
|
||||||
|
words := strings.Split(s, " ")
|
||||||
|
|
||||||
|
// combine phrases first, then extract OR conditions, then extract NOT conditions
|
||||||
|
// and the leftovers will be AND'd
|
||||||
|
ret := SearchSpecs{}
|
||||||
|
words = combinePhrases(words)
|
||||||
|
words = extractOrConditions(words, &ret)
|
||||||
|
words = extractNotConditions(words, &ret)
|
||||||
|
|
||||||
|
for _, w := range words {
|
||||||
|
// ignore empty quotes
|
||||||
|
word := extractPhrase(w)
|
||||||
|
if word == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ret.MustHave = append(ret.MustHave, word)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
227
pkg/models/search_test.go
Normal file
227
pkg/models/search_test.go
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestParseSearchString(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
q string
|
||||||
|
want SearchSpecs
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"basic",
|
||||||
|
"a b c",
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a", "b", "c"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"empty",
|
||||||
|
"",
|
||||||
|
SearchSpecs{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whitespace",
|
||||||
|
" ",
|
||||||
|
SearchSpecs{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"single",
|
||||||
|
"a",
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"quoted",
|
||||||
|
`"a b" c`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a b", "c"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"quoted double space",
|
||||||
|
`"a b" c`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a b", "c"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"quoted end space",
|
||||||
|
`"a b " c`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a b ", "c"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"no matching end quote",
|
||||||
|
`"a b c`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{`"a`, "b", "c"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"no matching start quote",
|
||||||
|
`a b c"`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a", "b", `c"`},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"or",
|
||||||
|
"a OR b",
|
||||||
|
SearchSpecs{
|
||||||
|
AnySets: [][]string{
|
||||||
|
{"a", "b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"multi or",
|
||||||
|
"a OR b c OR d",
|
||||||
|
SearchSpecs{
|
||||||
|
AnySets: [][]string{
|
||||||
|
{"a", "b"},
|
||||||
|
{"c", "d"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"lowercase or",
|
||||||
|
"a or b",
|
||||||
|
SearchSpecs{
|
||||||
|
AnySets: [][]string{
|
||||||
|
{"a", "b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"or symbol",
|
||||||
|
"a | b",
|
||||||
|
SearchSpecs{
|
||||||
|
AnySets: [][]string{
|
||||||
|
{"a", "b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"quoted or",
|
||||||
|
`a "OR" b`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a", "OR", "b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"quoted or symbol",
|
||||||
|
`a "|" b`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a", "|", "b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"or phrases",
|
||||||
|
`"a b" OR "c d"`,
|
||||||
|
SearchSpecs{
|
||||||
|
AnySets: [][]string{
|
||||||
|
{"a b", "c d"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"or at start",
|
||||||
|
"OR a",
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"OR", "a"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"or at end",
|
||||||
|
"a OR",
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a", "OR"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"or symbol at start",
|
||||||
|
"| a",
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"|", "a"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"or symbol at end",
|
||||||
|
"a |",
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a", "|"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nots",
|
||||||
|
"-a -b",
|
||||||
|
SearchSpecs{
|
||||||
|
MustNot: []string{"a", "b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"not or",
|
||||||
|
"-a OR b",
|
||||||
|
SearchSpecs{
|
||||||
|
AnySets: [][]string{
|
||||||
|
{"-a", "b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"not phrase",
|
||||||
|
`-"a b"`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustNot: []string{"a b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"not in phrase",
|
||||||
|
`"-a b"`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"-a b"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"double not",
|
||||||
|
"--a",
|
||||||
|
SearchSpecs{
|
||||||
|
MustNot: []string{"-a"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"empty quote",
|
||||||
|
`"" a`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"not empty quote",
|
||||||
|
`-"" a`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{"a"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"quote in word",
|
||||||
|
`ab"cd"`,
|
||||||
|
SearchSpecs{
|
||||||
|
MustHave: []string{`ab"cd"`},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if got := ParseSearchString(tt.q); !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("FindFilterType.ParseSearchString() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,9 +2,10 @@ package models
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql/driver"
|
"database/sql/driver"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -33,14 +34,19 @@ func (t *SQLiteDate) Scan(value interface{}) error {
|
|||||||
|
|
||||||
// Value implements the driver Valuer interface.
|
// Value implements the driver Valuer interface.
|
||||||
func (t SQLiteDate) Value() (driver.Value, error) {
|
func (t SQLiteDate) Value() (driver.Value, error) {
|
||||||
|
if !t.Valid {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
s := strings.TrimSpace(t.String)
|
||||||
// handle empty string
|
// handle empty string
|
||||||
if t.String == "" {
|
if s == "" {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := utils.ParseDateStringAsFormat(t.String, "2006-01-02")
|
result, err := utils.ParseDateStringAsFormat(s, "2006-01-02")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Debugf("sqlite date conversion error: %s", err.Error())
|
return nil, fmt.Errorf("converting sqlite date %q: %w", s, err)
|
||||||
}
|
}
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|||||||
84
pkg/models/sqlite_date_test.go
Normal file
84
pkg/models/sqlite_date_test.go
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql/driver"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSQLiteDate_Value(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
tr SQLiteDate
|
||||||
|
want driver.Value
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"empty string",
|
||||||
|
SQLiteDate{"", true},
|
||||||
|
"",
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whitespace",
|
||||||
|
SQLiteDate{" ", true},
|
||||||
|
"",
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"RFC3339",
|
||||||
|
SQLiteDate{"2021-11-22T17:11:55+11:00", true},
|
||||||
|
"2021-11-22",
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date",
|
||||||
|
SQLiteDate{"2021-11-22", true},
|
||||||
|
"2021-11-22",
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date and time",
|
||||||
|
SQLiteDate{"2021-11-22 17:12:05", true},
|
||||||
|
"2021-11-22",
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"date, time and zone",
|
||||||
|
SQLiteDate{"2021-11-22 17:33:05 AEST", true},
|
||||||
|
"2021-11-22",
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whitespaced date",
|
||||||
|
SQLiteDate{" 2021-11-22 ", true},
|
||||||
|
"2021-11-22",
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"bad format",
|
||||||
|
SQLiteDate{"foo", true},
|
||||||
|
nil,
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"invalid",
|
||||||
|
SQLiteDate{"null", false},
|
||||||
|
nil,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got, err := tt.tr.Value()
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("SQLiteDate.Value() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
|
t.Errorf("SQLiteDate.Value() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
57
pkg/models/timestamp.go
Normal file
57
pkg/models/timestamp.go
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/99designs/gqlgen/graphql"
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ErrTimestamp = errors.New("cannot parse Timestamp")
|
||||||
|
|
||||||
|
func MarshalTimestamp(t time.Time) graphql.Marshaler {
|
||||||
|
if t.IsZero() {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
|
||||||
|
return graphql.WriterFunc(func(w io.Writer) {
|
||||||
|
_, err := io.WriteString(w, strconv.Quote(t.Format(time.RFC3339Nano)))
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("could not marshal timestamp: %v", err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func UnmarshalTimestamp(v interface{}) (time.Time, error) {
|
||||||
|
if tmpStr, ok := v.(string); ok {
|
||||||
|
if len(tmpStr) == 0 {
|
||||||
|
return time.Time{}, fmt.Errorf("%w: empty string", ErrTimestamp)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch tmpStr[0] {
|
||||||
|
case '>', '<':
|
||||||
|
d, err := time.ParseDuration(tmpStr[1:])
|
||||||
|
if err != nil {
|
||||||
|
return time.Time{}, fmt.Errorf("%w: cannot parse %v-duration: %v", ErrTimestamp, tmpStr[0], err)
|
||||||
|
}
|
||||||
|
t := time.Now()
|
||||||
|
// Compute point in time:
|
||||||
|
if tmpStr[0] == '<' {
|
||||||
|
t = t.Add(-d)
|
||||||
|
} else {
|
||||||
|
t = t.Add(d)
|
||||||
|
}
|
||||||
|
|
||||||
|
return t, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return utils.ParseDateStringAsTime(tmpStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return time.Time{}, fmt.Errorf("%w: not a string", ErrTimestamp)
|
||||||
|
}
|
||||||
90
pkg/models/timestamp_test.go
Normal file
90
pkg/models/timestamp_test.go
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"strconv"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestTimestampSymmetry(t *testing.T) {
|
||||||
|
n := time.Now()
|
||||||
|
buf := bytes.NewBuffer([]byte{})
|
||||||
|
MarshalTimestamp(n).MarshalGQL(buf)
|
||||||
|
|
||||||
|
str, err := strconv.Unquote(buf.String())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal("could not unquote string")
|
||||||
|
}
|
||||||
|
got, err := UnmarshalTimestamp(str)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("could not unmarshal time: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !n.Equal(got) {
|
||||||
|
t.Fatalf("have %v, want %v", got, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTimestamp(t *testing.T) {
|
||||||
|
n := time.Now().In(time.UTC)
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
have string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{"reflexivity", n.Format(time.RFC3339Nano), n.Format(time.RFC3339Nano)},
|
||||||
|
{"rfc3339", "2021-11-04T01:02:03Z", "2021-11-04T01:02:03Z"},
|
||||||
|
{"date", "2021-04-05", "2021-04-05T00:00:00Z"},
|
||||||
|
{"datetime", "2021-04-05 14:45:36", "2021-04-05T14:45:36Z"},
|
||||||
|
{"datetime-tz", "2021-04-05 14:45:36 PDT", "2021-04-05T14:45:36Z"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
p, err := UnmarshalTimestamp(tc.have)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("could not unmarshal time: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := bytes.NewBuffer([]byte{})
|
||||||
|
MarshalTimestamp(p).MarshalGQL(buf)
|
||||||
|
|
||||||
|
got, err := strconv.Unquote(buf.String())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("count not unquote string")
|
||||||
|
}
|
||||||
|
if got != tc.want {
|
||||||
|
t.Errorf("got %s; want %s", got, tc.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const epsilon = 10 * time.Second
|
||||||
|
|
||||||
|
func TestTimestampRelative(t *testing.T) {
|
||||||
|
n := time.Now()
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
have string
|
||||||
|
want time.Time
|
||||||
|
}{
|
||||||
|
{"past", "<4h", n.Add(-4 * time.Hour)},
|
||||||
|
{"future", ">5m", n.Add(5 * time.Minute)},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
got, err := UnmarshalTimestamp(tc.have)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("could not unmarshal time: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if got.Sub(tc.want) > epsilon {
|
||||||
|
t.Errorf("not within bound of %v; got %s; want %s", epsilon, got, tc.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package plugin
|
package plugin
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/plugin/common"
|
"github.com/stashapp/stash/pkg/plugin/common"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -40,6 +41,7 @@ const (
|
|||||||
|
|
||||||
TagCreatePost HookTriggerEnum = "Tag.Create.Post"
|
TagCreatePost HookTriggerEnum = "Tag.Create.Post"
|
||||||
TagUpdatePost HookTriggerEnum = "Tag.Update.Post"
|
TagUpdatePost HookTriggerEnum = "Tag.Update.Post"
|
||||||
|
TagMergePost HookTriggerEnum = "Tag.Merge.Post"
|
||||||
TagDestroyPost HookTriggerEnum = "Tag.Destroy.Post"
|
TagDestroyPost HookTriggerEnum = "Tag.Destroy.Post"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -74,6 +76,7 @@ var AllHookTriggerEnum = []HookTriggerEnum{
|
|||||||
|
|
||||||
TagCreatePost,
|
TagCreatePost,
|
||||||
TagUpdatePost,
|
TagUpdatePost,
|
||||||
|
TagMergePost,
|
||||||
TagDestroyPost,
|
TagDestroyPost,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,3 +126,36 @@ func (e HookTriggerEnum) String() string {
|
|||||||
func addHookContext(argsMap common.ArgsMap, hookContext common.HookContext) {
|
func addHookContext(argsMap common.ArgsMap, hookContext common.HookContext) {
|
||||||
argsMap[common.HookContextKey] = hookContext
|
argsMap[common.HookContextKey] = hookContext
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// types for destroy hooks, to provide a little more information
|
||||||
|
type SceneDestroyInput struct {
|
||||||
|
models.SceneDestroyInput
|
||||||
|
Checksum string `json:"checksum"`
|
||||||
|
OSHash string `json:"oshash"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScenesDestroyInput struct {
|
||||||
|
models.ScenesDestroyInput
|
||||||
|
Checksum string `json:"checksum"`
|
||||||
|
OSHash string `json:"oshash"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GalleryDestroyInput struct {
|
||||||
|
models.GalleryDestroyInput
|
||||||
|
Checksum string `json:"checksum"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ImageDestroyInput struct {
|
||||||
|
models.ImageDestroyInput
|
||||||
|
Checksum string `json:"checksum"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ImagesDestroyInput struct {
|
||||||
|
models.ImagesDestroyInput
|
||||||
|
Checksum string `json:"checksum"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
}
|
||||||
|
|||||||
164
pkg/scene/delete.go
Normal file
164
pkg/scene/delete.go
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
package scene
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/file"
|
||||||
|
"github.com/stashapp/stash/pkg/manager/paths"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FileDeleter is an extension of file.Deleter that handles deletion of scene files.
|
||||||
|
type FileDeleter struct {
|
||||||
|
file.Deleter
|
||||||
|
|
||||||
|
FileNamingAlgo models.HashAlgorithm
|
||||||
|
Paths *paths.Paths
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarkGeneratedFiles marks for deletion the generated files for the provided scene.
|
||||||
|
func (d *FileDeleter) MarkGeneratedFiles(scene *models.Scene) error {
|
||||||
|
sceneHash := scene.GetHash(d.FileNamingAlgo)
|
||||||
|
|
||||||
|
if sceneHash == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
markersFolder := filepath.Join(d.Paths.Generated.Markers, sceneHash)
|
||||||
|
|
||||||
|
exists, _ := utils.FileExists(markersFolder)
|
||||||
|
if exists {
|
||||||
|
if err := d.Dirs([]string{markersFolder}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var files []string
|
||||||
|
|
||||||
|
thumbPath := d.Paths.Scene.GetThumbnailScreenshotPath(sceneHash)
|
||||||
|
exists, _ = utils.FileExists(thumbPath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, thumbPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
normalPath := d.Paths.Scene.GetScreenshotPath(sceneHash)
|
||||||
|
exists, _ = utils.FileExists(normalPath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, normalPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
streamPreviewPath := d.Paths.Scene.GetStreamPreviewPath(sceneHash)
|
||||||
|
exists, _ = utils.FileExists(streamPreviewPath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, streamPreviewPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
streamPreviewImagePath := d.Paths.Scene.GetStreamPreviewImagePath(sceneHash)
|
||||||
|
exists, _ = utils.FileExists(streamPreviewImagePath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, streamPreviewImagePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
transcodePath := d.Paths.Scene.GetTranscodePath(sceneHash)
|
||||||
|
exists, _ = utils.FileExists(transcodePath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, transcodePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
spritePath := d.Paths.Scene.GetSpriteImageFilePath(sceneHash)
|
||||||
|
exists, _ = utils.FileExists(spritePath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, spritePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
vttPath := d.Paths.Scene.GetSpriteVttFilePath(sceneHash)
|
||||||
|
exists, _ = utils.FileExists(vttPath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, vttPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
heatmapPath := d.Paths.Scene.GetInteractiveHeatmapPath(sceneHash)
|
||||||
|
exists, _ = utils.FileExists(heatmapPath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, heatmapPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
return d.Files(files)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarkMarkerFiles deletes generated files for a scene marker with the
|
||||||
|
// provided scene and timestamp.
|
||||||
|
func (d *FileDeleter) MarkMarkerFiles(scene *models.Scene, seconds int) error {
|
||||||
|
videoPath := d.Paths.SceneMarkers.GetStreamPath(scene.GetHash(d.FileNamingAlgo), seconds)
|
||||||
|
imagePath := d.Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(d.FileNamingAlgo), seconds)
|
||||||
|
screenshotPath := d.Paths.SceneMarkers.GetStreamScreenshotPath(scene.GetHash(d.FileNamingAlgo), seconds)
|
||||||
|
|
||||||
|
var files []string
|
||||||
|
|
||||||
|
exists, _ := utils.FileExists(videoPath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, videoPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
exists, _ = utils.FileExists(imagePath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, imagePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
exists, _ = utils.FileExists(screenshotPath)
|
||||||
|
if exists {
|
||||||
|
files = append(files, screenshotPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
return d.Files(files)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destroy deletes a scene and its associated relationships from the
|
||||||
|
// database.
|
||||||
|
func Destroy(scene *models.Scene, repo models.Repository, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error {
|
||||||
|
qb := repo.Scene()
|
||||||
|
mqb := repo.SceneMarker()
|
||||||
|
|
||||||
|
markers, err := mqb.FindBySceneID(scene.ID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, m := range markers {
|
||||||
|
if err := DestroyMarker(scene, m, mqb, fileDeleter); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if deleteFile {
|
||||||
|
if err := fileDeleter.Files([]string{scene.Path}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if deleteGenerated {
|
||||||
|
if err := fileDeleter.MarkGeneratedFiles(scene); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := qb.Destroy(scene.ID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DestroyMarker deletes the scene marker from the database and returns a
|
||||||
|
// function that removes the generated files, to be executed after the
|
||||||
|
// transaction is successfully committed.
|
||||||
|
func DestroyMarker(scene *models.Scene, sceneMarker *models.SceneMarker, qb models.SceneMarkerWriter, fileDeleter *FileDeleter) error {
|
||||||
|
if err := qb.Destroy(sceneMarker.ID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete the preview for the marker
|
||||||
|
seconds := int(sceneMarker.Seconds)
|
||||||
|
return fileDeleter.MarkMarkerFiles(scene, seconds)
|
||||||
|
}
|
||||||
40
pkg/scene/filter.go
Normal file
40
pkg/scene/filter.go
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package scene
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func PathsFilter(paths []string) *models.SceneFilterType {
|
||||||
|
if paths == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sep := string(filepath.Separator)
|
||||||
|
|
||||||
|
var ret *models.SceneFilterType
|
||||||
|
var or *models.SceneFilterType
|
||||||
|
for _, p := range paths {
|
||||||
|
newOr := &models.SceneFilterType{}
|
||||||
|
if or != nil {
|
||||||
|
or.Or = newOr
|
||||||
|
} else {
|
||||||
|
ret = newOr
|
||||||
|
}
|
||||||
|
|
||||||
|
or = newOr
|
||||||
|
|
||||||
|
if !strings.HasSuffix(p, sep) {
|
||||||
|
p += sep
|
||||||
|
}
|
||||||
|
|
||||||
|
or.Path = &models.StringCriterionInput{
|
||||||
|
Modifier: models.CriterionModifierEquals,
|
||||||
|
Value: p + "%",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
@@ -42,6 +42,10 @@ func MigrateHash(p *paths.Paths, oldHash string, newHash string) {
|
|||||||
oldPath = scenePaths.GetSpriteImageFilePath(oldHash)
|
oldPath = scenePaths.GetSpriteImageFilePath(oldHash)
|
||||||
newPath = scenePaths.GetSpriteImageFilePath(newHash)
|
newPath = scenePaths.GetSpriteImageFilePath(newHash)
|
||||||
migrateSceneFiles(oldPath, newPath)
|
migrateSceneFiles(oldPath, newPath)
|
||||||
|
|
||||||
|
oldPath = scenePaths.GetInteractiveHeatmapPath(oldHash)
|
||||||
|
newPath = scenePaths.GetInteractiveHeatmapPath(newHash)
|
||||||
|
migrateSceneFiles(oldPath, newPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func migrateSceneFiles(oldName, newName string) {
|
func migrateSceneFiles(oldName, newName string) {
|
||||||
|
|||||||
@@ -130,6 +130,7 @@ func (scanner *Scanner) ScanExisting(existing file.FileBased, file file.SourceFi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
s.Interactive = interactive
|
||||||
s.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()}
|
s.UpdatedAt = models.SQLiteTimestamp{Timestamp: time.Now()}
|
||||||
|
|
||||||
_, err := qb.UpdateFull(*s)
|
_, err := qb.UpdateFull(*s)
|
||||||
@@ -262,7 +263,7 @@ func (scanner *Scanner) ScanNew(file file.SourceFile) (retScene *models.Scene, e
|
|||||||
|
|
||||||
if scanner.UseFileMetadata {
|
if scanner.UseFileMetadata {
|
||||||
newScene.Details = sql.NullString{String: videoFile.Comment, Valid: true}
|
newScene.Details = sql.NullString{String: videoFile.Comment, Valid: true}
|
||||||
newScene.Date = models.SQLiteDate{String: videoFile.CreationTime.Format("2006-01-02")}
|
_ = newScene.Date.Scan(videoFile.CreationTime)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
@@ -24,20 +25,12 @@ func (e scraperAction) IsValid() bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type scraperActionImpl interface {
|
type scraperActionImpl interface {
|
||||||
scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error)
|
scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error)
|
||||||
scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error)
|
scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error)
|
||||||
scrapePerformerByURL(url string) (*models.ScrapedPerformer, error)
|
scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error)
|
||||||
|
|
||||||
scrapeScenesByName(name string) ([]*models.ScrapedScene, error)
|
scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error)
|
||||||
scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error)
|
scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error)
|
||||||
scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error)
|
|
||||||
scrapeSceneByURL(url string) (*models.ScrapedScene, error)
|
|
||||||
|
|
||||||
scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error)
|
|
||||||
scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error)
|
|
||||||
scrapeGalleryByURL(url string) (*models.ScrapedGallery, error)
|
|
||||||
|
|
||||||
scrapeMovieByURL(url string) (*models.ScrapedMovie, error)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraperActionImpl {
|
func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraperActionImpl {
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ package scraper
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/match"
|
"github.com/stashapp/stash/pkg/match"
|
||||||
@@ -16,14 +16,12 @@ const (
|
|||||||
autoTagScraperName = "Auto Tag"
|
autoTagScraperName = "Auto Tag"
|
||||||
)
|
)
|
||||||
|
|
||||||
var errNotSupported = errors.New("not supported")
|
|
||||||
|
|
||||||
type autotagScraper struct {
|
type autotagScraper struct {
|
||||||
txnManager models.TransactionManager
|
txnManager models.TransactionManager
|
||||||
globalConfig GlobalConfig
|
globalConfig GlobalConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *autotagScraper) matchPerformers(path string, performerReader models.PerformerReader) ([]*models.ScrapedPerformer, error) {
|
func autotagMatchPerformers(path string, performerReader models.PerformerReader) ([]*models.ScrapedPerformer, error) {
|
||||||
p, err := match.PathToPerformers(path, performerReader)
|
p, err := match.PathToPerformers(path, performerReader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error matching performers: %w", err)
|
return nil, fmt.Errorf("error matching performers: %w", err)
|
||||||
@@ -47,16 +45,16 @@ func (s *autotagScraper) matchPerformers(path string, performerReader models.Per
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *autotagScraper) matchStudio(path string, studioReader models.StudioReader) (*models.ScrapedStudio, error) {
|
func autotagMatchStudio(path string, studioReader models.StudioReader) (*models.ScrapedStudio, error) {
|
||||||
st, err := match.PathToStudios(path, studioReader)
|
studio, err := match.PathToStudio(path, studioReader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error matching studios: %w", err)
|
return nil, fmt.Errorf("error matching studios: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(st) > 0 {
|
if studio != nil {
|
||||||
id := strconv.Itoa(st[0].ID)
|
id := strconv.Itoa(studio.ID)
|
||||||
return &models.ScrapedStudio{
|
return &models.ScrapedStudio{
|
||||||
Name: st[0].Name.String,
|
Name: studio.Name.String,
|
||||||
StoredID: &id,
|
StoredID: &id,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
@@ -64,7 +62,7 @@ func (s *autotagScraper) matchStudio(path string, studioReader models.StudioRead
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *autotagScraper) matchTags(path string, tagReader models.TagReader) ([]*models.ScrapedTag, error) {
|
func autotagMatchTags(path string, tagReader models.TagReader) ([]*models.ScrapedTag, error) {
|
||||||
t, err := match.PathToTags(path, tagReader)
|
t, err := match.PathToTags(path, tagReader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error matching tags: %w", err)
|
return nil, fmt.Errorf("error matching tags: %w", err)
|
||||||
@@ -85,32 +83,24 @@ func (s *autotagScraper) matchTags(path string, tagReader models.TagReader) ([]*
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type autotagSceneScraper struct {
|
func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
*autotagScraper
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *autotagSceneScraper) scrapeByName(name string) ([]*models.ScrapedScene, error) {
|
|
||||||
return nil, errNotSupported
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *autotagSceneScraper) scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
|
||||||
var ret *models.ScrapedScene
|
var ret *models.ScrapedScene
|
||||||
|
|
||||||
// populate performers, studio and tags based on scene path
|
// populate performers, studio and tags based on scene path
|
||||||
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
if err := s.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
path := scene.Path
|
path := scene.Path
|
||||||
performers, err := c.matchPerformers(path, r.Performer())
|
performers, err := autotagMatchPerformers(path, r.Performer())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("autotag scraper viaScene: %w", err)
|
||||||
}
|
}
|
||||||
studio, err := c.matchStudio(path, r.Studio())
|
studio, err := autotagMatchStudio(path, r.Studio())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("autotag scraper viaScene: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
tags, err := c.matchTags(path, r.Tag())
|
tags, err := autotagMatchTags(path, r.Tag())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("autotag scraper viaScene: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
||||||
@@ -129,19 +119,7 @@ func (c *autotagSceneScraper) scrapeByScene(scene *models.Scene) (*models.Scrape
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *autotagSceneScraper) scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
return nil, errNotSupported
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *autotagSceneScraper) scrapeByURL(url string) (*models.ScrapedScene, error) {
|
|
||||||
return nil, errNotSupported
|
|
||||||
}
|
|
||||||
|
|
||||||
type autotagGalleryScraper struct {
|
|
||||||
*autotagScraper
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *autotagGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
|
||||||
if !gallery.Path.Valid {
|
if !gallery.Path.Valid {
|
||||||
// not valid for non-path-based galleries
|
// not valid for non-path-based galleries
|
||||||
return nil, nil
|
return nil, nil
|
||||||
@@ -150,20 +128,20 @@ func (c *autotagGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*model
|
|||||||
var ret *models.ScrapedGallery
|
var ret *models.ScrapedGallery
|
||||||
|
|
||||||
// populate performers, studio and tags based on scene path
|
// populate performers, studio and tags based on scene path
|
||||||
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
if err := s.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
path := gallery.Path.String
|
path := gallery.Path.String
|
||||||
performers, err := c.matchPerformers(path, r.Performer())
|
performers, err := autotagMatchPerformers(path, r.Performer())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("autotag scraper viaGallery: %w", err)
|
||||||
}
|
}
|
||||||
studio, err := c.matchStudio(path, r.Studio())
|
studio, err := autotagMatchStudio(path, r.Studio())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("autotag scraper viaGallery: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
tags, err := c.matchTags(path, r.Tag())
|
tags, err := autotagMatchTags(path, r.Tag())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("autotag scraper viaGallery: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
||||||
@@ -182,12 +160,36 @@ func (c *autotagGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*model
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *autotagGalleryScraper) scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
|
func (s autotagScraper) supports(ty models.ScrapeContentType) bool {
|
||||||
return nil, errNotSupported
|
switch ty {
|
||||||
|
case models.ScrapeContentTypeScene:
|
||||||
|
return true
|
||||||
|
case models.ScrapeContentTypeGallery:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *autotagGalleryScraper) scrapeByURL(url string) (*models.ScrapedGallery, error) {
|
func (s autotagScraper) supportsURL(url string, ty models.ScrapeContentType) bool {
|
||||||
return nil, errNotSupported
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s autotagScraper) spec() models.Scraper {
|
||||||
|
supportedScrapes := []models.ScrapeType{
|
||||||
|
models.ScrapeTypeFragment,
|
||||||
|
}
|
||||||
|
|
||||||
|
return models.Scraper{
|
||||||
|
ID: autoTagScraperID,
|
||||||
|
Name: autoTagScraperName,
|
||||||
|
Scene: &models.ScraperSpec{
|
||||||
|
SupportedScrapes: supportedScrapes,
|
||||||
|
},
|
||||||
|
Gallery: &models.ScraperSpec{
|
||||||
|
SupportedScrapes: supportedScrapes,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAutoTagScraper(txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
|
func getAutoTagScraper(txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
|
||||||
@@ -196,23 +198,5 @@ func getAutoTagScraper(txnManager models.TransactionManager, globalConfig Global
|
|||||||
globalConfig: globalConfig,
|
globalConfig: globalConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
supportedScrapes := []models.ScrapeType{
|
return base
|
||||||
models.ScrapeTypeFragment,
|
|
||||||
}
|
|
||||||
|
|
||||||
return scraper{
|
|
||||||
ID: autoTagScraperID,
|
|
||||||
Spec: &models.Scraper{
|
|
||||||
ID: autoTagScraperID,
|
|
||||||
Name: autoTagScraperName,
|
|
||||||
Scene: &models.ScraperSpec{
|
|
||||||
SupportedScrapes: supportedScrapes,
|
|
||||||
},
|
|
||||||
Gallery: &models.ScraperSpec{
|
|
||||||
SupportedScrapes: supportedScrapes,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Scene: &autotagSceneScraper{&base},
|
|
||||||
Gallery: &autotagGalleryScraper{&base},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
298
pkg/scraper/cache.go
Normal file
298
pkg/scraper/cache.go
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
package scraper
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/tls"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// scrapeGetTimeout is the timeout for scraper HTTP requests. Includes transfer time.
|
||||||
|
// We may want to bump this at some point and use local context-timeouts if more granularity
|
||||||
|
// is needed.
|
||||||
|
scrapeGetTimeout = time.Second * 60
|
||||||
|
|
||||||
|
// maxIdleConnsPerHost is the maximum number of idle connections the HTTP client will
|
||||||
|
// keep on a per-host basis.
|
||||||
|
maxIdleConnsPerHost = 8
|
||||||
|
|
||||||
|
// maxRedirects defines the maximum number of redirects the HTTP client will follow
|
||||||
|
maxRedirects = 20
|
||||||
|
)
|
||||||
|
|
||||||
|
// GlobalConfig contains the global scraper options.
|
||||||
|
type GlobalConfig interface {
|
||||||
|
GetScraperUserAgent() string
|
||||||
|
GetScrapersPath() string
|
||||||
|
GetScraperCDPPath() string
|
||||||
|
GetScraperCertCheck() bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func isCDPPathHTTP(c GlobalConfig) bool {
|
||||||
|
return strings.HasPrefix(c.GetScraperCDPPath(), "http://") || strings.HasPrefix(c.GetScraperCDPPath(), "https://")
|
||||||
|
}
|
||||||
|
|
||||||
|
func isCDPPathWS(c GlobalConfig) bool {
|
||||||
|
return strings.HasPrefix(c.GetScraperCDPPath(), "ws://")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache stores the database of scrapers
|
||||||
|
type Cache struct {
|
||||||
|
client *http.Client
|
||||||
|
scrapers map[string]scraper // Scraper ID -> Scraper
|
||||||
|
globalConfig GlobalConfig
|
||||||
|
txnManager models.TransactionManager
|
||||||
|
}
|
||||||
|
|
||||||
|
// newClient creates a scraper-local http client we use throughout the scraper subsystem.
|
||||||
|
func newClient(gc GlobalConfig) *http.Client {
|
||||||
|
client := &http.Client{
|
||||||
|
Transport: &http.Transport{ // ignore insecure certificates
|
||||||
|
TLSClientConfig: &tls.Config{InsecureSkipVerify: !gc.GetScraperCertCheck()},
|
||||||
|
MaxIdleConnsPerHost: maxIdleConnsPerHost,
|
||||||
|
},
|
||||||
|
Timeout: scrapeGetTimeout,
|
||||||
|
// defaultCheckRedirect code with max changed from 10 to maxRedirects
|
||||||
|
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||||
|
if len(via) >= maxRedirects {
|
||||||
|
return fmt.Errorf("%w: gave up after %d redirects", ErrMaxRedirects, maxRedirects)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return client
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewCache returns a new Cache loading scraper configurations from the
|
||||||
|
// scraper path provided in the global config object. It returns a new
|
||||||
|
// instance and an error if the scraper directory could not be loaded.
|
||||||
|
//
|
||||||
|
// Scraper configurations are loaded from yml files in the provided scrapers
|
||||||
|
// directory and any subdirectories.
|
||||||
|
func NewCache(globalConfig GlobalConfig, txnManager models.TransactionManager) (*Cache, error) {
|
||||||
|
// HTTP Client setup
|
||||||
|
client := newClient(globalConfig)
|
||||||
|
|
||||||
|
scrapers, err := loadScrapers(globalConfig, txnManager)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Cache{
|
||||||
|
client: client,
|
||||||
|
globalConfig: globalConfig,
|
||||||
|
scrapers: scrapers,
|
||||||
|
txnManager: txnManager,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadScrapers(globalConfig GlobalConfig, txnManager models.TransactionManager) (map[string]scraper, error) {
|
||||||
|
path := globalConfig.GetScrapersPath()
|
||||||
|
scrapers := make(map[string]scraper)
|
||||||
|
|
||||||
|
// Add built-in scrapers
|
||||||
|
freeOnes := getFreeonesScraper(txnManager, globalConfig)
|
||||||
|
autoTag := getAutoTagScraper(txnManager, globalConfig)
|
||||||
|
scrapers[freeOnes.spec().ID] = freeOnes
|
||||||
|
scrapers[autoTag.spec().ID] = autoTag
|
||||||
|
|
||||||
|
logger.Debugf("Reading scraper configs from %s", path)
|
||||||
|
|
||||||
|
scraperFiles := []string{}
|
||||||
|
err := utils.SymWalk(path, func(fp string, f os.FileInfo, err error) error {
|
||||||
|
if filepath.Ext(fp) == ".yml" {
|
||||||
|
c, err := loadConfigFromYAMLFile(fp)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Error loading scraper %s: %v", fp, err)
|
||||||
|
} else {
|
||||||
|
scraper := newGroupScraper(*c, txnManager, globalConfig)
|
||||||
|
scrapers[scraper.spec().ID] = scraper
|
||||||
|
}
|
||||||
|
scraperFiles = append(scraperFiles, fp)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Error reading scraper configs: %v", err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return scrapers, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReloadScrapers clears the scraper cache and reloads from the scraper path.
|
||||||
|
// In the event of an error during loading, the cache will be left empty.
|
||||||
|
func (c *Cache) ReloadScrapers() error {
|
||||||
|
c.scrapers = nil
|
||||||
|
scrapers, err := loadScrapers(c.globalConfig, c.txnManager)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
c.scrapers = scrapers
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListScrapers lists scrapers matching one of the given types.
|
||||||
|
// Returns a list of scrapers, sorted by their ID.
|
||||||
|
func (c Cache) ListScrapers(tys []models.ScrapeContentType) []*models.Scraper {
|
||||||
|
var ret []*models.Scraper
|
||||||
|
for _, s := range c.scrapers {
|
||||||
|
for _, t := range tys {
|
||||||
|
if s.supports(t) {
|
||||||
|
spec := s.spec()
|
||||||
|
ret = append(ret, &spec)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(ret, func(i, j int) bool {
|
||||||
|
return ret[i].ID < ret[j].ID
|
||||||
|
})
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetScraper returns the scraper matching the provided id.
|
||||||
|
func (c Cache) GetScraper(scraperID string) *models.Scraper {
|
||||||
|
s := c.findScraper(scraperID)
|
||||||
|
if s != nil {
|
||||||
|
spec := s.spec()
|
||||||
|
return &spec
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) findScraper(scraperID string) scraper {
|
||||||
|
s, ok := c.scrapers[scraperID]
|
||||||
|
if ok {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) ScrapeName(ctx context.Context, id, query string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||||
|
// find scraper with the provided id
|
||||||
|
s := c.findScraper(id)
|
||||||
|
if s == nil {
|
||||||
|
return nil, fmt.Errorf("%w: id %s", ErrNotFound, id)
|
||||||
|
}
|
||||||
|
if !s.supports(ty) {
|
||||||
|
return nil, fmt.Errorf("%w: cannot use scraper %s as a %v scraper", ErrNotSupported, id, ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
ns, ok := s.(nameScraper)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("%w: cannot use scraper %s to scrape by name", ErrNotSupported, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ns.viaName(ctx, c.client, query, ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScrapeFragment uses the given fragment input to scrape
|
||||||
|
func (c Cache) ScrapeFragment(ctx context.Context, id string, input Input) (models.ScrapedContent, error) {
|
||||||
|
s := c.findScraper(id)
|
||||||
|
if s == nil {
|
||||||
|
return nil, fmt.Errorf("%w: id %s", ErrNotFound, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fs, ok := s.(fragmentScraper)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("%w: cannot use scraper %s as a fragment scraper", ErrNotSupported, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := fs.viaFragment(ctx, c.client, input)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error while fragment scraping with scraper %s: %w", id, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.postScrape(ctx, content)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScrapeURL scrapes a given url for the given content. Searches the scraper cache
|
||||||
|
// and picks the first scraper capable of scraping the given url into the desired
|
||||||
|
// content. Returns the scraped content or an error if the scrape fails.
|
||||||
|
func (c Cache) ScrapeURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
|
for _, s := range c.scrapers {
|
||||||
|
if s.supportsURL(url, ty) {
|
||||||
|
ul, ok := s.(urlScraper)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("%w: cannot use scraper %s as an url scraper", ErrNotSupported, s.spec().ID)
|
||||||
|
}
|
||||||
|
ret, err := ul.viaURL(ctx, c.client, url, ty)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ret == nil {
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.postScrape(ctx, ret)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) ScrapeID(ctx context.Context, scraperID string, id int, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
|
s := c.findScraper(scraperID)
|
||||||
|
if s == nil {
|
||||||
|
return nil, fmt.Errorf("%w: id %s", ErrNotFound, scraperID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !s.supports(ty) {
|
||||||
|
return nil, fmt.Errorf("%w: cannot use scraper %s to scrape %v content", ErrNotSupported, scraperID, ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
var ret models.ScrapedContent
|
||||||
|
switch ty {
|
||||||
|
case models.ScrapeContentTypeScene:
|
||||||
|
ss, ok := s.(sceneScraper)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("%w: cannot use scraper %s as a scene scraper", ErrNotSupported, scraperID)
|
||||||
|
}
|
||||||
|
|
||||||
|
scene, err := getScene(ctx, id, c.txnManager)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("scraper %s: unable to load scene id %v: %w", scraperID, id, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ret, err = ss.viaScene(ctx, c.client, scene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("scraper %s: %w", scraperID, err)
|
||||||
|
}
|
||||||
|
case models.ScrapeContentTypeGallery:
|
||||||
|
gs, ok := s.(galleryScraper)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("%w: cannot use scraper %s as a gallery scraper", ErrNotSupported, scraperID)
|
||||||
|
}
|
||||||
|
|
||||||
|
gallery, err := getGallery(ctx, id, c.txnManager)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("scraper %s: unable to load gallery id %v: %w", scraperID, id, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ret, err = gs.viaGallery(ctx, c.client, gallery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("scraper %s: %w", scraperID, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.postScrape(ctx, ret)
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -232,55 +233,118 @@ func loadConfigFromYAMLFile(path string) (*config, error) {
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) supportsPerformers() bool {
|
func (c config) spec() models.Scraper {
|
||||||
return c.PerformerByName != nil || c.PerformerByFragment != nil || len(c.PerformerByURL) > 0
|
ret := models.Scraper{
|
||||||
|
ID: c.ID,
|
||||||
|
Name: c.Name,
|
||||||
|
}
|
||||||
|
|
||||||
|
performer := models.ScraperSpec{}
|
||||||
|
if c.PerformerByName != nil {
|
||||||
|
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeName)
|
||||||
|
}
|
||||||
|
if c.PerformerByFragment != nil {
|
||||||
|
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeFragment)
|
||||||
|
}
|
||||||
|
if len(c.PerformerByURL) > 0 {
|
||||||
|
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeURL)
|
||||||
|
for _, v := range c.PerformerByURL {
|
||||||
|
performer.Urls = append(performer.Urls, v.URL...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(performer.SupportedScrapes) > 0 {
|
||||||
|
ret.Performer = &performer
|
||||||
|
}
|
||||||
|
|
||||||
|
scene := models.ScraperSpec{}
|
||||||
|
if c.SceneByFragment != nil {
|
||||||
|
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment)
|
||||||
|
}
|
||||||
|
if c.SceneByName != nil && c.SceneByQueryFragment != nil {
|
||||||
|
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName)
|
||||||
|
}
|
||||||
|
if len(c.SceneByURL) > 0 {
|
||||||
|
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL)
|
||||||
|
for _, v := range c.SceneByURL {
|
||||||
|
scene.Urls = append(scene.Urls, v.URL...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(scene.SupportedScrapes) > 0 {
|
||||||
|
ret.Scene = &scene
|
||||||
|
}
|
||||||
|
|
||||||
|
gallery := models.ScraperSpec{}
|
||||||
|
if c.GalleryByFragment != nil {
|
||||||
|
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeFragment)
|
||||||
|
}
|
||||||
|
if len(c.GalleryByURL) > 0 {
|
||||||
|
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeURL)
|
||||||
|
for _, v := range c.GalleryByURL {
|
||||||
|
gallery.Urls = append(gallery.Urls, v.URL...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(gallery.SupportedScrapes) > 0 {
|
||||||
|
ret.Gallery = &gallery
|
||||||
|
}
|
||||||
|
|
||||||
|
movie := models.ScraperSpec{}
|
||||||
|
if len(c.MovieByURL) > 0 {
|
||||||
|
movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL)
|
||||||
|
for _, v := range c.MovieByURL {
|
||||||
|
movie.Urls = append(movie.Urls, v.URL...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(movie.SupportedScrapes) > 0 {
|
||||||
|
ret.Movie = &movie
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) matchesPerformerURL(url string) bool {
|
func (c config) supports(ty models.ScrapeContentType) bool {
|
||||||
for _, scraper := range c.PerformerByURL {
|
switch ty {
|
||||||
if scraper.matchesURL(url) {
|
case models.ScrapeContentTypePerformer:
|
||||||
return true
|
return c.PerformerByName != nil || c.PerformerByFragment != nil || len(c.PerformerByURL) > 0
|
||||||
}
|
case models.ScrapeContentTypeScene:
|
||||||
}
|
return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0
|
||||||
|
case models.ScrapeContentTypeGallery:
|
||||||
return false
|
return c.GalleryByFragment != nil || len(c.GalleryByURL) > 0
|
||||||
}
|
case models.ScrapeContentTypeMovie:
|
||||||
|
return len(c.MovieByURL) > 0
|
||||||
func (c config) supportsScenes() bool {
|
}
|
||||||
return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0
|
|
||||||
}
|
panic("Unhandled ScrapeContentType")
|
||||||
|
}
|
||||||
func (c config) supportsGalleries() bool {
|
|
||||||
return c.GalleryByFragment != nil || len(c.GalleryByURL) > 0
|
func (c config) matchesURL(url string, ty models.ScrapeContentType) bool {
|
||||||
}
|
switch ty {
|
||||||
|
case models.ScrapeContentTypePerformer:
|
||||||
func (c config) matchesSceneURL(url string) bool {
|
for _, scraper := range c.PerformerByURL {
|
||||||
for _, scraper := range c.SceneByURL {
|
if scraper.matchesURL(url) {
|
||||||
if scraper.matchesURL(url) {
|
return true
|
||||||
return true
|
}
|
||||||
}
|
}
|
||||||
}
|
case models.ScrapeContentTypeScene:
|
||||||
|
for _, scraper := range c.SceneByURL {
|
||||||
return false
|
if scraper.matchesURL(url) {
|
||||||
}
|
return true
|
||||||
|
}
|
||||||
func (c config) matchesGalleryURL(url string) bool {
|
}
|
||||||
for _, scraper := range c.GalleryByURL {
|
case models.ScrapeContentTypeGallery:
|
||||||
if scraper.matchesURL(url) {
|
for _, scraper := range c.GalleryByURL {
|
||||||
return true
|
if scraper.matchesURL(url) {
|
||||||
}
|
return true
|
||||||
}
|
}
|
||||||
return false
|
}
|
||||||
}
|
case models.ScrapeContentTypeMovie:
|
||||||
|
for _, scraper := range c.MovieByURL {
|
||||||
func (c config) supportsMovies() bool {
|
if scraper.matchesURL(url) {
|
||||||
return len(c.MovieByURL) > 0
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) matchesMovieURL(url string) bool {
|
|
||||||
for _, scraper := range c.MovieByURL {
|
|
||||||
if scraper.matchesURL(url) {
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,289 +0,0 @@
|
|||||||
package scraper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
type configSceneScraper struct {
|
|
||||||
*configScraper
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configSceneScraper) matchesURL(url string) bool {
|
|
||||||
return c.config.matchesSceneURL(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configSceneScraper) scrapeByName(name string) ([]*models.ScrapedScene, error) {
|
|
||||||
if c.config.SceneByName != nil {
|
|
||||||
s := c.config.getScraper(*c.config.SceneByName, c.client, c.txnManager, c.globalConfig)
|
|
||||||
return s.scrapeScenesByName(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configSceneScraper) scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
|
||||||
if c.config.SceneByFragment != nil {
|
|
||||||
s := c.config.getScraper(*c.config.SceneByFragment, c.client, c.txnManager, c.globalConfig)
|
|
||||||
return s.scrapeSceneByScene(scene)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configSceneScraper) scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
|
||||||
if c.config.SceneByQueryFragment != nil {
|
|
||||||
s := c.config.getScraper(*c.config.SceneByQueryFragment, c.client, c.txnManager, c.globalConfig)
|
|
||||||
return s.scrapeSceneByFragment(scene)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configSceneScraper) scrapeByURL(url string) (*models.ScrapedScene, error) {
|
|
||||||
for _, scraper := range c.config.SceneByURL {
|
|
||||||
if scraper.matchesURL(url) {
|
|
||||||
s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig)
|
|
||||||
ret, err := s.scrapeSceneByURL(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type configPerformerScraper struct {
|
|
||||||
*configScraper
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configPerformerScraper) matchesURL(url string) bool {
|
|
||||||
return c.config.matchesPerformerURL(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configPerformerScraper) scrapeByName(name string) ([]*models.ScrapedPerformer, error) {
|
|
||||||
if c.config.PerformerByName != nil {
|
|
||||||
s := c.config.getScraper(*c.config.PerformerByName, c.client, c.txnManager, c.globalConfig)
|
|
||||||
return s.scrapePerformersByName(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configPerformerScraper) scrapeByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
|
||||||
if c.config.PerformerByFragment != nil {
|
|
||||||
s := c.config.getScraper(*c.config.PerformerByFragment, c.client, c.txnManager, c.globalConfig)
|
|
||||||
return s.scrapePerformerByFragment(scrapedPerformer)
|
|
||||||
}
|
|
||||||
|
|
||||||
// try to match against URL if present
|
|
||||||
if scrapedPerformer.URL != nil && *scrapedPerformer.URL != "" {
|
|
||||||
return c.scrapeByURL(*scrapedPerformer.URL)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configPerformerScraper) scrapeByURL(url string) (*models.ScrapedPerformer, error) {
|
|
||||||
for _, scraper := range c.config.PerformerByURL {
|
|
||||||
if scraper.matchesURL(url) {
|
|
||||||
s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig)
|
|
||||||
ret, err := s.scrapePerformerByURL(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type configGalleryScraper struct {
|
|
||||||
*configScraper
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configGalleryScraper) matchesURL(url string) bool {
|
|
||||||
return c.config.matchesGalleryURL(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configGalleryScraper) scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
|
||||||
if c.config.GalleryByFragment != nil {
|
|
||||||
s := c.config.getScraper(*c.config.GalleryByFragment, c.client, c.txnManager, c.globalConfig)
|
|
||||||
return s.scrapeGalleryByGallery(gallery)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configGalleryScraper) scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
|
|
||||||
if c.config.GalleryByFragment != nil {
|
|
||||||
// TODO - this should be galleryByQueryFragment
|
|
||||||
s := c.config.getScraper(*c.config.GalleryByFragment, c.client, c.txnManager, c.globalConfig)
|
|
||||||
return s.scrapeGalleryByFragment(gallery)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configGalleryScraper) scrapeByURL(url string) (*models.ScrapedGallery, error) {
|
|
||||||
for _, scraper := range c.config.GalleryByURL {
|
|
||||||
if scraper.matchesURL(url) {
|
|
||||||
s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig)
|
|
||||||
ret, err := s.scrapeGalleryByURL(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type configMovieScraper struct {
|
|
||||||
*configScraper
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configMovieScraper) matchesURL(url string) bool {
|
|
||||||
return c.config.matchesMovieURL(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configMovieScraper) scrapeByURL(url string) (*models.ScrapedMovie, error) {
|
|
||||||
for _, scraper := range c.config.MovieByURL {
|
|
||||||
if scraper.matchesURL(url) {
|
|
||||||
s := c.config.getScraper(scraper.scraperTypeConfig, c.client, c.txnManager, c.globalConfig)
|
|
||||||
ret, err := s.scrapeMovieByURL(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type configScraper struct {
|
|
||||||
config config
|
|
||||||
client *http.Client
|
|
||||||
txnManager models.TransactionManager
|
|
||||||
globalConfig GlobalConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
func createScraperFromConfig(c config, client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
|
|
||||||
base := configScraper{
|
|
||||||
client: client,
|
|
||||||
config: c,
|
|
||||||
txnManager: txnManager,
|
|
||||||
globalConfig: globalConfig,
|
|
||||||
}
|
|
||||||
|
|
||||||
ret := scraper{
|
|
||||||
ID: c.ID,
|
|
||||||
Spec: configScraperSpec(c),
|
|
||||||
}
|
|
||||||
|
|
||||||
// only set fields if supported
|
|
||||||
if c.supportsPerformers() {
|
|
||||||
ret.Performer = &configPerformerScraper{&base}
|
|
||||||
}
|
|
||||||
if c.supportsGalleries() {
|
|
||||||
ret.Gallery = &configGalleryScraper{&base}
|
|
||||||
}
|
|
||||||
if c.supportsMovies() {
|
|
||||||
ret.Movie = &configMovieScraper{&base}
|
|
||||||
}
|
|
||||||
if c.supportsScenes() {
|
|
||||||
ret.Scene = &configSceneScraper{&base}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
func configScraperSpec(c config) *models.Scraper {
|
|
||||||
ret := models.Scraper{
|
|
||||||
ID: c.ID,
|
|
||||||
Name: c.Name,
|
|
||||||
}
|
|
||||||
|
|
||||||
performer := models.ScraperSpec{}
|
|
||||||
if c.PerformerByName != nil {
|
|
||||||
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeName)
|
|
||||||
}
|
|
||||||
if c.PerformerByFragment != nil {
|
|
||||||
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeFragment)
|
|
||||||
}
|
|
||||||
if len(c.PerformerByURL) > 0 {
|
|
||||||
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeURL)
|
|
||||||
for _, v := range c.PerformerByURL {
|
|
||||||
performer.Urls = append(performer.Urls, v.URL...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(performer.SupportedScrapes) > 0 {
|
|
||||||
ret.Performer = &performer
|
|
||||||
}
|
|
||||||
|
|
||||||
scene := models.ScraperSpec{}
|
|
||||||
if c.SceneByFragment != nil {
|
|
||||||
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment)
|
|
||||||
}
|
|
||||||
if c.SceneByName != nil && c.SceneByQueryFragment != nil {
|
|
||||||
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName)
|
|
||||||
}
|
|
||||||
if len(c.SceneByURL) > 0 {
|
|
||||||
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL)
|
|
||||||
for _, v := range c.SceneByURL {
|
|
||||||
scene.Urls = append(scene.Urls, v.URL...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(scene.SupportedScrapes) > 0 {
|
|
||||||
ret.Scene = &scene
|
|
||||||
}
|
|
||||||
|
|
||||||
gallery := models.ScraperSpec{}
|
|
||||||
if c.GalleryByFragment != nil {
|
|
||||||
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeFragment)
|
|
||||||
}
|
|
||||||
if len(c.GalleryByURL) > 0 {
|
|
||||||
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeURL)
|
|
||||||
for _, v := range c.GalleryByURL {
|
|
||||||
gallery.Urls = append(gallery.Urls, v.URL...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(gallery.SupportedScrapes) > 0 {
|
|
||||||
ret.Gallery = &gallery
|
|
||||||
}
|
|
||||||
|
|
||||||
movie := models.ScraperSpec{}
|
|
||||||
if len(c.MovieByURL) > 0 {
|
|
||||||
movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL)
|
|
||||||
for _, v := range c.MovieByURL {
|
|
||||||
movie.Urls = append(movie.Urls, v.URL...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(movie.SupportedScrapes) > 0 {
|
|
||||||
ret.Movie = &movie
|
|
||||||
}
|
|
||||||
|
|
||||||
return &ret
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"net/http"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
@@ -47,7 +46,7 @@ xPathScrapers:
|
|||||||
- regex: \sBio\s*$
|
- regex: \sBio\s*$
|
||||||
with: ""
|
with: ""
|
||||||
URL: //link[@rel="alternate" and @hreflang="x-default"]/@href
|
URL: //link[@rel="alternate" and @hreflang="x-default"]/@href
|
||||||
Twitter: //a[not(starts-with(@href,'https://twitter.com/FreeOnes'))][contains(@href,'twitter.com/')]/@href
|
Twitter: //a[not(starts-with(@href,'https://twitter.com/FreeOnes'))][contains(@href,'twitter.com/')]/@href
|
||||||
Instagram: //a[contains(@href,'instagram.com/')]/@href
|
Instagram: //a[contains(@href,'instagram.com/')]/@href
|
||||||
Birthdate:
|
Birthdate:
|
||||||
selector: //span[contains(text(),'Born On')]
|
selector: //span[contains(text(),'Born On')]
|
||||||
@@ -124,7 +123,7 @@ xPathScrapers:
|
|||||||
# Last updated April 13, 2021
|
# Last updated April 13, 2021
|
||||||
`
|
`
|
||||||
|
|
||||||
func getFreeonesScraper(client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
|
func getFreeonesScraper(txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
|
||||||
yml := freeonesScraperConfig
|
yml := freeonesScraperConfig
|
||||||
|
|
||||||
c, err := loadConfigFromYAML(FreeonesScraperID, strings.NewReader(yml))
|
c, err := loadConfigFromYAML(FreeonesScraperID, strings.NewReader(yml))
|
||||||
@@ -132,5 +131,5 @@ func getFreeonesScraper(client *http.Client, txnManager models.TransactionManage
|
|||||||
logger.Fatalf("Error loading builtin freeones scraper: %s", err.Error())
|
logger.Fatalf("Error loading builtin freeones scraper: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
return createScraperFromConfig(*c, client, txnManager, globalConfig)
|
return newGroupScraper(*c, txnManager, globalConfig)
|
||||||
}
|
}
|
||||||
|
|||||||
141
pkg/scraper/group.go
Normal file
141
pkg/scraper/group.go
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
package scraper
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type group struct {
|
||||||
|
config config
|
||||||
|
|
||||||
|
txnManager models.TransactionManager
|
||||||
|
globalConf GlobalConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGroupScraper(c config, txnManager models.TransactionManager, globalConfig GlobalConfig) scraper {
|
||||||
|
return group{
|
||||||
|
config: c,
|
||||||
|
txnManager: txnManager,
|
||||||
|
globalConf: globalConfig,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g group) spec() models.Scraper {
|
||||||
|
return g.config.spec()
|
||||||
|
}
|
||||||
|
|
||||||
|
// fragmentScraper finds an appropriate fragment scraper based on input.
|
||||||
|
func (g group) fragmentScraper(input Input) *scraperTypeConfig {
|
||||||
|
switch {
|
||||||
|
case input.Performer != nil:
|
||||||
|
return g.config.PerformerByFragment
|
||||||
|
case input.Gallery != nil:
|
||||||
|
// TODO - this should be galleryByQueryFragment
|
||||||
|
return g.config.GalleryByFragment
|
||||||
|
case input.Scene != nil:
|
||||||
|
return g.config.SceneByQueryFragment
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g group) viaFragment(ctx context.Context, client *http.Client, input Input) (models.ScrapedContent, error) {
|
||||||
|
stc := g.fragmentScraper(input)
|
||||||
|
if stc == nil {
|
||||||
|
// If there's no performer fragment scraper in the group, we try to use
|
||||||
|
// the URL scraper. Check if there's an URL in the input, and then shift
|
||||||
|
// to an URL scrape if it's present.
|
||||||
|
if input.Performer != nil && input.Performer.URL != nil && *input.Performer.URL != "" {
|
||||||
|
return g.viaURL(ctx, client, *input.Performer.URL, models.ScrapeContentTypePerformer)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, ErrNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
s := g.config.getScraper(*stc, client, g.txnManager, g.globalConf)
|
||||||
|
return s.scrapeByFragment(ctx, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
|
if g.config.SceneByFragment == nil {
|
||||||
|
return nil, ErrNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
s := g.config.getScraper(*g.config.SceneByFragment, client, g.txnManager, g.globalConf)
|
||||||
|
return s.scrapeSceneByScene(ctx, scene)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
|
if g.config.GalleryByFragment == nil {
|
||||||
|
return nil, ErrNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
s := g.config.getScraper(*g.config.GalleryByFragment, client, g.txnManager, g.globalConf)
|
||||||
|
return s.scrapeGalleryByGallery(ctx, gallery)
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadUrlCandidates(c config, ty models.ScrapeContentType) []*scrapeByURLConfig {
|
||||||
|
switch ty {
|
||||||
|
case models.ScrapeContentTypePerformer:
|
||||||
|
return c.PerformerByURL
|
||||||
|
case models.ScrapeContentTypeScene:
|
||||||
|
return c.SceneByURL
|
||||||
|
case models.ScrapeContentTypeMovie:
|
||||||
|
return c.MovieByURL
|
||||||
|
case models.ScrapeContentTypeGallery:
|
||||||
|
return c.GalleryByURL
|
||||||
|
}
|
||||||
|
|
||||||
|
panic("loadUrlCandidates: unreachable")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
|
candidates := loadUrlCandidates(g.config, ty)
|
||||||
|
for _, scraper := range candidates {
|
||||||
|
if scraper.matchesURL(url) {
|
||||||
|
s := g.config.getScraper(scraper.scraperTypeConfig, client, g.txnManager, g.globalConf)
|
||||||
|
ret, err := s.scrapeByURL(ctx, url, ty)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ret != nil {
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g group) viaName(ctx context.Context, client *http.Client, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||||
|
switch ty {
|
||||||
|
case models.ScrapeContentTypePerformer:
|
||||||
|
if g.config.PerformerByName == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
s := g.config.getScraper(*g.config.PerformerByName, client, g.txnManager, g.globalConf)
|
||||||
|
return s.scrapeByName(ctx, name, ty)
|
||||||
|
case models.ScrapeContentTypeScene:
|
||||||
|
if g.config.SceneByName == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
s := g.config.getScraper(*g.config.SceneByName, client, g.txnManager, g.globalConf)
|
||||||
|
return s.scrapeByName(ctx, name, ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("%w: cannot load %v by name", ErrNotSupported, ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g group) supports(ty models.ScrapeContentType) bool {
|
||||||
|
return g.config.supports(ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g group) supportsURL(url string, ty models.ScrapeContentType) bool {
|
||||||
|
return g.config.matchesURL(url, ty)
|
||||||
|
}
|
||||||
@@ -12,7 +12,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func setPerformerImage(ctx context.Context, client *http.Client, p *models.ScrapedPerformer, globalConfig GlobalConfig) error {
|
func setPerformerImage(ctx context.Context, client *http.Client, p *models.ScrapedPerformer, globalConfig GlobalConfig) error {
|
||||||
if p == nil || p.Image == nil || !strings.HasPrefix(*p.Image, "http") {
|
if p.Image == nil || !strings.HasPrefix(*p.Image, "http") {
|
||||||
// nothing to do
|
// nothing to do
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -31,7 +31,7 @@ func setPerformerImage(ctx context.Context, client *http.Client, p *models.Scrap
|
|||||||
|
|
||||||
func setSceneImage(ctx context.Context, client *http.Client, s *models.ScrapedScene, globalConfig GlobalConfig) error {
|
func setSceneImage(ctx context.Context, client *http.Client, s *models.ScrapedScene, globalConfig GlobalConfig) error {
|
||||||
// don't try to get the image if it doesn't appear to be a URL
|
// don't try to get the image if it doesn't appear to be a URL
|
||||||
if s == nil || s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
|
if s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
|
||||||
// nothing to do
|
// nothing to do
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -48,7 +48,7 @@ func setSceneImage(ctx context.Context, client *http.Client, s *models.ScrapedSc
|
|||||||
|
|
||||||
func setMovieFrontImage(ctx context.Context, client *http.Client, m *models.ScrapedMovie, globalConfig GlobalConfig) error {
|
func setMovieFrontImage(ctx context.Context, client *http.Client, m *models.ScrapedMovie, globalConfig GlobalConfig) error {
|
||||||
// don't try to get the image if it doesn't appear to be a URL
|
// don't try to get the image if it doesn't appear to be a URL
|
||||||
if m == nil || m.FrontImage == nil || !strings.HasPrefix(*m.FrontImage, "http") {
|
if m.FrontImage == nil || !strings.HasPrefix(*m.FrontImage, "http") {
|
||||||
// nothing to do
|
// nothing to do
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -65,7 +65,7 @@ func setMovieFrontImage(ctx context.Context, client *http.Client, m *models.Scra
|
|||||||
|
|
||||||
func setMovieBackImage(ctx context.Context, client *http.Client, m *models.ScrapedMovie, globalConfig GlobalConfig) error {
|
func setMovieBackImage(ctx context.Context, client *http.Client, m *models.ScrapedMovie, globalConfig GlobalConfig) error {
|
||||||
// don't try to get the image if it doesn't appear to be a URL
|
// don't try to get the image if it doesn't appear to be a URL
|
||||||
if m == nil || m.BackImage == nil || !strings.HasPrefix(*m.BackImage, "http") {
|
if m.BackImage == nil || !strings.HasPrefix(*m.BackImage, "http") {
|
||||||
// nothing to do
|
// nothing to do
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package scraper
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
@@ -74,55 +75,33 @@ func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) {
|
|||||||
return docStr, err
|
return docStr, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
|
func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries
|
u := replaceURL(url, s.scraper) // allow a URL Replace for url-queries
|
||||||
doc, scraper, err := s.scrapeURL(context.TODO(), u)
|
doc, scraper, err := s.scrapeURL(ctx, u)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
q := s.getJsonQuery(doc)
|
||||||
return scraper.scrapePerformer(q)
|
switch ty {
|
||||||
}
|
case models.ScrapeContentTypePerformer:
|
||||||
|
return scraper.scrapePerformer(ctx, q)
|
||||||
func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
|
case models.ScrapeContentTypeScene:
|
||||||
u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries
|
return scraper.scrapeScene(ctx, q)
|
||||||
doc, scraper, err := s.scrapeURL(context.TODO(), u)
|
case models.ScrapeContentTypeGallery:
|
||||||
if err != nil {
|
return scraper.scrapeGallery(ctx, q)
|
||||||
return nil, err
|
case models.ScrapeContentTypeMovie:
|
||||||
|
return scraper.scrapeMovie(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
return nil, ErrNotSupported
|
||||||
return scraper.scrapeScene(q)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
|
func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||||
u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries
|
|
||||||
doc, scraper, err := s.scrapeURL(context.TODO(), u)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
|
||||||
return scraper.scrapeGallery(q)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
|
|
||||||
u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries
|
|
||||||
doc, scraper, err := s.scrapeURL(context.TODO(), u)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
|
||||||
return scraper.scrapeMovie(q)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *jsonScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
|
|
||||||
scraper := s.getJsonScraper()
|
scraper := s.getJsonScraper()
|
||||||
|
|
||||||
if scraper == nil {
|
if scraper == nil {
|
||||||
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, fmt.Errorf("%w: name %v", ErrNotFound, s.scraper.Scraper)
|
||||||
}
|
}
|
||||||
|
|
||||||
const placeholder = "{}"
|
const placeholder = "{}"
|
||||||
@@ -133,46 +112,45 @@ func (s *jsonScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerf
|
|||||||
url := s.scraper.QueryURL
|
url := s.scraper.QueryURL
|
||||||
url = strings.ReplaceAll(url, placeholder, escapedName)
|
url = strings.ReplaceAll(url, placeholder, escapedName)
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
q := s.getJsonQuery(doc)
|
||||||
return scraper.scrapePerformers(q)
|
q.setType(SearchQuery)
|
||||||
}
|
|
||||||
|
|
||||||
func (s *jsonScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
var content []models.ScrapedContent
|
||||||
return nil, errors.New("scrapePerformerByFragment not supported for json scraper")
|
switch ty {
|
||||||
}
|
case models.ScrapeContentTypePerformer:
|
||||||
|
performers, err := scraper.scrapePerformers(ctx, q)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
|
for _, p := range performers {
|
||||||
scraper := s.getJsonScraper()
|
content = append(content, p)
|
||||||
|
}
|
||||||
|
|
||||||
if scraper == nil {
|
return content, nil
|
||||||
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
|
case models.ScrapeContentTypeScene:
|
||||||
|
scenes, err := scraper.scrapeScenes(ctx, q)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, s := range scenes {
|
||||||
|
content = append(content, s)
|
||||||
|
}
|
||||||
|
|
||||||
|
return content, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const placeholder = "{}"
|
return nil, ErrNotSupported
|
||||||
|
|
||||||
// replace the placeholder string with the URL-escaped name
|
|
||||||
escapedName := url.QueryEscape(name)
|
|
||||||
|
|
||||||
url := s.scraper.QueryURL
|
|
||||||
url = strings.ReplaceAll(url, placeholder, escapedName)
|
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
|
||||||
return scraper.scrapeScenes(q)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromScene(scene)
|
queryURL := queryURLParametersFromScene(scene)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
@@ -186,17 +164,28 @@ func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedSc
|
|||||||
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
q := s.getJsonQuery(doc)
|
||||||
return scraper.scrapeScene(q)
|
return scraper.scrapeScene(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
|
||||||
|
switch {
|
||||||
|
case input.Gallery != nil:
|
||||||
|
return nil, fmt.Errorf("%w: cannot use a json scraper as a gallery fragment scraper", ErrNotSupported)
|
||||||
|
case input.Performer != nil:
|
||||||
|
return nil, fmt.Errorf("%w: cannot use a json scraper as a performer fragment scraper", ErrNotSupported)
|
||||||
|
case input.Scene == nil:
|
||||||
|
return nil, fmt.Errorf("%w: scene input is nil", ErrNotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
scene := *input.Scene
|
||||||
|
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromScrapedScene(scene)
|
queryURL := queryURLParametersFromScrapedScene(scene)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
@@ -210,17 +199,17 @@ func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*mo
|
|||||||
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
q := s.getJsonQuery(doc)
|
||||||
return scraper.scrapeScene(q)
|
return scraper.scrapeScene(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromGallery(gallery)
|
queryURL := queryURLParametersFromGallery(gallery)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
@@ -234,18 +223,14 @@ func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.S
|
|||||||
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getJsonQuery(doc)
|
q := s.getJsonQuery(doc)
|
||||||
return scraper.scrapeGallery(q)
|
return scraper.scrapeGallery(ctx, q)
|
||||||
}
|
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
|
|
||||||
return nil, errors.New("scrapeGalleryByFragment not supported for json scraper")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery {
|
func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery {
|
||||||
@@ -256,16 +241,24 @@ func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type jsonQuery struct {
|
type jsonQuery struct {
|
||||||
doc string
|
doc string
|
||||||
scraper *jsonScraper
|
scraper *jsonScraper
|
||||||
|
queryType QueryType
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *jsonQuery) runQuery(selector string) []string {
|
func (q *jsonQuery) getType() QueryType {
|
||||||
|
return q.queryType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *jsonQuery) setType(t QueryType) {
|
||||||
|
q.queryType = t
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *jsonQuery) runQuery(selector string) ([]string, error) {
|
||||||
value := gjson.Get(q.doc, selector)
|
value := gjson.Get(q.doc, selector)
|
||||||
|
|
||||||
if !value.Exists() {
|
if !value.Exists() {
|
||||||
logger.Warnf("Could not find json path '%s' in json object", selector)
|
return nil, fmt.Errorf("could not find json path '%s' in json object", selector)
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret []string
|
var ret []string
|
||||||
@@ -278,11 +271,11 @@ func (q *jsonQuery) runQuery(selector string) []string {
|
|||||||
ret = append(ret, value.String())
|
ret = append(ret, value.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *jsonQuery) subScrape(value string) mappedQuery {
|
func (q *jsonQuery) subScrape(ctx context.Context, value string) mappedQuery {
|
||||||
doc, err := q.scraper.loadURL(context.TODO(), value)
|
doc, err := q.scraper.loadURL(ctx, value)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())
|
logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v2"
|
||||||
@@ -81,7 +82,7 @@ jsonScrapers:
|
|||||||
doc: json,
|
doc: json,
|
||||||
}
|
}
|
||||||
|
|
||||||
scrapedPerformer, err := performerScraper.scrapePerformer(q)
|
scrapedPerformer, err := performerScraper.scrapePerformer(context.Background(), q)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Error scraping performer: %s", err.Error())
|
t.Fatalf("Error scraping performer: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
@@ -17,8 +18,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type mappedQuery interface {
|
type mappedQuery interface {
|
||||||
runQuery(selector string) []string
|
runQuery(selector string) ([]string, error)
|
||||||
subScrape(value string) mappedQuery
|
getType() QueryType
|
||||||
|
setType(QueryType)
|
||||||
|
subScrape(ctx context.Context, value string) mappedQuery
|
||||||
}
|
}
|
||||||
|
|
||||||
type commonMappedConfig map[string]string
|
type commonMappedConfig map[string]string
|
||||||
@@ -38,7 +41,7 @@ func (s mappedConfig) applyCommon(c commonMappedConfig, src string) string {
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedConfig) process(q mappedQuery, common commonMappedConfig) mappedResults {
|
func (s mappedConfig) process(ctx context.Context, q mappedQuery, common commonMappedConfig) mappedResults {
|
||||||
var ret mappedResults
|
var ret mappedResults
|
||||||
|
|
||||||
for k, attrConfig := range s {
|
for k, attrConfig := range s {
|
||||||
@@ -51,10 +54,13 @@ func (s mappedConfig) process(q mappedQuery, common commonMappedConfig) mappedRe
|
|||||||
selector := attrConfig.Selector
|
selector := attrConfig.Selector
|
||||||
selector = s.applyCommon(common, selector)
|
selector = s.applyCommon(common, selector)
|
||||||
|
|
||||||
found := q.runQuery(selector)
|
found, err := q.runQuery(selector)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("key '%v': %v", k, err)
|
||||||
|
}
|
||||||
|
|
||||||
if len(found) > 0 {
|
if len(found) > 0 {
|
||||||
result := s.postProcess(q, attrConfig, found)
|
result := s.postProcess(ctx, q, attrConfig, found)
|
||||||
for i, text := range result {
|
for i, text := range result {
|
||||||
ret = ret.setKey(i, k, text)
|
ret = ret.setKey(i, k, text)
|
||||||
}
|
}
|
||||||
@@ -65,14 +71,18 @@ func (s mappedConfig) process(q mappedQuery, common commonMappedConfig) mappedRe
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedConfig) postProcess(q mappedQuery, attrConfig mappedScraperAttrConfig, found []string) []string {
|
func (s mappedConfig) postProcess(ctx context.Context, q mappedQuery, attrConfig mappedScraperAttrConfig, found []string) []string {
|
||||||
// check if we're concatenating the results into a single result
|
// check if we're concatenating the results into a single result
|
||||||
var ret []string
|
var ret []string
|
||||||
if attrConfig.hasConcat() {
|
if attrConfig.hasConcat() {
|
||||||
result := attrConfig.concatenateResults(found)
|
result := attrConfig.concatenateResults(found)
|
||||||
result = attrConfig.postProcess(result, q)
|
result = attrConfig.postProcess(ctx, result, q)
|
||||||
if attrConfig.hasSplit() {
|
if attrConfig.hasSplit() {
|
||||||
results := attrConfig.splitString(result)
|
results := attrConfig.splitString(result)
|
||||||
|
// skip cleaning when the query is used for searching
|
||||||
|
if q.getType() == SearchQuery {
|
||||||
|
return results
|
||||||
|
}
|
||||||
results = attrConfig.cleanResults(results)
|
results = attrConfig.cleanResults(results)
|
||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
@@ -80,14 +90,19 @@ func (s mappedConfig) postProcess(q mappedQuery, attrConfig mappedScraperAttrCon
|
|||||||
ret = []string{result}
|
ret = []string{result}
|
||||||
} else {
|
} else {
|
||||||
for _, text := range found {
|
for _, text := range found {
|
||||||
text = attrConfig.postProcess(text, q)
|
text = attrConfig.postProcess(ctx, text, q)
|
||||||
if attrConfig.hasSplit() {
|
if attrConfig.hasSplit() {
|
||||||
return attrConfig.splitString(text)
|
return attrConfig.splitString(text)
|
||||||
}
|
}
|
||||||
|
|
||||||
ret = append(ret, text)
|
ret = append(ret, text)
|
||||||
}
|
}
|
||||||
|
// skip cleaning when the query is used for searching
|
||||||
|
if q.getType() == SearchQuery {
|
||||||
|
return ret
|
||||||
|
}
|
||||||
ret = attrConfig.cleanResults(ret)
|
ret = attrConfig.cleanResults(ret)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
@@ -356,12 +371,12 @@ func (c mappedRegexConfigs) apply(value string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type postProcessAction interface {
|
type postProcessAction interface {
|
||||||
Apply(value string, q mappedQuery) string
|
Apply(ctx context.Context, value string, q mappedQuery) string
|
||||||
}
|
}
|
||||||
|
|
||||||
type postProcessParseDate string
|
type postProcessParseDate string
|
||||||
|
|
||||||
func (p *postProcessParseDate) Apply(value string, q mappedQuery) string {
|
func (p *postProcessParseDate) Apply(ctx context.Context, value string, q mappedQuery) string {
|
||||||
parseDate := string(*p)
|
parseDate := string(*p)
|
||||||
|
|
||||||
const internalDateFormat = "2006-01-02"
|
const internalDateFormat = "2006-01-02"
|
||||||
@@ -393,7 +408,7 @@ func (p *postProcessParseDate) Apply(value string, q mappedQuery) string {
|
|||||||
|
|
||||||
type postProcessSubtractDays bool
|
type postProcessSubtractDays bool
|
||||||
|
|
||||||
func (p *postProcessSubtractDays) Apply(value string, q mappedQuery) string {
|
func (p *postProcessSubtractDays) Apply(ctx context.Context, value string, q mappedQuery) string {
|
||||||
const internalDateFormat = "2006-01-02"
|
const internalDateFormat = "2006-01-02"
|
||||||
|
|
||||||
i, err := strconv.Atoi(value)
|
i, err := strconv.Atoi(value)
|
||||||
@@ -409,21 +424,24 @@ func (p *postProcessSubtractDays) Apply(value string, q mappedQuery) string {
|
|||||||
|
|
||||||
type postProcessReplace mappedRegexConfigs
|
type postProcessReplace mappedRegexConfigs
|
||||||
|
|
||||||
func (c *postProcessReplace) Apply(value string, q mappedQuery) string {
|
func (c *postProcessReplace) Apply(ctx context.Context, value string, q mappedQuery) string {
|
||||||
replace := mappedRegexConfigs(*c)
|
replace := mappedRegexConfigs(*c)
|
||||||
return replace.apply(value)
|
return replace.apply(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
type postProcessSubScraper mappedScraperAttrConfig
|
type postProcessSubScraper mappedScraperAttrConfig
|
||||||
|
|
||||||
func (p *postProcessSubScraper) Apply(value string, q mappedQuery) string {
|
func (p *postProcessSubScraper) Apply(ctx context.Context, value string, q mappedQuery) string {
|
||||||
subScrapeConfig := mappedScraperAttrConfig(*p)
|
subScrapeConfig := mappedScraperAttrConfig(*p)
|
||||||
|
|
||||||
logger.Debugf("Sub-scraping for: %s", value)
|
logger.Debugf("Sub-scraping for: %s", value)
|
||||||
ss := q.subScrape(value)
|
ss := q.subScrape(ctx, value)
|
||||||
|
|
||||||
if ss != nil {
|
if ss != nil {
|
||||||
found := ss.runQuery(subScrapeConfig.Selector)
|
found, err := ss.runQuery(subScrapeConfig.Selector)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("subscrape for '%v': %v", value, err)
|
||||||
|
}
|
||||||
|
|
||||||
if len(found) > 0 {
|
if len(found) > 0 {
|
||||||
// check if we're concatenating the results into a single result
|
// check if we're concatenating the results into a single result
|
||||||
@@ -434,7 +452,7 @@ func (p *postProcessSubScraper) Apply(value string, q mappedQuery) string {
|
|||||||
result = found[0]
|
result = found[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
result = subScrapeConfig.postProcess(result, ss)
|
result = subScrapeConfig.postProcess(ctx, result, ss)
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -444,7 +462,7 @@ func (p *postProcessSubScraper) Apply(value string, q mappedQuery) string {
|
|||||||
|
|
||||||
type postProcessMap map[string]string
|
type postProcessMap map[string]string
|
||||||
|
|
||||||
func (p *postProcessMap) Apply(value string, q mappedQuery) string {
|
func (p *postProcessMap) Apply(ctx context.Context, value string, q mappedQuery) string {
|
||||||
// return the mapped value if present
|
// return the mapped value if present
|
||||||
m := *p
|
m := *p
|
||||||
mapped, ok := m[value]
|
mapped, ok := m[value]
|
||||||
@@ -458,7 +476,7 @@ func (p *postProcessMap) Apply(value string, q mappedQuery) string {
|
|||||||
|
|
||||||
type postProcessFeetToCm bool
|
type postProcessFeetToCm bool
|
||||||
|
|
||||||
func (p *postProcessFeetToCm) Apply(value string, q mappedQuery) string {
|
func (p *postProcessFeetToCm) Apply(ctx context.Context, value string, q mappedQuery) string {
|
||||||
const foot_in_cm = 30.48
|
const foot_in_cm = 30.48
|
||||||
const inch_in_cm = 2.54
|
const inch_in_cm = 2.54
|
||||||
|
|
||||||
@@ -482,7 +500,7 @@ func (p *postProcessFeetToCm) Apply(value string, q mappedQuery) string {
|
|||||||
|
|
||||||
type postProcessLbToKg bool
|
type postProcessLbToKg bool
|
||||||
|
|
||||||
func (p *postProcessLbToKg) Apply(value string, q mappedQuery) string {
|
func (p *postProcessLbToKg) Apply(ctx context.Context, value string, q mappedQuery) string {
|
||||||
const lb_in_kg = 0.45359237
|
const lb_in_kg = 0.45359237
|
||||||
w, err := strconv.ParseFloat(value, 64)
|
w, err := strconv.ParseFloat(value, 64)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@@ -684,9 +702,9 @@ func (c mappedScraperAttrConfig) splitString(value string) []string {
|
|||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c mappedScraperAttrConfig) postProcess(value string, q mappedQuery) string {
|
func (c mappedScraperAttrConfig) postProcess(ctx context.Context, value string, q mappedQuery) string {
|
||||||
for _, action := range c.postProcessActions {
|
for _, action := range c.postProcessActions {
|
||||||
value = action.Apply(value, q)
|
value = action.Apply(ctx, value, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
return value
|
return value
|
||||||
@@ -742,7 +760,7 @@ func (r mappedResults) setKey(index int, key string, value string) mappedResults
|
|||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer, error) {
|
func (s mappedScraper) scrapePerformer(ctx context.Context, q mappedQuery) (*models.ScrapedPerformer, error) {
|
||||||
var ret models.ScrapedPerformer
|
var ret models.ScrapedPerformer
|
||||||
|
|
||||||
performerMap := s.Performer
|
performerMap := s.Performer
|
||||||
@@ -752,14 +770,14 @@ func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer,
|
|||||||
|
|
||||||
performerTagsMap := performerMap.Tags
|
performerTagsMap := performerMap.Tags
|
||||||
|
|
||||||
results := performerMap.process(q, s.Common)
|
results := performerMap.process(ctx, q, s.Common)
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
results[0].apply(&ret)
|
results[0].apply(&ret)
|
||||||
|
|
||||||
// now apply the tags
|
// now apply the tags
|
||||||
if performerTagsMap != nil {
|
if performerTagsMap != nil {
|
||||||
logger.Debug(`Processing performer tags:`)
|
logger.Debug(`Processing performer tags:`)
|
||||||
tagResults := performerTagsMap.process(q, s.Common)
|
tagResults := performerTagsMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
for _, p := range tagResults {
|
for _, p := range tagResults {
|
||||||
tag := &models.ScrapedTag{}
|
tag := &models.ScrapedTag{}
|
||||||
@@ -772,7 +790,7 @@ func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer,
|
|||||||
return &ret, nil
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapePerformers(q mappedQuery) ([]*models.ScrapedPerformer, error) {
|
func (s mappedScraper) scrapePerformers(ctx context.Context, q mappedQuery) ([]*models.ScrapedPerformer, error) {
|
||||||
var ret []*models.ScrapedPerformer
|
var ret []*models.ScrapedPerformer
|
||||||
|
|
||||||
performerMap := s.Performer
|
performerMap := s.Performer
|
||||||
@@ -780,7 +798,7 @@ func (s mappedScraper) scrapePerformers(q mappedQuery) ([]*models.ScrapedPerform
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
results := performerMap.process(q, s.Common)
|
results := performerMap.process(ctx, q, s.Common)
|
||||||
for _, r := range results {
|
for _, r := range results {
|
||||||
var p models.ScrapedPerformer
|
var p models.ScrapedPerformer
|
||||||
r.apply(&p)
|
r.apply(&p)
|
||||||
@@ -790,7 +808,7 @@ func (s mappedScraper) scrapePerformers(q mappedQuery) ([]*models.ScrapedPerform
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.ScrapedScene {
|
func (s mappedScraper) processScene(ctx context.Context, q mappedQuery, r mappedResult) *models.ScrapedScene {
|
||||||
var ret models.ScrapedScene
|
var ret models.ScrapedScene
|
||||||
|
|
||||||
sceneScraperConfig := s.Scene
|
sceneScraperConfig := s.Scene
|
||||||
@@ -807,13 +825,13 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
|
|||||||
// process performer tags once
|
// process performer tags once
|
||||||
var performerTagResults mappedResults
|
var performerTagResults mappedResults
|
||||||
if scenePerformerTagsMap != nil {
|
if scenePerformerTagsMap != nil {
|
||||||
performerTagResults = scenePerformerTagsMap.process(q, s.Common)
|
performerTagResults = scenePerformerTagsMap.process(ctx, q, s.Common)
|
||||||
}
|
}
|
||||||
|
|
||||||
// now apply the performers and tags
|
// now apply the performers and tags
|
||||||
if scenePerformersMap.mappedConfig != nil {
|
if scenePerformersMap.mappedConfig != nil {
|
||||||
logger.Debug(`Processing scene performers:`)
|
logger.Debug(`Processing scene performers:`)
|
||||||
performerResults := scenePerformersMap.process(q, s.Common)
|
performerResults := scenePerformersMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
for _, p := range performerResults {
|
for _, p := range performerResults {
|
||||||
performer := &models.ScrapedPerformer{}
|
performer := &models.ScrapedPerformer{}
|
||||||
@@ -831,7 +849,7 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
|
|||||||
|
|
||||||
if sceneTagsMap != nil {
|
if sceneTagsMap != nil {
|
||||||
logger.Debug(`Processing scene tags:`)
|
logger.Debug(`Processing scene tags:`)
|
||||||
tagResults := sceneTagsMap.process(q, s.Common)
|
tagResults := sceneTagsMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
for _, p := range tagResults {
|
for _, p := range tagResults {
|
||||||
tag := &models.ScrapedTag{}
|
tag := &models.ScrapedTag{}
|
||||||
@@ -842,7 +860,7 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
|
|||||||
|
|
||||||
if sceneStudioMap != nil {
|
if sceneStudioMap != nil {
|
||||||
logger.Debug(`Processing scene studio:`)
|
logger.Debug(`Processing scene studio:`)
|
||||||
studioResults := sceneStudioMap.process(q, s.Common)
|
studioResults := sceneStudioMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
if len(studioResults) > 0 {
|
if len(studioResults) > 0 {
|
||||||
studio := &models.ScrapedStudio{}
|
studio := &models.ScrapedStudio{}
|
||||||
@@ -853,7 +871,7 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
|
|||||||
|
|
||||||
if sceneMoviesMap != nil {
|
if sceneMoviesMap != nil {
|
||||||
logger.Debug(`Processing scene movies:`)
|
logger.Debug(`Processing scene movies:`)
|
||||||
movieResults := sceneMoviesMap.process(q, s.Common)
|
movieResults := sceneMoviesMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
for _, p := range movieResults {
|
for _, p := range movieResults {
|
||||||
movie := &models.ScrapedMovie{}
|
movie := &models.ScrapedMovie{}
|
||||||
@@ -865,7 +883,7 @@ func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.Scrap
|
|||||||
return &ret
|
return &ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeScenes(q mappedQuery) ([]*models.ScrapedScene, error) {
|
func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*models.ScrapedScene, error) {
|
||||||
var ret []*models.ScrapedScene
|
var ret []*models.ScrapedScene
|
||||||
|
|
||||||
sceneScraperConfig := s.Scene
|
sceneScraperConfig := s.Scene
|
||||||
@@ -875,16 +893,16 @@ func (s mappedScraper) scrapeScenes(q mappedQuery) ([]*models.ScrapedScene, erro
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug(`Processing scenes:`)
|
logger.Debug(`Processing scenes:`)
|
||||||
results := sceneMap.process(q, s.Common)
|
results := sceneMap.process(ctx, q, s.Common)
|
||||||
for _, r := range results {
|
for _, r := range results {
|
||||||
logger.Debug(`Processing scene:`)
|
logger.Debug(`Processing scene:`)
|
||||||
ret = append(ret, s.processScene(q, r))
|
ret = append(ret, s.processScene(ctx, q, r))
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error) {
|
func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*models.ScrapedScene, error) {
|
||||||
var ret models.ScrapedScene
|
var ret models.ScrapedScene
|
||||||
|
|
||||||
sceneScraperConfig := s.Scene
|
sceneScraperConfig := s.Scene
|
||||||
@@ -894,16 +912,16 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug(`Processing scene:`)
|
logger.Debug(`Processing scene:`)
|
||||||
results := sceneMap.process(q, s.Common)
|
results := sceneMap.process(ctx, q, s.Common)
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
ss := s.processScene(q, results[0])
|
ss := s.processScene(ctx, q, results[0])
|
||||||
ret = *ss
|
ret = *ss
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ret, nil
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, error) {
|
func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*models.ScrapedGallery, error) {
|
||||||
var ret models.ScrapedGallery
|
var ret models.ScrapedGallery
|
||||||
|
|
||||||
galleryScraperConfig := s.Gallery
|
galleryScraperConfig := s.Gallery
|
||||||
@@ -917,14 +935,14 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
|
|||||||
galleryStudioMap := galleryScraperConfig.Studio
|
galleryStudioMap := galleryScraperConfig.Studio
|
||||||
|
|
||||||
logger.Debug(`Processing gallery:`)
|
logger.Debug(`Processing gallery:`)
|
||||||
results := galleryMap.process(q, s.Common)
|
results := galleryMap.process(ctx, q, s.Common)
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
results[0].apply(&ret)
|
results[0].apply(&ret)
|
||||||
|
|
||||||
// now apply the performers and tags
|
// now apply the performers and tags
|
||||||
if galleryPerformersMap != nil {
|
if galleryPerformersMap != nil {
|
||||||
logger.Debug(`Processing gallery performers:`)
|
logger.Debug(`Processing gallery performers:`)
|
||||||
performerResults := galleryPerformersMap.process(q, s.Common)
|
performerResults := galleryPerformersMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
for _, p := range performerResults {
|
for _, p := range performerResults {
|
||||||
performer := &models.ScrapedPerformer{}
|
performer := &models.ScrapedPerformer{}
|
||||||
@@ -935,7 +953,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
|
|||||||
|
|
||||||
if galleryTagsMap != nil {
|
if galleryTagsMap != nil {
|
||||||
logger.Debug(`Processing gallery tags:`)
|
logger.Debug(`Processing gallery tags:`)
|
||||||
tagResults := galleryTagsMap.process(q, s.Common)
|
tagResults := galleryTagsMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
for _, p := range tagResults {
|
for _, p := range tagResults {
|
||||||
tag := &models.ScrapedTag{}
|
tag := &models.ScrapedTag{}
|
||||||
@@ -946,7 +964,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
|
|||||||
|
|
||||||
if galleryStudioMap != nil {
|
if galleryStudioMap != nil {
|
||||||
logger.Debug(`Processing gallery studio:`)
|
logger.Debug(`Processing gallery studio:`)
|
||||||
studioResults := galleryStudioMap.process(q, s.Common)
|
studioResults := galleryStudioMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
if len(studioResults) > 0 {
|
if len(studioResults) > 0 {
|
||||||
studio := &models.ScrapedStudio{}
|
studio := &models.ScrapedStudio{}
|
||||||
@@ -959,7 +977,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
|
|||||||
return &ret, nil
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeMovie(q mappedQuery) (*models.ScrapedMovie, error) {
|
func (s mappedScraper) scrapeMovie(ctx context.Context, q mappedQuery) (*models.ScrapedMovie, error) {
|
||||||
var ret models.ScrapedMovie
|
var ret models.ScrapedMovie
|
||||||
|
|
||||||
movieScraperConfig := s.Movie
|
movieScraperConfig := s.Movie
|
||||||
@@ -970,13 +988,13 @@ func (s mappedScraper) scrapeMovie(q mappedQuery) (*models.ScrapedMovie, error)
|
|||||||
|
|
||||||
movieStudioMap := movieScraperConfig.Studio
|
movieStudioMap := movieScraperConfig.Studio
|
||||||
|
|
||||||
results := movieMap.process(q, s.Common)
|
results := movieMap.process(ctx, q, s.Common)
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
results[0].apply(&ret)
|
results[0].apply(&ret)
|
||||||
|
|
||||||
if movieStudioMap != nil {
|
if movieStudioMap != nil {
|
||||||
logger.Debug(`Processing movie studio:`)
|
logger.Debug(`Processing movie studio:`)
|
||||||
studioResults := movieStudioMap.process(q, s.Common)
|
studioResults := movieStudioMap.process(ctx, q, s.Common)
|
||||||
|
|
||||||
if len(studioResults) > 0 {
|
if len(studioResults) > 0 {
|
||||||
studio := &models.ScrapedStudio{}
|
studio := &models.ScrapedStudio{}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -15,7 +16,7 @@ performerByURL:
|
|||||||
xPathScrapers:
|
xPathScrapers:
|
||||||
performerScraper:
|
performerScraper:
|
||||||
performer:
|
performer:
|
||||||
Name:
|
Name:
|
||||||
selector: //div/a/@href
|
selector: //div/a/@href
|
||||||
postProcess:
|
postProcess:
|
||||||
- parseDate: Jan 2, 2006
|
- parseDate: Jan 2, 2006
|
||||||
@@ -55,6 +56,6 @@ func TestFeetToCM(t *testing.T) {
|
|||||||
q := &xpathQuery{}
|
q := &xpathQuery{}
|
||||||
|
|
||||||
for _, test := range feetToCMTests {
|
for _, test := range feetToCMTests {
|
||||||
assert.Equal(t, test.out, pp.Apply(test.in, q))
|
assert.Equal(t, test.out, pp.Apply(context.Background(), test.in, q))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
236
pkg/scraper/postprocessing.go
Normal file
236
pkg/scraper/postprocessing.go
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
package scraper
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
stash_config "github.com/stashapp/stash/pkg/manager/config"
|
||||||
|
"github.com/stashapp/stash/pkg/match"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// postScrape handles post-processing of scraped content. If the content
|
||||||
|
// requires post-processing, this function fans out to the given content
|
||||||
|
// type and post-processes it.
|
||||||
|
func (c Cache) postScrape(ctx context.Context, content models.ScrapedContent) (models.ScrapedContent, error) {
|
||||||
|
// Analyze the concrete type, call the right post-processing function
|
||||||
|
switch v := content.(type) {
|
||||||
|
case *models.ScrapedPerformer:
|
||||||
|
if v != nil {
|
||||||
|
return c.postScrapePerformer(ctx, *v)
|
||||||
|
}
|
||||||
|
case models.ScrapedPerformer:
|
||||||
|
return c.postScrapePerformer(ctx, v)
|
||||||
|
case *models.ScrapedScene:
|
||||||
|
if v != nil {
|
||||||
|
return c.postScrapeScene(ctx, *v)
|
||||||
|
}
|
||||||
|
case models.ScrapedScene:
|
||||||
|
return c.postScrapeScene(ctx, v)
|
||||||
|
case *models.ScrapedGallery:
|
||||||
|
if v != nil {
|
||||||
|
return c.postScrapeGallery(ctx, *v)
|
||||||
|
}
|
||||||
|
case models.ScrapedGallery:
|
||||||
|
return c.postScrapeGallery(ctx, v)
|
||||||
|
case *models.ScrapedMovie:
|
||||||
|
if v != nil {
|
||||||
|
return c.postScrapeMovie(ctx, *v)
|
||||||
|
}
|
||||||
|
case models.ScrapedMovie:
|
||||||
|
return c.postScrapeMovie(ctx, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If nothing matches, pass the content through
|
||||||
|
return content, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) postScrapePerformer(ctx context.Context, p models.ScrapedPerformer) (models.ScrapedContent, error) {
|
||||||
|
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
|
tqb := r.Tag()
|
||||||
|
|
||||||
|
tags, err := postProcessTags(tqb, p.Tags)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
p.Tags = tags
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// post-process - set the image if applicable
|
||||||
|
if err := setPerformerImage(ctx, c.client, &p, c.globalConfig); err != nil {
|
||||||
|
logger.Warnf("Could not set image using URL %s: %s", *p.Image, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return p, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) postScrapeMovie(ctx context.Context, m models.ScrapedMovie) (models.ScrapedContent, error) {
|
||||||
|
if m.Studio != nil {
|
||||||
|
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
|
return match.ScrapedStudio(r.Studio(), m.Studio, nil)
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// post-process - set the image if applicable
|
||||||
|
if err := setMovieFrontImage(ctx, c.client, &m, c.globalConfig); err != nil {
|
||||||
|
logger.Warnf("could not set front image using URL %s: %v", *m.FrontImage, err)
|
||||||
|
}
|
||||||
|
if err := setMovieBackImage(ctx, c.client, &m, c.globalConfig); err != nil {
|
||||||
|
logger.Warnf("could not set back image using URL %s: %v", *m.BackImage, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) postScrapeScenePerformer(ctx context.Context, p models.ScrapedPerformer) error {
|
||||||
|
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
|
tqb := r.Tag()
|
||||||
|
|
||||||
|
tags, err := postProcessTags(tqb, p.Tags)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
p.Tags = tags
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) postScrapeScene(ctx context.Context, scene models.ScrapedScene) (models.ScrapedContent, error) {
|
||||||
|
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
|
pqb := r.Performer()
|
||||||
|
mqb := r.Movie()
|
||||||
|
tqb := r.Tag()
|
||||||
|
sqb := r.Studio()
|
||||||
|
|
||||||
|
for _, p := range scene.Performers {
|
||||||
|
if p == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.postScrapeScenePerformer(ctx, *p); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := match.ScrapedPerformer(pqb, p, nil); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, p := range scene.Movies {
|
||||||
|
err := match.ScrapedMovie(mqb, p)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tags, err := postProcessTags(tqb, scene.Tags)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
scene.Tags = tags
|
||||||
|
|
||||||
|
if scene.Studio != nil {
|
||||||
|
err := match.ScrapedStudio(sqb, scene.Studio, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// post-process - set the image if applicable
|
||||||
|
if err := setSceneImage(ctx, c.client, &scene, c.globalConfig); err != nil {
|
||||||
|
logger.Warnf("Could not set image using URL %s: %v", *scene.Image, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return scene, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) postScrapeGallery(ctx context.Context, g models.ScrapedGallery) (models.ScrapedContent, error) {
|
||||||
|
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
|
pqb := r.Performer()
|
||||||
|
tqb := r.Tag()
|
||||||
|
sqb := r.Studio()
|
||||||
|
|
||||||
|
for _, p := range g.Performers {
|
||||||
|
err := match.ScrapedPerformer(pqb, p, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tags, err := postProcessTags(tqb, g.Tags)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
g.Tags = tags
|
||||||
|
|
||||||
|
if g.Studio != nil {
|
||||||
|
err := match.ScrapedStudio(sqb, g.Studio, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return g, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func postProcessTags(tqb models.TagReader, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) {
|
||||||
|
var ret []*models.ScrapedTag
|
||||||
|
|
||||||
|
excludePatterns := stash_config.GetInstance().GetScraperExcludeTagPatterns()
|
||||||
|
var excludeRegexps []*regexp.Regexp
|
||||||
|
|
||||||
|
for _, excludePattern := range excludePatterns {
|
||||||
|
reg, err := regexp.Compile(strings.ToLower(excludePattern))
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("Invalid tag exclusion pattern :%v", err)
|
||||||
|
} else {
|
||||||
|
excludeRegexps = append(excludeRegexps, reg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var ignoredTags []string
|
||||||
|
ScrapeTag:
|
||||||
|
for _, t := range scrapedTags {
|
||||||
|
for _, reg := range excludeRegexps {
|
||||||
|
if reg.MatchString(strings.ToLower(t.Name)) {
|
||||||
|
ignoredTags = append(ignoredTags, t.Name)
|
||||||
|
continue ScrapeTag
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err := match.ScrapedTag(tqb, t)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ret = append(ret, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ignoredTags) > 0 {
|
||||||
|
logger.Infof("Scraping ignored tags: %s", strings.Join(ignoredTags, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
@@ -1,51 +1,86 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import "github.com/stashapp/stash/pkg/models"
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
type urlMatcher interface {
|
"github.com/stashapp/stash/pkg/models"
|
||||||
matchesURL(url string) bool
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// ErrMaxRedirects is returned if the max number of HTTP redirects are reached.
|
||||||
|
ErrMaxRedirects = errors.New("maximum number of HTTP redirects reached")
|
||||||
|
|
||||||
|
// ErrNotFound is returned when an entity isn't found
|
||||||
|
ErrNotFound = errors.New("scraper not found")
|
||||||
|
|
||||||
|
// ErrNotSupported is returned when a given invocation isn't supported, and there
|
||||||
|
// is a guard function which should be able to guard against it.
|
||||||
|
ErrNotSupported = errors.New("scraper operation not supported")
|
||||||
|
)
|
||||||
|
|
||||||
|
// Input coalesces inputs of different types into a single structure.
|
||||||
|
// The system expects one of these to be set, and the remaining to be
|
||||||
|
// set to nil.
|
||||||
|
type Input struct {
|
||||||
|
Performer *models.ScrapedPerformerInput
|
||||||
|
Scene *models.ScrapedSceneInput
|
||||||
|
Gallery *models.ScrapedGalleryInput
|
||||||
}
|
}
|
||||||
|
|
||||||
type performerScraper interface {
|
// simple type definitions that can help customize
|
||||||
scrapeByName(name string) ([]*models.ScrapedPerformer, error)
|
// actions per query
|
||||||
scrapeByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error)
|
type QueryType int
|
||||||
scrapeByURL(url string) (*models.ScrapedPerformer, error)
|
|
||||||
|
const (
|
||||||
|
// for now only SearchQuery is needed
|
||||||
|
SearchQuery QueryType = iota + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
// scraper is the generic interface to the scraper subsystems
|
||||||
|
type scraper interface {
|
||||||
|
// spec returns the scraper specification, suitable for graphql
|
||||||
|
spec() models.Scraper
|
||||||
|
// supports tests if the scraper supports a given content type
|
||||||
|
supports(models.ScrapeContentType) bool
|
||||||
|
// supportsURL tests if the scraper supports scrapes of a given url, producing a given content type
|
||||||
|
supportsURL(url string, ty models.ScrapeContentType) bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// urlScraper is the interface of scrapers supporting url loads
|
||||||
|
type urlScraper interface {
|
||||||
|
scraper
|
||||||
|
|
||||||
|
viaURL(ctx context.Context, client *http.Client, url string, ty models.ScrapeContentType) (models.ScrapedContent, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// nameScraper is the interface of scrapers supporting name loads
|
||||||
|
type nameScraper interface {
|
||||||
|
scraper
|
||||||
|
|
||||||
|
viaName(ctx context.Context, client *http.Client, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fragmentScraper is the interface of scrapers supporting fragment loads
|
||||||
|
type fragmentScraper interface {
|
||||||
|
scraper
|
||||||
|
|
||||||
|
viaFragment(ctx context.Context, client *http.Client, input Input) (models.ScrapedContent, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// sceneScraper is a scraper which supports scene scrapes with
|
||||||
|
// scene data as the input.
|
||||||
type sceneScraper interface {
|
type sceneScraper interface {
|
||||||
scrapeByName(name string) ([]*models.ScrapedScene, error)
|
scraper
|
||||||
scrapeByScene(scene *models.Scene) (*models.ScrapedScene, error)
|
|
||||||
scrapeByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error)
|
viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error)
|
||||||
scrapeByURL(url string) (*models.ScrapedScene, error)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// galleryScraper is a scraper which supports gallery scrapes with
|
||||||
|
// gallery data as the input.
|
||||||
type galleryScraper interface {
|
type galleryScraper interface {
|
||||||
scrapeByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error)
|
scraper
|
||||||
scrapeByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error)
|
|
||||||
scrapeByURL(url string) (*models.ScrapedGallery, error)
|
viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error)
|
||||||
}
|
|
||||||
|
|
||||||
type movieScraper interface {
|
|
||||||
scrapeByURL(url string) (*models.ScrapedMovie, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type scraper struct {
|
|
||||||
ID string
|
|
||||||
Spec *models.Scraper
|
|
||||||
|
|
||||||
Performer performerScraper
|
|
||||||
Scene sceneScraper
|
|
||||||
Gallery galleryScraper
|
|
||||||
Movie movieScraper
|
|
||||||
}
|
|
||||||
|
|
||||||
func matchesURL(maybeURLMatcher interface{}, url string) bool {
|
|
||||||
if maybeURLMatcher != nil {
|
|
||||||
matcher, ok := maybeURLMatcher.(urlMatcher)
|
|
||||||
if ok {
|
|
||||||
return matcher.matchesURL(url)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,660 +0,0 @@
|
|||||||
package scraper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"crypto/tls"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
|
||||||
stash_config "github.com/stashapp/stash/pkg/manager/config"
|
|
||||||
"github.com/stashapp/stash/pkg/match"
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
var ErrMaxRedirects = errors.New("maximum number of HTTP redirects reached")
|
|
||||||
|
|
||||||
const (
|
|
||||||
// scrapeGetTimeout is the timeout for scraper HTTP requests. Includes transfer time.
|
|
||||||
// We may want to bump this at some point and use local context-timeouts if more granularity
|
|
||||||
// is needed.
|
|
||||||
scrapeGetTimeout = time.Second * 60
|
|
||||||
|
|
||||||
// maxIdleConnsPerHost is the maximum number of idle connections the HTTP client will
|
|
||||||
// keep on a per-host basis.
|
|
||||||
maxIdleConnsPerHost = 8
|
|
||||||
|
|
||||||
// maxRedirects defines the maximum number of redirects the HTTP client will follow
|
|
||||||
maxRedirects = 20
|
|
||||||
)
|
|
||||||
|
|
||||||
// GlobalConfig contains the global scraper options.
|
|
||||||
type GlobalConfig interface {
|
|
||||||
GetScraperUserAgent() string
|
|
||||||
GetScrapersPath() string
|
|
||||||
GetScraperCDPPath() string
|
|
||||||
GetScraperCertCheck() bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func isCDPPathHTTP(c GlobalConfig) bool {
|
|
||||||
return strings.HasPrefix(c.GetScraperCDPPath(), "http://") || strings.HasPrefix(c.GetScraperCDPPath(), "https://")
|
|
||||||
}
|
|
||||||
|
|
||||||
func isCDPPathWS(c GlobalConfig) bool {
|
|
||||||
return strings.HasPrefix(c.GetScraperCDPPath(), "ws://")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cache stores scraper details.
|
|
||||||
type Cache struct {
|
|
||||||
client *http.Client
|
|
||||||
scrapers []scraper
|
|
||||||
globalConfig GlobalConfig
|
|
||||||
txnManager models.TransactionManager
|
|
||||||
}
|
|
||||||
|
|
||||||
// newClient creates a scraper-local http client we use throughout the scraper subsystem.
|
|
||||||
func newClient(gc GlobalConfig) *http.Client {
|
|
||||||
client := &http.Client{
|
|
||||||
Transport: &http.Transport{ // ignore insecure certificates
|
|
||||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: !gc.GetScraperCertCheck()},
|
|
||||||
MaxIdleConnsPerHost: maxIdleConnsPerHost,
|
|
||||||
},
|
|
||||||
Timeout: scrapeGetTimeout,
|
|
||||||
// defaultCheckRedirect code with max changed from 10 to maxRedirects
|
|
||||||
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
|
||||||
if len(via) >= maxRedirects {
|
|
||||||
return fmt.Errorf("after %d redirects: %w", maxRedirects, ErrMaxRedirects)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
return client
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewCache returns a new Cache loading scraper configurations from the
|
|
||||||
// scraper path provided in the global config object. It returns a new
|
|
||||||
// instance and an error if the scraper directory could not be loaded.
|
|
||||||
//
|
|
||||||
// Scraper configurations are loaded from yml files in the provided scrapers
|
|
||||||
// directory and any subdirectories.
|
|
||||||
func NewCache(globalConfig GlobalConfig, txnManager models.TransactionManager) (*Cache, error) {
|
|
||||||
// HTTP Client setup
|
|
||||||
client := newClient(globalConfig)
|
|
||||||
|
|
||||||
scrapers, err := loadScrapers(globalConfig, client, txnManager)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Cache{
|
|
||||||
client: client,
|
|
||||||
globalConfig: globalConfig,
|
|
||||||
scrapers: scrapers,
|
|
||||||
txnManager: txnManager,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadScrapers(globalConfig GlobalConfig, client *http.Client, txnManager models.TransactionManager) ([]scraper, error) {
|
|
||||||
path := globalConfig.GetScrapersPath()
|
|
||||||
scrapers := make([]scraper, 0)
|
|
||||||
|
|
||||||
logger.Debugf("Reading scraper configs from %s", path)
|
|
||||||
scraperFiles := []string{}
|
|
||||||
err := utils.SymWalk(path, func(fp string, f os.FileInfo, err error) error {
|
|
||||||
if filepath.Ext(fp) == ".yml" {
|
|
||||||
scraperFiles = append(scraperFiles, fp)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
logger.Errorf("Error reading scraper configs: %s", err.Error())
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// add built-in freeones scraper
|
|
||||||
scrapers = append(scrapers, getFreeonesScraper(client, txnManager, globalConfig), getAutoTagScraper(txnManager, globalConfig))
|
|
||||||
|
|
||||||
for _, file := range scraperFiles {
|
|
||||||
c, err := loadConfigFromYAMLFile(file)
|
|
||||||
if err != nil {
|
|
||||||
logger.Errorf("Error loading scraper %s: %s", file, err.Error())
|
|
||||||
} else {
|
|
||||||
scraper := createScraperFromConfig(*c, client, txnManager, globalConfig)
|
|
||||||
scrapers = append(scrapers, scraper)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return scrapers, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReloadScrapers clears the scraper cache and reloads from the scraper path.
|
|
||||||
// In the event of an error during loading, the cache will be left empty.
|
|
||||||
func (c *Cache) ReloadScrapers() error {
|
|
||||||
c.scrapers = nil
|
|
||||||
scrapers, err := loadScrapers(c.globalConfig, c.client, c.txnManager)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
c.scrapers = scrapers
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO - don't think this is needed
|
|
||||||
// UpdateConfig updates the global config for the cache. If the scraper path
|
|
||||||
// has changed, ReloadScrapers will need to be called separately.
|
|
||||||
func (c *Cache) UpdateConfig(globalConfig GlobalConfig) {
|
|
||||||
c.globalConfig = globalConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
// ListPerformerScrapers returns a list of scrapers that are capable of
|
|
||||||
// scraping performers.
|
|
||||||
func (c Cache) ListPerformerScrapers() []*models.Scraper {
|
|
||||||
var ret []*models.Scraper
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
// filter on type
|
|
||||||
if s.Performer != nil {
|
|
||||||
ret = append(ret, s.Spec)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
// ListSceneScrapers returns a list of scrapers that are capable of
|
|
||||||
// scraping scenes.
|
|
||||||
func (c Cache) ListSceneScrapers() []*models.Scraper {
|
|
||||||
var ret []*models.Scraper
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
// filter on type
|
|
||||||
if s.Scene != nil {
|
|
||||||
ret = append(ret, s.Spec)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
// ListGalleryScrapers returns a list of scrapers that are capable of
|
|
||||||
// scraping galleries.
|
|
||||||
func (c Cache) ListGalleryScrapers() []*models.Scraper {
|
|
||||||
var ret []*models.Scraper
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
// filter on type
|
|
||||||
if s.Gallery != nil {
|
|
||||||
ret = append(ret, s.Spec)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
// ListMovieScrapers returns a list of scrapers that are capable of
|
|
||||||
// scraping scenes.
|
|
||||||
func (c Cache) ListMovieScrapers() []*models.Scraper {
|
|
||||||
var ret []*models.Scraper
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
// filter on type
|
|
||||||
if s.Movie != nil {
|
|
||||||
ret = append(ret, s.Spec)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetScraper returns the scraper matching the provided id.
|
|
||||||
func (c Cache) GetScraper(scraperID string) *models.Scraper {
|
|
||||||
ret := c.findScraper(scraperID)
|
|
||||||
if ret != nil {
|
|
||||||
return ret.Spec
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Cache) findScraper(scraperID string) *scraper {
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
if s.ID == scraperID {
|
|
||||||
return &s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapePerformerList uses the scraper with the provided ID to query for
|
|
||||||
// performers using the provided query string. It returns a list of
|
|
||||||
// scraped performer data.
|
|
||||||
func (c Cache) ScrapePerformerList(scraperID string, query string) ([]*models.ScrapedPerformer, error) {
|
|
||||||
// find scraper with the provided id
|
|
||||||
s := c.findScraper(scraperID)
|
|
||||||
if s != nil && s.Performer != nil {
|
|
||||||
return s.Performer.scrapeByName(query)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapePerformer uses the scraper with the provided ID to scrape a
|
|
||||||
// performer using the provided performer fragment.
|
|
||||||
func (c Cache) ScrapePerformer(scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
|
||||||
// find scraper with the provided id
|
|
||||||
s := c.findScraper(scraperID)
|
|
||||||
if s != nil && s.Performer != nil {
|
|
||||||
ret, err := s.Performer.scrapeByFragment(scrapedPerformer)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
err = c.postScrapePerformer(context.TODO(), ret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapePerformerURL uses the first scraper it finds that matches the URL
|
|
||||||
// provided to scrape a performer. If no scrapers are found that matches
|
|
||||||
// the URL, then nil is returned.
|
|
||||||
func (c Cache) ScrapePerformerURL(url string) (*models.ScrapedPerformer, error) {
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
if matchesURL(s.Performer, url) {
|
|
||||||
ret, err := s.Performer.scrapeByURL(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
err = c.postScrapePerformer(context.TODO(), ret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Cache) postScrapePerformer(ctx context.Context, ret *models.ScrapedPerformer) error {
|
|
||||||
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
|
||||||
tqb := r.Tag()
|
|
||||||
|
|
||||||
tags, err := postProcessTags(tqb, ret.Tags)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
ret.Tags = tags
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// post-process - set the image if applicable
|
|
||||||
if err := setPerformerImage(ctx, c.client, ret, c.globalConfig); err != nil {
|
|
||||||
logger.Warnf("Could not set image using URL %s: %s", *ret.Image, err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Cache) postScrapeScenePerformer(ret *models.ScrapedPerformer) error {
|
|
||||||
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
|
||||||
tqb := r.Tag()
|
|
||||||
|
|
||||||
tags, err := postProcessTags(tqb, ret.Tags)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
ret.Tags = tags
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Cache) postScrapeScene(ctx context.Context, ret *models.ScrapedScene) error {
|
|
||||||
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
|
||||||
pqb := r.Performer()
|
|
||||||
mqb := r.Movie()
|
|
||||||
tqb := r.Tag()
|
|
||||||
sqb := r.Studio()
|
|
||||||
|
|
||||||
for _, p := range ret.Performers {
|
|
||||||
if err := c.postScrapeScenePerformer(p); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := match.ScrapedPerformer(pqb, p, nil); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, p := range ret.Movies {
|
|
||||||
err := match.ScrapedMovie(mqb, p)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tags, err := postProcessTags(tqb, ret.Tags)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
ret.Tags = tags
|
|
||||||
|
|
||||||
if ret.Studio != nil {
|
|
||||||
err := match.ScrapedStudio(sqb, ret.Studio, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// post-process - set the image if applicable
|
|
||||||
if err := setSceneImage(ctx, c.client, ret, c.globalConfig); err != nil {
|
|
||||||
logger.Warnf("Could not set image using URL %s: %v", *ret.Image, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error {
|
|
||||||
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
|
||||||
pqb := r.Performer()
|
|
||||||
tqb := r.Tag()
|
|
||||||
sqb := r.Studio()
|
|
||||||
|
|
||||||
for _, p := range ret.Performers {
|
|
||||||
err := match.ScrapedPerformer(pqb, p, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tags, err := postProcessTags(tqb, ret.Tags)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
ret.Tags = tags
|
|
||||||
|
|
||||||
if ret.Studio != nil {
|
|
||||||
err := match.ScrapedStudio(sqb, ret.Studio, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeScene uses the scraper with the provided ID to scrape a scene using existing data.
|
|
||||||
func (c Cache) ScrapeScene(scraperID string, sceneID int) (*models.ScrapedScene, error) {
|
|
||||||
// find scraper with the provided id
|
|
||||||
s := c.findScraper(scraperID)
|
|
||||||
if s != nil && s.Scene != nil {
|
|
||||||
// get scene from id
|
|
||||||
scene, err := getScene(sceneID, c.txnManager)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret, err := s.Scene.scrapeByScene(scene)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
err = c.postScrapeScene(context.TODO(), ret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeSceneQuery uses the scraper with the provided ID to query for
|
|
||||||
// scenes using the provided query string. It returns a list of
|
|
||||||
// scraped scene data.
|
|
||||||
func (c Cache) ScrapeSceneQuery(scraperID string, query string) ([]*models.ScrapedScene, error) {
|
|
||||||
// find scraper with the provided id
|
|
||||||
s := c.findScraper(scraperID)
|
|
||||||
if s != nil && s.Scene != nil {
|
|
||||||
return s.Scene.scrapeByName(query)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeSceneFragment uses the scraper with the provided ID to scrape a scene.
|
|
||||||
func (c Cache) ScrapeSceneFragment(scraperID string, scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
|
||||||
// find scraper with the provided id
|
|
||||||
s := c.findScraper(scraperID)
|
|
||||||
if s != nil && s.Scene != nil {
|
|
||||||
ret, err := s.Scene.scrapeByFragment(scene)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
err = c.postScrapeScene(context.TODO(), ret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeSceneURL uses the first scraper it finds that matches the URL
|
|
||||||
// provided to scrape a scene. If no scrapers are found that matches
|
|
||||||
// the URL, then nil is returned.
|
|
||||||
func (c Cache) ScrapeSceneURL(url string) (*models.ScrapedScene, error) {
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
if matchesURL(s.Scene, url) {
|
|
||||||
ret, err := s.Scene.scrapeByURL(url)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = c.postScrapeScene(context.TODO(), ret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeGallery uses the scraper with the provided ID to scrape a gallery using existing data.
|
|
||||||
func (c Cache) ScrapeGallery(scraperID string, galleryID int) (*models.ScrapedGallery, error) {
|
|
||||||
s := c.findScraper(scraperID)
|
|
||||||
if s != nil && s.Gallery != nil {
|
|
||||||
// get gallery from id
|
|
||||||
gallery, err := getGallery(galleryID, c.txnManager)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret, err := s.Gallery.scrapeByGallery(gallery)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
err = c.postScrapeGallery(ret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errors.New("Scraped with ID " + scraperID + " not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeGalleryFragment uses the scraper with the provided ID to scrape a gallery.
|
|
||||||
func (c Cache) ScrapeGalleryFragment(scraperID string, gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
|
|
||||||
s := c.findScraper(scraperID)
|
|
||||||
if s != nil && s.Gallery != nil {
|
|
||||||
ret, err := s.Gallery.scrapeByFragment(gallery)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
err = c.postScrapeGallery(ret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errors.New("Scraped with ID " + scraperID + " not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeGalleryURL uses the first scraper it finds that matches the URL
|
|
||||||
// provided to scrape a scene. If no scrapers are found that matches
|
|
||||||
// the URL, then nil is returned.
|
|
||||||
func (c Cache) ScrapeGalleryURL(url string) (*models.ScrapedGallery, error) {
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
if matchesURL(s.Gallery, url) {
|
|
||||||
ret, err := s.Gallery.scrapeByURL(url)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = c.postScrapeGallery(ret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ScrapeMovieURL uses the first scraper it finds that matches the URL
|
|
||||||
// provided to scrape a movie. If no scrapers are found that matches
|
|
||||||
// the URL, then nil is returned.
|
|
||||||
func (c Cache) ScrapeMovieURL(url string) (*models.ScrapedMovie, error) {
|
|
||||||
for _, s := range c.scrapers {
|
|
||||||
if s.Movie != nil && matchesURL(s.Movie, url) {
|
|
||||||
ret, err := s.Movie.scrapeByURL(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret.Studio != nil {
|
|
||||||
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
|
||||||
return match.ScrapedStudio(r.Studio(), ret.Studio, nil)
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// post-process - set the image if applicable
|
|
||||||
if err := setMovieFrontImage(context.TODO(), c.client, ret, c.globalConfig); err != nil {
|
|
||||||
logger.Warnf("Could not set front image using URL %s: %s", *ret.FrontImage, err.Error())
|
|
||||||
}
|
|
||||||
if err := setMovieBackImage(context.TODO(), c.client, ret, c.globalConfig); err != nil {
|
|
||||||
logger.Warnf("Could not set back image using URL %s: %s", *ret.BackImage, err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func postProcessTags(tqb models.TagReader, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) {
|
|
||||||
var ret []*models.ScrapedTag
|
|
||||||
|
|
||||||
excludePatterns := stash_config.GetInstance().GetScraperExcludeTagPatterns()
|
|
||||||
var excludeRegexps []*regexp.Regexp
|
|
||||||
|
|
||||||
for _, excludePattern := range excludePatterns {
|
|
||||||
reg, err := regexp.Compile(strings.ToLower(excludePattern))
|
|
||||||
if err != nil {
|
|
||||||
logger.Errorf("Invalid tag exclusion pattern :%v", err)
|
|
||||||
} else {
|
|
||||||
excludeRegexps = append(excludeRegexps, reg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var ignoredTags []string
|
|
||||||
ScrapeTag:
|
|
||||||
for _, t := range scrapedTags {
|
|
||||||
for _, reg := range excludeRegexps {
|
|
||||||
if reg.MatchString(strings.ToLower(t.Name)) {
|
|
||||||
ignoredTags = append(ignoredTags, t.Name)
|
|
||||||
continue ScrapeTag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
err := match.ScrapedTag(tqb, t)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
ret = append(ret, t)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(ignoredTags) > 0 {
|
|
||||||
logger.Infof("Scraping ignored tags: %s", strings.Join(ignoredTags, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
@@ -13,6 +14,8 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var ErrScraperScript = errors.New("scraper script error")
|
||||||
|
|
||||||
type scriptScraper struct {
|
type scriptScraper struct {
|
||||||
scraper scraperTypeConfig
|
scraper scraperTypeConfig
|
||||||
config config
|
config config
|
||||||
@@ -73,65 +76,122 @@ func (s *scriptScraper) runScraperScript(inString string, out interface{}) error
|
|||||||
logger.Debugf("Scraper script <%s> started", strings.Join(cmd.Args, " "))
|
logger.Debugf("Scraper script <%s> started", strings.Join(cmd.Args, " "))
|
||||||
|
|
||||||
// TODO - add a timeout here
|
// TODO - add a timeout here
|
||||||
decodeErr := json.NewDecoder(stdout).Decode(out)
|
// Make a copy of stdout here. This allows us to decode it twice.
|
||||||
if decodeErr != nil {
|
var sb strings.Builder
|
||||||
logger.Error("could not unmarshal json: " + decodeErr.Error())
|
tr := io.TeeReader(stdout, &sb)
|
||||||
return errors.New("could not unmarshal json: " + decodeErr.Error())
|
|
||||||
|
// First, perform a decode where unknown fields are disallowed.
|
||||||
|
d := json.NewDecoder(tr)
|
||||||
|
d.DisallowUnknownFields()
|
||||||
|
strictErr := d.Decode(out)
|
||||||
|
|
||||||
|
if strictErr != nil {
|
||||||
|
// The decode failed for some reason, use the built string
|
||||||
|
// and allow unknown fields in the decode.
|
||||||
|
s := sb.String()
|
||||||
|
lenientErr := json.NewDecoder(strings.NewReader(s)).Decode(out)
|
||||||
|
if lenientErr != nil {
|
||||||
|
// The error is genuine, so return it
|
||||||
|
logger.Errorf("could not unmarshal json from script output: %v", lenientErr)
|
||||||
|
return fmt.Errorf("could not unmarshal json from script output: %w", lenientErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lenient decode succeeded, print a warning, but use the decode
|
||||||
|
logger.Warnf("reading script result: %v", strictErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = cmd.Wait()
|
err = cmd.Wait()
|
||||||
logger.Debugf("Scraper script finished")
|
logger.Debugf("Scraper script finished")
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.New("error running scraper script")
|
return fmt.Errorf("%w: %v", ErrScraperScript, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
|
func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||||
inString := `{"name": "` + name + `"}`
|
input := `{"name": "` + name + `"}`
|
||||||
|
|
||||||
var performers []models.ScrapedPerformer
|
var ret []models.ScrapedContent
|
||||||
|
var err error
|
||||||
err := s.runScraperScript(inString, &performers)
|
switch ty {
|
||||||
|
case models.ScrapeContentTypePerformer:
|
||||||
// convert to pointers
|
var performers []models.ScrapedPerformer
|
||||||
var ret []*models.ScrapedPerformer
|
err = s.runScraperScript(input, &performers)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
for i := 0; i < len(performers); i++ {
|
for _, p := range performers {
|
||||||
ret = append(ret, &performers[i])
|
v := p
|
||||||
|
ret = append(ret, &v)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
case models.ScrapeContentTypeScene:
|
||||||
|
var scenes []models.ScrapedScene
|
||||||
|
err = s.runScraperScript(input, &scenes)
|
||||||
|
if err == nil {
|
||||||
|
for _, s := range scenes {
|
||||||
|
v := s
|
||||||
|
ret = append(ret, &v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, err
|
return ret, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
func (s *scriptScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
|
||||||
inString, err := json.Marshal(scrapedPerformer)
|
var inString []byte
|
||||||
|
var err error
|
||||||
|
var ty models.ScrapeContentType
|
||||||
|
switch {
|
||||||
|
case input.Performer != nil:
|
||||||
|
inString, err = json.Marshal(*input.Performer)
|
||||||
|
ty = models.ScrapeContentTypePerformer
|
||||||
|
case input.Gallery != nil:
|
||||||
|
inString, err = json.Marshal(*input.Gallery)
|
||||||
|
ty = models.ScrapeContentTypeGallery
|
||||||
|
case input.Scene != nil:
|
||||||
|
inString, err = json.Marshal(*input.Scene)
|
||||||
|
ty = models.ScrapeContentTypeScene
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret models.ScrapedPerformer
|
return s.scrape(ctx, string(inString), ty)
|
||||||
|
|
||||||
err = s.runScraperScript(string(inString), &ret)
|
|
||||||
|
|
||||||
return &ret, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
|
func (s *scriptScraper) scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
inString := `{"url": "` + url + `"}`
|
return s.scrape(ctx, `{"url": "`+url+`"}`, ty)
|
||||||
|
|
||||||
var ret models.ScrapedPerformer
|
|
||||||
|
|
||||||
err := s.runScraperScript(string(inString), &ret)
|
|
||||||
|
|
||||||
return &ret, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
func (s *scriptScraper) scrape(ctx context.Context, input string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
|
switch ty {
|
||||||
|
case models.ScrapeContentTypePerformer:
|
||||||
|
var performer models.ScrapedPerformer
|
||||||
|
err := s.runScraperScript(input, &performer)
|
||||||
|
return &performer, err
|
||||||
|
case models.ScrapeContentTypeGallery:
|
||||||
|
var gallery models.ScrapedGallery
|
||||||
|
err := s.runScraperScript(input, &gallery)
|
||||||
|
return &gallery, err
|
||||||
|
case models.ScrapeContentTypeScene:
|
||||||
|
var scene models.ScrapedScene
|
||||||
|
err := s.runScraperScript(input, &scene)
|
||||||
|
return &scene, err
|
||||||
|
case models.ScrapeContentTypeMovie:
|
||||||
|
var movie models.ScrapedMovie
|
||||||
|
err := s.runScraperScript(input, &movie)
|
||||||
|
return &movie, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, ErrNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
inString, err := json.Marshal(sceneToUpdateInput(scene))
|
inString, err := json.Marshal(sceneToUpdateInput(scene))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -145,39 +205,7 @@ func (s *scriptScraper) scrapeSceneByScene(scene *models.Scene) (*models.Scraped
|
|||||||
return &ret, err
|
return &ret, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
|
func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
inString := `{"name": "` + name + `"}`
|
|
||||||
|
|
||||||
var scenes []models.ScrapedScene
|
|
||||||
|
|
||||||
err := s.runScraperScript(inString, &scenes)
|
|
||||||
|
|
||||||
// convert to pointers
|
|
||||||
var ret []*models.ScrapedScene
|
|
||||||
if err == nil {
|
|
||||||
for i := 0; i < len(scenes); i++ {
|
|
||||||
ret = append(ret, &scenes[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
|
||||||
inString, err := json.Marshal(scene)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var ret models.ScrapedScene
|
|
||||||
|
|
||||||
err = s.runScraperScript(string(inString), &ret)
|
|
||||||
|
|
||||||
return &ret, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
|
||||||
inString, err := json.Marshal(galleryToUpdateInput(gallery))
|
inString, err := json.Marshal(galleryToUpdateInput(gallery))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -191,50 +219,6 @@ func (s *scriptScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models
|
|||||||
return &ret, err
|
return &ret, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
|
|
||||||
inString, err := json.Marshal(gallery)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var ret models.ScrapedGallery
|
|
||||||
|
|
||||||
err = s.runScraperScript(string(inString), &ret)
|
|
||||||
|
|
||||||
return &ret, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
|
|
||||||
inString := `{"url": "` + url + `"}`
|
|
||||||
|
|
||||||
var ret models.ScrapedScene
|
|
||||||
|
|
||||||
err := s.runScraperScript(string(inString), &ret)
|
|
||||||
|
|
||||||
return &ret, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
|
|
||||||
inString := `{"url": "` + url + `"}`
|
|
||||||
|
|
||||||
var ret models.ScrapedGallery
|
|
||||||
|
|
||||||
err := s.runScraperScript(string(inString), &ret)
|
|
||||||
|
|
||||||
return &ret, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
|
|
||||||
inString := `{"url": "` + url + `"}`
|
|
||||||
|
|
||||||
var ret models.ScrapedMovie
|
|
||||||
|
|
||||||
err := s.runScraperScript(string(inString), &ret)
|
|
||||||
|
|
||||||
return &ret, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func findPythonExecutable() (string, error) {
|
func findPythonExecutable() (string, error) {
|
||||||
_, err := exec.LookPath("python3")
|
_, err := exec.LookPath("python3")
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ package scraper
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"errors"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
@@ -54,37 +54,6 @@ type stashFindPerformerNamesResultType struct {
|
|||||||
Performers []*stashFindPerformerNamePerformer `graphql:"performers"`
|
Performers []*stashFindPerformerNamePerformer `graphql:"performers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
|
|
||||||
client := s.getStashClient()
|
|
||||||
|
|
||||||
var q struct {
|
|
||||||
FindPerformers stashFindPerformerNamesResultType `graphql:"findPerformers(filter: $f)"`
|
|
||||||
}
|
|
||||||
|
|
||||||
page := 1
|
|
||||||
perPage := 10
|
|
||||||
|
|
||||||
vars := map[string]interface{}{
|
|
||||||
"f": models.FindFilterType{
|
|
||||||
Q: &name,
|
|
||||||
Page: &page,
|
|
||||||
PerPage: &perPage,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := client.Query(context.TODO(), &q, vars)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var ret []*models.ScrapedPerformer
|
|
||||||
for _, p := range q.FindPerformers.Performers {
|
|
||||||
ret = append(ret, p.toPerformer())
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// need a separate for scraped stash performers - does not include remote_site_id or image
|
// need a separate for scraped stash performers - does not include remote_site_id or image
|
||||||
type scrapedTagStash struct {
|
type scrapedTagStash struct {
|
||||||
Name string `graphql:"name" json:"name"`
|
Name string `graphql:"name" json:"name"`
|
||||||
@@ -114,7 +83,17 @@ type scrapedPerformerStash struct {
|
|||||||
Weight *string `graphql:"weight" json:"weight"`
|
Weight *string `graphql:"weight" json:"weight"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
func (s *stashScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
|
||||||
|
if input.Gallery != nil || input.Scene != nil {
|
||||||
|
return nil, fmt.Errorf("%w: using stash scraper as a fragment scraper", ErrNotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
if input.Performer == nil {
|
||||||
|
return nil, fmt.Errorf("%w: the given performer is nil", ErrNotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
scrapedPerformer := input.Performer
|
||||||
|
|
||||||
client := s.getStashClient()
|
client := s.getStashClient()
|
||||||
|
|
||||||
var q struct {
|
var q struct {
|
||||||
@@ -128,7 +107,7 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
|
|||||||
"f": performerID,
|
"f": performerID,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := client.Query(context.TODO(), &q, vars)
|
err := client.Query(ctx, &q, vars)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -141,7 +120,7 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
|
|||||||
}
|
}
|
||||||
|
|
||||||
// get the performer image directly
|
// get the performer image directly
|
||||||
ret.Image, err = getStashPerformerImage(context.TODO(), s.config.StashServer.URL, performerID, s.client, s.globalConfig)
|
ret.Image, err = getStashPerformerImage(ctx, s.config.StashServer.URL, performerID, s.client, s.globalConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -159,7 +138,7 @@ type stashFindSceneNamesResultType struct {
|
|||||||
Scenes []*scrapedSceneStash `graphql:"scenes"`
|
Scenes []*scrapedSceneStash `graphql:"scenes"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash) (*models.ScrapedScene, error) {
|
func (s *stashScraper) scrapedStashSceneToScrapedScene(ctx context.Context, scene *scrapedSceneStash) (*models.ScrapedScene, error) {
|
||||||
ret := models.ScrapedScene{}
|
ret := models.ScrapedScene{}
|
||||||
err := copier.Copy(&ret, scene)
|
err := copier.Copy(&ret, scene)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -167,7 +146,7 @@ func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// get the performer image directly
|
// get the performer image directly
|
||||||
ret.Image, err = getStashSceneImage(context.TODO(), s.config.StashServer.URL, scene.ID, s.client, s.globalConfig)
|
ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, scene.ID, s.client, s.globalConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -175,13 +154,9 @@ func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash)
|
|||||||
return &ret, nil
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
|
func (s *stashScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||||
client := s.getStashClient()
|
client := s.getStashClient()
|
||||||
|
|
||||||
var q struct {
|
|
||||||
FindScenes stashFindSceneNamesResultType `graphql:"findScenes(filter: $f)"`
|
|
||||||
}
|
|
||||||
|
|
||||||
page := 1
|
page := 1
|
||||||
perPage := 10
|
perPage := 10
|
||||||
|
|
||||||
@@ -193,21 +168,45 @@ func (s *stashScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene,
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
err := client.Query(context.TODO(), &q, vars)
|
var ret []models.ScrapedContent
|
||||||
if err != nil {
|
switch ty {
|
||||||
return nil, err
|
case models.ScrapeContentTypeScene:
|
||||||
}
|
var q struct {
|
||||||
|
FindScenes stashFindSceneNamesResultType `graphql:"findScenes(filter: $f)"`
|
||||||
|
}
|
||||||
|
|
||||||
var ret []*models.ScrapedScene
|
err := client.Query(ctx, &q, vars)
|
||||||
for _, scene := range q.FindScenes.Scenes {
|
|
||||||
converted, err := s.scrapedStashSceneToScrapedScene(scene)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
ret = append(ret, converted)
|
|
||||||
|
for _, scene := range q.FindScenes.Scenes {
|
||||||
|
converted, err := s.scrapedStashSceneToScrapedScene(ctx, scene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ret = append(ret, converted)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
case models.ScrapeContentTypePerformer:
|
||||||
|
var q struct {
|
||||||
|
FindPerformers stashFindPerformerNamesResultType `graphql:"findPerformers(filter: $f)"`
|
||||||
|
}
|
||||||
|
|
||||||
|
err := client.Query(ctx, &q, vars)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, p := range q.FindPerformers.Performers {
|
||||||
|
ret = append(ret, p.toPerformer())
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, nil
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
||||||
type scrapedSceneStash struct {
|
type scrapedSceneStash struct {
|
||||||
@@ -222,7 +221,7 @@ type scrapedSceneStash struct {
|
|||||||
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
|
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
// query by MD5
|
// query by MD5
|
||||||
var q struct {
|
var q struct {
|
||||||
FindScene *scrapedSceneStash `graphql:"findSceneByHash(input: $c)"`
|
FindScene *scrapedSceneStash `graphql:"findSceneByHash(input: $c)"`
|
||||||
@@ -243,18 +242,18 @@ func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS
|
|||||||
}
|
}
|
||||||
|
|
||||||
client := s.getStashClient()
|
client := s.getStashClient()
|
||||||
if err := client.Query(context.TODO(), &q, vars); err != nil {
|
if err := client.Query(ctx, &q, vars); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// need to copy back to a scraped scene
|
// need to copy back to a scraped scene
|
||||||
ret, err := s.scrapedStashSceneToScrapedScene(q.FindScene)
|
ret, err := s.scrapedStashSceneToScrapedScene(ctx, q.FindScene)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// get the performer image directly
|
// get the performer image directly
|
||||||
ret.Image, err = getStashSceneImage(context.TODO(), s.config.StashServer.URL, q.FindScene.ID, s.client, s.globalConfig)
|
ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, q.FindScene.ID, s.client, s.globalConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -262,10 +261,6 @@ func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
|
||||||
return nil, errors.New("scrapeSceneByFragment not supported for stash scraper")
|
|
||||||
}
|
|
||||||
|
|
||||||
type scrapedGalleryStash struct {
|
type scrapedGalleryStash struct {
|
||||||
ID string `graphql:"id" json:"id"`
|
ID string `graphql:"id" json:"id"`
|
||||||
Title *string `graphql:"title" json:"title"`
|
Title *string `graphql:"title" json:"title"`
|
||||||
@@ -278,7 +273,7 @@ type scrapedGalleryStash struct {
|
|||||||
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
|
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
func (s *stashScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
var q struct {
|
var q struct {
|
||||||
FindGallery *scrapedGalleryStash `graphql:"findGalleryByHash(input: $c)"`
|
FindGallery *scrapedGalleryStash `graphql:"findGalleryByHash(input: $c)"`
|
||||||
}
|
}
|
||||||
@@ -296,7 +291,7 @@ func (s *stashScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.
|
|||||||
}
|
}
|
||||||
|
|
||||||
client := s.getStashClient()
|
client := s.getStashClient()
|
||||||
if err := client.Query(context.TODO(), &q, vars); err != nil {
|
if err := client.Query(ctx, &q, vars); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -309,29 +304,13 @@ func (s *stashScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.
|
|||||||
return &ret, nil
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeGalleryByFragment(scene models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
|
func (s *stashScraper) scrapeByURL(_ context.Context, _ string, _ models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
return nil, errors.New("scrapeGalleryByFragment not supported for stash scraper")
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
|
func getScene(ctx context.Context, sceneID int, txnManager models.TransactionManager) (*models.Scene, error) {
|
||||||
return nil, errors.New("scrapePerformerByURL not supported for stash scraper")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stashScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
|
|
||||||
return nil, errors.New("scrapeSceneByURL not supported for stash scraper")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stashScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
|
|
||||||
return nil, errors.New("scrapeGalleryByURL not supported for stash scraper")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stashScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
|
|
||||||
return nil, errors.New("scrapeMovieByURL not supported for stash scraper")
|
|
||||||
}
|
|
||||||
|
|
||||||
func getScene(sceneID int, txnManager models.TransactionManager) (*models.Scene, error) {
|
|
||||||
var ret *models.Scene
|
var ret *models.Scene
|
||||||
if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
if err := txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
var err error
|
var err error
|
||||||
ret, err = r.Scene().Find(sceneID)
|
ret, err = r.Scene().Find(sceneID)
|
||||||
return err
|
return err
|
||||||
@@ -367,9 +346,9 @@ func sceneToUpdateInput(scene *models.Scene) models.SceneUpdateInput {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getGallery(galleryID int, txnManager models.TransactionManager) (*models.Gallery, error) {
|
func getGallery(ctx context.Context, galleryID int, txnManager models.TransactionManager) (*models.Gallery, error) {
|
||||||
var ret *models.Gallery
|
var ret *models.Gallery
|
||||||
if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
if err := txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
var err error
|
var err error
|
||||||
ret, err = r.Gallery().Find(galleryID)
|
ret, err = r.Gallery().Find(galleryID)
|
||||||
return err
|
return err
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ func (c Client) QueryStashBoxScene(ctx context.Context, queryStr string) ([]*mod
|
|||||||
|
|
||||||
var ret []*models.ScrapedScene
|
var ret []*models.ScrapedScene
|
||||||
for _, s := range sceneFragments {
|
for _, s := range sceneFragments {
|
||||||
ss, err := c.sceneFragmentToScrapedScene(context.TODO(), s)
|
ss, err := c.sceneFragmentToScrapedScene(ctx, s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -69,9 +69,7 @@ func (c Client) QueryStashBoxScene(ctx context.Context, queryStr string) ([]*mod
|
|||||||
// FindStashBoxScenesByFingerprints queries stash-box for scenes using every
|
// FindStashBoxScenesByFingerprints queries stash-box for scenes using every
|
||||||
// scene's MD5/OSHASH checksum, or PHash, and returns results in the same order
|
// scene's MD5/OSHASH checksum, or PHash, and returns results in the same order
|
||||||
// as the input slice.
|
// as the input slice.
|
||||||
func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models.ScrapedScene, error) {
|
func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, sceneIDs []string) ([][]*models.ScrapedScene, error) {
|
||||||
ctx := context.TODO()
|
|
||||||
|
|
||||||
ids, err := utils.StringSliceToIntSlice(sceneIDs)
|
ids, err := utils.StringSliceToIntSlice(sceneIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -150,9 +148,7 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models
|
|||||||
|
|
||||||
// FindStashBoxScenesByFingerprintsFlat queries stash-box for scenes using every
|
// FindStashBoxScenesByFingerprintsFlat queries stash-box for scenes using every
|
||||||
// scene's MD5/OSHASH checksum, or PHash, and returns results a flat slice.
|
// scene's MD5/OSHASH checksum, or PHash, and returns results a flat slice.
|
||||||
func (c Client) FindStashBoxScenesByFingerprintsFlat(sceneIDs []string) ([]*models.ScrapedScene, error) {
|
func (c Client) FindStashBoxScenesByFingerprintsFlat(ctx context.Context, sceneIDs []string) ([]*models.ScrapedScene, error) {
|
||||||
ctx := context.TODO()
|
|
||||||
|
|
||||||
ids, err := utils.StringSliceToIntSlice(sceneIDs)
|
ids, err := utils.StringSliceToIntSlice(sceneIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -230,7 +226,7 @@ func (c Client) findStashBoxScenesByFingerprints(ctx context.Context, fingerprin
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) (bool, error) {
|
func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []string, endpoint string) (bool, error) {
|
||||||
ids, err := utils.StringSliceToIntSlice(sceneIDs)
|
ids, err := utils.StringSliceToIntSlice(sceneIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
@@ -238,7 +234,7 @@ func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) (
|
|||||||
|
|
||||||
var fingerprints []graphql.FingerprintSubmission
|
var fingerprints []graphql.FingerprintSubmission
|
||||||
|
|
||||||
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
qb := r.Scene()
|
qb := r.Scene()
|
||||||
|
|
||||||
for _, sceneID := range ids {
|
for _, sceneID := range ids {
|
||||||
@@ -307,12 +303,12 @@ func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) (
|
|||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return c.submitStashBoxFingerprints(fingerprints)
|
return c.submitStashBoxFingerprints(ctx, fingerprints)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) submitStashBoxFingerprints(fingerprints []graphql.FingerprintSubmission) (bool, error) {
|
func (c Client) submitStashBoxFingerprints(ctx context.Context, fingerprints []graphql.FingerprintSubmission) (bool, error) {
|
||||||
for _, fingerprint := range fingerprints {
|
for _, fingerprint := range fingerprints {
|
||||||
_, err := c.client.SubmitFingerprint(context.TODO(), fingerprint)
|
_, err := c.client.SubmitFingerprint(ctx, fingerprint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
@@ -322,8 +318,8 @@ func (c Client) submitStashBoxFingerprints(fingerprints []graphql.FingerprintSub
|
|||||||
}
|
}
|
||||||
|
|
||||||
// QueryStashBoxPerformer queries stash-box for performers using a query string.
|
// QueryStashBoxPerformer queries stash-box for performers using a query string.
|
||||||
func (c Client) QueryStashBoxPerformer(queryStr string) ([]*models.StashBoxPerformerQueryResult, error) {
|
func (c Client) QueryStashBoxPerformer(ctx context.Context, queryStr string) ([]*models.StashBoxPerformerQueryResult, error) {
|
||||||
performers, err := c.queryStashBoxPerformer(queryStr)
|
performers, err := c.queryStashBoxPerformer(ctx, queryStr)
|
||||||
|
|
||||||
res := []*models.StashBoxPerformerQueryResult{
|
res := []*models.StashBoxPerformerQueryResult{
|
||||||
{
|
{
|
||||||
@@ -342,8 +338,8 @@ func (c Client) QueryStashBoxPerformer(queryStr string) ([]*models.StashBoxPerfo
|
|||||||
return res, err
|
return res, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) queryStashBoxPerformer(queryStr string) ([]*models.ScrapedPerformer, error) {
|
func (c Client) queryStashBoxPerformer(ctx context.Context, queryStr string) ([]*models.ScrapedPerformer, error) {
|
||||||
performers, err := c.client.SearchPerformer(context.TODO(), queryStr)
|
performers, err := c.client.SearchPerformer(ctx, queryStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -360,7 +356,7 @@ func (c Client) queryStashBoxPerformer(queryStr string) ([]*models.ScrapedPerfor
|
|||||||
}
|
}
|
||||||
|
|
||||||
// FindStashBoxPerformersByNames queries stash-box for performers by name
|
// FindStashBoxPerformersByNames queries stash-box for performers by name
|
||||||
func (c Client) FindStashBoxPerformersByNames(performerIDs []string) ([]*models.StashBoxPerformerQueryResult, error) {
|
func (c Client) FindStashBoxPerformersByNames(ctx context.Context, performerIDs []string) ([]*models.StashBoxPerformerQueryResult, error) {
|
||||||
ids, err := utils.StringSliceToIntSlice(performerIDs)
|
ids, err := utils.StringSliceToIntSlice(performerIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -368,7 +364,7 @@ func (c Client) FindStashBoxPerformersByNames(performerIDs []string) ([]*models.
|
|||||||
|
|
||||||
var performers []*models.Performer
|
var performers []*models.Performer
|
||||||
|
|
||||||
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
qb := r.Performer()
|
qb := r.Performer()
|
||||||
|
|
||||||
for _, performerID := range ids {
|
for _, performerID := range ids {
|
||||||
@@ -391,10 +387,10 @@ func (c Client) FindStashBoxPerformersByNames(performerIDs []string) ([]*models.
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return c.findStashBoxPerformersByNames(performers)
|
return c.findStashBoxPerformersByNames(ctx, performers)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([][]*models.ScrapedPerformer, error) {
|
func (c Client) FindStashBoxPerformersByPerformerNames(ctx context.Context, performerIDs []string) ([][]*models.ScrapedPerformer, error) {
|
||||||
ids, err := utils.StringSliceToIntSlice(performerIDs)
|
ids, err := utils.StringSliceToIntSlice(performerIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -402,7 +398,7 @@ func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([
|
|||||||
|
|
||||||
var performers []*models.Performer
|
var performers []*models.Performer
|
||||||
|
|
||||||
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||||
qb := r.Performer()
|
qb := r.Performer()
|
||||||
|
|
||||||
for _, performerID := range ids {
|
for _, performerID := range ids {
|
||||||
@@ -425,7 +421,7 @@ func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
results, err := c.findStashBoxPerformersByNames(performers)
|
results, err := c.findStashBoxPerformersByNames(ctx, performers)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -438,11 +434,11 @@ func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) findStashBoxPerformersByNames(performers []*models.Performer) ([]*models.StashBoxPerformerQueryResult, error) {
|
func (c Client) findStashBoxPerformersByNames(ctx context.Context, performers []*models.Performer) ([]*models.StashBoxPerformerQueryResult, error) {
|
||||||
var ret []*models.StashBoxPerformerQueryResult
|
var ret []*models.StashBoxPerformerQueryResult
|
||||||
for _, performer := range performers {
|
for _, performer := range performers {
|
||||||
if performer.Name.Valid {
|
if performer.Name.Valid {
|
||||||
performerResults, err := c.queryStashBoxPerformer(performer.Name.String)
|
performerResults, err := c.queryStashBoxPerformer(ctx, performer.Name.String)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -610,6 +606,11 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode
|
|||||||
sp.FakeTits = enumToStringPtr(p.BreastType, true)
|
sp.FakeTits = enumToStringPtr(p.BreastType, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(p.Aliases) > 0 {
|
||||||
|
alias := strings.Join(p.Aliases, ", ")
|
||||||
|
sp.Aliases = &alias
|
||||||
|
}
|
||||||
|
|
||||||
return sp
|
return sp
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -705,8 +706,8 @@ func (c Client) sceneFragmentToScrapedScene(ctx context.Context, s *graphql.Scen
|
|||||||
return ss, nil
|
return ss, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) FindStashBoxPerformerByID(id string) (*models.ScrapedPerformer, error) {
|
func (c Client) FindStashBoxPerformerByID(ctx context.Context, id string) (*models.ScrapedPerformer, error) {
|
||||||
performer, err := c.client.FindPerformerByID(context.TODO(), id)
|
performer, err := c.client.FindPerformerByID(ctx, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -715,8 +716,8 @@ func (c Client) FindStashBoxPerformerByID(id string) (*models.ScrapedPerformer,
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) FindStashBoxPerformerByName(name string) (*models.ScrapedPerformer, error) {
|
func (c Client) FindStashBoxPerformerByName(ctx context.Context, name string) (*models.ScrapedPerformer, error) {
|
||||||
performers, err := c.client.SearchPerformer(context.TODO(), name)
|
performers, err := c.client.SearchPerformer(ctx, name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -99,8 +99,6 @@ func urlFromCDP(ctx context.Context, url string, driverOptions scraperDriverOpti
|
|||||||
sleepDuration = time.Duration(driverOptions.Sleep) * time.Second
|
sleepDuration = time.Duration(driverOptions.Sleep) * time.Second
|
||||||
}
|
}
|
||||||
|
|
||||||
act := context.TODO()
|
|
||||||
|
|
||||||
// if scraperCDPPath is a remote address, then allocate accordingly
|
// if scraperCDPPath is a remote address, then allocate accordingly
|
||||||
cdpPath := globalConfig.GetScraperCDPPath()
|
cdpPath := globalConfig.GetScraperCDPPath()
|
||||||
if cdpPath != "" {
|
if cdpPath != "" {
|
||||||
@@ -118,7 +116,7 @@ func urlFromCDP(ctx context.Context, url string, driverOptions scraperDriverOpti
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
act, cancelAct = chromedp.NewRemoteAllocator(act, remote)
|
ctx, cancelAct = chromedp.NewRemoteAllocator(ctx, remote)
|
||||||
} else {
|
} else {
|
||||||
// use a temporary user directory for chrome
|
// use a temporary user directory for chrome
|
||||||
dir, err := os.MkdirTemp("", "stash-chromedp")
|
dir, err := os.MkdirTemp("", "stash-chromedp")
|
||||||
@@ -131,13 +129,13 @@ func urlFromCDP(ctx context.Context, url string, driverOptions scraperDriverOpti
|
|||||||
chromedp.UserDataDir(dir),
|
chromedp.UserDataDir(dir),
|
||||||
chromedp.ExecPath(cdpPath),
|
chromedp.ExecPath(cdpPath),
|
||||||
)
|
)
|
||||||
act, cancelAct = chromedp.NewExecAllocator(act, opts...)
|
ctx, cancelAct = chromedp.NewExecAllocator(ctx, opts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
defer cancelAct()
|
defer cancelAct()
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx, cancel := chromedp.NewContext(act)
|
ctx, cancel := chromedp.NewContext(ctx)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
// add a fixed timeout for the http request
|
// add a fixed timeout for the http request
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"regexp"
|
"regexp"
|
||||||
@@ -39,14 +40,14 @@ func (s *xpathScraper) getXpathScraper() *mappedScraper {
|
|||||||
return s.config.XPathScrapers[s.scraper.Scraper]
|
return s.config.XPathScrapers[s.scraper.Scraper]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeURL(url string) (*html.Node, *mappedScraper, error) {
|
func (s *xpathScraper) scrapeURL(ctx context.Context, url string) (*html.Node, *mappedScraper, error) {
|
||||||
scraper := s.getXpathScraper()
|
scraper := s.getXpathScraper()
|
||||||
|
|
||||||
if scraper == nil {
|
if scraper == nil {
|
||||||
return nil, nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
@@ -55,55 +56,33 @@ func (s *xpathScraper) scrapeURL(url string) (*html.Node, *mappedScraper, error)
|
|||||||
return doc, scraper, nil
|
return doc, scraper, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
|
func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||||
u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries
|
u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries
|
||||||
doc, scraper, err := s.scrapeURL(u)
|
doc, scraper, err := s.scrapeURL(ctx, u)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
q := s.getXPathQuery(doc)
|
||||||
return scraper.scrapePerformer(q)
|
switch ty {
|
||||||
}
|
case models.ScrapeContentTypePerformer:
|
||||||
|
return scraper.scrapePerformer(ctx, q)
|
||||||
func (s *xpathScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
|
case models.ScrapeContentTypeScene:
|
||||||
u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries
|
return scraper.scrapeScene(ctx, q)
|
||||||
doc, scraper, err := s.scrapeURL(u)
|
case models.ScrapeContentTypeGallery:
|
||||||
if err != nil {
|
return scraper.scrapeGallery(ctx, q)
|
||||||
return nil, err
|
case models.ScrapeContentTypeMovie:
|
||||||
|
return scraper.scrapeMovie(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
return nil, ErrNotSupported
|
||||||
return scraper.scrapeScene(q)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
|
func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||||
u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries
|
|
||||||
doc, scraper, err := s.scrapeURL(u)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
|
||||||
return scraper.scrapeGallery(q)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
|
|
||||||
u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries
|
|
||||||
doc, scraper, err := s.scrapeURL(u)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
|
||||||
return scraper.scrapeMovie(q)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *xpathScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
|
|
||||||
scraper := s.getXpathScraper()
|
scraper := s.getXpathScraper()
|
||||||
|
|
||||||
if scraper == nil {
|
if scraper == nil {
|
||||||
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, fmt.Errorf("%w: name %v", ErrNotFound, s.scraper.Scraper)
|
||||||
}
|
}
|
||||||
|
|
||||||
const placeholder = "{}"
|
const placeholder = "{}"
|
||||||
@@ -114,46 +93,43 @@ func (s *xpathScraper) scrapePerformersByName(name string) ([]*models.ScrapedPer
|
|||||||
url := s.scraper.QueryURL
|
url := s.scraper.QueryURL
|
||||||
url = strings.ReplaceAll(url, placeholder, escapedName)
|
url = strings.ReplaceAll(url, placeholder, escapedName)
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
q := s.getXPathQuery(doc)
|
||||||
return scraper.scrapePerformers(q)
|
q.setType(SearchQuery)
|
||||||
}
|
|
||||||
|
|
||||||
func (s *xpathScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
var content []models.ScrapedContent
|
||||||
return nil, errors.New("scrapePerformerByFragment not supported for xpath scraper")
|
switch ty {
|
||||||
}
|
case models.ScrapeContentTypePerformer:
|
||||||
|
performers, err := scraper.scrapePerformers(ctx, q)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, p := range performers {
|
||||||
|
content = append(content, p)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
|
return content, nil
|
||||||
scraper := s.getXpathScraper()
|
case models.ScrapeContentTypeScene:
|
||||||
|
scenes, err := scraper.scrapeScenes(ctx, q)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, s := range scenes {
|
||||||
|
content = append(content, s)
|
||||||
|
}
|
||||||
|
|
||||||
if scraper == nil {
|
return content, nil
|
||||||
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const placeholder = "{}"
|
return nil, ErrNotSupported
|
||||||
|
|
||||||
// replace the placeholder string with the URL-escaped name
|
|
||||||
escapedName := url.QueryEscape(name)
|
|
||||||
|
|
||||||
url := s.scraper.QueryURL
|
|
||||||
url = strings.ReplaceAll(url, placeholder, escapedName)
|
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
|
||||||
return scraper.scrapeScenes(q)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
func (s *xpathScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromScene(scene)
|
queryURL := queryURLParametersFromScene(scene)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
@@ -167,17 +143,28 @@ func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS
|
|||||||
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
q := s.getXPathQuery(doc)
|
||||||
return scraper.scrapeScene(q)
|
return scraper.scrapeScene(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
|
||||||
|
switch {
|
||||||
|
case input.Gallery != nil:
|
||||||
|
return nil, fmt.Errorf("%w: cannot use an xpath scraper as a gallery fragment scraper", ErrNotSupported)
|
||||||
|
case input.Performer != nil:
|
||||||
|
return nil, fmt.Errorf("%w: cannot use an xpath scraper as a performer fragment scraper", ErrNotSupported)
|
||||||
|
case input.Scene == nil:
|
||||||
|
return nil, fmt.Errorf("%w: scene input is nil", ErrNotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
scene := *input.Scene
|
||||||
|
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromScrapedScene(scene)
|
queryURL := queryURLParametersFromScrapedScene(scene)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
@@ -191,17 +178,17 @@ func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*m
|
|||||||
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
q := s.getXPathQuery(doc)
|
||||||
return scraper.scrapeScene(q)
|
return scraper.scrapeScene(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromGallery(gallery)
|
queryURL := queryURLParametersFromGallery(gallery)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
@@ -215,18 +202,14 @@ func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.
|
|||||||
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
}
|
}
|
||||||
|
|
||||||
doc, err := s.loadURL(context.TODO(), url)
|
doc, err := s.loadURL(ctx, url)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
q := s.getXPathQuery(doc)
|
q := s.getXPathQuery(doc)
|
||||||
return scraper.scrapeGallery(q)
|
return scraper.scrapeGallery(ctx, q)
|
||||||
}
|
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
|
|
||||||
return nil, errors.New("scrapeGalleryByFragment not supported for xpath scraper")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) loadURL(ctx context.Context, url string) (*html.Node, error) {
|
func (s *xpathScraper) loadURL(ctx context.Context, url string) (*html.Node, error) {
|
||||||
@@ -256,15 +239,23 @@ func (s *xpathScraper) getXPathQuery(doc *html.Node) *xpathQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type xpathQuery struct {
|
type xpathQuery struct {
|
||||||
doc *html.Node
|
doc *html.Node
|
||||||
scraper *xpathScraper
|
scraper *xpathScraper
|
||||||
|
queryType QueryType
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *xpathQuery) runQuery(selector string) []string {
|
func (q *xpathQuery) getType() QueryType {
|
||||||
|
return q.queryType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *xpathQuery) setType(t QueryType) {
|
||||||
|
q.queryType = t
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *xpathQuery) runQuery(selector string) ([]string, error) {
|
||||||
found, err := htmlquery.QueryAll(q.doc, selector)
|
found, err := htmlquery.QueryAll(q.doc, selector)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warnf("Error parsing xpath expression '%s': %s", selector, err.Error())
|
return nil, fmt.Errorf("selector '%s': parse error: %v", selector, err)
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret []string
|
var ret []string
|
||||||
@@ -276,7 +267,7 @@ func (q *xpathQuery) runQuery(selector string) []string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *xpathQuery) nodeText(n *html.Node) string {
|
func (q *xpathQuery) nodeText(n *html.Node) string {
|
||||||
@@ -301,8 +292,8 @@ func (q *xpathQuery) nodeText(n *html.Node) string {
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *xpathQuery) subScrape(value string) mappedQuery {
|
func (q *xpathQuery) subScrape(ctx context.Context, value string) mappedQuery {
|
||||||
doc, err := q.scraper.loadURL(context.TODO(), value)
|
doc, err := q.scraper.loadURL(ctx, value)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())
|
logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user