Merge pull request #597 from stashapp/develop

Merge 0.2 to master
This commit is contained in:
WithoutPants
2020-06-06 14:59:07 +10:00
committed by GitHub
876 changed files with 106938 additions and 105659 deletions

4
.gitattributes vendored
View File

@@ -1,2 +1,6 @@
go.mod text eol=lf go.mod text eol=lf
go.sum text eol=lf go.sum text eol=lf
*.go text eol=lf
vendor/** -text
ui/v2.5/**/*.ts* text eol=lf
ui/v2.5/**/*.scss text eol=lf

View File

@@ -23,6 +23,8 @@ A clear and concise description of what you expected to happen.
**Screenshots** **Screenshots**
If applicable, add screenshots to help explain your problem please ensure that your screenshots are SFW or at least appropriately censored. If applicable, add screenshots to help explain your problem please ensure that your screenshots are SFW or at least appropriately censored.
**Stash Version: (from Settings -> About):**
**Desktop (please complete the following information):** **Desktop (please complete the following information):**
- OS: [e.g. iOS] - OS: [e.g. iOS]
- Browser [e.g. chrome, safari] - Browser [e.g. chrome, safari]

1
.gitignore vendored
View File

@@ -21,6 +21,7 @@
# GraphQL generated output # GraphQL generated output
pkg/models/generated_*.go pkg/models/generated_*.go
ui/v2/src/core/generated-*.tsx ui/v2/src/core/generated-*.tsx
ui/v2.5/src/core/generated-*.tsx
# packr generated files # packr generated files
*-packr.go *-packr.go

5
.idea/go.iml generated
View File

@@ -4,9 +4,8 @@
<content url="file://$MODULE_DIR$"> <content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/certs" /> <excludeFolder url="file://$MODULE_DIR$/certs" />
<excludeFolder url="file://$MODULE_DIR$/dist" /> <excludeFolder url="file://$MODULE_DIR$/dist" />
<excludeFolder url="file://$MODULE_DIR$/ui/v1/dist" /> <excludeFolder url="file://$MODULE_DIR$/ui/v2.5/build" />
<excludeFolder url="file://$MODULE_DIR$/ui/v2/build" /> <excludeFolder url="file://$MODULE_DIR$/ui/v2.5/node_modules" />
<excludeFolder url="file://$MODULE_DIR$/ui/v2/node_modules" />
</content> </content>
<orderEntry type="inheritedJdk" /> <orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />

View File

@@ -1,4 +1,8 @@
if: tag != latest_develop # dont build for the latest_develop tagged version
dist: xenial dist: xenial
git:
depth: false
language: go language: go
go: go:
- 1.11.x - 1.11.x
@@ -9,14 +13,15 @@ env:
- GO111MODULE=on - GO111MODULE=on
before_install: before_install:
- echo -e "machine github.com\n login $CI_USER_TOKEN" > ~/.netrc - echo -e "machine github.com\n login $CI_USER_TOKEN" > ~/.netrc
- travis_retry yarn --cwd ui/v2 install --frozen-lockfile - nvm install 12
- travis_retry yarn --cwd ui/v2.5 install --frozen-lockfile
- make generate - make generate
- CI=false yarn --cwd ui/v2 build # TODO: Fix warnings - CI=false yarn --cwd ui/v2.5 build-ci
#- go get -v github.com/mgechev/revive #- go get -v github.com/mgechev/revive
script: script:
# left lint off to avoid getting extra dependency
#- make lint #- make lint
#- make vet - make fmt-check vet it
- make it
after_success: after_success:
- docker pull stashapp/compiler:develop - docker pull stashapp/compiler:develop
- sh ./scripts/cross-compile.sh - sh ./scripts/cross-compile.sh
@@ -31,6 +36,8 @@ before_deploy:
deploy: deploy:
# latest develop release # latest develop release
- provider: releases - provider: releases
# use the v2 release provider for proper release note setting
edge: true
api_key: api_key:
secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00= secure: tGJ2q62CfPdayid2qEtW2aGRhMgCl3lBXYYQqp3eH0vFgIIf6cs7IDX7YC/x3XKMEQ/iMLZmtCXZvSTqNrD6Sk7MSnt30GIs+4uxIZDnnd8mV5X3K4n4gjD+NAORc4DrQBvUGrYMKJsR5gtkH0nu6diWb1o1If7OiJEuCPRhrmQYcza7NUdABnA9Z2wn2RNUV9Ga33WUCqLMEU5GtNBlfQPiP/khCQrqn/ocR6wUjYut3J6YagzqH4wsfJi3glHyWtowcNIw1LZi5zFxHD/bRBT4Tln7yypkjWNq9eQILA6i6kRUGf7ggyTx26/k8n4tnu+QD0vVh4EcjlThpU/LGyUXzKrrxjRwaDZnM0oYxg5AfHcBuAiAdo0eWnV3lEWRfTJMIVb9MPf4qDmzR4RREfB5OXOxwq3ODeCcJE8sTIMD/wBPZrlqS/QrRpND2gn2X4snkVukN9t9F4CMTFMtVSzFV7TDJW5E5Lq6VEExulteQhs6kcK9NRPNAaLgRQAw7X9kVWfDtiGUP+fE2i8F9Bo8bm7sOT5O5VPMPykx3EgeNg1IqIgMTCsMlhMJT4xBJoQUgmd2wWyf3Ryw+P+sFgdb5Sd7+lFgJBjMUUoOxMxAOiEgdFvCXcr+/Udyz2RdtetU1/6VzXzLPcKOw0wubZeBkISqu7o9gpfdMP9Eq00=
file: file:
@@ -41,7 +48,7 @@ deploy:
skip_cleanup: true skip_cleanup: true
overwrite: true overwrite: true
name: "${STASH_VERSION}: Latest development build" name: "${STASH_VERSION}: Latest development build"
body: ${RELEASE_DATE}\n This is always the latest committed version on the develop branch. Use as your own risk! release_notes: "**${RELEASE_DATE}**\n This is always the latest committed version on the develop branch. Use as your own risk!"
prerelease: true prerelease: true
on: on:
repo: stashapp/stash repo: stashapp/stash

View File

@@ -21,13 +21,18 @@ clean:
.PHONY: generate .PHONY: generate
generate: generate:
go generate -mod=vendor go generate -mod=vendor
cd ui/v2 && yarn run gqlgen cd ui/v2.5 && yarn run gqlgen
# Runs gofmt -w on the project's source code, modifying any files that do not match its style. # Runs gofmt -w on the project's source code, modifying any files that do not match its style.
.PHONY: fmt .PHONY: fmt
fmt: fmt:
go fmt ./... go fmt ./...
# Ensures that changed files have had gofmt run on them
.PHONY: fmt-check
fmt-check:
sh ./scripts/check-gofmt.sh
# Runs go vet on the project's source code. # Runs go vet on the project's source code.
.PHONY: vet .PHONY: vet
vet: vet:
@@ -47,7 +52,31 @@ test:
it: it:
go test -mod=vendor -tags=integration ./... go test -mod=vendor -tags=integration ./...
# installs UI dependencies. Run when first cloning repository, or if UI
# dependencies have changed
.PHONY: pre-ui
pre-ui:
cd ui/v2.5 && yarn install --frozen-lockfile
.PHONY: ui .PHONY: ui
ui: ui:
cd ui/v2 && yarn build cd ui/v2.5 && yarn build
packr2 packr2
fmt-ui:
cd ui/v2.5 && yarn format
# runs tests and checks on the UI and builds it
.PHONY: ui-validate
ui-validate:
cd ui/v2.5 && yarn run validate
# just repacks the packr files - use when updating migrations and packed files without
# rebuilding the UI
.PHONY: packr
packr:
packr2
# runs all of the tests and checks required for a PR to be accepted
.PHONY: validate
validate: ui-validate fmt-check vet lint it

View File

@@ -69,7 +69,7 @@ Join the [Discord server](https://discord.gg/2TsNFKt).
* Go Install: `go get github.com/gobuffalo/packr/v2/packr2@v2.0.2` * Go Install: `go get github.com/gobuffalo/packr/v2/packr2@v2.0.2`
* [Binary Download](https://github.com/gobuffalo/packr/releases) * [Binary Download](https://github.com/gobuffalo/packr/releases)
* [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager * [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager
* Run `yarn install --frozen-lockfile` in the `stash/ui/v2` folder (before running make generate for first time). * Run `yarn install --frozen-lockfile` in the `stash/ui/v2.5` folder (before running make generate for first time).
NOTE: You may need to run the `go get` commands outside the project directory to avoid modifying the projects module file. NOTE: You may need to run the `go get` commands outside the project directory to avoid modifying the projects module file.
@@ -92,11 +92,18 @@ NOTE: The `make` command in Windows will be `mingw32-make` with MingW.
## Commands ## Commands
* `make generate` - Generate Go GraphQL and packr2 files * `make generate` - Generate Go and UI GraphQL files
* `make build` - Builds the binary (make sure to build the UI as well... see below) * `make build` - Builds the binary (make sure to build the UI as well... see below)
* `make ui` - Builds the frontend * `make pre-ui` - Installs the UI dependencies. Only needs to be run once before building the UI for the first time, or if the dependencies are updated
* `make fmt-ui` - Formats the UI source code.
* `make ui` - Builds the frontend and the packr2 files
* `make packr` - Generate packr2 files (sub-target of `ui`. Use to regenerate packr2 files without rebuilding UI)
* `make vet` - Run `go vet` * `make vet` - Run `go vet`
* `make lint` - Run the linter * `make lint` - Run the linter
* `make fmt` - Run `go fmt`
* `make fmt-check` - Ensure changed files are formatted correctly
* `make it` - Run the unit and integration tests
* `make validate` - Run all of the tests and checks required to submit a PR
## Building a release ## Building a release
@@ -111,3 +118,10 @@ where the app can be cross-compiled. This process is kicked off by CI via the `
command to open a bash shell to the container to poke around: command to open a bash shell to the container to poke around:
`docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t stashappdev/compiler:latest /bin/bash` `docker run --rm --mount type=bind,source="$(pwd)",target=/stash -w /stash -i -t stashappdev/compiler:latest /bin/bash`
## Customization
You can make Stash interface fit your desired style with [Custom CSS snippets](https://github.com/stashapp/stash/wiki/Custom-CSS-snippets) and [CSS Tweaks](https://github.com/stashapp/stash/wiki/CSS-Tweaks).
[Stash Plex Theme](https://github.com/stashapp/stash/wiki/Stash-Plex-Theme) is a community created theme inspired by popular Plex Interface.

View File

@@ -32,10 +32,10 @@ RUN wget -O /ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/releases/ffmpeg-rele
mv /ffmpeg*/ /ffmpeg/ mv /ffmpeg*/ /ffmpeg/
# copy the ui yarn stuff so that it doesn't get rebuilt every time # copy the ui yarn stuff so that it doesn't get rebuilt every time
COPY ./ui/v2/package.json ./ui/v2/yarn.lock /stash/ui/v2/ COPY ./ui/v2.5/package.json ./ui/v2.5/yarn.lock /stash/ui/v2.5/
WORKDIR /stash WORKDIR /stash
RUN yarn --cwd ui/v2 install --frozen-lockfile RUN yarn --cwd ui/v2.5 install --frozen-lockfile
COPY . /stash/ COPY . /stash/
ENV GO111MODULE=on ENV GO111MODULE=on

17
go.mod
View File

@@ -2,29 +2,30 @@ module github.com/stashapp/stash
require ( require (
github.com/99designs/gqlgen v0.9.0 github.com/99designs/gqlgen v0.9.0
github.com/PuerkitoBio/goquery v1.5.0 github.com/antchfx/htmlquery v1.2.3
github.com/antchfx/htmlquery v1.2.0 github.com/bmatcuk/doublestar v1.3.1
github.com/antchfx/xpath v1.1.2 // indirect
github.com/bmatcuk/doublestar v1.1.5
github.com/disintegration/imaging v1.6.0 github.com/disintegration/imaging v1.6.0
github.com/go-chi/chi v4.0.2+incompatible github.com/go-chi/chi v4.0.2+incompatible
github.com/gobuffalo/packr/v2 v2.0.2 github.com/gobuffalo/packr/v2 v2.0.2
github.com/golang-migrate/migrate/v4 v4.3.1 github.com/golang-migrate/migrate/v4 v4.3.1
github.com/gorilla/sessions v1.2.0
github.com/gorilla/websocket v1.4.0 github.com/gorilla/websocket v1.4.0
github.com/h2non/filetype v1.0.8 github.com/h2non/filetype v1.0.8
// this is required for generate
github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
github.com/jmoiron/sqlx v1.2.0 github.com/jmoiron/sqlx v1.2.0
github.com/mattn/go-sqlite3 v1.10.0 github.com/json-iterator/go v1.1.9
github.com/mattn/go-sqlite3 v1.13.0
github.com/rs/cors v1.6.0 github.com/rs/cors v1.6.0
github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f
github.com/sirupsen/logrus v1.4.2 github.com/sirupsen/logrus v1.4.2
github.com/spf13/pflag v1.0.3 github.com/spf13/pflag v1.0.3
github.com/spf13/viper v1.4.0 github.com/spf13/viper v1.4.0
github.com/stretchr/testify v1.5.1
github.com/vektah/gqlparser v1.1.2 github.com/vektah/gqlparser v1.1.2
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4 golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
golang.org/x/image v0.0.0-20190118043309-183bebdce1b2 // indirect golang.org/x/image v0.0.0-20190118043309-183bebdce1b2
golang.org/x/net v0.0.0-20190522155817-f3200d17e092 golang.org/x/net v0.0.0-20200421231249-e086a090c8fd
gopkg.in/yaml.v2 v2.2.2 gopkg.in/yaml.v2 v2.2.2
) )

40
go.sum
View File

@@ -16,8 +16,6 @@ github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF0
github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA=
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/agnivade/levenshtein v1.0.1 h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ= github.com/agnivade/levenshtein v1.0.1 h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ=
@@ -27,13 +25,11 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/antchfx/htmlquery v1.2.0 h1:oKShnsGlnOHX6t4uj5OHgLKkABcJoqnXpqnscoi9Lpw= github.com/antchfx/htmlquery v1.2.3 h1:sP3NFDneHx2stfNXCKbhHFo8XgNjCACnU/4AO5gWz6M=
github.com/antchfx/htmlquery v1.2.0/go.mod h1:MS9yksVSQXls00iXkiMqXr0J+umL/AmxXKuP28SUJM8= github.com/antchfx/htmlquery v1.2.3/go.mod h1:B0ABL+F5irhhMWg54ymEZinzMSi0Kt3I2if0BLYa3V0=
github.com/antchfx/xpath v1.1.2 h1:YziPrtM0gEJBnhdUGxYcIVYXZ8FXbtbovxOi+UW/yWQ= github.com/antchfx/xpath v1.1.6 h1:6sVh6hB5T6phw1pFpHRQ+C4bd8sNI+O58flqtg7h0R0=
github.com/antchfx/xpath v1.1.2/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk= github.com/antchfx/xpath v1.1.6/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
@@ -41,8 +37,8 @@ github.com/aws/aws-sdk-go v1.17.7/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k= github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
github.com/bmatcuk/doublestar v1.1.5 h1:2bNwBOmhyFEFcoB3tGvTD5xanq+4kyOZlB8wFYbMjkk= github.com/bmatcuk/doublestar v1.3.1 h1:rT8rxDPsavp9G+4ZULzqhhUSaI/OPsTZNG88Z3i0xvY=
github.com/bmatcuk/doublestar v1.1.5/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= github.com/bmatcuk/doublestar v1.3.1/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
@@ -81,6 +77,7 @@ github.com/docker/docker v0.7.3-0.20190108045446-77df18c24acf/go.mod h1:eEKB0N0r
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dustin/go-humanize v0.0.0-20180713052910-9f541cc9db5d/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v0.0.0-20180713052910-9f541cc9db5d/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
@@ -325,6 +322,8 @@ github.com/golang-migrate/migrate/v4 v4.3.1/go.mod h1:mJ89KBgbXmM3P49BqOxRL3riNF
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef h1:veQD95Isof8w9/WXiA+pa3tz3fJXkt5B7QaRBrM62gk= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef h1:veQD95Isof8w9/WXiA+pa3tz3fJXkt5B7QaRBrM62gk=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
@@ -340,6 +339,7 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
@@ -352,9 +352,12 @@ github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1/go.mod h1:YeAe0gNeiNT5hoiZRI4yiOky6jVdNvfO2N6Kav/HmxY= github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1/go.mod h1:YeAe0gNeiNT5hoiZRI4yiOky6jVdNvfO2N6Kav/HmxY=
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
github.com/gorilla/sessions v1.1.2/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w= github.com/gorilla/sessions v1.1.2/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
github.com/gorilla/sessions v1.1.3/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w= github.com/gorilla/sessions v1.1.3/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
github.com/gorilla/sessions v1.2.0 h1:S7P+1Hm5V/AT9cjEcUD5uDaQSX0OE577aCXgoaKpYbQ=
github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.2.0 h1:VJtLvh6VQym50czpZzx07z/kw9EgAxI3x1ZB8taTMQQ= github.com/gorilla/websocket v1.2.0 h1:VJtLvh6VQym50czpZzx07z/kw9EgAxI3x1ZB8taTMQQ=
github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q= github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
@@ -383,6 +386,8 @@ github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANyt
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ= github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ=
github.com/jackc/pgx v3.2.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I= github.com/jackc/pgx v3.2.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU= github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a h1:zPPuIq2jAWWPTrGt70eK/BSch+gFAGrNzecsoENgu2o=
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a/go.mod h1:yL958EeXv8Ylng6IfnvG4oflryUi3vgA3xPs9hmII1s=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0/go.mod h1:IiEW3SEiiErVyFdH8NTuWjSifiEQKUoyK3LNqr2kCHU= github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0/go.mod h1:IiEW3SEiiErVyFdH8NTuWjSifiEQKUoyK3LNqr2kCHU=
github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA= github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=
@@ -391,6 +396,8 @@ github.com/joho/godotenv v1.2.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqx
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
@@ -445,6 +452,8 @@ github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o= github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o=
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.13.0 h1:LnJI81JidiW9r7pS/hXe6cFeO5EXNq7KbfvoJLRI69c=
github.com/mattn/go-sqlite3 v1.13.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4=
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
@@ -453,6 +462,10 @@ github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:F
github.com/mitchellh/mapstructure v1.0.0/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.0.0/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/mongodb/mongo-go-driver v0.3.0/go.mod h1:NK/HWDIIZkaYsnYa0hmtP443T5ELr0KDecmIioVuuyU= github.com/mongodb/mongo-go-driver v0.3.0/go.mod h1:NK/HWDIIZkaYsnYa0hmtP443T5ELr0KDecmIioVuuyU=
github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q= github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
@@ -574,6 +587,8 @@ github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXf
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA= github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
github.com/tidwall/pretty v0.0.0-20180105212114-65a9db5fad51/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v0.0.0-20180105212114-65a9db5fad51/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
@@ -633,7 +648,6 @@ golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTk
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180816102801-aaf60122140d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180816102801-aaf60122140d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -660,6 +674,8 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190424112056-4829fb13d2c6/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190424112056-4829fb13d2c6/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgPQpswuFndXpOj3rKEco= golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgPQpswuFndXpOj3rKEco=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20200421231249-e086a090c8fd h1:QPwSajcTUrFriMF1nJ3XzgoqakqQEsnZf9LdXdi2nkI=
golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -703,6 +719,8 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190426135247-a129542de9ae h1:mQLHiymj/JXKnnjc62tb7nD5pZLs940/sXJu+Xp3DBA= golang.org/x/sys v0.0.0-20190426135247-a129542de9ae h1:mQLHiymj/JXKnnjc62tb7nD5pZLs940/sXJu+Xp3DBA=
golang.org/x/sys v0.0.0-20190426135247-a129542de9ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190426135247-a129542de9ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=

View File

@@ -26,6 +26,8 @@ models:
model: github.com/stashapp/stash/pkg/models.ScrapedItem model: github.com/stashapp/stash/pkg/models.ScrapedItem
Studio: Studio:
model: github.com/stashapp/stash/pkg/models.Studio model: github.com/stashapp/stash/pkg/models.Studio
Movie:
model: github.com/stashapp/stash/pkg/models.Movie
Tag: Tag:
model: github.com/stashapp/stash/pkg/models.Tag model: github.com/stashapp/stash/pkg/models.Tag
ScrapedPerformer: ScrapedPerformer:
@@ -36,6 +38,8 @@ models:
model: github.com/stashapp/stash/pkg/models.ScrapedScenePerformer model: github.com/stashapp/stash/pkg/models.ScrapedScenePerformer
ScrapedSceneStudio: ScrapedSceneStudio:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneStudio model: github.com/stashapp/stash/pkg/models.ScrapedSceneStudio
ScrapedSceneMovie:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneMovie
ScrapedSceneTag: ScrapedSceneTag:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneTag model: github.com/stashapp/stash/pkg/models.ScrapedSceneTag
SceneFileType: SceneFileType:

View File

@@ -2,25 +2,32 @@ fragment ConfigGeneralData on ConfigGeneralResult {
stashes stashes
databasePath databasePath
generatedPath generatedPath
cachePath
maxTranscodeSize maxTranscodeSize
maxStreamingTranscodeSize maxStreamingTranscodeSize
forceMkv
forceHevc
username username
password password
maxSessionAge
logFile logFile
logOut logOut
logLevel logLevel
logAccess logAccess
excludes excludes
scraperUserAgent
} }
fragment ConfigInterfaceData on ConfigInterfaceResult { fragment ConfigInterfaceData on ConfigInterfaceResult {
soundOnPreview soundOnPreview
wallShowTitle wallShowTitle
wallPlayback
maximumLoopDuration maximumLoopDuration
autostartVideo autostartVideo
showStudioAsText showStudioAsText
css css
cssEnabled cssEnabled
language
} }
fragment ConfigData on ConfigResult { fragment ConfigData on ConfigResult {

View File

@@ -0,0 +1,5 @@
fragment SlimMovieData on Movie {
id
name
front_image_path
}

View File

@@ -0,0 +1,20 @@
fragment MovieData on Movie {
id
checksum
name
aliases
duration
date
rating
director
studio {
...StudioData
}
synopsis
url
front_image_path
back_image_path
scene_count
}

View File

@@ -1,5 +1,6 @@
fragment SlimPerformerData on Performer { fragment SlimPerformerData on Performer {
id id
name name
gender
image_path image_path
} }

View File

@@ -3,6 +3,7 @@ fragment PerformerData on Performer {
checksum checksum
name name
url url
gender
twitter twitter
instagram instagram
birthdate birthdate

View File

@@ -47,6 +47,15 @@ fragment SlimSceneData on Scene {
image_path image_path
} }
movies {
movie {
id
name
front_image_path
}
scene_index
}
tags { tags {
id id
name name

View File

@@ -43,6 +43,13 @@ fragment SceneData on Scene {
...StudioData ...StudioData
} }
movies {
movie {
...MovieData
}
scene_index
}
tags { tags {
...TagData ...TagData
} }

View File

@@ -1,5 +1,27 @@
fragment ScrapedPerformerData on ScrapedPerformer { fragment ScrapedPerformerData on ScrapedPerformer {
name name
gender
url
twitter
instagram
birthdate
ethnicity
country
eye_color
height
measurements
fake_tits
career_length
tattoos
piercings
aliases
image
}
fragment ScrapedScenePerformerData on ScrapedScenePerformer {
id
name
gender
url url
twitter twitter
instagram instagram
@@ -16,23 +38,27 @@ fragment ScrapedPerformerData on ScrapedPerformer {
aliases aliases
} }
fragment ScrapedScenePerformerData on ScrapedScenePerformer { fragment ScrapedMovieData on ScrapedMovie {
name
aliases
duration
date
rating
director
url
synopsis
}
fragment ScrapedSceneMovieData on ScrapedSceneMovie {
id id
name name
url
twitter
instagram
birthdate
ethnicity
country
eye_color
height
measurements
fake_tits
career_length
tattoos
piercings
aliases aliases
duration
date
rating
director
url
synopsis
} }
fragment ScrapedSceneStudioData on ScrapedSceneStudio { fragment ScrapedSceneStudioData on ScrapedSceneStudio {
@@ -51,6 +77,7 @@ fragment ScrapedSceneData on ScrapedScene {
details details
url url
date date
image
file { file {
size size
@@ -74,4 +101,8 @@ fragment ScrapedSceneData on ScrapedScene {
performers { performers {
...ScrapedScenePerformerData ...ScrapedScenePerformerData
} }
movies {
...ScrapedSceneMovieData
}
} }

View File

@@ -0,0 +1,27 @@
mutation MetadataImport {
metadataImport
}
mutation MetadataExport {
metadataExport
}
mutation MetadataScan($input: ScanMetadataInput!) {
metadataScan(input: $input)
}
mutation MetadataGenerate($input: GenerateMetadataInput!) {
metadataGenerate(input: $input)
}
mutation MetadataAutoTag($input: AutoTagMetadataInput!) {
metadataAutoTag(input: $input)
}
mutation MetadataClean {
metadataClean
}
mutation StopJob {
stopJob
}

View File

@@ -0,0 +1,40 @@
mutation MovieCreate(
$name: String!,
$aliases: String,
$duration: Int,
$date: String,
$rating: Int,
$studio_id: ID,
$director: String,
$synopsis: String,
$url: String,
$front_image: String,
$back_image: String) {
movieCreate(input: { name: $name, aliases: $aliases, duration: $duration, date: $date, rating: $rating, studio_id: $studio_id, director: $director, synopsis: $synopsis, url: $url, front_image: $front_image, back_image: $back_image }) {
...MovieData
}
}
mutation MovieUpdate(
$id: ID!
$name: String,
$aliases: String,
$duration: Int,
$date: String,
$rating: Int,
$studio_id: ID,
$director: String,
$synopsis: String,
$url: String,
$front_image: String,
$back_image: String) {
movieUpdate(input: { id: $id, name: $name, aliases: $aliases, duration: $duration, date: $date, rating: $rating, studio_id: $studio_id, director: $director, synopsis: $synopsis, url: $url, front_image: $front_image, back_image: $back_image }) {
...MovieData
}
}
mutation MovieDestroy($id: ID!) {
movieDestroy(input: { id: $id })
}

View File

@@ -1,6 +1,7 @@
mutation PerformerCreate( mutation PerformerCreate(
$name: String, $name: String,
$url: String, $url: String,
$gender: GenderEnum,
$birthdate: String, $birthdate: String,
$ethnicity: String, $ethnicity: String,
$country: String, $country: String,
@@ -20,6 +21,7 @@ mutation PerformerCreate(
performerCreate(input: { performerCreate(input: {
name: $name, name: $name,
url: $url, url: $url,
gender: $gender,
birthdate: $birthdate, birthdate: $birthdate,
ethnicity: $ethnicity, ethnicity: $ethnicity,
country: $country, country: $country,
@@ -44,6 +46,7 @@ mutation PerformerUpdate(
$id: ID!, $id: ID!,
$name: String, $name: String,
$url: String, $url: String,
$gender: GenderEnum,
$birthdate: String, $birthdate: String,
$ethnicity: String, $ethnicity: String,
$country: String, $country: String,
@@ -64,6 +67,7 @@ mutation PerformerUpdate(
id: $id, id: $id,
name: $name, name: $name,
url: $url, url: $url,
gender: $gender,
birthdate: $birthdate, birthdate: $birthdate,
ethnicity: $ethnicity, ethnicity: $ethnicity,
country: $country, country: $country,

View File

@@ -8,6 +8,7 @@ mutation SceneUpdate(
$studio_id: ID, $studio_id: ID,
$gallery_id: ID, $gallery_id: ID,
$performer_ids: [ID!] = [], $performer_ids: [ID!] = [],
$movies: [SceneMovieInput!] = [],
$tag_ids: [ID!] = [], $tag_ids: [ID!] = [],
$cover_image: String) { $cover_image: String) {
@@ -21,6 +22,7 @@ mutation SceneUpdate(
studio_id: $studio_id, studio_id: $studio_id,
gallery_id: $gallery_id, gallery_id: $gallery_id,
performer_ids: $performer_ids, performer_ids: $performer_ids,
movies: $movies,
tag_ids: $tag_ids, tag_ids: $tag_ids,
cover_image: $cover_image cover_image: $cover_image
}) { }) {
@@ -37,8 +39,8 @@ mutation BulkSceneUpdate(
$rating: Int, $rating: Int,
$studio_id: ID, $studio_id: ID,
$gallery_id: ID, $gallery_id: ID,
$performer_ids: [ID!], $performer_ids: BulkUpdateIds,
$tag_ids: [ID!]) { $tag_ids: BulkUpdateIds) {
bulkSceneUpdate(input: { bulkSceneUpdate(input: {
ids: $ids, ids: $ids,
@@ -77,3 +79,7 @@ mutation SceneResetO($id: ID!) {
mutation SceneDestroy($id: ID!, $delete_file: Boolean, $delete_generated : Boolean) { mutation SceneDestroy($id: ID!, $delete_file: Boolean, $delete_generated : Boolean) {
sceneDestroy(input: {id: $id, delete_file: $delete_file, delete_generated: $delete_generated}) sceneDestroy(input: {id: $id, delete_file: $delete_file, delete_generated: $delete_generated})
} }
mutation SceneGenerateScreenshot($id: ID!, $at: Float) {
sceneGenerateScreenshot(id: $id, at: $at)
}

View File

@@ -19,19 +19,24 @@ query AllTags {
} }
query AllPerformersForFilter { query AllPerformersForFilter {
allPerformers { allPerformersSlim {
...SlimPerformerData ...SlimPerformerData
} }
} }
query AllStudiosForFilter { query AllStudiosForFilter {
allStudios { allStudiosSlim {
...SlimStudioData ...SlimStudioData
} }
} }
query AllMoviesForFilter {
allMoviesSlim {
...SlimMovieData
}
}
query AllTagsForFilter { query AllTagsForFilter {
allTags { allTagsSlim {
id id
name name
} }
@@ -47,9 +52,11 @@ query ValidGalleriesForScene($scene_id: ID!) {
query Stats { query Stats {
stats { stats {
scene_count, scene_count,
scene_size_count,
gallery_count, gallery_count,
performer_count, performer_count,
studio_count, studio_count,
movie_count,
tag_count tag_count
} }
} }

View File

@@ -0,0 +1,14 @@
query FindMovies($filter: FindFilterType, $movie_filter: MovieFilterType) {
findMovies(filter: $filter, movie_filter: $movie_filter) {
count
movies {
...MovieData
}
}
}
query FindMovie($id: ID!) {
findMovie(id: $id) {
...MovieData
}
}

View File

@@ -46,6 +46,9 @@ query ParseSceneFilenames($filter: FindFilterType!, $config: SceneParserInput!)
rating rating
studio_id studio_id
gallery_id gallery_id
movies {
movie_id
}
performer_ids performer_ids
tag_ids tag_ids
} }

View File

@@ -4,6 +4,10 @@ query Configuration {
} }
} }
query Directories($path: String) { query Directory($path: String) {
directories(path: $path) directory(path: $path) {
path
parent
directories
}
} }

View File

@@ -1,27 +1,3 @@
query MetadataImport {
metadataImport
}
query MetadataExport {
metadataExport
}
query MetadataScan($input: ScanMetadataInput!) {
metadataScan(input: $input)
}
query MetadataGenerate($input: GenerateMetadataInput!) {
metadataGenerate(input: $input)
}
query MetadataAutoTag($input: AutoTagMetadataInput!) {
metadataAutoTag(input: $input)
}
query MetadataClean {
metadataClean
}
query JobStatus { query JobStatus {
jobStatus { jobStatus {
progress progress
@@ -29,7 +5,3 @@ query JobStatus {
message message
} }
} }
query StopJob {
stopJob
}

View File

@@ -22,6 +22,11 @@ type Query {
"""A function which queries Studio objects""" """A function which queries Studio objects"""
findStudios(filter: FindFilterType): FindStudiosResultType! findStudios(filter: FindFilterType): FindStudiosResultType!
"""Find a movie by ID"""
findMovie(id: ID!): Movie
"""A function which queries Movie objects"""
findMovies(movie_filter: MovieFilterType, filter: FindFilterType): FindMoviesResultType!
findGallery(id: ID!): Gallery findGallery(id: ID!): Gallery
findGalleries(filter: FindFilterType): FindGalleriesResultType! findGalleries(filter: FindFilterType): FindGalleriesResultType!
@@ -68,32 +73,26 @@ type Query {
"""Returns the current, complete configuration""" """Returns the current, complete configuration"""
configuration: ConfigResult! configuration: ConfigResult!
"""Returns an array of paths for the given path""" """Returns an array of paths for the given path"""
directories(path: String): [String!]! directory(path: String): Directory!
# Metadata # Metadata
"""Start an import. Returns the job ID"""
metadataImport: String!
"""Start an export. Returns the job ID"""
metadataExport: String!
"""Start a scan. Returns the job ID"""
metadataScan(input: ScanMetadataInput!): String!
"""Start generating content. Returns the job ID"""
metadataGenerate(input: GenerateMetadataInput!): String!
"""Start auto-tagging. Returns the job ID"""
metadataAutoTag(input: AutoTagMetadataInput!): String!
"""Clean metadata. Returns the job ID"""
metadataClean: String!
jobStatus: MetadataUpdateStatus! jobStatus: MetadataUpdateStatus!
stopJob: Boolean!
# Get everything # Get everything
allPerformers: [Performer!]! allPerformers: [Performer!]!
allStudios: [Studio!]! allStudios: [Studio!]!
allMovies: [Movie!]!
allTags: [Tag!]! allTags: [Tag!]!
# Get everything with minimal metadata
allPerformersSlim: [Performer!]!
allStudiosSlim: [Studio!]!
allMoviesSlim: [Movie!]!
allTagsSlim: [Tag!]!
# Version # Version
version: Version! version: Version!
@@ -114,6 +113,9 @@ type Mutation {
"""Resets the o-counter for a scene to 0. Returns the new value""" """Resets the o-counter for a scene to 0. Returns the new value"""
sceneResetO(id: ID!): Int! sceneResetO(id: ID!): Int!
"""Generates screenshot at specified time in seconds. Leave empty to generate default screenshot"""
sceneGenerateScreenshot(id: ID!, at: Float): String!
sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker
sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker
sceneMarkerDestroy(id: ID!): Boolean! sceneMarkerDestroy(id: ID!): Boolean!
@@ -126,6 +128,10 @@ type Mutation {
studioUpdate(input: StudioUpdateInput!): Studio studioUpdate(input: StudioUpdateInput!): Studio
studioDestroy(input: StudioDestroyInput!): Boolean! studioDestroy(input: StudioDestroyInput!): Boolean!
movieCreate(input: MovieCreateInput!): Movie
movieUpdate(input: MovieUpdateInput!): Movie
movieDestroy(input: MovieDestroyInput!): Boolean!
tagCreate(input: TagCreateInput!): Tag tagCreate(input: TagCreateInput!): Tag
tagUpdate(input: TagUpdateInput!): Tag tagUpdate(input: TagUpdateInput!): Tag
tagDestroy(input: TagDestroyInput!): Boolean! tagDestroy(input: TagDestroyInput!): Boolean!
@@ -133,6 +139,21 @@ type Mutation {
"""Change general configuration options""" """Change general configuration options"""
configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult! configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult!
configureInterface(input: ConfigInterfaceInput!): ConfigInterfaceResult! configureInterface(input: ConfigInterfaceInput!): ConfigInterfaceResult!
"""Start an import. Returns the job ID"""
metadataImport: String!
"""Start an export. Returns the job ID"""
metadataExport: String!
"""Start a scan. Returns the job ID"""
metadataScan(input: ScanMetadataInput!): String!
"""Start generating content. Returns the job ID"""
metadataGenerate(input: GenerateMetadataInput!): String!
"""Start auto-tagging. Returns the job ID"""
metadataAutoTag(input: AutoTagMetadataInput!): String!
"""Clean metadata. Returns the job ID"""
metadataClean: String!
stopJob: Boolean!
} }
type Subscription { type Subscription {

View File

@@ -14,14 +14,22 @@ input ConfigGeneralInput {
databasePath: String databasePath: String
"""Path to generated files""" """Path to generated files"""
generatedPath: String generatedPath: String
"""Path to cache"""
cachePath: String
"""Max generated transcode size""" """Max generated transcode size"""
maxTranscodeSize: StreamingResolutionEnum maxTranscodeSize: StreamingResolutionEnum
"""Max streaming transcode size""" """Max streaming transcode size"""
maxStreamingTranscodeSize: StreamingResolutionEnum maxStreamingTranscodeSize: StreamingResolutionEnum
"""Force MKV as supported format"""
forceMkv: Boolean!
"""Force HEVC as a supported codec"""
forceHevc: Boolean!
"""Username""" """Username"""
username: String username: String
"""Password""" """Password"""
password: String password: String
"""Maximum session cookie age"""
maxSessionAge: Int
"""Name of the log file""" """Name of the log file"""
logFile: String logFile: String
"""Whether to also output to stderr""" """Whether to also output to stderr"""
@@ -32,6 +40,8 @@ input ConfigGeneralInput {
logAccess: Boolean! logAccess: Boolean!
"""Array of file regexp to exclude from Scan""" """Array of file regexp to exclude from Scan"""
excludes: [String!] excludes: [String!]
"""Scraper user agent string"""
scraperUserAgent: String
} }
type ConfigGeneralResult { type ConfigGeneralResult {
@@ -41,14 +51,22 @@ type ConfigGeneralResult {
databasePath: String! databasePath: String!
"""Path to generated files""" """Path to generated files"""
generatedPath: String! generatedPath: String!
"""Path to cache"""
cachePath: String!
"""Max generated transcode size""" """Max generated transcode size"""
maxTranscodeSize: StreamingResolutionEnum maxTranscodeSize: StreamingResolutionEnum
"""Max streaming transcode size""" """Max streaming transcode size"""
maxStreamingTranscodeSize: StreamingResolutionEnum maxStreamingTranscodeSize: StreamingResolutionEnum
"""Force MKV as supported format"""
forceMkv: Boolean!
"""Force HEVC as a supported codec"""
forceHevc: Boolean!
"""Username""" """Username"""
username: String! username: String!
"""Password""" """Password"""
password: String! password: String!
"""Maximum session cookie age"""
maxSessionAge: Int!
"""Name of the log file""" """Name of the log file"""
logFile: String logFile: String
"""Whether to also output to stderr""" """Whether to also output to stderr"""
@@ -59,6 +77,8 @@ type ConfigGeneralResult {
logAccess: Boolean! logAccess: Boolean!
"""Array of file regexp to exclude from Scan""" """Array of file regexp to exclude from Scan"""
excludes: [String!]! excludes: [String!]!
"""Scraper user agent string"""
scraperUserAgent: String
} }
input ConfigInterfaceInput { input ConfigInterfaceInput {
@@ -66,6 +86,8 @@ input ConfigInterfaceInput {
soundOnPreview: Boolean soundOnPreview: Boolean
"""Show title and tags in wall view""" """Show title and tags in wall view"""
wallShowTitle: Boolean wallShowTitle: Boolean
"""Wall playback type"""
wallPlayback: String
"""Maximum duration (in seconds) in which a scene video will loop in the scene player""" """Maximum duration (in seconds) in which a scene video will loop in the scene player"""
maximumLoopDuration: Int maximumLoopDuration: Int
"""If true, video will autostart on load in the scene player""" """If true, video will autostart on load in the scene player"""
@@ -75,6 +97,7 @@ input ConfigInterfaceInput {
"""Custom CSS""" """Custom CSS"""
css: String css: String
cssEnabled: Boolean cssEnabled: Boolean
language: String
} }
type ConfigInterfaceResult { type ConfigInterfaceResult {
@@ -82,6 +105,8 @@ type ConfigInterfaceResult {
soundOnPreview: Boolean soundOnPreview: Boolean
"""Show title and tags in wall view""" """Show title and tags in wall view"""
wallShowTitle: Boolean wallShowTitle: Boolean
"""Wall playback type"""
wallPlayback: String
"""Maximum duration (in seconds) in which a scene video will loop in the scene player""" """Maximum duration (in seconds) in which a scene video will loop in the scene player"""
maximumLoopDuration: Int maximumLoopDuration: Int
"""If true, video will autostart on load in the scene player""" """If true, video will autostart on load in the scene player"""
@@ -91,6 +116,8 @@ type ConfigInterfaceResult {
"""Custom CSS""" """Custom CSS"""
css: String css: String
cssEnabled: Boolean cssEnabled: Boolean
"""Interface language"""
language: String
} }
"""All configuration settings""" """All configuration settings"""
@@ -98,3 +125,10 @@ type ConfigResult {
general: ConfigGeneralResult! general: ConfigGeneralResult!
interface: ConfigInterfaceResult! interface: ConfigInterfaceResult!
} }
"""Directory structure of a path"""
type Directory {
path: String!
parent: String
directories: [String!]!
}

View File

@@ -46,6 +46,10 @@ input PerformerFilterType {
piercings: StringCriterionInput piercings: StringCriterionInput
"""Filter by aliases""" """Filter by aliases"""
aliases: StringCriterionInput aliases: StringCriterionInput
"""Filter by gender"""
gender: GenderCriterionInput
"""Filter to only include performers missing this property"""
is_missing: String
} }
input SceneMarkerFilterType { input SceneMarkerFilterType {
@@ -74,12 +78,19 @@ input SceneFilterType {
is_missing: String is_missing: String
"""Filter to only include scenes with this studio""" """Filter to only include scenes with this studio"""
studios: MultiCriterionInput studios: MultiCriterionInput
"""Filter to only include scenes with this movie"""
movies: MultiCriterionInput
"""Filter to only include scenes with these tags""" """Filter to only include scenes with these tags"""
tags: MultiCriterionInput tags: MultiCriterionInput
"""Filter to only include scenes with these performers""" """Filter to only include scenes with these performers"""
performers: MultiCriterionInput performers: MultiCriterionInput
} }
input MovieFilterType {
"""Filter to only include movies with this studio"""
studios: MultiCriterionInput
}
enum CriterionModifier { enum CriterionModifier {
"""=""" """="""
EQUALS, EQUALS,
@@ -113,3 +124,8 @@ input MultiCriterionInput {
value: [ID!] value: [ID!]
modifier: CriterionModifier! modifier: CriterionModifier!
} }
input GenderCriterionInput {
value: GenderEnum
modifier: CriterionModifier!
}

View File

@@ -1,8 +1,12 @@
input GenerateMetadataInput { input GenerateMetadataInput {
sprites: Boolean! sprites: Boolean!
previews: Boolean! previews: Boolean!
previewPreset: PreviewPreset
imagePreviews: Boolean!
markers: Boolean! markers: Boolean!
transcodes: Boolean! transcodes: Boolean!
"""gallery thumbnails for cache usage"""
thumbnails: Boolean!
} }
input ScanMetadataInput { input ScanMetadataInput {
@@ -23,3 +27,13 @@ type MetadataUpdateStatus {
status: String! status: String!
message: String! message: String!
} }
enum PreviewPreset {
"X264_ULTRAFAST", ultrafast
"X264_VERYFAST", veryfast
"X264_FAST", fast
"X264_MEDIUM", medium
"X264_SLOW", slow
"X264_SLOWER", slower
"X264_VERYSLOW", veryslow
}

View File

@@ -0,0 +1,59 @@
type Movie {
id: ID!
checksum: String!
name: String!
aliases: String
"""Duration in seconds"""
duration: Int
date: String
rating: Int
studio: Studio
director: String
synopsis: String
url: String
front_image_path: String # Resolver
back_image_path: String # Resolver
scene_count: Int # Resolver
}
input MovieCreateInput {
name: String!
aliases: String
"""Duration in seconds"""
duration: Int
date: String
rating: Int
studio_id: ID
director: String
synopsis: String
url: String
"""This should be base64 encoded"""
front_image: String
back_image: String
}
input MovieUpdateInput {
id: ID!
name: String
aliases: String
duration: Int
date: String
rating: Int
studio_id: ID
director: String
synopsis: String
url: String
"""This should be base64 encoded"""
front_image: String
back_image: String
}
input MovieDestroyInput {
id: ID!
}
type FindMoviesResultType {
count: Int!
movies: [Movie!]!
}

View File

@@ -1,8 +1,17 @@
enum GenderEnum {
MALE
FEMALE
TRANSGENDER_MALE
TRANSGENDER_FEMALE
INTERSEX
}
type Performer { type Performer {
id: ID! id: ID!
checksum: String! checksum: String!
name: String name: String
url: String url: String
gender: GenderEnum
twitter: String twitter: String
instagram: String instagram: String
birthdate: String birthdate: String
@@ -26,6 +35,7 @@ type Performer {
input PerformerCreateInput { input PerformerCreateInput {
name: String name: String
url: String url: String
gender: GenderEnum
birthdate: String birthdate: String
ethnicity: String ethnicity: String
country: String country: String
@@ -48,6 +58,7 @@ input PerformerUpdateInput {
id: ID! id: ID!
name: String name: String
url: String url: String
gender: GenderEnum
birthdate: String birthdate: String
ethnicity: String ethnicity: String
country: String country: String

View File

@@ -18,6 +18,11 @@ type ScenePathsType {
chapters_vtt: String # Resolver chapters_vtt: String # Resolver
} }
type SceneMovie {
movie: Movie!
scene_index: Int
}
type Scene { type Scene {
id: ID! id: ID!
checksum: String! checksum: String!
@@ -36,10 +41,16 @@ type Scene {
scene_markers: [SceneMarker!]! scene_markers: [SceneMarker!]!
gallery: Gallery gallery: Gallery
studio: Studio studio: Studio
movies: [SceneMovie!]!
tags: [Tag!]! tags: [Tag!]!
performers: [Performer!]! performers: [Performer!]!
} }
input SceneMovieInput {
movie_id: ID!
scene_index: Int
}
input SceneUpdateInput { input SceneUpdateInput {
clientMutationId: String clientMutationId: String
id: ID! id: ID!
@@ -51,11 +62,23 @@ input SceneUpdateInput {
studio_id: ID studio_id: ID
gallery_id: ID gallery_id: ID
performer_ids: [ID!] performer_ids: [ID!]
movies: [SceneMovieInput!]
tag_ids: [ID!] tag_ids: [ID!]
"""This should be base64 encoded""" """This should be base64 encoded"""
cover_image: String cover_image: String
} }
enum BulkUpdateIdMode {
SET
ADD
REMOVE
}
input BulkUpdateIds {
ids: [ID!]
mode: BulkUpdateIdMode!
}
input BulkSceneUpdateInput { input BulkSceneUpdateInput {
clientMutationId: String clientMutationId: String
ids: [ID!] ids: [ID!]
@@ -66,8 +89,8 @@ input BulkSceneUpdateInput {
rating: Int rating: Int
studio_id: ID studio_id: ID
gallery_id: ID gallery_id: ID
performer_ids: [ID!] performer_ids: BulkUpdateIds
tag_ids: [ID!] tag_ids: BulkUpdateIds
} }
input SceneDestroyInput { input SceneDestroyInput {
@@ -87,6 +110,11 @@ input SceneParserInput {
capitalizeTitle: Boolean capitalizeTitle: Boolean
} }
type SceneMovieID {
movie_id: ID!
scene_index: String
}
type SceneParserResult { type SceneParserResult {
scene: Scene! scene: Scene!
title: String title: String
@@ -97,6 +125,7 @@ type SceneParserResult {
studio_id: ID studio_id: ID
gallery_id: ID gallery_id: ID
performer_ids: [ID!] performer_ids: [ID!]
movies: [SceneMovieID!]
tag_ids: [ID!] tag_ids: [ID!]
} }

View File

@@ -0,0 +1,22 @@
"""A movie from a scraping operation..."""
type ScrapedMovie {
name: String
aliases: String
duration: String
date: String
rating: String
director: String
url: String
synopsis: String
}
input ScrapedMovieInput {
name: String
aliases: String
duration: String
date: String
rating: String
director: String
url: String
synopsis: String
}

View File

@@ -1,6 +1,7 @@
"""A performer from a scraping operation...""" """A performer from a scraping operation..."""
type ScrapedPerformer { type ScrapedPerformer {
name: String name: String
gender: String
url: String url: String
twitter: String twitter: String
instagram: String instagram: String
@@ -15,10 +16,14 @@ type ScrapedPerformer {
tattoos: String tattoos: String
piercings: String piercings: String
aliases: String aliases: String
"""This should be base64 encoded"""
image: String
} }
input ScrapedPerformerInput { input ScrapedPerformerInput {
name: String name: String
gender: String
url: String url: String
twitter: String twitter: String
instagram: String instagram: String
@@ -33,4 +38,6 @@ input ScrapedPerformerInput {
tattoos: String tattoos: String
piercings: String piercings: String
aliases: String aliases: String
# not including image for the input
} }

View File

@@ -27,6 +27,7 @@ type ScrapedScenePerformer {
"""Set if performer matched""" """Set if performer matched"""
id: ID id: ID
name: String! name: String!
gender: String
url: String url: String
twitter: String twitter: String
instagram: String instagram: String
@@ -43,6 +44,19 @@ type ScrapedScenePerformer {
aliases: String aliases: String
} }
type ScrapedSceneMovie {
"""Set if movie matched"""
id: ID
name: String!
aliases: String
duration: String
date: String
rating: String
director: String
synopsis: String
url: String
}
type ScrapedSceneStudio { type ScrapedSceneStudio {
"""Set if studio matched""" """Set if studio matched"""
id: ID id: ID
@@ -62,9 +76,13 @@ type ScrapedScene {
url: String url: String
date: String date: String
"""This should be base64 encoded"""
image: String
file: SceneFileType # Resolver file: SceneFileType # Resolver
studio: ScrapedSceneStudio studio: ScrapedSceneStudio
tags: [ScrapedSceneTag!] tags: [ScrapedSceneTag!]
performers: [ScrapedScenePerformer!] performers: [ScrapedScenePerformer!]
movies: [ScrapedSceneMovie!]
} }

View File

@@ -1,7 +1,9 @@
type StatsResultType { type StatsResultType {
scene_count: Int! scene_count: Int!
scene_size_count: String!
gallery_count: Int! gallery_count: Int!
performer_count: Int! performer_count: Int!
studio_count: Int! studio_count: Int!
movie_count: Int!
tag_count: Int! tag_count: Int!
} }

72
pkg/api/cache_thumbs.go Normal file
View File

@@ -0,0 +1,72 @@
package api
import (
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/paths"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
"io/ioutil"
)
type thumbBuffer struct {
path string
dir string
data []byte
}
func newCacheThumb(dir string, path string, data []byte) *thumbBuffer {
t := thumbBuffer{dir: dir, path: path, data: data}
return &t
}
var writeChan chan *thumbBuffer
var touchChan chan *string
func startThumbCache() { // TODO add extra wait, close chan code if/when stash gets a stop mode
writeChan = make(chan *thumbBuffer, 20)
go thumbnailCacheWriter()
}
//serialize file writes to avoid race conditions
func thumbnailCacheWriter() {
for thumb := range writeChan {
exists, _ := utils.FileExists(thumb.path)
if !exists {
err := utils.WriteFile(thumb.path, thumb.data)
if err != nil {
logger.Errorf("Write error for thumbnail %s: %s ", thumb.path, err)
}
}
}
}
// get thumbnail from cache, otherwise create it and store to cache
func cacheGthumb(gallery *models.Gallery, index int, width int) []byte {
thumbPath := paths.GetGthumbPath(gallery.Checksum, index, width)
exists, _ := utils.FileExists(thumbPath)
if exists { // if thumbnail exists in cache return that
content, err := ioutil.ReadFile(thumbPath)
if err == nil {
return content
} else {
logger.Errorf("Read Error for file %s : %s", thumbPath, err)
}
}
data := gallery.GetThumbnail(index, width)
thumbDir := paths.GetGthumbDir(gallery.Checksum)
t := newCacheThumb(thumbDir, thumbPath, data)
writeChan <- t // write the file to cache
return data
}
// create all thumbs for a given gallery
func CreateGthumbs(gallery *models.Gallery) {
count := gallery.ImageCount()
for i := 0; i < count; i++ {
cacheGthumb(gallery, i, models.DefaultGthumbWidth)
}
}

View File

@@ -2,6 +2,7 @@ package api
import ( import (
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
@@ -18,6 +19,10 @@ const apiTags string = "https://api.github.com/repos/stashapp/stash/tags"
const apiAcceptHeader string = "application/vnd.github.v3+json" const apiAcceptHeader string = "application/vnd.github.v3+json"
const developmentTag string = "latest_develop" const developmentTag string = "latest_develop"
// ErrNoVersion indicates that no version information has been embedded in the
// stash binary
var ErrNoVersion = errors.New("no stash version")
var stashReleases = func() map[string]string { var stashReleases = func() map[string]string {
return map[string]string{ return map[string]string{
"windows/amd64": "stash-win.exe", "windows/amd64": "stash-win.exe",
@@ -140,7 +145,7 @@ func GetLatestVersion(shortHash bool) (latestVersion string, latestRelease strin
version, _, _ := GetVersion() version, _, _ := GetVersion()
if version == "" { if version == "" {
return "", "", fmt.Errorf("Stash doesn't have a version. Version check not supported.") return "", "", ErrNoVersion
} }
// if the version is suffixed with -x-xxxx, then we are running a development build // if the version is suffixed with -x-xxxx, then we are running a development build

View File

@@ -9,4 +9,6 @@ const (
performerKey key = 1 performerKey key = 1
sceneKey key = 2 sceneKey key = 2
studioKey key = 3 studioKey key = 3
movieKey key = 4
ContextUser key = 5
) )

View File

@@ -2,18 +2,31 @@ package api
import ( import (
"math/rand" "math/rand"
"strings"
"github.com/gobuffalo/packr/v2" "github.com/gobuffalo/packr/v2"
) )
var performerBox *packr.Box var performerBox *packr.Box
var performerBoxMale *packr.Box
func initialiseImages() { func initialiseImages() {
performerBox = packr.New("Performer Box", "../../static/performer") performerBox = packr.New("Performer Box", "../../static/performer")
performerBoxMale = packr.New("Male Performer Box", "../../static/performer_male")
} }
func getRandomPerformerImage() ([]byte, error) { func getRandomPerformerImage(gender string) ([]byte, error) {
imageFiles := performerBox.List() var box *packr.Box
index := rand.Intn(len(imageFiles)) switch strings.ToUpper(gender) {
return performerBox.Find(imageFiles[index]) case "FEMALE":
box = performerBox
case "MALE":
box = performerBoxMale
default:
box = performerBox
}
imageFiles := box.List()
index := rand.Intn(len(imageFiles))
return box.Find(imageFiles[index])
} }

92
pkg/api/migrate.go Normal file
View File

@@ -0,0 +1,92 @@
package api
import (
"fmt"
"html/template"
"net/http"
"os"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger"
)
type migrateData struct {
ExistingVersion uint
MigrateVersion uint
BackupPath string
}
func getMigrateData() migrateData {
return migrateData{
ExistingVersion: database.Version(),
MigrateVersion: database.AppSchemaVersion(),
BackupPath: database.DatabaseBackupPath(),
}
}
func getMigrateHandler(w http.ResponseWriter, r *http.Request) {
if !database.NeedsMigration() {
http.Redirect(w, r, "/", 301)
return
}
data, _ := setupUIBox.Find("migrate.html")
templ, err := template.New("Migrate").Parse(string(data))
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), 500)
return
}
err = templ.Execute(w, getMigrateData())
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), 500)
}
}
func doMigrateHandler(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), 500)
}
formBackupPath := r.Form.Get("backuppath")
// always backup so that we can roll back to the previous version if
// migration fails
backupPath := formBackupPath
if formBackupPath == "" {
backupPath = database.DatabaseBackupPath()
}
// perform database backup
if err = database.Backup(backupPath); err != nil {
http.Error(w, fmt.Sprintf("error backing up database: %s", err), 500)
return
}
err = database.RunMigrations()
if err != nil {
errStr := fmt.Sprintf("error performing migration: %s", err)
// roll back to the backed up version
restoreErr := database.RestoreFromBackup(backupPath)
if restoreErr != nil {
errStr = fmt.Sprintf("ERROR: unable to restore database from backup after migration failure: %s\n%s", restoreErr.Error(), errStr)
} else {
errStr = "An error occurred migrating the database to the latest schema version. The backup database file was automatically renamed to restore the database.\n" + errStr
}
http.Error(w, errStr, 500)
return
}
// if no backup path was provided, then delete the created backup
if formBackupPath == "" {
err = os.Remove(backupPath)
if err != nil {
logger.Warnf("error removing unwanted database backup (%s): %s", backupPath, err.Error())
}
}
http.Redirect(w, r, "/", 301)
}

View File

@@ -33,6 +33,9 @@ func (r *Resolver) SceneMarker() models.SceneMarkerResolver {
func (r *Resolver) Studio() models.StudioResolver { func (r *Resolver) Studio() models.StudioResolver {
return &studioResolver{r} return &studioResolver{r}
} }
func (r *Resolver) Movie() models.MovieResolver {
return &movieResolver{r}
}
func (r *Resolver) Subscription() models.SubscriptionResolver { func (r *Resolver) Subscription() models.SubscriptionResolver {
return &subscriptionResolver{r} return &subscriptionResolver{r}
} }
@@ -49,6 +52,7 @@ type performerResolver struct{ *Resolver }
type sceneResolver struct{ *Resolver } type sceneResolver struct{ *Resolver }
type sceneMarkerResolver struct{ *Resolver } type sceneMarkerResolver struct{ *Resolver }
type studioResolver struct{ *Resolver } type studioResolver struct{ *Resolver }
type movieResolver struct{ *Resolver }
type tagResolver struct{ *Resolver } type tagResolver struct{ *Resolver }
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) ([]*models.SceneMarker, error) { func (r *queryResolver) MarkerWall(ctx context.Context, q *string) ([]*models.SceneMarker, error) {
@@ -89,19 +93,24 @@ func (r *queryResolver) ValidGalleriesForScene(ctx context.Context, scene_id *st
func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, error) { func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, error) {
scenesQB := models.NewSceneQueryBuilder() scenesQB := models.NewSceneQueryBuilder()
scenesCount, _ := scenesQB.Count() scenesCount, _ := scenesQB.Count()
scenesSizeCount, _ := scenesQB.SizeCount()
galleryQB := models.NewGalleryQueryBuilder() galleryQB := models.NewGalleryQueryBuilder()
galleryCount, _ := galleryQB.Count() galleryCount, _ := galleryQB.Count()
performersQB := models.NewPerformerQueryBuilder() performersQB := models.NewPerformerQueryBuilder()
performersCount, _ := performersQB.Count() performersCount, _ := performersQB.Count()
studiosQB := models.NewStudioQueryBuilder() studiosQB := models.NewStudioQueryBuilder()
studiosCount, _ := studiosQB.Count() studiosCount, _ := studiosQB.Count()
moviesQB := models.NewMovieQueryBuilder()
moviesCount, _ := moviesQB.Count()
tagsQB := models.NewTagQueryBuilder() tagsQB := models.NewTagQueryBuilder()
tagsCount, _ := tagsQB.Count() tagsCount, _ := tagsQB.Count()
return &models.StatsResultType{ return &models.StatsResultType{
SceneCount: scenesCount, SceneCount: scenesCount,
SceneSizeCount: scenesSizeCount,
GalleryCount: galleryCount, GalleryCount: galleryCount,
PerformerCount: performersCount, PerformerCount: performersCount,
StudioCount: studiosCount, StudioCount: studiosCount,
MovieCount: moviesCount,
TagCount: tagsCount, TagCount: tagsCount,
}, nil }, nil
} }

View File

@@ -0,0 +1,95 @@
package api
import (
"context"
"github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *movieResolver) Name(ctx context.Context, obj *models.Movie) (string, error) {
if obj.Name.Valid {
return obj.Name.String, nil
}
return "", nil
}
func (r *movieResolver) URL(ctx context.Context, obj *models.Movie) (*string, error) {
if obj.URL.Valid {
return &obj.URL.String, nil
}
return nil, nil
}
func (r *movieResolver) Aliases(ctx context.Context, obj *models.Movie) (*string, error) {
if obj.Aliases.Valid {
return &obj.Aliases.String, nil
}
return nil, nil
}
func (r *movieResolver) Duration(ctx context.Context, obj *models.Movie) (*int, error) {
if obj.Duration.Valid {
rating := int(obj.Duration.Int64)
return &rating, nil
}
return nil, nil
}
func (r *movieResolver) Date(ctx context.Context, obj *models.Movie) (*string, error) {
if obj.Date.Valid {
result := utils.GetYMDFromDatabaseDate(obj.Date.String)
return &result, nil
}
return nil, nil
}
func (r *movieResolver) Rating(ctx context.Context, obj *models.Movie) (*int, error) {
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
if obj.StudioID.Valid {
return qb.Find(int(obj.StudioID.Int64), nil)
}
return nil, nil
}
func (r *movieResolver) Director(ctx context.Context, obj *models.Movie) (*string, error) {
if obj.Director.Valid {
return &obj.Director.String, nil
}
return nil, nil
}
func (r *movieResolver) Synopsis(ctx context.Context, obj *models.Movie) (*string, error) {
if obj.Synopsis.Valid {
return &obj.Synopsis.String, nil
}
return nil, nil
}
func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
frontimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj.ID).GetMovieFrontImageURL()
return &frontimagePath, nil
}
func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
backimagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj.ID).GetMovieBackImageURL()
return &backimagePath, nil
}
func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (*int, error) {
qb := models.NewSceneQueryBuilder()
res, err := qb.CountByMovieID(obj.ID)
return &res, err
}

View File

@@ -2,6 +2,7 @@ package api
import ( import (
"context" "context"
"github.com/stashapp/stash/pkg/api/urlbuilders" "github.com/stashapp/stash/pkg/api/urlbuilders"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
@@ -20,6 +21,19 @@ func (r *performerResolver) URL(ctx context.Context, obj *models.Performer) (*st
return nil, nil return nil, nil
} }
func (r *performerResolver) Gender(ctx context.Context, obj *models.Performer) (*models.GenderEnum, error) {
var ret models.GenderEnum
if obj.Gender.Valid {
ret = models.GenderEnum(obj.Gender.String)
if ret.IsValid() {
return &ret, nil
}
}
return nil, nil
}
func (r *performerResolver) Twitter(ctx context.Context, obj *models.Performer) (*string, error) { func (r *performerResolver) Twitter(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.Twitter.Valid { if obj.Twitter.Valid {
return &obj.Twitter.String, nil return &obj.Twitter.String, nil

View File

@@ -82,7 +82,9 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
} }
func (r *sceneResolver) IsStreamable(ctx context.Context, obj *models.Scene) (bool, error) { func (r *sceneResolver) IsStreamable(ctx context.Context, obj *models.Scene) (bool, error) {
return manager.IsStreamable(obj) // ignore error
ret, _ := manager.IsStreamable(obj)
return ret, nil
} }
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) ([]*models.SceneMarker, error) { func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) ([]*models.SceneMarker, error) {
@@ -100,6 +102,38 @@ func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (*models.
return qb.FindBySceneID(obj.ID) return qb.FindBySceneID(obj.ID)
} }
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) ([]*models.SceneMovie, error) {
joinQB := models.NewJoinsQueryBuilder()
qb := models.NewMovieQueryBuilder()
sceneMovies, err := joinQB.GetSceneMovies(obj.ID, nil)
if err != nil {
return nil, err
}
var ret []*models.SceneMovie
for _, sm := range sceneMovies {
movie, err := qb.Find(sm.MovieID, nil)
if err != nil {
return nil, err
}
sceneIdx := sm.SceneIndex
sceneMovie := &models.SceneMovie{
Movie: movie,
}
if sceneIdx.Valid {
var idx int
idx = int(sceneIdx.Int64)
sceneMovie.SceneIndex = &idx
}
ret = append(ret, sceneMovie)
}
return ret, nil
}
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) ([]*models.Tag, error) { func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder() qb := models.NewTagQueryBuilder()
return qb.FindBySceneID(obj.ID, nil) return qb.FindBySceneID(obj.ID, nil)

View File

@@ -38,6 +38,13 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
config.Set(config.Generated, input.GeneratedPath) config.Set(config.Generated, input.GeneratedPath)
} }
if input.CachePath != nil {
if err := utils.EnsureDir(*input.CachePath); err != nil {
return makeConfigGeneralResult(), err
}
config.Set(config.Cache, input.CachePath)
}
if input.MaxTranscodeSize != nil { if input.MaxTranscodeSize != nil {
config.Set(config.MaxTranscodeSize, input.MaxTranscodeSize.String()) config.Set(config.MaxTranscodeSize, input.MaxTranscodeSize.String())
} }
@@ -45,6 +52,8 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
if input.MaxStreamingTranscodeSize != nil { if input.MaxStreamingTranscodeSize != nil {
config.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String()) config.Set(config.MaxStreamingTranscodeSize, input.MaxStreamingTranscodeSize.String())
} }
config.Set(config.ForceMKV, input.ForceMkv)
config.Set(config.ForceHEVC, input.ForceHevc)
if input.Username != nil { if input.Username != nil {
config.Set(config.Username, input.Username) config.Set(config.Username, input.Username)
@@ -60,6 +69,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
} }
} }
if input.MaxSessionAge != nil {
config.Set(config.MaxSessionAge, *input.MaxSessionAge)
}
if input.LogFile != nil { if input.LogFile != nil {
config.Set(config.LogFile, input.LogFile) config.Set(config.LogFile, input.LogFile)
} }
@@ -76,6 +89,10 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
config.Set(config.Exclude, input.Excludes) config.Set(config.Exclude, input.Excludes)
} }
if input.ScraperUserAgent != nil {
config.Set(config.ScraperUserAgent, input.ScraperUserAgent)
}
if err := config.Write(); err != nil { if err := config.Write(); err != nil {
return makeConfigGeneralResult(), err return makeConfigGeneralResult(), err
} }
@@ -94,6 +111,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
config.Set(config.WallShowTitle, *input.WallShowTitle) config.Set(config.WallShowTitle, *input.WallShowTitle)
} }
if input.WallPlayback != nil {
config.Set(config.WallPlayback, *input.WallPlayback)
}
if input.MaximumLoopDuration != nil { if input.MaximumLoopDuration != nil {
config.Set(config.MaximumLoopDuration, *input.MaximumLoopDuration) config.Set(config.MaximumLoopDuration, *input.MaximumLoopDuration)
} }
@@ -106,6 +127,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
config.Set(config.ShowStudioAsText, *input.ShowStudioAsText) config.Set(config.ShowStudioAsText, *input.ShowStudioAsText)
} }
if input.Language != nil {
config.Set(config.Language, *input.Language)
}
css := "" css := ""
if input.CSS != nil { if input.CSS != nil {

View File

@@ -0,0 +1,53 @@
package api
import (
"context"
"github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/models"
)
func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) {
manager.GetInstance().Scan(input.UseFileMetadata)
return "todo", nil
}
func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
manager.GetInstance().Import()
return "todo", nil
}
func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
manager.GetInstance().Export()
return "todo", nil
}
func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) {
manager.GetInstance().Generate(input.Sprites, input.Previews, input.PreviewPreset, input.ImagePreviews, input.Markers, input.Transcodes, input.Thumbnails)
return "todo", nil
}
func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) {
manager.GetInstance().AutoTag(input.Performers, input.Studios, input.Tags)
return "todo", nil
}
func (r *mutationResolver) MetadataClean(ctx context.Context) (string, error) {
manager.GetInstance().Clean()
return "todo", nil
}
func (r *mutationResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) {
status := manager.GetInstance().Status
ret := models.MetadataUpdateStatus{
Progress: status.Progress,
Status: status.Status.String(),
Message: "",
}
return &ret, nil
}
func (r *mutationResolver) StopJob(ctx context.Context) (bool, error) {
return manager.GetInstance().Status.Stop(), nil
}

View File

@@ -0,0 +1,199 @@
package api
import (
"context"
"database/sql"
"strconv"
"time"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCreateInput) (*models.Movie, error) {
// generate checksum from movie name rather than image
checksum := utils.MD5FromString(input.Name)
var frontimageData []byte
var backimageData []byte
var err error
if input.FrontImage == nil {
input.FrontImage = &models.DefaultMovieImage
}
if input.BackImage == nil {
input.BackImage = &models.DefaultMovieImage
}
// Process the base 64 encoded image string
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
if err != nil {
return nil, err
}
// Process the base 64 encoded image string
_, backimageData, err = utils.ProcessBase64Image(*input.BackImage)
if err != nil {
return nil, err
}
// Populate a new movie from the input
currentTime := time.Now()
newMovie := models.Movie{
BackImage: backimageData,
FrontImage: frontimageData,
Checksum: checksum,
Name: sql.NullString{String: input.Name, Valid: true},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
}
if input.Aliases != nil {
newMovie.Aliases = sql.NullString{String: *input.Aliases, Valid: true}
}
if input.Duration != nil {
duration := int64(*input.Duration)
newMovie.Duration = sql.NullInt64{Int64: duration, Valid: true}
}
if input.Date != nil {
newMovie.Date = models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
rating := int64(*input.Rating)
newMovie.Rating = sql.NullInt64{Int64: rating, Valid: true}
}
if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
newMovie.StudioID = sql.NullInt64{Int64: studioID, Valid: true}
}
if input.Director != nil {
newMovie.Director = sql.NullString{String: *input.Director, Valid: true}
}
if input.Synopsis != nil {
newMovie.Synopsis = sql.NullString{String: *input.Synopsis, Valid: true}
}
if input.URL != nil {
newMovie.URL = sql.NullString{String: *input.URL, Valid: true}
}
// Start the transaction and save the movie
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewMovieQueryBuilder()
movie, err := qb.Create(newMovie, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err
}
return movie, nil
}
func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUpdateInput) (*models.Movie, error) {
// Populate movie from the input
movieID, _ := strconv.Atoi(input.ID)
updatedMovie := models.MoviePartial{
ID: movieID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: time.Now()},
}
if input.FrontImage != nil {
_, frontimageData, err := utils.ProcessBase64Image(*input.FrontImage)
if err != nil {
return nil, err
}
updatedMovie.FrontImage = &frontimageData
}
if input.BackImage != nil {
_, backimageData, err := utils.ProcessBase64Image(*input.BackImage)
if err != nil {
return nil, err
}
updatedMovie.BackImage = &backimageData
}
if input.Name != nil {
// generate checksum from movie name rather than image
checksum := utils.MD5FromString(*input.Name)
updatedMovie.Name = &sql.NullString{String: *input.Name, Valid: true}
updatedMovie.Checksum = &checksum
}
if input.Aliases != nil {
updatedMovie.Aliases = &sql.NullString{String: *input.Aliases, Valid: true}
}
if input.Duration != nil {
duration := int64(*input.Duration)
updatedMovie.Duration = &sql.NullInt64{Int64: duration, Valid: true}
}
if input.Date != nil {
updatedMovie.Date = &models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
rating := int64(*input.Rating)
updatedMovie.Rating = &sql.NullInt64{Int64: rating, Valid: true}
} else {
// rating must be nullable
updatedMovie.Rating = &sql.NullInt64{Valid: false}
}
if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
updatedMovie.StudioID = &sql.NullInt64{Int64: studioID, Valid: true}
} else {
// studio must be nullable
updatedMovie.StudioID = &sql.NullInt64{Valid: false}
}
if input.Director != nil {
updatedMovie.Director = &sql.NullString{String: *input.Director, Valid: true}
}
if input.Synopsis != nil {
updatedMovie.Synopsis = &sql.NullString{String: *input.Synopsis, Valid: true}
}
if input.URL != nil {
updatedMovie.URL = &sql.NullString{String: *input.URL, Valid: true}
}
// Start the transaction and save the movie
tx := database.DB.MustBeginTx(ctx, nil)
qb := models.NewMovieQueryBuilder()
movie, err := qb.Update(updatedMovie, tx)
if err != nil {
_ = tx.Rollback()
return nil, err
}
// Commit
if err := tx.Commit(); err != nil {
return nil, err
}
return movie, nil
}
func (r *mutationResolver) MovieDestroy(ctx context.Context, input models.MovieDestroyInput) (bool, error) {
qb := models.NewMovieQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil)
if err := qb.Destroy(input.ID, tx); err != nil {
_ = tx.Rollback()
return false, err
}
if err := tx.Commit(); err != nil {
return false, err
}
return true, nil
}

View File

@@ -19,7 +19,11 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
var err error var err error
if input.Image == nil { if input.Image == nil {
imageData, err = getRandomPerformerImage() gender := ""
if input.Gender != nil {
gender = input.Gender.String()
}
imageData, err = getRandomPerformerImage(gender)
} else { } else {
_, imageData, err = utils.ProcessBase64Image(*input.Image) _, imageData, err = utils.ProcessBase64Image(*input.Image)
} }
@@ -42,6 +46,9 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
if input.URL != nil { if input.URL != nil {
newPerformer.URL = sql.NullString{String: *input.URL, Valid: true} newPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
} }
if input.Gender != nil {
newPerformer.Gender = sql.NullString{String: input.Gender.String(), Valid: true}
}
if input.Birthdate != nil { if input.Birthdate != nil {
newPerformer.Birthdate = models.SQLiteDate{String: *input.Birthdate, Valid: true} newPerformer.Birthdate = models.SQLiteDate{String: *input.Birthdate, Valid: true}
} }
@@ -128,6 +135,9 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
if input.URL != nil { if input.URL != nil {
updatedPerformer.URL = sql.NullString{String: *input.URL, Valid: true} updatedPerformer.URL = sql.NullString{String: *input.URL, Valid: true}
} }
if input.Gender != nil {
updatedPerformer.Gender = sql.NullString{String: input.Gender.String(), Valid: true}
}
if input.Birthdate != nil { if input.Birthdate != nil {
updatedPerformer.Birthdate = models.SQLiteDate{String: *input.Birthdate, Valid: true} updatedPerformer.Birthdate = models.SQLiteDate{String: *input.Birthdate, Valid: true}
} }

View File

@@ -147,6 +147,31 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, tx *sqlx.T
return nil, err return nil, err
} }
// Save the movies
var movieJoins []models.MoviesScenes
for _, movie := range input.Movies {
movieID, _ := strconv.Atoi(movie.MovieID)
movieJoin := models.MoviesScenes{
MovieID: movieID,
SceneID: sceneID,
}
if movie.SceneIndex != nil {
movieJoin.SceneIndex = sql.NullInt64{
Int64: int64(*movie.SceneIndex),
Valid: true,
}
}
movieJoins = append(movieJoins, movieJoin)
}
if err := jqb.UpdateMoviesScenes(sceneID, movieJoins, tx); err != nil {
return nil, err
}
// Save the tags // Save the tags
var tagJoins []models.ScenesTags var tagJoins []models.ScenesTags
for _, tid := range input.TagIds { for _, tid := range input.TagIds {
@@ -247,9 +272,14 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
// Save the performers // Save the performers
if wasFieldIncluded(ctx, "performer_ids") { if wasFieldIncluded(ctx, "performer_ids") {
performerIDs, err := adjustScenePerformerIDs(tx, sceneID, *input.PerformerIds)
if err != nil {
_ = tx.Rollback()
return nil, err
}
var performerJoins []models.PerformersScenes var performerJoins []models.PerformersScenes
for _, pid := range input.PerformerIds { for _, performerID := range performerIDs {
performerID, _ := strconv.Atoi(pid)
performerJoin := models.PerformersScenes{ performerJoin := models.PerformersScenes{
PerformerID: performerID, PerformerID: performerID,
SceneID: sceneID, SceneID: sceneID,
@@ -264,9 +294,14 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
// Save the tags // Save the tags
if wasFieldIncluded(ctx, "tag_ids") { if wasFieldIncluded(ctx, "tag_ids") {
tagIDs, err := adjustSceneTagIDs(tx, sceneID, *input.TagIds)
if err != nil {
_ = tx.Rollback()
return nil, err
}
var tagJoins []models.ScenesTags var tagJoins []models.ScenesTags
for _, tid := range input.TagIds { for _, tagID := range tagIDs {
tagID, _ := strconv.Atoi(tid)
tagJoin := models.ScenesTags{ tagJoin := models.ScenesTags{
SceneID: sceneID, SceneID: sceneID,
TagID: tagID, TagID: tagID,
@@ -288,6 +323,72 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
return ret, nil return ret, nil
} }
func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
// look for the id in the list
foundExisting := false
for idx, existingID := range existingIDs {
if existingID == id {
if updateIDs.Mode == models.BulkUpdateIDModeRemove {
// remove from the list
existingIDs = append(existingIDs[:idx], existingIDs[idx+1:]...)
}
foundExisting = true
break
}
}
if !foundExisting && updateIDs.Mode != models.BulkUpdateIDModeRemove {
existingIDs = append(existingIDs, id)
}
}
return existingIDs
}
func adjustScenePerformerIDs(tx *sqlx.Tx, sceneID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
performerJoins, err := jqb.GetScenePerformers(sceneID, tx)
if err != nil {
return nil, err
}
for _, join := range performerJoins {
ret = append(ret, join.PerformerID)
}
}
return adjustIDs(ret, ids), nil
}
func adjustSceneTagIDs(tx *sqlx.Tx, sceneID int, ids models.BulkUpdateIds) ([]int, error) {
var ret []int
jqb := models.NewJoinsQueryBuilder()
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
// adding to the joins
tagJoins, err := jqb.GetSceneTags(sceneID, tx)
if err != nil {
return nil, err
}
for _, join := range tagJoins {
ret = append(ret, join.TagID)
}
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) { func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) {
qb := models.NewSceneQueryBuilder() qb := models.NewSceneQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil) tx := database.DB.MustBeginTx(ctx, nil)
@@ -356,6 +457,14 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.S
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) { func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
qb := models.NewSceneMarkerQueryBuilder() qb := models.NewSceneMarkerQueryBuilder()
tx := database.DB.MustBeginTx(ctx, nil) tx := database.DB.MustBeginTx(ctx, nil)
markerID, _ := strconv.Atoi(id)
marker, err := qb.Find(markerID)
if err != nil {
return false, err
}
if err := qb.Destroy(id, tx); err != nil { if err := qb.Destroy(id, tx); err != nil {
_ = tx.Rollback() _ = tx.Rollback()
return false, err return false, err
@@ -363,6 +472,16 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
if err := tx.Commit(); err != nil { if err := tx.Commit(); err != nil {
return false, err return false, err
} }
// delete the preview for the marker
sqb := models.NewSceneQueryBuilder()
scene, _ := sqb.Find(int(marker.SceneID.Int64))
if scene != nil {
seconds := int(marker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds)
}
return true, nil return true, nil
} }
@@ -372,14 +491,19 @@ func changeMarker(ctx context.Context, changeType int, changedMarker models.Scen
qb := models.NewSceneMarkerQueryBuilder() qb := models.NewSceneMarkerQueryBuilder()
jqb := models.NewJoinsQueryBuilder() jqb := models.NewJoinsQueryBuilder()
var existingMarker *models.SceneMarker
var sceneMarker *models.SceneMarker var sceneMarker *models.SceneMarker
var err error var err error
switch changeType { switch changeType {
case create: case create:
sceneMarker, err = qb.Create(changedMarker, tx) sceneMarker, err = qb.Create(changedMarker, tx)
case update: case update:
// check to see if timestamp was changed
existingMarker, err = qb.Find(changedMarker.ID)
if err == nil {
sceneMarker, err = qb.Update(changedMarker, tx) sceneMarker, err = qb.Update(changedMarker, tx)
} }
}
if err != nil { if err != nil {
_ = tx.Rollback() _ = tx.Rollback()
return nil, err return nil, err
@@ -416,6 +540,18 @@ func changeMarker(ctx context.Context, changeType int, changedMarker models.Scen
return nil, err return nil, err
} }
// remove the marker preview if the timestamp was changed
if existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
sqb := models.NewSceneQueryBuilder()
scene, _ := sqb.Find(int(existingMarker.SceneID.Int64))
if scene != nil {
seconds := int(existingMarker.Seconds)
manager.DeleteSceneMarkerFiles(scene, seconds)
}
}
return sceneMarker, nil return sceneMarker, nil
} }
@@ -478,3 +614,13 @@ func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (int, err
return newVal, nil return newVal, nil
} }
func (r *mutationResolver) SceneGenerateScreenshot(ctx context.Context, id string, at *float64) (string, error) {
if at != nil {
manager.GetInstance().GenerateScreenshot(id, *at)
} else {
manager.GetInstance().GenerateDefaultScreenshot(id)
}
return "todo", nil
}

View File

@@ -12,12 +12,18 @@ func (r *queryResolver) Configuration(ctx context.Context) (*models.ConfigResult
return makeConfigResult(), nil return makeConfigResult(), nil
} }
func (r *queryResolver) Directories(ctx context.Context, path *string) ([]string, error) { func (r *queryResolver) Directory(ctx context.Context, path *string) (*models.Directory, error) {
var dirPath = "" var dirPath = ""
if path != nil { if path != nil {
dirPath = *path dirPath = *path
} }
return utils.ListDir(dirPath), nil currentDir := utils.GetDir(dirPath)
return &models.Directory{
Path: currentDir,
Parent: utils.GetParent(currentDir),
Directories: utils.ListDir(currentDir),
}, nil
} }
func makeConfigResult() *models.ConfigResult { func makeConfigResult() *models.ConfigResult {
@@ -33,38 +39,49 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
maxTranscodeSize := config.GetMaxTranscodeSize() maxTranscodeSize := config.GetMaxTranscodeSize()
maxStreamingTranscodeSize := config.GetMaxStreamingTranscodeSize() maxStreamingTranscodeSize := config.GetMaxStreamingTranscodeSize()
scraperUserAgent := config.GetScraperUserAgent()
return &models.ConfigGeneralResult{ return &models.ConfigGeneralResult{
Stashes: config.GetStashPaths(), Stashes: config.GetStashPaths(),
DatabasePath: config.GetDatabasePath(), DatabasePath: config.GetDatabasePath(),
GeneratedPath: config.GetGeneratedPath(), GeneratedPath: config.GetGeneratedPath(),
CachePath: config.GetCachePath(),
MaxTranscodeSize: &maxTranscodeSize, MaxTranscodeSize: &maxTranscodeSize,
MaxStreamingTranscodeSize: &maxStreamingTranscodeSize, MaxStreamingTranscodeSize: &maxStreamingTranscodeSize,
ForceMkv: config.GetForceMKV(),
ForceHevc: config.GetForceHEVC(),
Username: config.GetUsername(), Username: config.GetUsername(),
Password: config.GetPasswordHash(), Password: config.GetPasswordHash(),
MaxSessionAge: config.GetMaxSessionAge(),
LogFile: &logFile, LogFile: &logFile,
LogOut: config.GetLogOut(), LogOut: config.GetLogOut(),
LogLevel: config.GetLogLevel(), LogLevel: config.GetLogLevel(),
LogAccess: config.GetLogAccess(), LogAccess: config.GetLogAccess(),
Excludes: config.GetExcludes(), Excludes: config.GetExcludes(),
ScraperUserAgent: &scraperUserAgent,
} }
} }
func makeConfigInterfaceResult() *models.ConfigInterfaceResult { func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
soundOnPreview := config.GetSoundOnPreview() soundOnPreview := config.GetSoundOnPreview()
wallShowTitle := config.GetWallShowTitle() wallShowTitle := config.GetWallShowTitle()
wallPlayback := config.GetWallPlayback()
maximumLoopDuration := config.GetMaximumLoopDuration() maximumLoopDuration := config.GetMaximumLoopDuration()
autostartVideo := config.GetAutostartVideo() autostartVideo := config.GetAutostartVideo()
showStudioAsText := config.GetShowStudioAsText() showStudioAsText := config.GetShowStudioAsText()
css := config.GetCSS() css := config.GetCSS()
cssEnabled := config.GetCSSEnabled() cssEnabled := config.GetCSSEnabled()
language := config.GetLanguage()
return &models.ConfigInterfaceResult{ return &models.ConfigInterfaceResult{
SoundOnPreview: &soundOnPreview, SoundOnPreview: &soundOnPreview,
WallShowTitle: &wallShowTitle, WallShowTitle: &wallShowTitle,
WallPlayback: &wallPlayback,
MaximumLoopDuration: &maximumLoopDuration, MaximumLoopDuration: &maximumLoopDuration,
AutostartVideo: &autostartVideo, AutostartVideo: &autostartVideo,
ShowStudioAsText: &showStudioAsText, ShowStudioAsText: &showStudioAsText,
CSS: &css, CSS: &css,
CSSEnabled: &cssEnabled, CSSEnabled: &cssEnabled,
Language: &language,
} }
} }

View File

@@ -0,0 +1,33 @@
package api
import (
"context"
"strconv"
"github.com/stashapp/stash/pkg/models"
)
func (r *queryResolver) FindMovie(ctx context.Context, id string) (*models.Movie, error) {
qb := models.NewMovieQueryBuilder()
idInt, _ := strconv.Atoi(id)
return qb.Find(idInt, nil)
}
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (*models.FindMoviesResultType, error) {
qb := models.NewMovieQueryBuilder()
movies, total := qb.Query(movieFilter, filter)
return &models.FindMoviesResultType{
Count: total,
Movies: movies,
}, nil
}
func (r *queryResolver) AllMovies(ctx context.Context) ([]*models.Movie, error) {
qb := models.NewMovieQueryBuilder()
return qb.All()
}
func (r *queryResolver) AllMoviesSlim(ctx context.Context) ([]*models.Movie, error) {
qb := models.NewMovieQueryBuilder()
return qb.AllSlim()
}

View File

@@ -25,3 +25,8 @@ func (r *queryResolver) AllPerformers(ctx context.Context) ([]*models.Performer,
qb := models.NewPerformerQueryBuilder() qb := models.NewPerformerQueryBuilder()
return qb.All() return qb.All()
} }
func (r *queryResolver) AllPerformersSlim(ctx context.Context) ([]*models.Performer, error) {
qb := models.NewPerformerQueryBuilder()
return qb.AllSlim()
}

View File

@@ -25,3 +25,8 @@ func (r *queryResolver) AllStudios(ctx context.Context) ([]*models.Studio, error
qb := models.NewStudioQueryBuilder() qb := models.NewStudioQueryBuilder()
return qb.All() return qb.All()
} }
func (r *queryResolver) AllStudiosSlim(ctx context.Context) ([]*models.Studio, error) {
qb := models.NewStudioQueryBuilder()
return qb.AllSlim()
}

View File

@@ -16,3 +16,8 @@ func (r *queryResolver) AllTags(ctx context.Context) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder() qb := models.NewTagQueryBuilder()
return qb.All() return qb.All()
} }
func (r *queryResolver) AllTagsSlim(ctx context.Context) ([]*models.Tag, error) {
qb := models.NewTagQueryBuilder()
return qb.AllSlim()
}

View File

@@ -7,36 +7,6 @@ import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func (r *queryResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) {
manager.GetInstance().Scan(input.UseFileMetadata)
return "todo", nil
}
func (r *queryResolver) MetadataImport(ctx context.Context) (string, error) {
manager.GetInstance().Import()
return "todo", nil
}
func (r *queryResolver) MetadataExport(ctx context.Context) (string, error) {
manager.GetInstance().Export()
return "todo", nil
}
func (r *queryResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) {
manager.GetInstance().Generate(input.Sprites, input.Previews, input.Markers, input.Transcodes)
return "todo", nil
}
func (r *queryResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) {
manager.GetInstance().AutoTag(input.Performers, input.Studios, input.Tags)
return "todo", nil
}
func (r *queryResolver) MetadataClean(ctx context.Context) (string, error) {
manager.GetInstance().Clean()
return "todo", nil
}
func (r *queryResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) { func (r *queryResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateStatus, error) {
status := manager.GetInstance().Status status := manager.GetInstance().Status
ret := models.MetadataUpdateStatus{ ret := models.MetadataUpdateStatus{
@@ -47,7 +17,3 @@ func (r *queryResolver) JobStatus(ctx context.Context) (*models.MetadataUpdateSt
return &ret, nil return &ret, nil
} }
func (r *queryResolver) StopJob(ctx context.Context) (bool, error) {
return manager.GetInstance().Status.Stop(), nil
}

View File

@@ -23,11 +23,15 @@ func (rs galleryRoutes) Routes() chi.Router {
func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) { func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) {
gallery := r.Context().Value(galleryKey).(*models.Gallery) gallery := r.Context().Value(galleryKey).(*models.Gallery)
if gallery == nil {
http.Error(w, http.StatusText(404), 404)
return
}
fileIndex, _ := strconv.Atoi(chi.URLParam(r, "fileIndex")) fileIndex, _ := strconv.Atoi(chi.URLParam(r, "fileIndex"))
thumb := r.URL.Query().Get("thumb") thumb := r.URL.Query().Get("thumb")
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
if thumb == "true" { if thumb == "true" {
_, _ = w.Write(gallery.GetThumbnail(fileIndex, 200)) _, _ = w.Write(cacheGthumb(gallery, fileIndex, models.DefaultGthumbWidth))
} else if thumb == "" { } else if thumb == "" {
_, _ = w.Write(gallery.GetImage(fileIndex)) _, _ = w.Write(gallery.GetImage(fileIndex))
} else { } else {
@@ -36,7 +40,7 @@ func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) {
http.Error(w, http.StatusText(400), 400) http.Error(w, http.StatusText(400), 400)
return return
} }
_, _ = w.Write(gallery.GetThumbnail(fileIndex, int(width))) _, _ = w.Write(cacheGthumb(gallery, fileIndex, int(width)))
} }
} }

54
pkg/api/routes_movie.go Normal file
View File

@@ -0,0 +1,54 @@
package api
import (
"context"
"net/http"
"strconv"
"github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models"
)
type movieRoutes struct{}
func (rs movieRoutes) Routes() chi.Router {
r := chi.NewRouter()
r.Route("/{movieId}", func(r chi.Router) {
r.Use(MovieCtx)
r.Get("/frontimage", rs.FrontImage)
r.Get("/backimage", rs.BackImage)
})
return r
}
func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
movie := r.Context().Value(movieKey).(*models.Movie)
_, _ = w.Write(movie.FrontImage)
}
func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
movie := r.Context().Value(movieKey).(*models.Movie)
_, _ = w.Write(movie.BackImage)
}
func MovieCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
movieID, err := strconv.Atoi(chi.URLParam(r, "movieId"))
if err != nil {
http.Error(w, http.StatusText(404), 404)
return
}
qb := models.NewMovieQueryBuilder()
movie, err := qb.Find(movieID, nil)
if err != nil {
http.Error(w, http.StatusText(404), 404)
return
}
ctx := context.WithValue(r.Context(), movieKey, movie)
next.ServeHTTP(w, r.WithContext(ctx))
})
}

View File

@@ -4,6 +4,7 @@ import (
"context" "context"
"io" "io"
"net/http" "net/http"
"os"
"strconv" "strconv"
"strings" "strings"
@@ -42,13 +43,32 @@ func (rs sceneRoutes) Routes() chi.Router {
// region Handlers // region Handlers
func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
container := ""
if scene.Format.Valid {
container = scene.Format.String
} else { // container isn't in the DB
// shouldn't happen, fallback to ffprobe
tmpVideoFile, err := ffmpeg.NewVideoFile(manager.GetInstance().FFProbePath, scene.Path)
if err != nil {
logger.Errorf("[transcode] error reading video file: %s", err.Error())
return
}
container = string(ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path))
}
// detect if not a streamable file and try to transcode it instead // detect if not a streamable file and try to transcode it instead
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.Checksum) filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.Checksum)
videoCodec := scene.VideoCodec.String videoCodec := scene.VideoCodec.String
audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec.Valid {
audioCodec = ffmpeg.AudioCodec(scene.AudioCodec.String)
}
hasTranscode, _ := manager.HasTranscode(scene) hasTranscode, _ := manager.HasTranscode(scene)
if ffmpeg.IsValidCodec(videoCodec) || hasTranscode { if ffmpeg.IsValidCodec(videoCodec) && ffmpeg.IsValidCombo(videoCodec, ffmpeg.Container(container)) && ffmpeg.IsValidAudioForContainer(audioCodec, ffmpeg.Container(container)) || hasTranscode {
manager.RegisterStream(filepath, &w) manager.RegisterStream(filepath, &w)
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
manager.WaitAndDeregisterStream(filepath, &w, r) manager.WaitAndDeregisterStream(filepath, &w, r)
@@ -69,16 +89,50 @@ func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
encoder := ffmpeg.NewEncoder(manager.GetInstance().FFMPEGPath) encoder := ffmpeg.NewEncoder(manager.GetInstance().FFMPEGPath)
stream, process, err := encoder.StreamTranscode(*videoFile, startTime, config.GetMaxStreamingTranscodeSize()) var stream io.ReadCloser
var process *os.Process
mimeType := ffmpeg.MimeWebm
if audioCodec == ffmpeg.MissingUnsupported {
//ffmpeg fails if it trys to transcode a non supported audio codec
stream, process, err = encoder.StreamTranscodeVideo(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
} else {
copyVideo := false // try to be smart if the video to be transcoded is in a Matroska container
// mp4 has always supported audio so it doesn't need to be checked
// while mpeg_ts has seeking issues if we don't reencode the video
if config.GetForceMKV() { // If MKV is forced as supported and video codec is also supported then only transcode audio
if ffmpeg.Container(container) == ffmpeg.Matroska {
switch videoCodec {
case ffmpeg.H264, ffmpeg.Vp9, ffmpeg.Vp8:
copyVideo = true
case ffmpeg.Hevc:
if config.GetForceHEVC() {
copyVideo = true
}
}
}
}
if copyVideo { // copy video stream instead of transcoding it
stream, process, err = encoder.StreamMkvTranscodeAudio(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
mimeType = ffmpeg.MimeMkv
} else {
stream, process, err = encoder.StreamTranscode(*videoFile, startTime, config.GetMaxStreamingTranscodeSize())
}
}
if err != nil { if err != nil {
logger.Errorf("[stream] error transcoding video file: %s", err.Error()) logger.Errorf("[stream] error transcoding video file: %s", err.Error())
return return
} }
w.WriteHeader(http.StatusOK) w.WriteHeader(http.StatusOK)
w.Header().Set("Content-Type", "video/webm") w.Header().Set("Content-Type", mimeType)
logger.Info("[stream] transcoding video file") logger.Infof("[stream] transcoding video file to %s", mimeType)
// handle if client closes the connection // handle if client closes the connection
notify := r.Context().Done() notify := r.Context().Done()

View File

@@ -2,10 +2,13 @@ package api
import ( import (
"context" "context"
"crypto/md5"
"fmt"
"github.com/go-chi/chi" "github.com/go-chi/chi"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"net/http" "net/http"
"strconv" "strconv"
"strings"
) )
type studioRoutes struct{} type studioRoutes struct{}
@@ -23,6 +26,21 @@ func (rs studioRoutes) Routes() chi.Router {
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) { func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
studio := r.Context().Value(studioKey).(*models.Studio) studio := r.Context().Value(studioKey).(*models.Studio)
etag := fmt.Sprintf("%x", md5.Sum(studio.Image))
if match := r.Header.Get("If-None-Match"); match != "" {
if strings.Contains(match, etag) {
w.WriteHeader(http.StatusNotModified)
return
}
}
contentType := http.DetectContentType(studio.Image)
if contentType == "text/xml; charset=utf-8" || contentType == "text/plain; charset=utf-8" {
contentType = "image/svg+xml"
}
w.Header().Set("Content-Type", contentType)
w.Header().Add("Etag", etag)
_, _ = w.Write(studio.Image) _, _ = w.Write(studio.Image)
} }

View File

@@ -7,6 +7,7 @@ import (
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
"net/url"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
@@ -20,6 +21,7 @@ import (
"github.com/gobuffalo/packr/v2" "github.com/gobuffalo/packr/v2"
"github.com/gorilla/websocket" "github.com/gorilla/websocket"
"github.com/rs/cors" "github.com/rs/cors"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager" "github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/manager/config"
@@ -28,46 +30,81 @@ import (
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
var version string = "" var version string
var buildstamp string = "" var buildstamp string
var githash string = "" var githash string
var uiBox *packr.Box var uiBox *packr.Box
//var legacyUiBox *packr.Box //var legacyUiBox *packr.Box
var setupUIBox *packr.Box var setupUIBox *packr.Box
var loginUIBox *packr.Box
func allowUnauthenticated(r *http.Request) bool {
return strings.HasPrefix(r.URL.Path, "/login") || r.URL.Path == "/css"
}
func authenticateHandler() func(http.Handler) http.Handler { func authenticateHandler() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler { return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// only do this if credentials have been configured ctx := r.Context()
if !config.HasCredentials() {
next.ServeHTTP(w, r) // translate api key into current user, if present
userID := ""
var err error
// handle session
userID, err = getSessionUserID(w, r)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(err.Error()))
return return
} }
authUser, authPW, ok := r.BasicAuth() // handle redirect if no user and user is required
if userID == "" && config.HasCredentials() && !allowUnauthenticated(r) {
// always allow
if !ok || !config.ValidateCredentials(authUser, authPW) { // if we don't have a userID, then redirect
unauthorized(w) // if graphql was requested, we just return a forbidden error
if r.URL.Path == "/graphql" {
w.Header().Add("WWW-Authenticate", `FormBased`)
w.WriteHeader(http.StatusUnauthorized)
return return
} }
// otherwise redirect to the login page
u := url.URL{
Path: "/login",
}
q := u.Query()
q.Set(returnURLParam, r.URL.Path)
u.RawQuery = q.Encode()
http.Redirect(w, r, u.String(), http.StatusFound)
return
}
ctx = context.WithValue(ctx, ContextUser, userID)
r = r.WithContext(ctx)
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
}) })
} }
} }
func unauthorized(w http.ResponseWriter) { const setupEndPoint = "/setup"
w.Header().Add("WWW-Authenticate", `Basic realm=\"Stash\"`) const migrateEndPoint = "/migrate"
w.WriteHeader(http.StatusUnauthorized) const loginEndPoint = "/login"
}
func Start() { func Start() {
uiBox = packr.New("UI Box", "../../ui/v2/build") uiBox = packr.New("UI Box", "../../ui/v2.5/build")
//legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend") //legacyUiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
setupUIBox = packr.New("Setup UI Box", "../../ui/setup") setupUIBox = packr.New("Setup UI Box", "../../ui/setup")
loginUIBox = packr.New("Login UI Box", "../../ui/login")
initSessionStore()
initialiseImages() initialiseImages()
r := chi.NewRouter() r := chi.NewRouter()
@@ -83,6 +120,7 @@ func Start() {
r.Use(cors.AllowAll().Handler) r.Use(cors.AllowAll().Handler)
r.Use(BaseURLMiddleware) r.Use(BaseURLMiddleware)
r.Use(ConfigCheckMiddleware) r.Use(ConfigCheckMiddleware)
r.Use(DatabaseCheckMiddleware)
recoverFunc := handler.RecoverFunc(func(ctx context.Context, err interface{}) error { recoverFunc := handler.RecoverFunc(func(ctx context.Context, err interface{}) error {
logger.Error(err) logger.Error(err)
@@ -105,12 +143,20 @@ func Start() {
r.Handle("/graphql", gqlHandler) r.Handle("/graphql", gqlHandler)
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql")) r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
// session handlers
r.Post(loginEndPoint, handleLogin)
r.Get("/logout", handleLogout)
r.Get(loginEndPoint, getLoginHandler)
r.Mount("/gallery", galleryRoutes{}.Routes()) r.Mount("/gallery", galleryRoutes{}.Routes())
r.Mount("/performer", performerRoutes{}.Routes()) r.Mount("/performer", performerRoutes{}.Routes())
r.Mount("/scene", sceneRoutes{}.Routes()) r.Mount("/scene", sceneRoutes{}.Routes())
r.Mount("/studio", studioRoutes{}.Routes()) r.Mount("/studio", studioRoutes{}.Routes())
r.Mount("/movie", movieRoutes{}.Routes())
r.HandleFunc("/css", func(w http.ResponseWriter, r *http.Request) { r.HandleFunc("/css", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/css")
if !config.GetCSSEnabled() { if !config.GetCSSEnabled() {
return return
} }
@@ -125,6 +171,10 @@ func Start() {
http.ServeFile(w, r, fn) http.ServeFile(w, r, fn)
}) })
// Serve the migration UI
r.Get("/migrate", getMigrateHandler)
r.Post("/migrate", doMigrateHandler)
// Serve the setup UI // Serve the setup UI
r.HandleFunc("/setup*", func(w http.ResponseWriter, r *http.Request) { r.HandleFunc("/setup*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path) ext := path.Ext(r.URL.Path)
@@ -136,6 +186,16 @@ func Start() {
http.FileServer(setupUIBox).ServeHTTP(w, r) http.FileServer(setupUIBox).ServeHTTP(w, r)
} }
}) })
r.HandleFunc("/login*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
if ext == ".html" || ext == "" {
data, _ := loginUIBox.Find("login.html")
_, _ = w.Write(data)
} else {
r.URL.Path = strings.Replace(r.URL.Path, loginEndPoint, "", 1)
http.FileServer(loginUIBox).ServeHTTP(w, r)
}
})
r.Post("/init", func(w http.ResponseWriter, r *http.Request) { r.Post("/init", func(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm() err := r.ParseForm()
if err != nil { if err != nil {
@@ -174,7 +234,8 @@ func Start() {
_ = os.Mkdir(downloads, 0755) _ = os.Mkdir(downloads, 0755)
config.Set(config.Stash, stash) // #536 - set stash as slice of strings
config.Set(config.Stash, []string{stash})
config.Set(config.Generated, generated) config.Set(config.Generated, generated)
config.Set(config.Metadata, metadata) config.Set(config.Metadata, metadata)
config.Set(config.Cache, cache) config.Set(config.Cache, cache)
@@ -189,6 +250,7 @@ func Start() {
http.Redirect(w, r, "/", 301) http.Redirect(w, r, "/", 301)
}) })
startThumbCache()
// Serve the web app // Serve the web app
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) { r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path) ext := path.Ext(r.URL.Path)
@@ -311,10 +373,27 @@ func BaseURLMiddleware(next http.Handler) http.Handler {
func ConfigCheckMiddleware(next http.Handler) http.Handler { func ConfigCheckMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path) ext := path.Ext(r.URL.Path)
shouldRedirect := ext == "" && r.Method == "GET" && r.URL.Path != "/init" shouldRedirect := ext == "" && r.Method == "GET"
if !config.IsValid() && shouldRedirect { if !config.IsValid() && shouldRedirect {
if !strings.HasPrefix(r.URL.Path, "/setup") { // #539 - don't redirect if loading login page
http.Redirect(w, r, "/setup", 301) if !strings.HasPrefix(r.URL.Path, setupEndPoint) && !strings.HasPrefix(r.URL.Path, loginEndPoint) {
http.Redirect(w, r, setupEndPoint, 301)
return
}
}
next.ServeHTTP(w, r)
})
}
func DatabaseCheckMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ext := path.Ext(r.URL.Path)
shouldRedirect := ext == "" && r.Method == "GET"
if shouldRedirect && database.NeedsMigration() {
// #451 - don't redirect if loading login page
// #539 - or setup page
if !strings.HasPrefix(r.URL.Path, migrateEndPoint) && !strings.HasPrefix(r.URL.Path, loginEndPoint) && !strings.HasPrefix(r.URL.Path, setupEndPoint) {
http.Redirect(w, r, migrateEndPoint, 301)
return return
} }
} }

127
pkg/api/session.go Normal file
View File

@@ -0,0 +1,127 @@
package api
import (
"fmt"
"html/template"
"net/http"
"github.com/stashapp/stash/pkg/manager/config"
"github.com/gorilla/sessions"
)
const cookieName = "session"
const usernameFormKey = "username"
const passwordFormKey = "password"
const userIDKey = "userID"
const returnURLParam = "returnURL"
var sessionStore = sessions.NewCookieStore(config.GetSessionStoreKey())
type loginTemplateData struct {
URL string
Error string
}
func initSessionStore() {
sessionStore.MaxAge(config.GetMaxSessionAge())
}
func redirectToLogin(w http.ResponseWriter, returnURL string, loginError string) {
data, _ := loginUIBox.Find("login.html")
templ, err := template.New("Login").Parse(string(data))
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
return
}
err = templ.Execute(w, loginTemplateData{URL: returnURL, Error: loginError})
if err != nil {
http.Error(w, fmt.Sprintf("error: %s", err), http.StatusInternalServerError)
}
}
func getLoginHandler(w http.ResponseWriter, r *http.Request) {
if !config.HasCredentials() {
http.Redirect(w, r, "/", http.StatusFound)
return
}
redirectToLogin(w, r.URL.Query().Get(returnURLParam), "")
}
func handleLogin(w http.ResponseWriter, r *http.Request) {
url := r.FormValue(returnURLParam)
if url == "" {
url = "/"
}
// ignore error - we want a new session regardless
newSession, _ := sessionStore.Get(r, cookieName)
username := r.FormValue("username")
password := r.FormValue("password")
// authenticate the user
if !config.ValidateCredentials(username, password) {
// redirect back to the login page with an error
redirectToLogin(w, url, "Username or password is invalid")
return
}
newSession.Values[userIDKey] = username
err := newSession.Save(r, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, r, url, http.StatusFound)
}
func handleLogout(w http.ResponseWriter, r *http.Request) {
session, err := sessionStore.Get(r, cookieName)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
delete(session.Values, userIDKey)
session.Options.MaxAge = -1
err = session.Save(r, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// redirect to the login page if credentials are required
getLoginHandler(w, r)
}
func getSessionUserID(w http.ResponseWriter, r *http.Request) (string, error) {
session, err := sessionStore.Get(r, cookieName)
// ignore errors and treat as an empty user id, so that we handle expired
// cookie
if err != nil {
return "", nil
}
if !session.IsNew {
val := session.Values[userIDKey]
// refresh the cookie
err = session.Save(r, w)
if err != nil {
return "", err
}
ret, _ := val.(string)
return ret, nil
}
return "", nil
}

View File

@@ -0,0 +1,24 @@
package urlbuilders
import "strconv"
type MovieURLBuilder struct {
BaseURL string
MovieID string
}
func NewMovieURLBuilder(baseURL string, movieID int) MovieURLBuilder {
return MovieURLBuilder{
BaseURL: baseURL,
MovieID: strconv.Itoa(movieID),
}
}
func (b MovieURLBuilder) GetMovieFrontImageURL() string {
return b.BaseURL + "/movie/" + b.MovieID + "/frontimage"
}
func (b MovieURLBuilder) GetMovieBackImageURL() string {
return b.BaseURL + "/movie/" + b.MovieID + "/backimage"
}

View File

@@ -5,10 +5,11 @@ import (
"errors" "errors"
"fmt" "fmt"
"os" "os"
"regexp" "time"
"github.com/gobuffalo/packr/v2" "github.com/gobuffalo/packr/v2"
"github.com/golang-migrate/migrate/v4" "github.com/golang-migrate/migrate/v4"
sqlite3mig "github.com/golang-migrate/migrate/v4/database/sqlite3"
"github.com/golang-migrate/migrate/v4/source" "github.com/golang-migrate/migrate/v4/source"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
sqlite3 "github.com/mattn/go-sqlite3" sqlite3 "github.com/mattn/go-sqlite3"
@@ -17,26 +18,62 @@ import (
) )
var DB *sqlx.DB var DB *sqlx.DB
var appSchemaVersion uint = 3 var dbPath string
var appSchemaVersion uint = 8
var databaseSchemaVersion uint
const sqlite3Driver = "sqlite3_regexp" const sqlite3Driver = "sqlite3ex"
func init() { func init() {
// register custom driver with regexp function // register custom driver with regexp function
registerRegexpFunc() registerCustomDriver()
} }
func Initialize(databasePath string) { func Initialize(databasePath string) {
runMigrations(databasePath) dbPath = databasePath
if err := getDatabaseSchemaVersion(); err != nil {
panic(err)
}
if databaseSchemaVersion == 0 {
// new database, just run the migrations
if err := RunMigrations(); err != nil {
panic(err)
}
// RunMigrations calls Initialise. Just return
return
} else {
if databaseSchemaVersion > appSchemaVersion {
panic(fmt.Sprintf("Database schema version %d is incompatible with required schema version %d", databaseSchemaVersion, appSchemaVersion))
}
// if migration is needed, then don't open the connection
if NeedsMigration() {
logger.Warnf("Database schema version %d does not match required schema version %d.", databaseSchemaVersion, appSchemaVersion)
return
}
}
const disableForeignKeys = false
DB = open(databasePath, disableForeignKeys)
}
func open(databasePath string, disableForeignKeys bool) *sqlx.DB {
// https://github.com/mattn/go-sqlite3 // https://github.com/mattn/go-sqlite3
conn, err := sqlx.Open(sqlite3Driver, "file:"+databasePath+"?_fk=true") url := "file:" + databasePath
if !disableForeignKeys {
url += "?_fk=true"
}
conn, err := sqlx.Open(sqlite3Driver, url)
conn.SetMaxOpenConns(25) conn.SetMaxOpenConns(25)
conn.SetMaxIdleConns(4) conn.SetMaxIdleConns(4)
if err != nil { if err != nil {
logger.Fatalf("db.Open(): %q\n", err) logger.Fatalf("db.Open(): %q\n", err)
} }
DB = conn
return conn
} }
func Reset(databasePath string) error { func Reset(databasePath string) error {
@@ -55,45 +92,123 @@ func Reset(databasePath string) error {
return nil return nil
} }
// Backup the database
func Backup(backupPath string) error {
db, err := sqlx.Connect(sqlite3Driver, "file:"+dbPath+"?_fk=true")
if err != nil {
return fmt.Errorf("Open database %s failed:%s", dbPath, err)
}
defer db.Close()
_, err = db.Exec(`VACUUM INTO "` + backupPath + `"`)
if err != nil {
return fmt.Errorf("Vacuum failed: %s", err)
}
return nil
}
func RestoreFromBackup(backupPath string) error {
return os.Rename(backupPath, dbPath)
}
// Migrate the database // Migrate the database
func runMigrations(databasePath string) { func NeedsMigration() bool {
return databaseSchemaVersion != appSchemaVersion
}
func AppSchemaVersion() uint {
return appSchemaVersion
}
func DatabaseBackupPath() string {
return fmt.Sprintf("%s.%d.%s", dbPath, databaseSchemaVersion, time.Now().Format("20060102_150405"))
}
func Version() uint {
return databaseSchemaVersion
}
func getMigrate() (*migrate.Migrate, error) {
migrationsBox := packr.New("Migrations Box", "./migrations") migrationsBox := packr.New("Migrations Box", "./migrations")
packrSource := &Packr2Source{ packrSource := &Packr2Source{
Box: migrationsBox, Box: migrationsBox,
Migrations: source.NewMigrations(), Migrations: source.NewMigrations(),
} }
databasePath = utils.FixWindowsPath(databasePath) databasePath := utils.FixWindowsPath(dbPath)
s, _ := WithInstance(packrSource) s, _ := WithInstance(packrSource)
m, err := migrate.NewWithSourceInstance(
const disableForeignKeys = true
conn := open(databasePath, disableForeignKeys)
driver, err := sqlite3mig.WithInstance(conn.DB, &sqlite3mig.Config{})
if err != nil {
return nil, err
}
// use sqlite3Driver so that migration has access to durationToTinyInt
return migrate.NewWithInstance(
"packr2", "packr2",
s, s,
fmt.Sprintf("sqlite3://%s", "file:"+databasePath), databasePath,
driver,
) )
}
func getDatabaseSchemaVersion() error {
m, err := getMigrate()
if err != nil {
return err
}
databaseSchemaVersion, _, _ = m.Version()
m.Close()
return nil
}
// Migrate the database
func RunMigrations() error {
m, err := getMigrate()
if err != nil { if err != nil {
panic(err.Error()) panic(err.Error())
} }
databaseSchemaVersion, _, _ := m.Version() databaseSchemaVersion, _, _ = m.Version()
stepNumber := appSchemaVersion - databaseSchemaVersion stepNumber := appSchemaVersion - databaseSchemaVersion
if stepNumber != 0 { if stepNumber != 0 {
err = m.Steps(int(stepNumber)) err = m.Steps(int(stepNumber))
if err != nil { if err != nil {
panic(err.Error()) // migration failed
m.Close()
return err
} }
} }
m.Close() m.Close()
// re-initialise the database
Initialize(dbPath)
return nil
} }
func registerRegexpFunc() { func registerCustomDriver() {
regexFn := func(re, s string) (bool, error) {
return regexp.MatchString(re, s)
}
sql.Register(sqlite3Driver, sql.Register(sqlite3Driver,
&sqlite3.SQLiteDriver{ &sqlite3.SQLiteDriver{
ConnectHook: func(conn *sqlite3.SQLiteConn) error { ConnectHook: func(conn *sqlite3.SQLiteConn) error {
return conn.RegisterFunc("regexp", regexFn, true) funcs := map[string]interface{}{
}, "regexp": regexFn,
}) "durationToTinyInt": durationToTinyIntFn,
}
for name, fn := range funcs {
if err := conn.RegisterFunc(name, fn, true); err != nil {
return fmt.Errorf("Error registering function %s: %s", name, err.Error())
}
}
return nil
},
},
)
} }

37
pkg/database/functions.go Normal file
View File

@@ -0,0 +1,37 @@
package database
import (
"regexp"
"strconv"
"strings"
)
func regexFn(re, s string) (bool, error) {
return regexp.MatchString(re, s)
}
func durationToTinyIntFn(str string) (int64, error) {
splits := strings.Split(str, ":")
if len(splits) > 3 {
return 0, nil
}
seconds := 0
factor := 1
for len(splits) > 0 {
// pop the last split
var thisSplit string
thisSplit, splits = splits[len(splits)-1], splits[:len(splits)-1]
thisInt, err := strconv.Atoi(thisSplit)
if err != nil {
return 0, nil
}
seconds += factor * thisInt
factor *= 60
}
return int64(seconds), nil
}

View File

@@ -0,0 +1,32 @@
CREATE TABLE `movies` (
`id` integer not null primary key autoincrement,
`name` varchar(255),
`aliases` varchar(255),
`duration` varchar(6),
`date` date,
`rating` varchar(1),
`director` varchar(255),
`synopsis` text,
`front_image` blob not null,
`back_image` blob,
`checksum` varchar(255) not null,
`url` varchar(255),
`created_at` datetime not null,
`updated_at` datetime not null
);
CREATE TABLE `movies_scenes` (
`movie_id` integer,
`scene_id` integer,
`scene_index` varchar(2),
foreign key(`movie_id`) references `movies`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
ALTER TABLE `scraped_items` ADD COLUMN `movie_id` integer;
CREATE UNIQUE INDEX `movies_checksum_unique` on `movies` (`checksum`);
CREATE UNIQUE INDEX `index_movie_id_scene_index_unique` ON `movies_scenes` ( `movie_id`, `scene_index` );
CREATE INDEX `index_movies_scenes_on_movie_id` on `movies_scenes` (`movie_id`);
CREATE INDEX `index_movies_scenes_on_scene_id` on `movies_scenes` (`scene_id`);

View File

@@ -0,0 +1,89 @@
PRAGMA foreign_keys=off;
-- need to re-create the performers table without the added column.
-- also need re-create the performers_scenes table due to the foreign key
-- rename existing performers table
ALTER TABLE `performers` RENAME TO `performers_old`;
ALTER TABLE `performers_scenes` RENAME TO `performers_scenes_old`;
-- drop the indexes
DROP INDEX IF EXISTS `index_performers_on_name`;
DROP INDEX IF EXISTS `index_performers_on_checksum`;
DROP INDEX IF EXISTS `index_performers_scenes_on_scene_id`;
DROP INDEX IF EXISTS `index_performers_scenes_on_performer_id`;
-- recreate the tables
CREATE TABLE `performers` (
`id` integer not null primary key autoincrement,
`image` blob not null,
`checksum` varchar(255) not null,
`name` varchar(255),
`url` varchar(255),
`twitter` varchar(255),
`instagram` varchar(255),
`birthdate` date,
`ethnicity` varchar(255),
`country` varchar(255),
`eye_color` varchar(255),
`height` varchar(255),
`measurements` varchar(255),
`fake_tits` varchar(255),
`career_length` varchar(255),
`tattoos` varchar(255),
`piercings` varchar(255),
`aliases` varchar(255),
`favorite` boolean not null default '0',
`created_at` datetime not null,
`updated_at` datetime not null
);
CREATE TABLE `performers_scenes` (
`performer_id` integer,
`scene_id` integer,
foreign key(`performer_id`) references `performers`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
INSERT INTO `performers`
SELECT
`id`,
`image`,
`checksum`,
`name`,
`url`,
`twitter`,
`instagram`,
`birthdate`,
`ethnicity`,
`country`,
`eye_color`,
`height`,
`measurements`,
`fake_tits`,
`career_length`,
`tattoos`,
`piercings`,
`aliases`,
`favorite`,
`created_at`,
`updated_at`
FROM `performers_old`;
INSERT INTO `performers_scenes`
SELECT
`performer_id`,
`scene_id`
FROM `performers_scenes_old`;
DROP TABLE `performers_scenes_old`;
DROP TABLE `performers_old`;
-- re-create the indexes after removing the old tables
CREATE INDEX `index_performers_on_name` on `performers` (`name`);
CREATE INDEX `index_performers_on_checksum` on `performers` (`checksum`);
CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
PRAGMA foreign_keys=on;

View File

@@ -0,0 +1 @@
ALTER TABLE `performers` ADD COLUMN `gender` varchar(20);

View File

@@ -0,0 +1 @@
ALTER TABLE `scenes` ADD COLUMN `format` varchar(255);

View File

@@ -0,0 +1,101 @@
DROP INDEX `performers_checksum_unique`;
DROP INDEX `index_performers_on_name`;
DROP INDEX `index_performers_on_checksum`;
ALTER TABLE `performers` RENAME TO `temp_old_performers`;
CREATE TABLE `performers` (
`id` integer not null primary key autoincrement,
`checksum` varchar(255) not null,
`name` varchar(255),
`gender` varchar(20),
`url` varchar(255),
`twitter` varchar(255),
`instagram` varchar(255),
`birthdate` date,
`ethnicity` varchar(255),
`country` varchar(255),
`eye_color` varchar(255),
`height` varchar(255),
`measurements` varchar(255),
`fake_tits` varchar(255),
`career_length` varchar(255),
`tattoos` varchar(255),
`piercings` varchar(255),
`aliases` varchar(255),
`favorite` boolean not null default '0',
`created_at` datetime not null,
`updated_at` datetime not null,
`image` blob not null
);
CREATE UNIQUE INDEX `performers_checksum_unique` on `performers` (`checksum`);
CREATE INDEX `index_performers_on_name` on `performers` (`name`);
INSERT INTO `performers` (
`id`,
`checksum`,
`name`,
`gender`,
`url`,
`twitter`,
`instagram`,
`birthdate`,
`ethnicity`,
`country`,
`eye_color`,
`height`,
`measurements`,
`fake_tits`,
`career_length`,
`tattoos`,
`piercings`,
`aliases`,
`favorite`,
`created_at`,
`updated_at`,
`image`
)
SELECT
`id`,
`checksum`,
`name`,
`gender`,
`url`,
`twitter`,
`instagram`,
`birthdate`,
`ethnicity`,
`country`,
`eye_color`,
`height`,
`measurements`,
`fake_tits`,
`career_length`,
`tattoos`,
`piercings`,
`aliases`,
`favorite`,
`created_at`,
`updated_at`,
`image`
FROM `temp_old_performers`;
DROP INDEX `index_performers_scenes_on_scene_id`;
DROP INDEX `index_performers_scenes_on_performer_id`;
ALTER TABLE performers_scenes RENAME TO temp_old_performers_scenes;
CREATE TABLE `performers_scenes` (
`performer_id` integer,
`scene_id` integer,
foreign key(`performer_id`) references `performers`(`id`),
foreign key(`scene_id`) references `scenes`(`id`)
);
CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
INSERT INTO `performers_scenes` (
`performer_id`,
`scene_id`
)
SELECT
`performer_id`,
`scene_id`
FROM `temp_old_performers_scenes`;
DROP TABLE `temp_old_performers`;
DROP TABLE `temp_old_performers_scenes`;

View File

@@ -0,0 +1,106 @@
ALTER TABLE `movies` rename to `_movies_old`;
ALTER TABLE `movies_scenes` rename to `_movies_scenes_old`;
DROP INDEX IF EXISTS `movies_checksum_unique`;
DROP INDEX IF EXISTS `index_movie_id_scene_index_unique`;
DROP INDEX IF EXISTS `index_movies_scenes_on_movie_id`;
DROP INDEX IF EXISTS `index_movies_scenes_on_scene_id`;
-- recreate the movies table with fixed column types and constraints
CREATE TABLE `movies` (
`id` integer not null primary key autoincrement,
-- add not null
`name` varchar(255) not null,
`aliases` varchar(255),
-- varchar(6) -> integer
`duration` integer,
`date` date,
-- varchar(1) -> tinyint
`rating` tinyint,
`studio_id` integer,
`director` varchar(255),
`synopsis` text,
`checksum` varchar(255) not null,
`url` varchar(255),
`created_at` datetime not null,
`updated_at` datetime not null,
`front_image` blob not null,
`back_image` blob,
foreign key(`studio_id`) references `studios`(`id`) on delete set null
);
CREATE TABLE `movies_scenes` (
`movie_id` integer,
`scene_id` integer,
-- varchar(2) -> tinyint
`scene_index` tinyint,
foreign key(`movie_id`) references `movies`(`id`) on delete cascade,
foreign key(`scene_id`) references `scenes`(`id`) on delete cascade
);
-- add unique index on movie name
CREATE UNIQUE INDEX `movies_name_unique` on `movies` (`name`);
CREATE UNIQUE INDEX `movies_checksum_unique` on `movies` (`checksum`);
-- remove unique index on movies_scenes
CREATE INDEX `index_movies_scenes_on_movie_id` on `movies_scenes` (`movie_id`);
CREATE INDEX `index_movies_scenes_on_scene_id` on `movies_scenes` (`scene_id`);
CREATE INDEX `index_movies_on_studio_id` on `movies` (`studio_id`);
-- custom functions cannot accept NULL values, so massage the old data
UPDATE `_movies_old` set `duration` = 0 WHERE `duration` IS NULL;
-- now populate from the old tables
INSERT INTO `movies`
(
`id`,
`name`,
`aliases`,
`duration`,
`date`,
`rating`,
`director`,
`synopsis`,
`front_image`,
`back_image`,
`checksum`,
`url`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`name`,
`aliases`,
durationToTinyInt(`duration`),
`date`,
CAST(`rating` as tinyint),
`director`,
`synopsis`,
`front_image`,
`back_image`,
`checksum`,
`url`,
`created_at`,
`updated_at`
FROM `_movies_old`
-- ignore null named movies
WHERE `name` is not null;
-- durationToTinyInt returns 0 if it cannot parse the string
-- set these values to null instead
UPDATE `movies` SET `duration` = NULL WHERE `duration` = 0;
INSERT INTO `movies_scenes`
(
`movie_id`,
`scene_id`,
`scene_index`
)
SELECT
`movie_id`,
`scene_id`,
CAST(`scene_index` as tinyint)
FROM `_movies_scenes_old`;
-- drop old tables
DROP TABLE `_movies_scenes_old`;
DROP TABLE `_movies_old`;

View File

@@ -18,7 +18,7 @@ type Encoder struct {
} }
var ( var (
runningEncoders map[string][]*os.Process = make(map[string][]*os.Process) runningEncoders = make(map[string][]*os.Process)
runningEncodersMutex = sync.RWMutex{} runningEncodersMutex = sync.RWMutex{}
) )

View File

@@ -13,9 +13,10 @@ type ScenePreviewChunkOptions struct {
OutputPath string OutputPath string
} }
func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePreviewChunkOptions) { func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePreviewChunkOptions, preset string) {
args := []string{ args := []string{
"-v", "error", "-v", "error",
"-xerror",
"-ss", strconv.Itoa(options.Time), "-ss", strconv.Itoa(options.Time),
"-i", probeResult.Path, "-i", probeResult.Path,
"-t", "0.75", "-t", "0.75",
@@ -25,7 +26,7 @@ func (e *Encoder) ScenePreviewVideoChunk(probeResult VideoFile, options ScenePre
"-pix_fmt", "yuv420p", "-pix_fmt", "yuv420p",
"-profile:v", "high", "-profile:v", "high",
"-level", "4.2", "-level", "4.2",
"-preset", "veryslow", "-preset", preset,
"-crf", "21", "-crf", "21",
"-threads", "4", "-threads", "4",
"-vf", fmt.Sprintf("scale=%v:-2", options.Width), "-vf", fmt.Sprintf("scale=%v:-2", options.Width),

View File

@@ -10,7 +10,7 @@ type ScreenshotOptions struct {
Verbosity string Verbosity string
} }
func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) { func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) error {
if options.Verbosity == "" { if options.Verbosity == "" {
options.Verbosity = "error" options.Verbosity = "error"
} }
@@ -28,5 +28,7 @@ func (e *Encoder) Screenshot(probeResult VideoFile, options ScreenshotOptions) {
"-f", "image2", "-f", "image2",
options.OutputPath, options.OutputPath,
} }
_, _ = e.run(probeResult, args) _, err := e.run(probeResult, args)
return err
} }

View File

@@ -69,6 +69,49 @@ func (e *Encoder) Transcode(probeResult VideoFile, options TranscodeOptions) {
_, _ = e.run(probeResult, args) _, _ = e.run(probeResult, args)
} }
//transcode the video, remove the audio
//in some videos where the audio codec is not supported by ffmpeg
//ffmpeg fails if you try to transcode the audio
func (e *Encoder) TranscodeVideo(probeResult VideoFile, options TranscodeOptions) {
scale := calculateTranscodeScale(probeResult, options.MaxTranscodeSize)
args := []string{
"-i", probeResult.Path,
"-an",
"-c:v", "libx264",
"-pix_fmt", "yuv420p",
"-profile:v", "high",
"-level", "4.2",
"-preset", "superfast",
"-crf", "23",
"-vf", "scale=" + scale,
options.OutputPath,
}
_, _ = e.run(probeResult, args)
}
//copy the video stream as is, transcode audio
func (e *Encoder) TranscodeAudio(probeResult VideoFile, options TranscodeOptions) {
args := []string{
"-i", probeResult.Path,
"-c:v", "copy",
"-c:a", "aac",
"-strict", "-2",
options.OutputPath,
}
_, _ = e.run(probeResult, args)
}
//copy the video stream as is, drop audio
func (e *Encoder) CopyVideo(probeResult VideoFile, options TranscodeOptions) {
args := []string{
"-i", probeResult.Path,
"-an",
"-c:v", "copy",
options.OutputPath,
}
_, _ = e.run(probeResult, args)
}
func (e *Encoder) StreamTranscode(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) { func (e *Encoder) StreamTranscode(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
scale := calculateTranscodeScale(probeResult, maxTranscodeSize) scale := calculateTranscodeScale(probeResult, maxTranscodeSize)
args := []string{} args := []string{}
@@ -92,3 +135,53 @@ func (e *Encoder) StreamTranscode(probeResult VideoFile, startTime string, maxTr
return e.stream(probeResult, args) return e.stream(probeResult, args)
} }
//transcode the video, remove the audio
//in some videos where the audio codec is not supported by ffmpeg
//ffmpeg fails if you try to transcode the audio
func (e *Encoder) StreamTranscodeVideo(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
scale := calculateTranscodeScale(probeResult, maxTranscodeSize)
args := []string{}
if startTime != "" {
args = append(args, "-ss", startTime)
}
args = append(args,
"-i", probeResult.Path,
"-an",
"-c:v", "libvpx-vp9",
"-vf", "scale="+scale,
"-deadline", "realtime",
"-cpu-used", "5",
"-row-mt", "1",
"-crf", "30",
"-b:v", "0",
"-f", "webm",
"pipe:",
)
return e.stream(probeResult, args)
}
//it is very common in MKVs to have just the audio codec unsupported
//copy the video stream, transcode the audio and serve as Matroska
func (e *Encoder) StreamMkvTranscodeAudio(probeResult VideoFile, startTime string, maxTranscodeSize models.StreamingResolutionEnum) (io.ReadCloser, *os.Process, error) {
args := []string{}
if startTime != "" {
args = append(args, "-ss", startTime)
}
args = append(args,
"-i", probeResult.Path,
"-c:v", "copy",
"-c:a", "libopus",
"-b:a", "96k",
"-vbr", "on",
"-f", "matroska",
"pipe:",
)
return e.stream(probeResult, args)
}

View File

@@ -10,11 +10,106 @@ import (
"strconv" "strconv"
"strings" "strings"
"time" "time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config"
) )
var ValidCodecs = []string{"h264", "h265", "vp8", "vp9"} type Container string
type AudioCodec string
const (
Mp4 Container = "mp4"
M4v Container = "m4v"
Mov Container = "mov"
Wmv Container = "wmv"
Webm Container = "webm"
Matroska Container = "matroska"
Avi Container = "avi"
Flv Container = "flv"
Mpegts Container = "mpegts"
Aac AudioCodec = "aac"
Mp3 AudioCodec = "mp3"
Opus AudioCodec = "opus"
Vorbis AudioCodec = "vorbis"
MissingUnsupported AudioCodec = ""
Mp4Ffmpeg string = "mov,mp4,m4a,3gp,3g2,mj2" // browsers support all of them
M4vFfmpeg string = "mov,mp4,m4a,3gp,3g2,mj2" // so we don't care that ffmpeg
MovFfmpeg string = "mov,mp4,m4a,3gp,3g2,mj2" // can't differentiate between them
WmvFfmpeg string = "asf"
WebmFfmpeg string = "matroska,webm"
MatroskaFfmpeg string = "matroska,webm"
AviFfmpeg string = "avi"
FlvFfmpeg string = "flv"
MpegtsFfmpeg string = "mpegts"
H264 string = "h264"
H265 string = "h265" // found in rare cases from a faulty encoder
Hevc string = "hevc"
Vp8 string = "vp8"
Vp9 string = "vp9"
MimeWebm string = "video/webm"
MimeMkv string = "video/x-matroska"
)
var ValidCodecs = []string{H264, H265, Vp8, Vp9}
var validForH264Mkv = []Container{Mp4, Matroska}
var validForH264 = []Container{Mp4}
var validForH265Mkv = []Container{Mp4, Matroska}
var validForH265 = []Container{Mp4}
var validForVp8 = []Container{Webm}
var validForVp9Mkv = []Container{Webm, Matroska}
var validForVp9 = []Container{Webm}
var validForHevcMkv = []Container{Mp4, Matroska}
var validForHevc = []Container{Mp4}
var validAudioForMkv = []AudioCodec{Aac, Mp3, Vorbis, Opus}
var validAudioForWebm = []AudioCodec{Vorbis, Opus}
var validAudioForMp4 = []AudioCodec{Aac, Mp3}
//maps user readable container strings to ffprobe's format_name
//on some formats ffprobe can't differentiate
var ContainerToFfprobe = map[Container]string{
Mp4: Mp4Ffmpeg,
M4v: M4vFfmpeg,
Mov: MovFfmpeg,
Wmv: WmvFfmpeg,
Webm: WebmFfmpeg,
Matroska: MatroskaFfmpeg,
Avi: AviFfmpeg,
Flv: FlvFfmpeg,
Mpegts: MpegtsFfmpeg,
}
var FfprobeToContainer = map[string]Container{
Mp4Ffmpeg: Mp4,
WmvFfmpeg: Wmv,
AviFfmpeg: Avi,
FlvFfmpeg: Flv,
MpegtsFfmpeg: Mpegts,
MatroskaFfmpeg: Matroska,
}
func MatchContainer(format string, filePath string) Container { // match ffprobe string to our Container
container := FfprobeToContainer[format]
if container == Matroska {
container = MagicContainer(filePath) // use magic number instead of ffprobe for matroska,webm
}
if container == "" { // if format is not in our Container list leave it as ffprobes reported format_name
container = Container(format)
}
return container
}
func IsValidCodec(codecName string) bool { func IsValidCodec(codecName string) bool {
forceHEVC := config.GetForceHEVC()
if forceHEVC {
if codecName == Hevc {
return true
}
}
for _, c := range ValidCodecs { for _, c := range ValidCodecs {
if c == codecName { if c == codecName {
return true return true
@@ -23,6 +118,78 @@ func IsValidCodec(codecName string) bool {
return false return false
} }
func IsValidAudio(audio AudioCodec, ValidCodecs []AudioCodec) bool {
// if audio codec is missing or unsupported by ffmpeg we can't do anything about it
// report it as valid so that the file can at least be streamed directly if the video codec is supported
if audio == MissingUnsupported {
return true
}
for _, c := range ValidCodecs {
if c == audio {
return true
}
}
return false
}
func IsValidAudioForContainer(audio AudioCodec, format Container) bool {
switch format {
case Matroska:
return IsValidAudio(audio, validAudioForMkv)
case Webm:
return IsValidAudio(audio, validAudioForWebm)
case Mp4:
return IsValidAudio(audio, validAudioForMp4)
}
return false
}
func IsValidForContainer(format Container, validContainers []Container) bool {
for _, fmt := range validContainers {
if fmt == format {
return true
}
}
return false
}
//extend stream validation check to take into account container
func IsValidCombo(codecName string, format Container) bool {
forceMKV := config.GetForceMKV()
forceHEVC := config.GetForceHEVC()
switch codecName {
case H264:
if forceMKV {
return IsValidForContainer(format, validForH264Mkv)
}
return IsValidForContainer(format, validForH264)
case H265:
if forceMKV {
return IsValidForContainer(format, validForH265Mkv)
}
return IsValidForContainer(format, validForH265)
case Vp8:
return IsValidForContainer(format, validForVp8)
case Vp9:
if forceMKV {
return IsValidForContainer(format, validForVp9Mkv)
}
return IsValidForContainer(format, validForVp9)
case Hevc:
if forceHEVC {
if forceMKV {
return IsValidForContainer(format, validForHevcMkv)
}
return IsValidForContainer(format, validForHevc)
}
}
return false
}
type VideoFile struct { type VideoFile struct {
JSON FFProbeJSON JSON FFProbeJSON
AudioStream *FFProbeStream AudioStream *FFProbeStream
@@ -98,7 +265,11 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
result.Container = probeJSON.Format.FormatName result.Container = probeJSON.Format.FormatName
duration, _ := strconv.ParseFloat(probeJSON.Format.Duration, 64) duration, _ := strconv.ParseFloat(probeJSON.Format.Duration, 64)
result.Duration = math.Round(duration*100) / 100 result.Duration = math.Round(duration*100) / 100
fileStat, _ := os.Stat(filePath) fileStat, err := os.Stat(filePath)
if err != nil {
logger.Errorf("Error statting file: %v", err)
return nil, err
}
result.Size = fileStat.Size() result.Size = fileStat.Size()
result.StartTime, _ = strconv.ParseFloat(probeJSON.Format.StartTime, 64) result.StartTime, _ = strconv.ParseFloat(probeJSON.Format.StartTime, 64)
result.CreationTime = probeJSON.Format.Tags.CreationTime.Time result.CreationTime = probeJSON.Format.Tags.CreationTime.Time

View File

@@ -0,0 +1,66 @@
package ffmpeg
import (
"bytes"
"github.com/stashapp/stash/pkg/logger"
"os"
)
// detect file format from magic file number
// https://github.com/lex-r/filetype/blob/73c10ad714e3b8ecf5cd1564c882ed6d440d5c2d/matchers/video.go
func mkv(buf []byte) bool {
return len(buf) > 3 &&
buf[0] == 0x1A && buf[1] == 0x45 &&
buf[2] == 0xDF && buf[3] == 0xA3 &&
containsMatroskaSignature(buf, []byte{'m', 'a', 't', 'r', 'o', 's', 'k', 'a'})
}
func webm(buf []byte) bool {
return len(buf) > 3 &&
buf[0] == 0x1A && buf[1] == 0x45 &&
buf[2] == 0xDF && buf[3] == 0xA3 &&
containsMatroskaSignature(buf, []byte{'w', 'e', 'b', 'm'})
}
func containsMatroskaSignature(buf, subType []byte) bool {
limit := 4096
if len(buf) < limit {
limit = len(buf)
}
index := bytes.Index(buf[:limit], subType)
if index < 3 {
return false
}
return buf[index-3] == 0x42 && buf[index-2] == 0x82
}
//returns container as string ("" on error or no match)
//implements only mkv or webm as ffprobe can't distinguish between them
//and not all browsers support mkv
func MagicContainer(file_path string) Container {
file, err := os.Open(file_path)
if err != nil {
logger.Errorf("[magicfile] %v", err)
return ""
}
defer file.Close()
buf := make([]byte, 4096)
_, err = file.Read(buf)
if err != nil {
logger.Errorf("[magicfile] %v", err)
return ""
}
if webm(buf) {
return Webm
}
if mkv(buf) {
return Matroska
}
return ""
}

View File

@@ -19,10 +19,12 @@ const Metadata = "metadata"
const Downloads = "downloads" const Downloads = "downloads"
const Username = "username" const Username = "username"
const Password = "password" const Password = "password"
const MaxSessionAge = "max_session_age"
const DefaultMaxSessionAge = 60 * 60 * 1 // 1 hours
const Database = "database" const Database = "database"
const ScrapersPath = "scrapers_path"
const Exclude = "exclude" const Exclude = "exclude"
const MaxTranscodeSize = "max_transcode_size" const MaxTranscodeSize = "max_transcode_size"
@@ -32,6 +34,19 @@ const Host = "host"
const Port = "port" const Port = "port"
const ExternalHost = "external_host" const ExternalHost = "external_host"
// key used to sign JWT tokens
const JWTSignKey = "jwt_secret_key"
// key used for session store
const SessionStoreKey = "session_store_key"
// scraping options
const ScrapersPath = "scrapers_path"
const ScraperUserAgent = "scraper_user_agent"
// i18n
const Language = "language"
// Interface options // Interface options
const SoundOnPreview = "sound_on_preview" const SoundOnPreview = "sound_on_preview"
const WallShowTitle = "wall_show_title" const WallShowTitle = "wall_show_title"
@@ -39,6 +54,11 @@ const MaximumLoopDuration = "maximum_loop_duration"
const AutostartVideo = "autostart_video" const AutostartVideo = "autostart_video"
const ShowStudioAsText = "show_studio_as_text" const ShowStudioAsText = "show_studio_as_text"
const CSSEnabled = "cssEnabled" const CSSEnabled = "cssEnabled"
const WallPlayback = "wall_playback"
// Playback force codec,container
const ForceMKV = "forceMKV"
const ForceHEVC = "forceHEVC"
// Logging options // Logging options
const LogFile = "logFile" const LogFile = "logFile"
@@ -83,6 +103,14 @@ func GetDatabasePath() string {
return viper.GetString(Database) return viper.GetString(Database)
} }
func GetJWTSignKey() []byte {
return []byte(viper.GetString(JWTSignKey))
}
func GetSessionStoreKey() []byte {
return []byte(viper.GetString(SessionStoreKey))
}
func GetDefaultScrapersPath() string { func GetDefaultScrapersPath() string {
// default to the same directory as the config file // default to the same directory as the config file
configFileUsed := viper.ConfigFileUsed() configFileUsed := viper.ConfigFileUsed()
@@ -97,10 +125,25 @@ func GetExcludes() []string {
return viper.GetStringSlice(Exclude) return viper.GetStringSlice(Exclude)
} }
func GetLanguage() string {
ret := viper.GetString(Language)
// default to English
if ret == "" {
return "en-US"
}
return ret
}
func GetScrapersPath() string { func GetScrapersPath() string {
return viper.GetString(ScrapersPath) return viper.GetString(ScrapersPath)
} }
func GetScraperUserAgent() string {
return viper.GetString(ScraperUserAgent)
}
func GetHost() string { func GetHost() string {
return viper.GetString(Host) return viper.GetString(Host)
} }
@@ -181,6 +224,13 @@ func ValidateCredentials(username string, password string) bool {
return username == authUser && err == nil return username == authUser && err == nil
} }
// GetMaxSessionAge gets the maximum age for session cookies, in seconds.
// Session cookie expiry times are refreshed every request.
func GetMaxSessionAge() int {
viper.SetDefault(MaxSessionAge, DefaultMaxSessionAge)
return viper.GetInt(MaxSessionAge)
}
// Interface options // Interface options
func GetSoundOnPreview() bool { func GetSoundOnPreview() bool {
viper.SetDefault(SoundOnPreview, true) viper.SetDefault(SoundOnPreview, true)
@@ -192,6 +242,11 @@ func GetWallShowTitle() bool {
return viper.GetBool(WallShowTitle) return viper.GetBool(WallShowTitle)
} }
func GetWallPlayback() string {
viper.SetDefault(WallPlayback, "video")
return viper.GetString(WallPlayback)
}
func GetMaximumLoopDuration() int { func GetMaximumLoopDuration() int {
viper.SetDefault(MaximumLoopDuration, 0) viper.SetDefault(MaximumLoopDuration, 0)
return viper.GetInt(MaximumLoopDuration) return viper.GetInt(MaximumLoopDuration)
@@ -246,6 +301,15 @@ func GetCSSEnabled() bool {
return viper.GetBool(CSSEnabled) return viper.GetBool(CSSEnabled)
} }
// force codec,container
func GetForceMKV() bool {
return viper.GetBool(ForceMKV)
}
func GetForceHEVC() bool {
return viper.GetBool(ForceHEVC)
}
// GetLogFile returns the filename of the file to output logs to. // GetLogFile returns the filename of the file to output logs to.
// An empty string means that file logging will be disabled. // An empty string means that file logging will be disabled.
func GetLogFile() string { func GetLogFile() string {
@@ -294,3 +358,21 @@ func IsValid() bool {
// TODO: check valid paths // TODO: check valid paths
return setPaths return setPaths
} }
// SetInitialConfig fills in missing required config fields
func SetInitialConfig() error {
// generate some api keys
const apiKeyLength = 32
if string(GetJWTSignKey()) == "" {
signKey := utils.GenerateRandomKey(apiKeyLength)
Set(JWTSignKey, signKey)
}
if string(GetSessionStoreKey()) == "" {
sessionStoreKey := utils.GenerateRandomKey(apiKeyLength)
Set(SessionStoreKey, sessionStoreKey)
}
return Write()
}

View File

@@ -1,9 +1,10 @@
package manager package manager
import ( import (
"github.com/stashapp/stash/pkg/logger"
"regexp" "regexp"
"strings" "strings"
"github.com/stashapp/stash/pkg/logger"
) )
func excludeFiles(files []string, patterns []string) ([]string, int) { func excludeFiles(files []string, patterns []string) ([]string, int) {
@@ -37,21 +38,13 @@ func excludeFiles(files []string, patterns []string) ([]string, int) {
} }
func matchFile(file string, patterns []string) bool { func matchFile(file string, patterns []string) bool {
if patterns == nil { if patterns != nil {
logger.Infof("No exclude patterns in config.")
} else {
fileRegexps := generateRegexps(patterns) fileRegexps := generateRegexps(patterns)
if len(fileRegexps) == 0 {
return false
}
for _, regPattern := range fileRegexps { for _, regPattern := range fileRegexps {
if regPattern.MatchString(strings.ToLower(file)) { if regPattern.MatchString(strings.ToLower(file)) {
return true return true
} }
} }
} }

View File

@@ -7,6 +7,7 @@ import (
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
"time"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
@@ -87,8 +88,10 @@ func initParserFields() {
//I = new ParserField("i", undefined, "Matches any ignored word", false); //I = new ParserField("i", undefined, "Matches any ignored word", false);
ret["d"] = newParserField("d", `(?:\.|-|_)`, false) ret["d"] = newParserField("d", `(?:\.|-|_)`, false)
ret["rating"] = newParserField("rating", `\d`, true)
ret["performer"] = newParserField("performer", ".*", true) ret["performer"] = newParserField("performer", ".*", true)
ret["studio"] = newParserField("studio", ".*", true) ret["studio"] = newParserField("studio", ".*", true)
ret["movie"] = newParserField("movie", ".*", true)
ret["tag"] = newParserField("tag", ".*", true) ret["tag"] = newParserField("tag", ".*", true)
// date fields // date fields
@@ -96,6 +99,7 @@ func initParserFields() {
ret["yyyy"] = newParserField("yyyy", `\d{4}`, true) ret["yyyy"] = newParserField("yyyy", `\d{4}`, true)
ret["yy"] = newParserField("yy", `\d{2}`, true) ret["yy"] = newParserField("yy", `\d{2}`, true)
ret["mm"] = newParserField("mm", `\d{2}`, true) ret["mm"] = newParserField("mm", `\d{2}`, true)
ret["mmm"] = newParserField("mmm", `\w{3}`, true)
ret["dd"] = newParserField("dd", `\d{2}`, true) ret["dd"] = newParserField("dd", `\d{2}`, true)
ret["yyyymmdd"] = newFullDateParserField("yyyymmdd", `\d{8}`) ret["yyyymmdd"] = newFullDateParserField("yyyymmdd", `\d{8}`)
ret["yymmdd"] = newFullDateParserField("yymmdd", `\d{6}`) ret["yymmdd"] = newFullDateParserField("yymmdd", `\d{6}`)
@@ -204,6 +208,7 @@ type sceneHolder struct {
mm string mm string
dd string dd string
performers []string performers []string
movies []string
studio string studio string
tags []string tags []string
} }
@@ -222,6 +227,10 @@ func newSceneHolder(scene *models.Scene) *sceneHolder {
return &ret return &ret
} }
func validateRating(rating int) bool {
return rating >= 1 && rating <= 5
}
func validateDate(dateStr string) bool { func validateDate(dateStr string) bool {
splits := strings.Split(dateStr, "-") splits := strings.Split(dateStr, "-")
if len(splits) != 3 { if len(splits) != 3 {
@@ -283,6 +292,20 @@ func (h *sceneHolder) setDate(field *parserField, value string) {
} }
} }
func mmmToMonth(mmm string) string {
format := "02-Jan-2006"
dateStr := "01-" + mmm + "-2000"
t, err := time.Parse(format, dateStr)
if err != nil {
return ""
}
// expect month in two-digit format
format = "01-02-2006"
return t.Format(format)[0:2]
}
func (h *sceneHolder) setField(field parserField, value interface{}) { func (h *sceneHolder) setField(field parserField, value interface{}) {
if field.isFullDateField { if field.isFullDateField {
h.setDate(&field, value.(string)) h.setDate(&field, value.(string))
@@ -302,27 +325,35 @@ func (h *sceneHolder) setField(field parserField, value interface{}) {
Valid: true, Valid: true,
} }
} }
case "rating":
rating, _ := strconv.Atoi(value.(string))
if validateRating(rating) {
h.result.Rating = sql.NullInt64{
Int64: int64(rating),
Valid: true,
}
}
case "performer": case "performer":
// add performer to list // add performer to list
h.performers = append(h.performers, value.(string)) h.performers = append(h.performers, value.(string))
case "studio": case "studio":
h.studio = value.(string) h.studio = value.(string)
case "movie":
h.movies = append(h.movies, value.(string))
case "tag": case "tag":
h.tags = append(h.tags, value.(string)) h.tags = append(h.tags, value.(string))
case "yyyy": case "yyyy":
h.yyyy = value.(string) h.yyyy = value.(string)
break
case "yy": case "yy":
v := value.(string) v := value.(string)
v = "20" + v v = "20" + v
h.yyyy = v h.yyyy = v
break case "mmm":
h.mm = mmmToMonth(value.(string))
case "mm": case "mm":
h.mm = value.(string) h.mm = value.(string)
break
case "dd": case "dd":
h.dd = value.(string) h.dd = value.(string)
break
} }
} }
@@ -374,7 +405,7 @@ func (m parseMapper) parse(scene *models.Scene) *sceneHolder {
} }
type performerQueryer interface { type performerQueryer interface {
FindByNames(names []string, tx *sqlx.Tx) ([]*models.Performer, error) FindByNames(names []string, tx *sqlx.Tx, nocase bool) ([]*models.Performer, error)
} }
type sceneQueryer interface { type sceneQueryer interface {
@@ -382,11 +413,15 @@ type sceneQueryer interface {
} }
type tagQueryer interface { type tagQueryer interface {
FindByName(name string, tx *sqlx.Tx) (*models.Tag, error) FindByName(name string, tx *sqlx.Tx, nocase bool) (*models.Tag, error)
} }
type studioQueryer interface { type studioQueryer interface {
FindByName(name string, tx *sqlx.Tx) (*models.Studio, error) FindByName(name string, tx *sqlx.Tx, nocase bool) (*models.Studio, error)
}
type movieQueryer interface {
FindByName(name string, tx *sqlx.Tx, nocase bool) (*models.Movie, error)
} }
type SceneFilenameParser struct { type SceneFilenameParser struct {
@@ -396,12 +431,14 @@ type SceneFilenameParser struct {
whitespaceRE *regexp.Regexp whitespaceRE *regexp.Regexp
performerCache map[string]*models.Performer performerCache map[string]*models.Performer
studioCache map[string]*models.Studio studioCache map[string]*models.Studio
movieCache map[string]*models.Movie
tagCache map[string]*models.Tag tagCache map[string]*models.Tag
performerQuery performerQueryer performerQuery performerQueryer
sceneQuery sceneQueryer sceneQuery sceneQueryer
tagQuery tagQueryer tagQuery tagQueryer
studioQuery studioQueryer studioQuery studioQueryer
movieQuery movieQueryer
} }
func NewSceneFilenameParser(filter *models.FindFilterType, config models.SceneParserInput) *SceneFilenameParser { func NewSceneFilenameParser(filter *models.FindFilterType, config models.SceneParserInput) *SceneFilenameParser {
@@ -413,6 +450,7 @@ func NewSceneFilenameParser(filter *models.FindFilterType, config models.ScenePa
p.performerCache = make(map[string]*models.Performer) p.performerCache = make(map[string]*models.Performer)
p.studioCache = make(map[string]*models.Studio) p.studioCache = make(map[string]*models.Studio)
p.movieCache = make(map[string]*models.Movie)
p.tagCache = make(map[string]*models.Tag) p.tagCache = make(map[string]*models.Tag)
p.initWhiteSpaceRegex() p.initWhiteSpaceRegex()
@@ -429,6 +467,9 @@ func NewSceneFilenameParser(filter *models.FindFilterType, config models.ScenePa
studioQuery := models.NewStudioQueryBuilder() studioQuery := models.NewStudioQueryBuilder()
p.studioQuery = &studioQuery p.studioQuery = &studioQuery
movieQuery := models.NewMovieQueryBuilder()
p.movieQuery = &movieQuery
return p return p
} }
@@ -505,7 +546,7 @@ func (p *SceneFilenameParser) queryPerformer(performerName string) *models.Perfo
} }
// perform an exact match and grab the first // perform an exact match and grab the first
performers, _ := p.performerQuery.FindByNames([]string{performerName}, nil) performers, _ := p.performerQuery.FindByNames([]string{performerName}, nil, true)
var ret *models.Performer var ret *models.Performer
if len(performers) > 0 { if len(performers) > 0 {
@@ -527,7 +568,7 @@ func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
return ret return ret
} }
ret, _ := p.studioQuery.FindByName(studioName, nil) ret, _ := p.studioQuery.FindByName(studioName, nil, true)
// add result to cache // add result to cache
p.studioCache[studioName] = ret p.studioCache[studioName] = ret
@@ -535,6 +576,23 @@ func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
return ret return ret
} }
func (p *SceneFilenameParser) queryMovie(movieName string) *models.Movie {
// massage the movie name
movieName = delimiterRE.ReplaceAllString(movieName, " ")
// check cache first
if ret, found := p.movieCache[movieName]; found {
return ret
}
ret, _ := p.movieQuery.FindByName(movieName, nil, true)
// add result to cache
p.movieCache[movieName] = ret
return ret
}
func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag { func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
// massage the performer name // massage the performer name
tagName = delimiterRE.ReplaceAllString(tagName, " ") tagName = delimiterRE.ReplaceAllString(tagName, " ")
@@ -545,7 +603,7 @@ func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
} }
// match tag name exactly // match tag name exactly
ret, _ := p.tagQuery.FindByName(tagName, nil) ret, _ := p.tagQuery.FindByName(tagName, nil, true)
// add result to cache // add result to cache
p.tagCache[tagName] = ret p.tagCache[tagName] = ret
@@ -596,6 +654,24 @@ func (p *SceneFilenameParser) setStudio(h sceneHolder, result *models.SceneParse
} }
} }
func (p *SceneFilenameParser) setMovies(h sceneHolder, result *models.SceneParserResult) {
// query for each movie
moviesSet := make(map[int]bool)
for _, movieName := range h.movies {
if movieName != "" {
movie := p.queryMovie(movieName)
if movie != nil {
if _, found := moviesSet[movie.ID]; !found {
result.Movies = append(result.Movies, &models.SceneMovieID{
MovieID: strconv.Itoa(movie.ID),
})
moviesSet[movie.ID] = true
}
}
}
}
}
func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.SceneParserResult) { func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.SceneParserResult) {
if h.result.Title.Valid { if h.result.Title.Valid {
title := h.result.Title.String title := h.result.Title.String
@@ -612,6 +688,11 @@ func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.Scen
result.Date = &h.result.Date.String result.Date = &h.result.Date.String
} }
if h.result.Rating.Valid {
rating := int(h.result.Rating.Int64)
result.Rating = &rating
}
if len(h.performers) > 0 { if len(h.performers) > 0 {
p.setPerformers(h, result) p.setPerformers(h, result)
} }
@@ -619,4 +700,9 @@ func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.Scen
p.setTags(h, result) p.setTags(h, result)
} }
p.setStudio(h, result) p.setStudio(h, result)
if len(h.movies) > 0 {
p.setMovies(h, result)
}
} }

View File

@@ -16,9 +16,14 @@ type PreviewGenerator struct {
VideoFilename string VideoFilename string
ImageFilename string ImageFilename string
OutputDirectory string OutputDirectory string
GenerateVideo bool
GenerateImage bool
PreviewPreset string
} }
func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, imageFilename string, outputDirectory string) (*PreviewGenerator, error) { func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, imageFilename string, outputDirectory string, generateVideo bool, generateImage bool, previewPreset string) (*PreviewGenerator, error) {
exists, err := utils.FileExists(videoFile.Path) exists, err := utils.FileExists(videoFile.Path)
if !exists { if !exists {
return nil, err return nil, err
@@ -37,6 +42,9 @@ func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, image
VideoFilename: videoFilename, VideoFilename: videoFilename,
ImageFilename: imageFilename, ImageFilename: imageFilename,
OutputDirectory: outputDirectory, OutputDirectory: outputDirectory,
GenerateVideo: generateVideo,
GenerateImage: generateImage,
PreviewPreset: previewPreset,
}, nil }, nil
} }
@@ -47,12 +55,17 @@ func (g *PreviewGenerator) Generate() error {
if err := g.generateConcatFile(); err != nil { if err := g.generateConcatFile(); err != nil {
return err return err
} }
if g.GenerateVideo {
if err := g.generateVideo(&encoder); err != nil { if err := g.generateVideo(&encoder); err != nil {
return err return err
} }
}
if g.GenerateImage {
if err := g.generateImage(&encoder); err != nil { if err := g.generateImage(&encoder); err != nil {
return err return err
} }
}
return nil return nil
} }
@@ -91,7 +104,7 @@ func (g *PreviewGenerator) generateVideo(encoder *ffmpeg.Encoder) error {
Width: 640, Width: 640,
OutputPath: chunkOutputPath, OutputPath: chunkOutputPath,
} }
encoder.ScenePreviewVideoChunk(g.Info.VideoFile, options) encoder.ScenePreviewVideoChunk(g.Info.VideoFile, options, g.PreviewPreset)
} }
videoOutputPath := filepath.Join(g.OutputDirectory, g.VideoFilename) videoOutputPath := filepath.Join(g.OutputDirectory, g.VideoFilename)

View File

@@ -38,6 +38,14 @@ func (jp *jsonUtils) saveStudio(checksum string, studio *jsonschema.Studio) erro
return jsonschema.SaveStudioFile(instance.Paths.JSON.StudioJSONPath(checksum), studio) return jsonschema.SaveStudioFile(instance.Paths.JSON.StudioJSONPath(checksum), studio)
} }
func (jp *jsonUtils) getMovie(checksum string) (*jsonschema.Movie, error) {
return jsonschema.LoadMovieFile(instance.Paths.JSON.MovieJSONPath(checksum))
}
func (jp *jsonUtils) saveMovie(checksum string, movie *jsonschema.Movie) error {
return jsonschema.SaveMovieFile(instance.Paths.JSON.MovieJSONPath(checksum), movie)
}
func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) { func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) {
return jsonschema.LoadSceneFile(instance.Paths.JSON.SceneJSONPath(checksum)) return jsonschema.LoadSceneFile(instance.Paths.JSON.SceneJSONPath(checksum))
} }

View File

@@ -1,8 +1,8 @@
package jsonschema package jsonschema
import ( import (
"encoding/json"
"fmt" "fmt"
"github.com/json-iterator/go"
"os" "os"
) )
@@ -19,6 +19,7 @@ type PathMapping struct {
type Mappings struct { type Mappings struct {
Performers []NameMapping `json:"performers"` Performers []NameMapping `json:"performers"`
Studios []NameMapping `json:"studios"` Studios []NameMapping `json:"studios"`
Movies []NameMapping `json:"movies"`
Galleries []PathMapping `json:"galleries"` Galleries []PathMapping `json:"galleries"`
Scenes []PathMapping `json:"scenes"` Scenes []PathMapping `json:"scenes"`
} }
@@ -30,6 +31,7 @@ func LoadMappingsFile(filePath string) (*Mappings, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file) jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&mappings) err = jsonParser.Decode(&mappings)
if err != nil { if err != nil {

View File

@@ -0,0 +1,47 @@
package jsonschema
import (
"fmt"
"github.com/json-iterator/go"
"os"
"github.com/stashapp/stash/pkg/models"
)
type Movie struct {
Name string `json:"name,omitempty"`
Aliases string `json:"aliases,omitempty"`
Duration int `json:"duration,omitempty"`
Date string `json:"date,omitempty"`
Rating int `json:"rating,omitempty"`
Director string `json:"director,omitempty"`
Synopsis string `json:"sypnopsis,omitempty"`
FrontImage string `json:"front_image,omitempty"`
BackImage string `json:"back_image,omitempty"`
URL string `json:"url,omitempty"`
CreatedAt models.JSONTime `json:"created_at,omitempty"`
UpdatedAt models.JSONTime `json:"updated_at,omitempty"`
}
func LoadMovieFile(filePath string) (*Movie, error) {
var movie Movie
file, err := os.Open(filePath)
defer file.Close()
if err != nil {
return nil, err
}
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&movie)
if err != nil {
return nil, err
}
return &movie, nil
}
func SaveMovieFile(filePath string, movie *Movie) error {
if movie == nil {
return fmt.Errorf("movie must not be nil")
}
return marshalToFile(filePath, movie)
}

View File

@@ -1,14 +1,16 @@
package jsonschema package jsonschema
import ( import (
"encoding/json"
"fmt" "fmt"
"github.com/stashapp/stash/pkg/models" "github.com/json-iterator/go"
"os" "os"
"github.com/stashapp/stash/pkg/models"
) )
type Performer struct { type Performer struct {
Name string `json:"name,omitempty"` Name string `json:"name,omitempty"`
Gender string `json:"gender,omitempty"`
URL string `json:"url,omitempty"` URL string `json:"url,omitempty"`
Twitter string `json:"twitter,omitempty"` Twitter string `json:"twitter,omitempty"`
Instagram string `json:"instagram,omitempty"` Instagram string `json:"instagram,omitempty"`
@@ -36,6 +38,7 @@ func LoadPerformerFile(filePath string) (*Performer, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file) jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&performer) err = jsonParser.Decode(&performer)
if err != nil { if err != nil {

View File

@@ -1,10 +1,11 @@
package jsonschema package jsonschema
import ( import (
"encoding/json"
"fmt" "fmt"
"github.com/stashapp/stash/pkg/models" "github.com/json-iterator/go"
"os" "os"
"github.com/stashapp/stash/pkg/models"
) )
type SceneMarker struct { type SceneMarker struct {
@@ -21,21 +22,29 @@ type SceneFile struct {
Duration string `json:"duration"` Duration string `json:"duration"`
VideoCodec string `json:"video_codec"` VideoCodec string `json:"video_codec"`
AudioCodec string `json:"audio_codec"` AudioCodec string `json:"audio_codec"`
Format string `json:"format"`
Width int `json:"width"` Width int `json:"width"`
Height int `json:"height"` Height int `json:"height"`
Framerate string `json:"framerate"` Framerate string `json:"framerate"`
Bitrate int `json:"bitrate"` Bitrate int `json:"bitrate"`
} }
type SceneMovie struct {
MovieName string `json:"movieName,omitempty"`
SceneIndex int `json:"scene_index,omitempty"`
}
type Scene struct { type Scene struct {
Title string `json:"title,omitempty"` Title string `json:"title,omitempty"`
Studio string `json:"studio,omitempty"` Studio string `json:"studio,omitempty"`
URL string `json:"url,omitempty"` URL string `json:"url,omitempty"`
Date string `json:"date,omitempty"` Date string `json:"date,omitempty"`
Rating int `json:"rating,omitempty"` Rating int `json:"rating,omitempty"`
OCounter int `json:"o_counter,omitempty"`
Details string `json:"details,omitempty"` Details string `json:"details,omitempty"`
Gallery string `json:"gallery,omitempty"` Gallery string `json:"gallery,omitempty"`
Performers []string `json:"performers,omitempty"` Performers []string `json:"performers,omitempty"`
Movies []SceneMovie `json:"movies,omitempty"`
Tags []string `json:"tags,omitempty"` Tags []string `json:"tags,omitempty"`
Markers []SceneMarker `json:"markers,omitempty"` Markers []SceneMarker `json:"markers,omitempty"`
File *SceneFile `json:"file,omitempty"` File *SceneFile `json:"file,omitempty"`
@@ -51,6 +60,7 @@ func LoadSceneFile(filePath string) (*Scene, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file) jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&scene) err = jsonParser.Decode(&scene)
if err != nil { if err != nil {

View File

@@ -1,8 +1,8 @@
package jsonschema package jsonschema
import ( import (
"encoding/json"
"fmt" "fmt"
"github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"os" "os"
) )
@@ -31,6 +31,7 @@ func LoadScrapedFile(filePath string) ([]ScrapedItem, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file) jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&scraped) err = jsonParser.Decode(&scraped)
if err != nil { if err != nil {

View File

@@ -1,8 +1,8 @@
package jsonschema package jsonschema
import ( import (
"encoding/json"
"fmt" "fmt"
"github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"os" "os"
) )
@@ -22,6 +22,7 @@ func LoadStudioFile(filePath string) (*Studio, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file) jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&studio) err = jsonParser.Decode(&studio)
if err != nil { if err != nil {

View File

@@ -2,7 +2,8 @@ package jsonschema
import ( import (
"bytes" "bytes"
"encoding/json" "github.com/json-iterator/go"
"io/ioutil" "io/ioutil"
"time" "time"
) )
@@ -25,6 +26,7 @@ func marshalToFile(filePath string, j interface{}) error {
func encode(j interface{}) ([]byte, error) { func encode(j interface{}) ([]byte, error) {
buffer := &bytes.Buffer{} buffer := &bytes.Buffer{}
var json = jsoniter.ConfigCompatibleWithStandardLibrary
encoder := json.NewEncoder(buffer) encoder := json.NewEncoder(buffer)
encoder.SetEscapeHTML(false) encoder.SetEscapeHTML(false)
encoder.SetIndent("", " ") encoder.SetIndent("", " ")

View File

@@ -154,9 +154,6 @@ func (s *singleton) RefreshConfig() {
_ = utils.EnsureDir(s.Paths.Generated.Markers) _ = utils.EnsureDir(s.Paths.Generated.Markers)
_ = utils.EnsureDir(s.Paths.Generated.Transcodes) _ = utils.EnsureDir(s.Paths.Generated.Transcodes)
_ = utils.EnsureDir(s.Paths.JSON.Performers) paths.EnsureJSONDirs()
_ = utils.EnsureDir(s.Paths.JSON.Scenes)
_ = utils.EnsureDir(s.Paths.JSON.Galleries)
_ = utils.EnsureDir(s.Paths.JSON.Studios)
} }
} }

View File

@@ -1,17 +1,44 @@
package manager package manager
import ( import (
"path/filepath"
"strconv"
"sync"
"time"
"github.com/bmatcuk/doublestar" "github.com/bmatcuk/doublestar"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
"path/filepath"
"strconv"
"sync"
"time"
) )
var extensionsToScan = []string{"zip", "m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm"}
var extensionsGallery = []string{"zip"}
func constructGlob() string { // create a sequence for glob doublestar from our extensions
extLen := len(extensionsToScan)
glb := "{"
for i := 0; i < extLen-1; i++ { // append extensions and commas
glb += extensionsToScan[i] + ","
}
if extLen >= 1 { // append last extension without comma
glb += extensionsToScan[extLen-1]
}
glb += "}"
return glb
}
func isGallery(pathname string) bool {
for _, ext := range extensionsGallery {
if filepath.Ext(pathname) == "."+ext {
return true
}
}
return false
}
type TaskStatus struct { type TaskStatus struct {
Status JobStatus Status JobStatus
Progress float64 Progress float64
@@ -67,7 +94,7 @@ func (s *singleton) Scan(useFileMetadata bool) {
var results []string var results []string
for _, path := range config.GetStashPaths() { for _, path := range config.GetStashPaths() {
globPath := filepath.Join(path, "**/*.{zip,m4v,mp4,mov,wmv,avi,mpg,mpeg,rmvb,rm,flv,asf,mkv,webm}") // TODO: Make this configurable globPath := filepath.Join(path, "**/*."+constructGlob())
globResults, _ := doublestar.Glob(globPath) globResults, _ := doublestar.Glob(globPath)
results = append(results, globResults...) results = append(results, globResults...)
} }
@@ -96,6 +123,15 @@ func (s *singleton) Scan(useFileMetadata bool) {
} }
logger.Info("Finished scan") logger.Info("Finished scan")
for _, path := range results {
if isGallery(path) {
wg.Add(1)
task := ScanTask{FilePath: path, UseFileMetadata: false}
go task.associateGallery(&wg)
wg.Wait()
}
}
logger.Info("Finished gallery association")
}() }()
} }
@@ -135,7 +171,7 @@ func (s *singleton) Export() {
}() }()
} }
func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcodes bool) { func (s *singleton) Generate(sprites bool, previews bool, previewPreset *models.PreviewPreset, imagePreviews bool, markers bool, transcodes bool, thumbnails bool) {
if s.Status.Status != Idle { if s.Status.Status != Idle {
return return
} }
@@ -143,13 +179,21 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
s.Status.indefiniteProgress() s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder() qb := models.NewSceneQueryBuilder()
qg := models.NewGalleryQueryBuilder()
//this.job.total = await ObjectionUtils.getCount(Scene); //this.job.total = await ObjectionUtils.getCount(Scene);
instance.Paths.Generated.EnsureTmpDir() instance.Paths.Generated.EnsureTmpDir()
preset := string(models.PreviewPresetSlow)
if previewPreset != nil && previewPreset.IsValid() {
preset = string(*previewPreset)
}
go func() { go func() {
defer s.returnToIdleState() defer s.returnToIdleState()
scenes, err := qb.All() scenes, err := qb.All()
var galleries []*models.Gallery
if err != nil { if err != nil {
logger.Errorf("failed to get scenes for generate") logger.Errorf("failed to get scenes for generate")
return return
@@ -158,18 +202,27 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
delta := utils.Btoi(sprites) + utils.Btoi(previews) + utils.Btoi(markers) + utils.Btoi(transcodes) delta := utils.Btoi(sprites) + utils.Btoi(previews) + utils.Btoi(markers) + utils.Btoi(transcodes)
var wg sync.WaitGroup var wg sync.WaitGroup
s.Status.Progress = 0 s.Status.Progress = 0
total := len(scenes) lenScenes := len(scenes)
total := lenScenes
if thumbnails {
galleries, err = qg.All()
if err != nil {
logger.Errorf("failed to get galleries for generate")
return
}
total += len(galleries)
}
if s.Status.stopping { if s.Status.stopping {
logger.Info("Stopping due to user request") logger.Info("Stopping due to user request")
return return
} }
totalsNeeded := s.neededGenerate(scenes, sprites, previews, markers, transcodes) totalsNeeded := s.neededGenerate(scenes, sprites, previews, imagePreviews, markers, transcodes)
if totalsNeeded == nil { if totalsNeeded == nil {
logger.Infof("Taking too long to count content. Skipping...") logger.Infof("Taking too long to count content. Skipping...")
logger.Infof("Generating content") logger.Infof("Generating content")
} else { } else {
logger.Infof("Generating %d sprites %d previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.markers, totalsNeeded.transcodes) logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes)
} }
for i, scene := range scenes { for i, scene := range scenes {
s.Status.setProgress(i, total) s.Status.setProgress(i, total)
@@ -196,7 +249,7 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
} }
if previews { if previews {
task := GeneratePreviewTask{Scene: *scene} task := GeneratePreviewTask{Scene: *scene, ImagePreview: imagePreviews, PreviewPreset: preset}
go task.Start(&wg) go task.Start(&wg)
} }
@@ -212,6 +265,77 @@ func (s *singleton) Generate(sprites bool, previews bool, markers bool, transcod
wg.Wait() wg.Wait()
} }
if thumbnails {
logger.Infof("Generating thumbnails for the galleries")
for i, gallery := range galleries {
s.Status.setProgress(lenScenes+i, total)
if s.Status.stopping {
logger.Info("Stopping due to user request")
return
}
if gallery == nil {
logger.Errorf("nil gallery, skipping generate")
continue
}
wg.Add(1)
task := GenerateGthumbsTask{Gallery: *gallery}
go task.Start(&wg)
wg.Wait()
}
}
logger.Infof("Generate finished")
}()
}
func (s *singleton) GenerateDefaultScreenshot(sceneId string) {
s.generateScreenshot(sceneId, nil)
}
func (s *singleton) GenerateScreenshot(sceneId string, at float64) {
s.generateScreenshot(sceneId, &at)
}
// generate default screenshot if at is nil
func (s *singleton) generateScreenshot(sceneId string, at *float64) {
if s.Status.Status != Idle {
return
}
s.Status.SetStatus(Generate)
s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder()
instance.Paths.Generated.EnsureTmpDir()
go func() {
defer s.returnToIdleState()
sceneIdInt, err := strconv.Atoi(sceneId)
if err != nil {
logger.Errorf("Error parsing scene id %s: %s", sceneId, err.Error())
return
}
scene, err := qb.Find(sceneIdInt)
if err != nil || scene == nil {
logger.Errorf("failed to get scene for generate")
return
}
task := GenerateScreenshotTask{
Scene: *scene,
ScreenshotAt: at,
}
var wg sync.WaitGroup
wg.Add(1)
go task.Start(&wg)
wg.Wait()
logger.Infof("Generate finished") logger.Infof("Generate finished")
}() }()
} }
@@ -390,6 +514,7 @@ func (s *singleton) Clean() {
s.Status.indefiniteProgress() s.Status.indefiniteProgress()
qb := models.NewSceneQueryBuilder() qb := models.NewSceneQueryBuilder()
gqb := models.NewGalleryQueryBuilder()
go func() { go func() {
defer s.returnToIdleState() defer s.returnToIdleState()
@@ -400,6 +525,12 @@ func (s *singleton) Clean() {
return return
} }
galleries, err := gqb.All()
if err != nil {
logger.Errorf("failed to fetch list of galleries for cleaning")
return
}
if s.Status.stopping { if s.Status.stopping {
logger.Info("Stopping due to user request") logger.Info("Stopping due to user request")
return return
@@ -407,7 +538,7 @@ func (s *singleton) Clean() {
var wg sync.WaitGroup var wg sync.WaitGroup
s.Status.Progress = 0 s.Status.Progress = 0
total := len(scenes) total := len(scenes) + len(galleries)
for i, scene := range scenes { for i, scene := range scenes {
s.Status.setProgress(i, total) s.Status.setProgress(i, total)
if s.Status.stopping { if s.Status.stopping {
@@ -422,7 +553,26 @@ func (s *singleton) Clean() {
wg.Add(1) wg.Add(1)
task := CleanTask{Scene: *scene} task := CleanTask{Scene: scene}
go task.Start(&wg)
wg.Wait()
}
for i, gallery := range galleries {
s.Status.setProgress(len(scenes)+i, total)
if s.Status.stopping {
logger.Info("Stopping due to user request")
return
}
if gallery == nil {
logger.Errorf("nil gallery, skipping Clean")
continue
}
wg.Add(1)
task := CleanTask{Gallery: gallery}
go task.Start(&wg) go task.Start(&wg)
wg.Wait() wg.Wait()
} }
@@ -445,7 +595,7 @@ func (s *singleton) returnToIdleState() {
} }
func (s *singleton) neededScan(paths []string) int64 { func (s *singleton) neededScan(paths []string) int64 {
var neededScans int64 = 0 var neededScans int64
for _, path := range paths { for _, path := range paths {
task := ScanTask{FilePath: path} task := ScanTask{FilePath: path}
@@ -459,21 +609,22 @@ func (s *singleton) neededScan(paths []string) int64 {
type totalsGenerate struct { type totalsGenerate struct {
sprites int64 sprites int64
previews int64 previews int64
imagePreviews int64
markers int64 markers int64
transcodes int64 transcodes int64
} }
func (s *singleton) neededGenerate(scenes []*models.Scene, sprites, previews, markers, transcodes bool) *totalsGenerate { func (s *singleton) neededGenerate(scenes []*models.Scene, sprites, previews, imagePreviews, markers, transcodes bool) *totalsGenerate {
var totals totalsGenerate var totals totalsGenerate
const timeoutSecs = 90 * time.Second const timeout = 90 * time.Second
// create a control channel through which to signal the counting loop when the timeout is reached // create a control channel through which to signal the counting loop when the timeout is reached
chTimeout := make(chan struct{}) chTimeout := make(chan struct{})
//run the timeout function in a separate thread //run the timeout function in a separate thread
go func() { go func() {
time.Sleep(timeoutSecs) time.Sleep(timeout)
chTimeout <- struct{}{} chTimeout <- struct{}{}
}() }()
@@ -488,10 +639,13 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, sprites, previews, ma
} }
if previews { if previews {
task := GeneratePreviewTask{Scene: *scene} task := GeneratePreviewTask{Scene: *scene, ImagePreview: imagePreviews}
if !task.doesPreviewExist(task.Scene.Checksum) { if !task.doesVideoPreviewExist(task.Scene.Checksum) {
totals.previews++ totals.previews++
} }
if imagePreviews && !task.doesImagePreviewExist(task.Scene.Checksum) {
totals.imagePreviews++
}
} }
if markers { if markers {

View File

@@ -1,12 +1,18 @@
package paths package paths
import ( import (
"fmt"
"github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/utils"
"path/filepath" "path/filepath"
) )
type galleryPaths struct{} type galleryPaths struct{}
const thumbDir = "gthumbs"
const thumbDirDepth int = 2
const thumbDirLength int = 2 // thumbDirDepth * thumbDirLength must be smaller than the length of checksum
func newGalleryPaths() *galleryPaths { func newGalleryPaths() *galleryPaths {
return &galleryPaths{} return &galleryPaths{}
} }
@@ -15,6 +21,19 @@ func (gp *galleryPaths) GetExtractedPath(checksum string) string {
return filepath.Join(config.GetCachePath(), checksum) return filepath.Join(config.GetCachePath(), checksum)
} }
func GetGthumbCache() string {
return filepath.Join(config.GetCachePath(), thumbDir)
}
func GetGthumbDir(checksum string) string {
return filepath.Join(config.GetCachePath(), thumbDir, utils.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), checksum)
}
func GetGthumbPath(checksum string, index int, width int) string {
fname := fmt.Sprintf("%s_%d_%d.jpg", checksum, index, width)
return filepath.Join(config.GetCachePath(), thumbDir, utils.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), checksum, fname)
}
func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string { func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string {
return filepath.Join(config.GetCachePath(), checksum, fileName) return filepath.Join(config.GetCachePath(), checksum, fileName)
} }

View File

@@ -2,10 +2,13 @@ package paths
import ( import (
"github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/manager/config"
"github.com/stashapp/stash/pkg/utils"
"path/filepath" "path/filepath"
) )
type jsonPaths struct { type jsonPaths struct {
Metadata string
MappingsFile string MappingsFile string
ScrapedFile string ScrapedFile string
@@ -13,19 +16,37 @@ type jsonPaths struct {
Scenes string Scenes string
Galleries string Galleries string
Studios string Studios string
Movies string
} }
func newJSONPaths() *jsonPaths { func newJSONPaths() *jsonPaths {
jp := jsonPaths{} jp := jsonPaths{}
jp.Metadata = config.GetMetadataPath()
jp.MappingsFile = filepath.Join(config.GetMetadataPath(), "mappings.json") jp.MappingsFile = filepath.Join(config.GetMetadataPath(), "mappings.json")
jp.ScrapedFile = filepath.Join(config.GetMetadataPath(), "scraped.json") jp.ScrapedFile = filepath.Join(config.GetMetadataPath(), "scraped.json")
jp.Performers = filepath.Join(config.GetMetadataPath(), "performers") jp.Performers = filepath.Join(config.GetMetadataPath(), "performers")
jp.Scenes = filepath.Join(config.GetMetadataPath(), "scenes") jp.Scenes = filepath.Join(config.GetMetadataPath(), "scenes")
jp.Galleries = filepath.Join(config.GetMetadataPath(), "galleries") jp.Galleries = filepath.Join(config.GetMetadataPath(), "galleries")
jp.Studios = filepath.Join(config.GetMetadataPath(), "studios") jp.Studios = filepath.Join(config.GetMetadataPath(), "studios")
jp.Movies = filepath.Join(config.GetMetadataPath(), "movies")
return &jp return &jp
} }
func GetJSONPaths() *jsonPaths {
jp := newJSONPaths()
return jp
}
func EnsureJSONDirs() {
jsonPaths := GetJSONPaths()
utils.EnsureDir(jsonPaths.Metadata)
utils.EnsureDir(jsonPaths.Scenes)
utils.EnsureDir(jsonPaths.Galleries)
utils.EnsureDir(jsonPaths.Performers)
utils.EnsureDir(jsonPaths.Studios)
utils.EnsureDir(jsonPaths.Movies)
}
func (jp *jsonPaths) PerformerJSONPath(checksum string) string { func (jp *jsonPaths) PerformerJSONPath(checksum string) string {
return filepath.Join(jp.Performers, checksum+".json") return filepath.Join(jp.Performers, checksum+".json")
} }
@@ -37,3 +58,7 @@ func (jp *jsonPaths) SceneJSONPath(checksum string) string {
func (jp *jsonPaths) StudioJSONPath(checksum string) string { func (jp *jsonPaths) StudioJSONPath(checksum string) string {
return filepath.Join(jp.Studios, checksum+".json") return filepath.Join(jp.Studios, checksum+".json")
} }
func (jp *jsonPaths) MovieJSONPath(checksum string) string {
return filepath.Join(jp.Movies, checksum+".json")
}

View File

@@ -122,6 +122,27 @@ func DeleteGeneratedSceneFiles(scene *models.Scene) {
} }
} }
func DeleteSceneMarkerFiles(scene *models.Scene, seconds int) {
videoPath := GetInstance().Paths.SceneMarkers.GetStreamPath(scene.Checksum, seconds)
imagePath := GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.Checksum, seconds)
exists, _ := utils.FileExists(videoPath)
if exists {
err := os.Remove(videoPath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", videoPath, err.Error())
}
}
exists, _ = utils.FileExists(imagePath)
if exists {
err := os.Remove(imagePath)
if err != nil {
logger.Warnf("Could not delete file %s: %s", videoPath, err.Error())
}
}
}
func DeleteSceneFile(scene *models.Scene) { func DeleteSceneFile(scene *models.Scene) {
// kill any running encoders // kill any running encoders
KillRunningStreams(scene.Path) KillRunningStreams(scene.Path)

16
pkg/manager/screenshot.go Normal file
View File

@@ -0,0 +1,16 @@
package manager
import (
"github.com/stashapp/stash/pkg/ffmpeg"
)
func makeScreenshot(probeResult ffmpeg.VideoFile, outputPath string, quality int, width int, time float64) {
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
options := ffmpeg.ScreenshotOptions{
OutputPath: outputPath,
Quality: quality,
Time: time,
Width: width,
}
encoder.Screenshot(probeResult, options)
}

View File

@@ -24,12 +24,10 @@ func (t *AutoTagPerformerTask) Start(wg *sync.WaitGroup) {
func getQueryRegex(name string) string { func getQueryRegex(name string) string {
const separatorChars = `.\-_ ` const separatorChars = `.\-_ `
// handle path separators // handle path separators
const endSeparatorChars = separatorChars + `\\/`
const separator = `[` + separatorChars + `]` const separator = `[` + separatorChars + `]`
const endSeparator = `[` + endSeparatorChars + `]`
ret := strings.Replace(name, " ", separator+"*", -1) ret := strings.Replace(name, " ", separator+"*", -1)
ret = "(?:^|" + endSeparator + "+)" + ret + "(?:$|" + endSeparator + "+)" ret = `(?:^|_|[^\w\d])` + ret + `(?:$|_|[^\w\d])`
return ret return ret
} }

View File

@@ -36,7 +36,15 @@ var testSeparators = []string{
" ", " ",
} }
func generateNamePatterns(name string, separator string) []string { var testEndSeparators = []string{
"{",
"}",
"(",
")",
",",
}
func generateNamePatterns(name, separator string) []string {
var ret []string var ret []string
ret = append(ret, fmt.Sprintf("%s%saaa"+testExtension, name, separator)) ret = append(ret, fmt.Sprintf("%s%saaa"+testExtension, name, separator))
ret = append(ret, fmt.Sprintf("aaa%s%s"+testExtension, separator, name)) ret = append(ret, fmt.Sprintf("aaa%s%s"+testExtension, separator, name))
@@ -152,13 +160,20 @@ func createScenes(tx *sqlx.Tx) error {
// create the scenes // create the scenes
var scenePatterns []string var scenePatterns []string
var falseScenePatterns []string var falseScenePatterns []string
for _, separator := range testSeparators {
separators := append(testSeparators, testEndSeparators...)
for _, separator := range separators {
scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator)...) scenePatterns = append(scenePatterns, generateNamePatterns(testName, separator)...)
scenePatterns = append(scenePatterns, generateNamePatterns(strings.ToLower(testName), separator)...) scenePatterns = append(scenePatterns, generateNamePatterns(strings.ToLower(testName), separator)...)
falseScenePatterns = append(falseScenePatterns, generateFalseNamePattern(testName, separator))
}
// add test cases for intra-name separators
for _, separator := range testSeparators {
if separator != " " { if separator != " " {
scenePatterns = append(scenePatterns, generateNamePatterns(strings.Replace(testName, " ", separator, -1), separator)...) scenePatterns = append(scenePatterns, generateNamePatterns(strings.Replace(testName, " ", separator, -1), separator)...)
} }
falseScenePatterns = append(falseScenePatterns, generateFalseNamePattern(testName, separator))
} }
for _, fn := range scenePatterns { for _, fn := range scenePatterns {

Some files were not shown because too many files have changed in this diff Show More