Merge pull request #2907 from stashapp/files-refactor

This commit is contained in:
WithoutPants
2022-09-14 13:36:46 +10:00
committed by GitHub
837 changed files with 81231 additions and 35614 deletions

2
.gitignore vendored
View File

@@ -16,7 +16,7 @@
*.out *.out
# GraphQL generated output # GraphQL generated output
pkg/models/generated_*.go internal/api/generated_*.go
ui/v2.5/src/core/generated-*.tsx ui/v2.5/src/core/generated-*.tsx
#### ####

View File

@@ -162,6 +162,10 @@ generate-frontend:
generate-backend: touch-ui generate-backend: touch-ui
go generate -mod=vendor ./cmd/stash go generate -mod=vendor ./cmd/stash
.PHONY: generate-dataloaders
generate-dataloaders:
go generate -mod=vendor ./internal/api/loaders
# Regenerates stash-box client files # Regenerates stash-box client files
.PHONY: generate-stash-box-client .PHONY: generate-stash-box-client
generate-stash-box-client: generate-stash-box-client:

14
go.mod
View File

@@ -19,7 +19,7 @@ require (
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
github.com/jmoiron/sqlx v1.3.1 github.com/jmoiron/sqlx v1.3.1
github.com/json-iterator/go v1.1.12 github.com/json-iterator/go v1.1.12
github.com/mattn/go-sqlite3 v1.14.6 github.com/mattn/go-sqlite3 v1.14.7
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8
github.com/remeh/sizedwaitgroup v1.0.0 github.com/remeh/sizedwaitgroup v1.0.0
@@ -36,17 +36,18 @@ require (
github.com/vektra/mockery/v2 v2.10.0 github.com/vektra/mockery/v2 v2.10.0
golang.org/x/crypto v0.0.0-20220321153916-2c7772ba3064 golang.org/x/crypto v0.0.0-20220321153916-2c7772ba3064
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9 golang.org/x/net v0.0.0-20220722155237-a158d28d115b
golang.org/x/sys v0.0.0-20220329152356-43be30ef3008 golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
golang.org/x/text v0.3.7 golang.org/x/text v0.3.7
golang.org/x/tools v0.1.10 // indirect golang.org/x/tools v0.1.12 // indirect
gopkg.in/sourcemap.v1 v1.0.5 // indirect gopkg.in/sourcemap.v1 v1.0.5 // indirect
gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v2 v2.4.0
) )
require ( require (
github.com/asticode/go-astisub v0.20.0 github.com/asticode/go-astisub v0.20.0
github.com/doug-martin/goqu/v9 v9.18.0
github.com/go-chi/httplog v0.2.1 github.com/go-chi/httplog v0.2.1
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
github.com/hashicorp/golang-lru v0.5.4 github.com/hashicorp/golang-lru v0.5.4
@@ -55,7 +56,9 @@ require (
github.com/lucasb-eyer/go-colorful v1.2.0 github.com/lucasb-eyer/go-colorful v1.2.0
github.com/spf13/cast v1.4.1 github.com/spf13/cast v1.4.1
github.com/vearutop/statigz v1.1.6 github.com/vearutop/statigz v1.1.6
github.com/vektah/dataloaden v0.3.0
github.com/vektah/gqlparser/v2 v2.4.1 github.com/vektah/gqlparser/v2 v2.4.1
gopkg.in/guregu/null.v4 v4.0.0
) )
require ( require (
@@ -98,8 +101,7 @@ require (
github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/match v1.1.1 // indirect
github.com/urfave/cli/v2 v2.4.0 // indirect github.com/urfave/cli/v2 v2.4.0 // indirect
go.uber.org/atomic v1.7.0 // indirect go.uber.org/atomic v1.7.0 // indirect
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
gopkg.in/ini.v1 v1.66.4 // indirect gopkg.in/ini.v1 v1.66.4 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
) )

37
go.sum
View File

@@ -65,6 +65,8 @@ github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBp
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI=
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
@@ -206,6 +208,8 @@ github.com/docker/docker v17.12.0-ce-rc1.0.20210128214336-420b1d36250f+incompati
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/doug-martin/goqu/v9 v9.18.0 h1:/6bcuEtAe6nsSMVK/M+fOiXUNfyFF3yYtE07DBPFMYY=
github.com/doug-martin/goqu/v9 v9.18.0/go.mod h1:nf0Wc2/hV3gYK9LiyqIrzBEVGlI8qW3GuDCEobC4wBQ=
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
@@ -248,8 +252,9 @@ github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE= github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10= github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10=
@@ -535,8 +540,9 @@ github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.1 h1:6VXZrLU0jHBYyAqrSPa+MgPfnSvTPuMgK+k0o5kVFWo=
github.com/lib/pq v1.10.1/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc= github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
@@ -570,8 +576,9 @@ github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOA
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.7 h1:fxWBnXkxfM6sRiuH3bqJ4CfzZojMOLVc0UTsTglEghA=
github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
@@ -746,6 +753,8 @@ github.com/urfave/cli/v2 v2.4.0 h1:m2pxjjDFgDxSPtO8WSdbndj17Wu2y8vOT86wE/tjr+I=
github.com/urfave/cli/v2 v2.4.0/go.mod h1:NX9W0zmTvedE5oDoOMs2RTC8RvdK98NTYZE5LbaEYPg= github.com/urfave/cli/v2 v2.4.0/go.mod h1:NX9W0zmTvedE5oDoOMs2RTC8RvdK98NTYZE5LbaEYPg=
github.com/vearutop/statigz v1.1.6 h1:si1zvulh/6P4S/SjFticuKQ8/EgQISglaRuycj8PWso= github.com/vearutop/statigz v1.1.6 h1:si1zvulh/6P4S/SjFticuKQ8/EgQISglaRuycj8PWso=
github.com/vearutop/statigz v1.1.6/go.mod h1:czAv7iXgPv/s+xsgXpVEhhD0NSOQ4wZPgmM/n7LANDI= github.com/vearutop/statigz v1.1.6/go.mod h1:czAv7iXgPv/s+xsgXpVEhhD0NSOQ4wZPgmM/n7LANDI=
github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84=
github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U=
github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
github.com/vektah/gqlparser/v2 v2.4.1 h1:QOyEn8DAPMUMARGMeshKDkDgNmVoEaEGiDB0uWxcSlQ= github.com/vektah/gqlparser/v2 v2.4.1 h1:QOyEn8DAPMUMARGMeshKDkDgNmVoEaEGiDB0uWxcSlQ=
github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
@@ -764,6 +773,7 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE= gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE=
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
@@ -856,8 +866,9 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -913,8 +924,8 @@ golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9 h1:0qxwC5n+ttVOINCBeRHO0nq9X7uy8SDsPoi5OaCdIEI= golang.org/x/net v0.0.0-20220722155237-a158d28d115b h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0=
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -947,6 +958,7 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180224232135-f6cff0780e54/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180224232135-f6cff0780e54/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -1040,8 +1052,10 @@ golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220329152356-43be30ef3008 h1:pq9pwoi2rjLWvmiVser/lIOgiyA3fli4M+RfGVMA7nE= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220329152356-43be30ef3008/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664 h1:v1W7bwXHsnLLloWYTVEdvGvA7BHMeBYsPcF0GLDxIRs=
golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@@ -1071,6 +1085,7 @@ golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
@@ -1128,14 +1143,14 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
golang.org/x/tools v0.1.12 h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
@@ -1300,6 +1315,8 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8X
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/guregu/null.v4 v4.0.0 h1:1Wm3S1WEA2I26Kq+6vcW+w0gcDo44YKYD7YIEJNHDjg=
gopkg.in/guregu/null.v4 v4.0.0/go.mod h1:YoQhUrADuG3i9WqesrCmpNRwm1ypAgSHYqoOcTu/JrI=
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=

View File

@@ -4,46 +4,122 @@ schema:
- "graphql/schema/types/*.graphql" - "graphql/schema/types/*.graphql"
- "graphql/schema/*.graphql" - "graphql/schema/*.graphql"
exec: exec:
filename: pkg/models/generated_exec.go filename: internal/api/generated_exec.go
model: model:
filename: pkg/models/generated_models.go filename: internal/api/generated_models.go
resolver: resolver:
filename: internal/api/resolver.go filename: internal/api/resolver.go
type: Resolver type: Resolver
struct_tag: gqlgen struct_tag: gqlgen
autobind:
- github.com/stashapp/stash/pkg/models
- github.com/stashapp/stash/pkg/plugin
- github.com/stashapp/stash/pkg/scraper
- github.com/stashapp/stash/internal/identify
- github.com/stashapp/stash/internal/dlna
- github.com/stashapp/stash/pkg/scraper/stashbox
models: models:
# Scalars # Scalars
Timestamp: Timestamp:
model: github.com/stashapp/stash/pkg/models.Timestamp model: github.com/stashapp/stash/pkg/models.Timestamp
# Objects Int64:
Gallery: model: github.com/stashapp/stash/pkg/models.Int64
model: github.com/stashapp/stash/pkg/models.Gallery # define to force resolvers
Image: Image:
model: github.com/stashapp/stash/pkg/models.Image model: github.com/stashapp/stash/pkg/models.Image
ImageFileType: fields:
model: github.com/stashapp/stash/pkg/models.ImageFileType title:
Performer: resolver: true
model: github.com/stashapp/stash/pkg/models.Performer # autobind on config causes generation issues
Scene: StashConfig:
model: github.com/stashapp/stash/pkg/models.Scene model: github.com/stashapp/stash/internal/manager/config.StashConfig
SceneMarker: StashConfigInput:
model: github.com/stashapp/stash/pkg/models.SceneMarker model: github.com/stashapp/stash/internal/manager/config.StashConfigInput
ScrapedItem: StashBoxInput:
model: github.com/stashapp/stash/pkg/models.ScrapedItem model: github.com/stashapp/stash/internal/manager/config.StashBoxInput
Studio: ConfigImageLightboxResult:
model: github.com/stashapp/stash/pkg/models.Studio model: github.com/stashapp/stash/internal/manager/config.ConfigImageLightboxResult
Movie: ImageLightboxDisplayMode:
model: github.com/stashapp/stash/pkg/models.Movie model: github.com/stashapp/stash/internal/manager/config.ImageLightboxDisplayMode
Tag: ImageLightboxScrollMode:
model: github.com/stashapp/stash/pkg/models.Tag model: github.com/stashapp/stash/internal/manager/config.ImageLightboxScrollMode
SceneFileType: ConfigDisableDropdownCreate:
model: github.com/stashapp/stash/pkg/models.SceneFileType model: github.com/stashapp/stash/internal/manager/config.ConfigDisableDropdownCreate
SavedFilter: ScanMetadataOptions:
model: github.com/stashapp/stash/pkg/models.SavedFilter model: github.com/stashapp/stash/internal/manager/config.ScanMetadataOptions
StashID: AutoTagMetadataOptions:
model: github.com/stashapp/stash/internal/manager/config.AutoTagMetadataOptions
SceneParserInput:
model: github.com/stashapp/stash/internal/manager.SceneParserInput
SceneParserResult:
model: github.com/stashapp/stash/internal/manager.SceneParserResult
SceneMovieID:
model: github.com/stashapp/stash/internal/manager.SceneMovieID
SystemStatus:
model: github.com/stashapp/stash/internal/manager.SystemStatus
SystemStatusEnum:
model: github.com/stashapp/stash/internal/manager.SystemStatusEnum
ImportDuplicateEnum:
model: github.com/stashapp/stash/internal/manager.ImportDuplicateEnum
SetupInput:
model: github.com/stashapp/stash/internal/manager.SetupInput
MigrateInput:
model: github.com/stashapp/stash/internal/manager.MigrateInput
ScanMetadataInput:
model: github.com/stashapp/stash/internal/manager.ScanMetadataInput
GenerateMetadataInput:
model: github.com/stashapp/stash/internal/manager.GenerateMetadataInput
GeneratePreviewOptionsInput:
model: github.com/stashapp/stash/internal/manager.GeneratePreviewOptionsInput
AutoTagMetadataInput:
model: github.com/stashapp/stash/internal/manager.AutoTagMetadataInput
CleanMetadataInput:
model: github.com/stashapp/stash/internal/manager.CleanMetadataInput
StashBoxBatchPerformerTagInput:
model: github.com/stashapp/stash/internal/manager.StashBoxBatchPerformerTagInput
SceneStreamEndpoint:
model: github.com/stashapp/stash/internal/manager.SceneStreamEndpoint
ExportObjectTypeInput:
model: github.com/stashapp/stash/internal/manager.ExportObjectTypeInput
ExportObjectsInput:
model: github.com/stashapp/stash/internal/manager.ExportObjectsInput
ImportObjectsInput:
model: github.com/stashapp/stash/internal/manager.ImportObjectsInput
ScanMetaDataFilterInput:
model: github.com/stashapp/stash/internal/manager.ScanMetaDataFilterInput
# renamed types
BulkUpdateIdMode:
model: github.com/stashapp/stash/pkg/models.RelationshipUpdateMode
DLNAStatus:
model: github.com/stashapp/stash/internal/dlna.Status
DLNAIP:
model: github.com/stashapp/stash/internal/dlna.Dlnaip
IdentifySource:
model: github.com/stashapp/stash/internal/identify.Source
IdentifyMetadataTaskOptions:
model: github.com/stashapp/stash/internal/identify.Options
IdentifyMetadataInput:
model: github.com/stashapp/stash/internal/identify.Options
IdentifyMetadataOptions:
model: github.com/stashapp/stash/internal/identify.MetadataOptions
IdentifyFieldOptions:
model: github.com/stashapp/stash/internal/identify.FieldOptions
IdentifyFieldStrategy:
model: github.com/stashapp/stash/internal/identify.FieldStrategy
ScraperSource:
model: github.com/stashapp/stash/pkg/scraper.Source
# rebind inputs to types
StashIDInput:
model: github.com/stashapp/stash/pkg/models.StashID model: github.com/stashapp/stash/pkg/models.StashID
SceneCaption: IdentifySourceInput:
model: github.com/stashapp/stash/pkg/models.SceneCaption model: github.com/stashapp/stash/internal/identify.Source
IdentifyFieldOptionsInput:
model: github.com/stashapp/stash/internal/identify.FieldOptions
IdentifyMetadataOptionsInput:
model: github.com/stashapp/stash/internal/identify.MetadataOptions
ScraperSourceInput:
model: github.com/stashapp/stash/pkg/scraper.Source

View File

@@ -0,0 +1,40 @@
fragment FolderData on Folder {
id
path
}
fragment VideoFileData on VideoFile {
path
size
duration
video_codec
audio_codec
width
height
frame_rate
bit_rate
fingerprints {
type
value
}
}
fragment ImageFileData on ImageFile {
path
size
width
height
fingerprints {
type
value
}
}
fragment GalleryFileData on GalleryFile {
path
size
fingerprints {
type
value
}
}

View File

@@ -1,19 +1,21 @@
fragment SlimGalleryData on Gallery { fragment SlimGalleryData on Gallery {
id id
checksum
path
title title
date date
url url
details details
rating rating
organized organized
files {
...GalleryFileData
}
folder {
...FolderData
}
image_count image_count
cover { cover {
file { files {
size ...ImageFileData
width
height
} }
paths { paths {
@@ -37,8 +39,6 @@ fragment SlimGalleryData on Gallery {
image_path image_path
} }
scenes { scenes {
id ...SlimSceneData
title
path
} }
} }

View File

@@ -1,7 +1,5 @@
fragment GalleryData on Gallery { fragment GalleryData on Gallery {
id id
checksum
path
created_at created_at
updated_at updated_at
title title
@@ -10,6 +8,14 @@ fragment GalleryData on Gallery {
details details
rating rating
organized organized
files {
...GalleryFileData
}
folder {
...FolderData
}
images { images {
...SlimImageData ...SlimImageData
} }

View File

@@ -1,16 +1,12 @@
fragment SlimImageData on Image { fragment SlimImageData on Image {
id id
checksum
title title
rating rating
organized organized
o_counter o_counter
path
file { files {
size ...ImageFileData
width
height
} }
paths { paths {
@@ -20,8 +16,13 @@ fragment SlimImageData on Image {
galleries { galleries {
id id
path
title title
files {
path
}
folder {
path
}
} }
studio { studio {

View File

@@ -1,18 +1,14 @@
fragment ImageData on Image { fragment ImageData on Image {
id id
checksum
title title
rating rating
organized organized
o_counter o_counter
path
created_at created_at
updated_at updated_at
file { files {
size ...ImageFileData
width
height
} }
paths { paths {

View File

@@ -1,7 +1,5 @@
fragment SlimSceneData on Scene { fragment SlimSceneData on Scene {
id id
checksum
oshash
title title
details details
url url
@@ -9,24 +7,11 @@ fragment SlimSceneData on Scene {
rating rating
o_counter o_counter
organized organized
path
phash
interactive interactive
interactive_speed interactive_speed
captions {
language_code
caption_type
}
file { files {
size ...VideoFileData
duration
video_codec
audio_codec
width
height
framerate
bitrate
} }
paths { paths {

View File

@@ -1,7 +1,5 @@
fragment SceneData on Scene { fragment SceneData on Scene {
id id
checksum
oshash
title title
details details
url url
@@ -9,8 +7,6 @@ fragment SceneData on Scene {
rating rating
o_counter o_counter
organized organized
path
phash
interactive interactive
interactive_speed interactive_speed
captions { captions {
@@ -20,15 +16,8 @@ fragment SceneData on Scene {
created_at created_at
updated_at updated_at
file { files {
size ...VideoFileData
duration
video_codec
audio_codec
width
height
framerate
bitrate
} }
paths { paths {

View File

@@ -4,7 +4,7 @@ query FindScenes($filter: FindFilterType, $scene_filter: SceneFilterType, $scene
filesize filesize
duration duration
scenes { scenes {
...SceneData ...SlimSceneData
} }
} }
} }

View File

@@ -0,0 +1,97 @@
type Fingerprint {
type: String!
value: String!
}
type Folder {
id: ID!
path: String!
parent_folder_id: ID
zip_file_id: ID
mod_time: Time!
created_at: Time!
updated_at: Time!
}
interface BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
created_at: Time!
updated_at: Time!
}
type VideoFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
format: String!
width: Int!
height: Int!
duration: Float!
video_codec: String!
audio_codec: String!
frame_rate: Float!
bit_rate: Int!
created_at: Time!
updated_at: Time!
}
type ImageFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
width: Int!
height: Int!
created_at: Time!
updated_at: Time!
}
type GalleryFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
created_at: Time!
updated_at: Time!
}

View File

@@ -132,6 +132,8 @@ input SceneFilterType {
phash: StringCriterionInput phash: StringCriterionInput
"""Filter by path""" """Filter by path"""
path: StringCriterionInput path: StringCriterionInput
"""Filter by file count"""
file_count: IntCriterionInput
"""Filter by rating""" """Filter by rating"""
rating: IntCriterionInput rating: IntCriterionInput
"""Filter by organized""" """Filter by organized"""
@@ -239,6 +241,8 @@ input GalleryFilterType {
checksum: StringCriterionInput checksum: StringCriterionInput
"""Filter by path""" """Filter by path"""
path: StringCriterionInput path: StringCriterionInput
"""Filter by zip-file count"""
file_count: IntCriterionInput
"""Filter to only include galleries missing this property""" """Filter to only include galleries missing this property"""
is_missing: String is_missing: String
"""Filter to include/exclude galleries that were created from zip""" """Filter to include/exclude galleries that were created from zip"""
@@ -327,6 +331,8 @@ input ImageFilterType {
checksum: StringCriterionInput checksum: StringCriterionInput
"""Filter by path""" """Filter by path"""
path: StringCriterionInput path: StringCriterionInput
"""Filter by file count"""
file_count: IntCriterionInput
"""Filter by rating""" """Filter by rating"""
rating: IntCriterionInput rating: IntCriterionInput
"""Filter by organized""" """Filter by organized"""

View File

@@ -1,8 +1,8 @@
"""Gallery type""" """Gallery type"""
type Gallery { type Gallery {
id: ID! id: ID!
checksum: String! checksum: String! @deprecated(reason: "Use files.fingerprints")
path: String path: String @deprecated(reason: "Use files.path")
title: String title: String
url: String url: String
date: String date: String
@@ -11,7 +11,10 @@ type Gallery {
organized: Boolean! organized: Boolean!
created_at: Time! created_at: Time!
updated_at: Time! updated_at: Time!
file_mod_time: Time file_mod_time: Time @deprecated(reason: "Use files.mod_time")
files: [GalleryFile!]!
folder: Folder
scenes: [Scene!]! scenes: [Scene!]!
studio: Studio studio: Studio
@@ -24,12 +27,6 @@ type Gallery {
cover: Image cover: Image
} }
type GalleryFilesType {
index: Int!
name: String
path: String
}
input GalleryCreateInput { input GalleryCreateInput {
title: String! title: String!
url: String url: String

View File

@@ -1,16 +1,18 @@
type Image { type Image {
id: ID! id: ID!
checksum: String checksum: String @deprecated(reason: "Use files.fingerprints")
title: String title: String
rating: Int rating: Int
o_counter: Int o_counter: Int
organized: Boolean! organized: Boolean!
path: String! path: String! @deprecated(reason: "Use files.path")
created_at: Time! created_at: Time!
updated_at: Time! updated_at: Time!
file_mod_time: Time
file: ImageFileType! # Resolver file_mod_time: Time @deprecated(reason: "Use files.mod_time")
file: ImageFileType! @deprecated(reason: "Use files.mod_time")
files: [ImageFile!]!
paths: ImagePathsType! # Resolver paths: ImagePathsType! # Resolver
galleries: [Gallery!]! galleries: [Gallery!]!
@@ -20,9 +22,10 @@ type Image {
} }
type ImageFileType { type ImageFileType {
size: Int mod_time: Time!
width: Int size: Int!
height: Int width: Int!
height: Int!
} }
type ImagePathsType { type ImagePathsType {

View File

@@ -71,10 +71,19 @@ input ScanMetaDataFilterInput {
input ScanMetadataInput { input ScanMetadataInput {
paths: [String!] paths: [String!]
# useFileMetadata is deprecated with the new file management system
# if this functionality is desired, then we can make a built in scraper instead.
"""Set name, date, details from metadata (if present)""" """Set name, date, details from metadata (if present)"""
useFileMetadata: Boolean useFileMetadata: Boolean @deprecated(reason: "Not implemented")
# stripFileExtension is deprecated since we no longer set the title from the
# filename - it is automatically returned if the object has no title. If this
# functionality is desired, then we could make this an option to not include
# the extension in the auto-generated title.
"""Strip file extension from title""" """Strip file extension from title"""
stripFileExtension: Boolean stripFileExtension: Boolean @deprecated(reason: "Not implemented")
"""Generate previews during scan""" """Generate previews during scan"""
scanGeneratePreviews: Boolean scanGeneratePreviews: Boolean
"""Generate image previews during scan""" """Generate image previews during scan"""

View File

@@ -10,3 +10,5 @@ scalar Timestamp
scalar Map scalar Map
scalar Any scalar Any
scalar Int64

View File

@@ -27,15 +27,15 @@ type SceneMovie {
scene_index: Int scene_index: Int
} }
type SceneCaption { type VideoCaption {
language_code: String! language_code: String!
caption_type: String! caption_type: String!
} }
type Scene { type Scene {
id: ID! id: ID!
checksum: String checksum: String @deprecated(reason: "Use files.fingerprints")
oshash: String oshash: String @deprecated(reason: "Use files.fingerprints")
title: String title: String
details: String details: String
url: String url: String
@@ -43,16 +43,17 @@ type Scene {
rating: Int rating: Int
organized: Boolean! organized: Boolean!
o_counter: Int o_counter: Int
path: String! path: String! @deprecated(reason: "Use files.path")
phash: String phash: String @deprecated(reason: "Use files.fingerprints")
interactive: Boolean! interactive: Boolean!
interactive_speed: Int interactive_speed: Int
captions: [SceneCaption!] captions: [VideoCaption!]
created_at: Time! created_at: Time!
updated_at: Time! updated_at: Time!
file_mod_time: Time file_mod_time: Time
file: SceneFileType! # Resolver file: SceneFileType! @deprecated(reason: "Use files")
files: [VideoFile!]!
paths: ScenePathsType! # Resolver paths: ScenePathsType! # Resolver
scene_markers: [SceneMarker!]! scene_markers: [SceneMarker!]!

View File

@@ -3,6 +3,7 @@ package api
import ( import (
"context" "context"
"database/sql" "database/sql"
"fmt"
"strconv" "strconv"
"github.com/99designs/gqlgen/graphql" "github.com/99designs/gqlgen/graphql"
@@ -89,6 +90,14 @@ func (t changesetTranslator) nullString(value *string, field string) *sql.NullSt
return ret return ret
} }
func (t changesetTranslator) optionalString(value *string, field string) models.OptionalString {
if !t.hasField(field) {
return models.OptionalString{}
}
return models.NewOptionalStringPtr(value)
}
func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQLiteDate { func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQLiteDate {
if !t.hasField(field) { if !t.hasField(field) {
return nil return nil
@@ -104,6 +113,21 @@ func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQL
return ret return ret
} }
func (t changesetTranslator) optionalDate(value *string, field string) models.OptionalDate {
if !t.hasField(field) {
return models.OptionalDate{}
}
if value == nil {
return models.OptionalDate{
Set: true,
Null: true,
}
}
return models.NewOptionalDate(models.NewDate(*value))
}
func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64 { func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64 {
if !t.hasField(field) { if !t.hasField(field) {
return nil return nil
@@ -119,6 +143,14 @@ func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64
return ret return ret
} }
func (t changesetTranslator) optionalInt(value *int, field string) models.OptionalInt {
if !t.hasField(field) {
return models.OptionalInt{}
}
return models.NewOptionalIntPtr(value)
}
func (t changesetTranslator) nullInt64FromString(value *string, field string) *sql.NullInt64 { func (t changesetTranslator) nullInt64FromString(value *string, field string) *sql.NullInt64 {
if !t.hasField(field) { if !t.hasField(field) {
return nil return nil
@@ -134,6 +166,25 @@ func (t changesetTranslator) nullInt64FromString(value *string, field string) *s
return ret return ret
} }
func (t changesetTranslator) optionalIntFromString(value *string, field string) (models.OptionalInt, error) {
if !t.hasField(field) {
return models.OptionalInt{}, nil
}
if value == nil {
return models.OptionalInt{
Set: true,
Null: true,
}, nil
}
vv, err := strconv.Atoi(*value)
if err != nil {
return models.OptionalInt{}, fmt.Errorf("converting %v to int: %w", *value, err)
}
return models.NewOptionalInt(vv), nil
}
func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool { func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool {
if !t.hasField(field) { if !t.hasField(field) {
return nil return nil
@@ -148,3 +199,11 @@ func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool {
return ret return ret
} }
func (t changesetTranslator) optionalBool(value *bool, field string) models.OptionalBool {
if !t.hasField(field) {
return models.OptionalBool{}
}
return models.NewOptionalBoolPtr(value)
}

View File

@@ -0,0 +1,261 @@
//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene
//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery
//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image
//go:generate go run -mod=vendor github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer
//go:generate go run -mod=vendor github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio
//go:generate go run -mod=vendor github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag
//go:generate go run -mod=vendor github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie
//go:generate go run -mod=vendor github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/file.ID github.com/stashapp/stash/pkg/file.File
//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
package loaders
import (
"context"
"net/http"
"time"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
)
type contextKey struct{ name string }
var (
loadersCtxKey = &contextKey{"loaders"}
)
const (
wait = 1 * time.Millisecond
maxBatch = 100
)
type Loaders struct {
SceneByID *SceneLoader
SceneFiles *SceneFileIDsLoader
ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader
GalleryByID *GalleryLoader
ImageByID *ImageLoader
PerformerByID *PerformerLoader
StudioByID *StudioLoader
TagByID *TagLoader
MovieByID *MovieLoader
FileByID *FileLoader
}
type Middleware struct {
DatabaseProvider txn.DatabaseProvider
Repository manager.Repository
}
func (m Middleware) Middleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
ldrs := Loaders{
SceneByID: &SceneLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenes(ctx),
},
GalleryByID: &GalleryLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchGalleries(ctx),
},
ImageByID: &ImageLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchImages(ctx),
},
PerformerByID: &PerformerLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchPerformers(ctx),
},
StudioByID: &StudioLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchStudios(ctx),
},
TagByID: &TagLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchTags(ctx),
},
MovieByID: &MovieLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchMovies(ctx),
},
FileByID: &FileLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchFiles(ctx),
},
SceneFiles: &SceneFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesFileIDs(ctx),
},
ImageFiles: &ImageFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchImagesFileIDs(ctx),
},
GalleryFiles: &GalleryFileIDsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchGalleriesFileIDs(ctx),
},
}
newCtx := context.WithValue(r.Context(), loadersCtxKey, ldrs)
next.ServeHTTP(w, r.WithContext(newCtx))
})
}
func From(ctx context.Context) Loaders {
return ctx.Value(loadersCtxKey).(Loaders)
}
func toErrorSlice(err error) []error {
if err != nil {
return []error{err}
}
return nil
}
func (m Middleware) withTxn(ctx context.Context, fn func(ctx context.Context) error) error {
return txn.WithDatabase(ctx, m.DatabaseProvider, fn)
}
func (m Middleware) fetchScenes(ctx context.Context) func(keys []int) ([]*models.Scene, []error) {
return func(keys []int) (ret []*models.Scene, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.FindMany(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models.Image, []error) {
return func(keys []int) (ret []*models.Image, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Image.FindMany(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchGalleries(ctx context.Context) func(keys []int) ([]*models.Gallery, []error) {
return func(keys []int) (ret []*models.Gallery, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Gallery.FindMany(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchPerformers(ctx context.Context) func(keys []int) ([]*models.Performer, []error) {
return func(keys []int) (ret []*models.Performer, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Performer.FindMany(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*models.Studio, []error) {
return func(keys []int) (ret []*models.Studio, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Studio.FindMany(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.Tag, []error) {
return func(keys []int) (ret []*models.Tag, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Tag.FindMany(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchMovies(ctx context.Context) func(keys []int) ([]*models.Movie, []error) {
return func(keys []int) (ret []*models.Movie, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Movie.FindMany(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file.File, []error) {
return func(keys []file.ID) (ret []file.File, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.File.Find(ctx, keys...)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
return func(keys []int) (ret [][]file.ID, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyFileIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
return func(keys []int) (ret [][]file.ID, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Image.GetManyFileIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
return func(keys []int) (ret [][]file.ID, errs []error) {
err := m.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Gallery.GetManyFileIDs(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}

View File

@@ -0,0 +1,221 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/file"
)
// FileLoaderConfig captures the config to create a new FileLoader
type FileLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []file.ID) ([]file.File, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewFileLoader creates a new FileLoader given a fetch, wait, and maxBatch
func NewFileLoader(config FileLoaderConfig) *FileLoader {
return &FileLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// FileLoader batches and caches requests
type FileLoader struct {
// this method provides the data for the loader
fetch func(keys []file.ID) ([]file.File, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[file.ID]file.File
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *fileLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type fileLoaderBatch struct {
keys []file.ID
data []file.File
error []error
closing bool
done chan struct{}
}
// Load a File by key, batching and caching will be applied automatically
func (l *FileLoader) Load(key file.ID) (file.File, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a File.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (file.File, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &fileLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (file.File, error) {
<-batch.done
var data file.File
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) {
results := make([]func() (file.File, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
files := make([]file.File, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
files[i], errors[i] = thunk()
}
return files, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Files.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) {
results := make([]func() (file.File, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]file.File, []error) {
files := make([]file.File, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
files[i], errors[i] = thunk()
}
return files, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *FileLoader) Prime(key file.ID, value file.File) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *FileLoader) Clear(key file.ID) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *FileLoader) unsafeSet(key file.ID, value file.File) {
if l.cache == nil {
l.cache = map[file.ID]file.File{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *fileLoaderBatch) keyIndex(l *FileLoader, key file.ID) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *fileLoaderBatch) startTimer(l *FileLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *fileLoaderBatch) end(l *FileLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,225 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/file"
)
// GalleryFileIDsLoaderConfig captures the config to create a new GalleryFileIDsLoader
type GalleryFileIDsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]file.ID, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewGalleryFileIDsLoader creates a new GalleryFileIDsLoader given a fetch, wait, and maxBatch
func NewGalleryFileIDsLoader(config GalleryFileIDsLoaderConfig) *GalleryFileIDsLoader {
return &GalleryFileIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// GalleryFileIDsLoader batches and caches requests
type GalleryFileIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]file.ID, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]file.ID
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *galleryFileIDsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type galleryFileIDsLoaderBatch struct {
keys []int
data [][]file.ID
error []error
closing bool
done chan struct{}
}
// Load a ID by key, batching and caching will be applied automatically
func (l *GalleryFileIDsLoader) Load(key int) ([]file.ID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a ID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]file.ID, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &galleryFileIDsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]file.ID, error) {
<-batch.done
var data []file.ID
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
// LoadAllThunk returns a function that when called will block waiting for a IDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]file.ID, []error) {
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *GalleryFileIDsLoader) Prime(key int, value []file.ID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]file.ID, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *GalleryFileIDsLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *GalleryFileIDsLoader) unsafeSet(key int, value []file.ID) {
if l.cache == nil {
l.cache = map[int][]file.ID{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *galleryFileIDsLoaderBatch) keyIndex(l *GalleryFileIDsLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *galleryFileIDsLoaderBatch) startTimer(l *GalleryFileIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *galleryFileIDsLoaderBatch) end(l *GalleryFileIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,224 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// GalleryLoaderConfig captures the config to create a new GalleryLoader
type GalleryLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*models.Gallery, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewGalleryLoader creates a new GalleryLoader given a fetch, wait, and maxBatch
func NewGalleryLoader(config GalleryLoaderConfig) *GalleryLoader {
return &GalleryLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// GalleryLoader batches and caches requests
type GalleryLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*models.Gallery, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*models.Gallery
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *galleryLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type galleryLoaderBatch struct {
keys []int
data []*models.Gallery
error []error
closing bool
done chan struct{}
}
// Load a Gallery by key, batching and caching will be applied automatically
func (l *GalleryLoader) Load(key int) (*models.Gallery, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Gallery.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *GalleryLoader) LoadThunk(key int) func() (*models.Gallery, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*models.Gallery, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &galleryLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*models.Gallery, error) {
<-batch.done
var data *models.Gallery
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *GalleryLoader) LoadAll(keys []int) ([]*models.Gallery, []error) {
results := make([]func() (*models.Gallery, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
gallerys := make([]*models.Gallery, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
gallerys[i], errors[i] = thunk()
}
return gallerys, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Gallerys.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *GalleryLoader) LoadAllThunk(keys []int) func() ([]*models.Gallery, []error) {
results := make([]func() (*models.Gallery, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*models.Gallery, []error) {
gallerys := make([]*models.Gallery, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
gallerys[i], errors[i] = thunk()
}
return gallerys, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *GalleryLoader) Prime(key int, value *models.Gallery) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *GalleryLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *GalleryLoader) unsafeSet(key int, value *models.Gallery) {
if l.cache == nil {
l.cache = map[int]*models.Gallery{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *galleryLoaderBatch) keyIndex(l *GalleryLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *galleryLoaderBatch) startTimer(l *GalleryLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *galleryLoaderBatch) end(l *GalleryLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,225 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/file"
)
// ImageFileIDsLoaderConfig captures the config to create a new ImageFileIDsLoader
type ImageFileIDsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]file.ID, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewImageFileIDsLoader creates a new ImageFileIDsLoader given a fetch, wait, and maxBatch
func NewImageFileIDsLoader(config ImageFileIDsLoaderConfig) *ImageFileIDsLoader {
return &ImageFileIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// ImageFileIDsLoader batches and caches requests
type ImageFileIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]file.ID, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]file.ID
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *imageFileIDsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type imageFileIDsLoaderBatch struct {
keys []int
data [][]file.ID
error []error
closing bool
done chan struct{}
}
// Load a ID by key, batching and caching will be applied automatically
func (l *ImageFileIDsLoader) Load(key int) ([]file.ID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a ID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]file.ID, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &imageFileIDsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]file.ID, error) {
<-batch.done
var data []file.ID
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
// LoadAllThunk returns a function that when called will block waiting for a IDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]file.ID, []error) {
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *ImageFileIDsLoader) Prime(key int, value []file.ID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]file.ID, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *ImageFileIDsLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *ImageFileIDsLoader) unsafeSet(key int, value []file.ID) {
if l.cache == nil {
l.cache = map[int][]file.ID{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *imageFileIDsLoaderBatch) keyIndex(l *ImageFileIDsLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *imageFileIDsLoaderBatch) startTimer(l *ImageFileIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *imageFileIDsLoaderBatch) end(l *ImageFileIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,224 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// ImageLoaderConfig captures the config to create a new ImageLoader
type ImageLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*models.Image, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewImageLoader creates a new ImageLoader given a fetch, wait, and maxBatch
func NewImageLoader(config ImageLoaderConfig) *ImageLoader {
return &ImageLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// ImageLoader batches and caches requests
type ImageLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*models.Image, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*models.Image
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *imageLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type imageLoaderBatch struct {
keys []int
data []*models.Image
error []error
closing bool
done chan struct{}
}
// Load a Image by key, batching and caching will be applied automatically
func (l *ImageLoader) Load(key int) (*models.Image, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Image.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ImageLoader) LoadThunk(key int) func() (*models.Image, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*models.Image, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &imageLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*models.Image, error) {
<-batch.done
var data *models.Image
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *ImageLoader) LoadAll(keys []int) ([]*models.Image, []error) {
results := make([]func() (*models.Image, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
images := make([]*models.Image, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
images[i], errors[i] = thunk()
}
return images, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Images.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ImageLoader) LoadAllThunk(keys []int) func() ([]*models.Image, []error) {
results := make([]func() (*models.Image, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*models.Image, []error) {
images := make([]*models.Image, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
images[i], errors[i] = thunk()
}
return images, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *ImageLoader) Prime(key int, value *models.Image) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *ImageLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *ImageLoader) unsafeSet(key int, value *models.Image) {
if l.cache == nil {
l.cache = map[int]*models.Image{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *imageLoaderBatch) keyIndex(l *ImageLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *imageLoaderBatch) startTimer(l *ImageLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *imageLoaderBatch) end(l *ImageLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,224 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// MovieLoaderConfig captures the config to create a new MovieLoader
type MovieLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*models.Movie, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewMovieLoader creates a new MovieLoader given a fetch, wait, and maxBatch
func NewMovieLoader(config MovieLoaderConfig) *MovieLoader {
return &MovieLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// MovieLoader batches and caches requests
type MovieLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*models.Movie, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*models.Movie
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *movieLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type movieLoaderBatch struct {
keys []int
data []*models.Movie
error []error
closing bool
done chan struct{}
}
// Load a Movie by key, batching and caching will be applied automatically
func (l *MovieLoader) Load(key int) (*models.Movie, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Movie.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *MovieLoader) LoadThunk(key int) func() (*models.Movie, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*models.Movie, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &movieLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*models.Movie, error) {
<-batch.done
var data *models.Movie
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *MovieLoader) LoadAll(keys []int) ([]*models.Movie, []error) {
results := make([]func() (*models.Movie, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
movies := make([]*models.Movie, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
movies[i], errors[i] = thunk()
}
return movies, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Movies.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *MovieLoader) LoadAllThunk(keys []int) func() ([]*models.Movie, []error) {
results := make([]func() (*models.Movie, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*models.Movie, []error) {
movies := make([]*models.Movie, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
movies[i], errors[i] = thunk()
}
return movies, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *MovieLoader) Prime(key int, value *models.Movie) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *MovieLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *MovieLoader) unsafeSet(key int, value *models.Movie) {
if l.cache == nil {
l.cache = map[int]*models.Movie{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *movieLoaderBatch) keyIndex(l *MovieLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *movieLoaderBatch) startTimer(l *MovieLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *movieLoaderBatch) end(l *MovieLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,224 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// PerformerLoaderConfig captures the config to create a new PerformerLoader
type PerformerLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*models.Performer, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewPerformerLoader creates a new PerformerLoader given a fetch, wait, and maxBatch
func NewPerformerLoader(config PerformerLoaderConfig) *PerformerLoader {
return &PerformerLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// PerformerLoader batches and caches requests
type PerformerLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*models.Performer, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*models.Performer
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *performerLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type performerLoaderBatch struct {
keys []int
data []*models.Performer
error []error
closing bool
done chan struct{}
}
// Load a Performer by key, batching and caching will be applied automatically
func (l *PerformerLoader) Load(key int) (*models.Performer, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Performer.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *PerformerLoader) LoadThunk(key int) func() (*models.Performer, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*models.Performer, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &performerLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*models.Performer, error) {
<-batch.done
var data *models.Performer
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *PerformerLoader) LoadAll(keys []int) ([]*models.Performer, []error) {
results := make([]func() (*models.Performer, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
performers := make([]*models.Performer, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
performers[i], errors[i] = thunk()
}
return performers, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Performers.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *PerformerLoader) LoadAllThunk(keys []int) func() ([]*models.Performer, []error) {
results := make([]func() (*models.Performer, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*models.Performer, []error) {
performers := make([]*models.Performer, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
performers[i], errors[i] = thunk()
}
return performers, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *PerformerLoader) Prime(key int, value *models.Performer) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *PerformerLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *PerformerLoader) unsafeSet(key int, value *models.Performer) {
if l.cache == nil {
l.cache = map[int]*models.Performer{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *performerLoaderBatch) keyIndex(l *PerformerLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *performerLoaderBatch) startTimer(l *PerformerLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *performerLoaderBatch) end(l *PerformerLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,225 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/file"
)
// SceneFileIDsLoaderConfig captures the config to create a new SceneFileIDsLoader
type SceneFileIDsLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]file.ID, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneFileIDsLoader creates a new SceneFileIDsLoader given a fetch, wait, and maxBatch
func NewSceneFileIDsLoader(config SceneFileIDsLoaderConfig) *SceneFileIDsLoader {
return &SceneFileIDsLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneFileIDsLoader batches and caches requests
type SceneFileIDsLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]file.ID, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]file.ID
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneFileIDsLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneFileIDsLoaderBatch struct {
keys []int
data [][]file.ID
error []error
closing bool
done chan struct{}
}
// Load a ID by key, batching and caching will be applied automatically
func (l *SceneFileIDsLoader) Load(key int) ([]file.ID, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a ID.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]file.ID, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneFileIDsLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]file.ID, error) {
<-batch.done
var data []file.ID
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
// LoadAllThunk returns a function that when called will block waiting for a IDs.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
results := make([]func() ([]file.ID, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]file.ID, []error) {
iDs := make([][]file.ID, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
iDs[i], errors[i] = thunk()
}
return iDs, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneFileIDsLoader) Prime(key int, value []file.ID) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]file.ID, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneFileIDsLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneFileIDsLoader) unsafeSet(key int, value []file.ID) {
if l.cache == nil {
l.cache = map[int][]file.ID{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneFileIDsLoaderBatch) keyIndex(l *SceneFileIDsLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneFileIDsLoaderBatch) startTimer(l *SceneFileIDsLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneFileIDsLoaderBatch) end(l *SceneFileIDsLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,224 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// SceneLoaderConfig captures the config to create a new SceneLoader
type SceneLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*models.Scene, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneLoader creates a new SceneLoader given a fetch, wait, and maxBatch
func NewSceneLoader(config SceneLoaderConfig) *SceneLoader {
return &SceneLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneLoader batches and caches requests
type SceneLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*models.Scene, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*models.Scene
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneLoaderBatch struct {
keys []int
data []*models.Scene
error []error
closing bool
done chan struct{}
}
// Load a Scene by key, batching and caching will be applied automatically
func (l *SceneLoader) Load(key int) (*models.Scene, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Scene.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneLoader) LoadThunk(key int) func() (*models.Scene, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*models.Scene, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*models.Scene, error) {
<-batch.done
var data *models.Scene
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneLoader) LoadAll(keys []int) ([]*models.Scene, []error) {
results := make([]func() (*models.Scene, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
scenes := make([]*models.Scene, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
scenes[i], errors[i] = thunk()
}
return scenes, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Scenes.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneLoader) LoadAllThunk(keys []int) func() ([]*models.Scene, []error) {
results := make([]func() (*models.Scene, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*models.Scene, []error) {
scenes := make([]*models.Scene, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
scenes[i], errors[i] = thunk()
}
return scenes, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneLoader) Prime(key int, value *models.Scene) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneLoader) unsafeSet(key int, value *models.Scene) {
if l.cache == nil {
l.cache = map[int]*models.Scene{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneLoaderBatch) keyIndex(l *SceneLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneLoaderBatch) startTimer(l *SceneLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneLoaderBatch) end(l *SceneLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,224 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// StudioLoaderConfig captures the config to create a new StudioLoader
type StudioLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*models.Studio, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewStudioLoader creates a new StudioLoader given a fetch, wait, and maxBatch
func NewStudioLoader(config StudioLoaderConfig) *StudioLoader {
return &StudioLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// StudioLoader batches and caches requests
type StudioLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*models.Studio, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*models.Studio
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *studioLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type studioLoaderBatch struct {
keys []int
data []*models.Studio
error []error
closing bool
done chan struct{}
}
// Load a Studio by key, batching and caching will be applied automatically
func (l *StudioLoader) Load(key int) (*models.Studio, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Studio.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *StudioLoader) LoadThunk(key int) func() (*models.Studio, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*models.Studio, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &studioLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*models.Studio, error) {
<-batch.done
var data *models.Studio
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *StudioLoader) LoadAll(keys []int) ([]*models.Studio, []error) {
results := make([]func() (*models.Studio, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
studios := make([]*models.Studio, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
studios[i], errors[i] = thunk()
}
return studios, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Studios.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *StudioLoader) LoadAllThunk(keys []int) func() ([]*models.Studio, []error) {
results := make([]func() (*models.Studio, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*models.Studio, []error) {
studios := make([]*models.Studio, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
studios[i], errors[i] = thunk()
}
return studios, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *StudioLoader) Prime(key int, value *models.Studio) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *StudioLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *StudioLoader) unsafeSet(key int, value *models.Studio) {
if l.cache == nil {
l.cache = map[int]*models.Studio{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *studioLoaderBatch) keyIndex(l *StudioLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *studioLoaderBatch) startTimer(l *StudioLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *studioLoaderBatch) end(l *StudioLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -0,0 +1,224 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
"github.com/stashapp/stash/pkg/models"
)
// TagLoaderConfig captures the config to create a new TagLoader
type TagLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*models.Tag, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewTagLoader creates a new TagLoader given a fetch, wait, and maxBatch
func NewTagLoader(config TagLoaderConfig) *TagLoader {
return &TagLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// TagLoader batches and caches requests
type TagLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*models.Tag, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*models.Tag
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *tagLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type tagLoaderBatch struct {
keys []int
data []*models.Tag
error []error
closing bool
done chan struct{}
}
// Load a Tag by key, batching and caching will be applied automatically
func (l *TagLoader) Load(key int) (*models.Tag, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Tag.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *TagLoader) LoadThunk(key int) func() (*models.Tag, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*models.Tag, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &tagLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*models.Tag, error) {
<-batch.done
var data *models.Tag
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *TagLoader) LoadAll(keys []int) ([]*models.Tag, []error) {
results := make([]func() (*models.Tag, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
tags := make([]*models.Tag, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
tags[i], errors[i] = thunk()
}
return tags, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Tags.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *TagLoader) LoadAllThunk(keys []int) func() ([]*models.Tag, []error) {
results := make([]func() (*models.Tag, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*models.Tag, []error) {
tags := make([]*models.Tag, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
tags[i], errors[i] = thunk()
}
return tags, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *TagLoader) Prime(key int, value *models.Tag) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *TagLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *TagLoader) unsafeSet(key int, value *models.Tag) {
if l.cache == nil {
l.cache = map[int]*models.Tag{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *tagLoaderBatch) keyIndex(l *TagLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *tagLoaderBatch) startTimer(l *TagLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *tagLoaderBatch) end(l *TagLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View File

@@ -11,6 +11,7 @@ import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scraper" "github.com/stashapp/stash/pkg/scraper"
"github.com/stashapp/stash/pkg/txn"
) )
var ( var (
@@ -30,7 +31,12 @@ type hookExecutor interface {
} }
type Resolver struct { type Resolver struct {
txnManager models.TransactionManager txnManager txn.Manager
repository manager.Repository
sceneService manager.SceneService
imageService manager.ImageService
galleryService manager.GalleryService
hookExecutor hookExecutor hookExecutor hookExecutor
} }
@@ -38,37 +44,37 @@ func (r *Resolver) scraperCache() *scraper.Cache {
return manager.GetInstance().ScraperCache return manager.GetInstance().ScraperCache
} }
func (r *Resolver) Gallery() models.GalleryResolver { func (r *Resolver) Gallery() GalleryResolver {
return &galleryResolver{r} return &galleryResolver{r}
} }
func (r *Resolver) Mutation() models.MutationResolver { func (r *Resolver) Mutation() MutationResolver {
return &mutationResolver{r} return &mutationResolver{r}
} }
func (r *Resolver) Performer() models.PerformerResolver { func (r *Resolver) Performer() PerformerResolver {
return &performerResolver{r} return &performerResolver{r}
} }
func (r *Resolver) Query() models.QueryResolver { func (r *Resolver) Query() QueryResolver {
return &queryResolver{r} return &queryResolver{r}
} }
func (r *Resolver) Scene() models.SceneResolver { func (r *Resolver) Scene() SceneResolver {
return &sceneResolver{r} return &sceneResolver{r}
} }
func (r *Resolver) Image() models.ImageResolver { func (r *Resolver) Image() ImageResolver {
return &imageResolver{r} return &imageResolver{r}
} }
func (r *Resolver) SceneMarker() models.SceneMarkerResolver { func (r *Resolver) SceneMarker() SceneMarkerResolver {
return &sceneMarkerResolver{r} return &sceneMarkerResolver{r}
} }
func (r *Resolver) Studio() models.StudioResolver { func (r *Resolver) Studio() StudioResolver {
return &studioResolver{r} return &studioResolver{r}
} }
func (r *Resolver) Movie() models.MovieResolver { func (r *Resolver) Movie() MovieResolver {
return &movieResolver{r} return &movieResolver{r}
} }
func (r *Resolver) Subscription() models.SubscriptionResolver { func (r *Resolver) Subscription() SubscriptionResolver {
return &subscriptionResolver{r} return &subscriptionResolver{r}
} }
func (r *Resolver) Tag() models.TagResolver { func (r *Resolver) Tag() TagResolver {
return &tagResolver{r} return &tagResolver{r}
} }
@@ -85,17 +91,13 @@ type studioResolver struct{ *Resolver }
type movieResolver struct{ *Resolver } type movieResolver struct{ *Resolver }
type tagResolver struct{ *Resolver } type tagResolver struct{ *Resolver }
func (r *Resolver) withTxn(ctx context.Context, fn func(r models.Repository) error) error { func (r *Resolver) withTxn(ctx context.Context, fn func(ctx context.Context) error) error {
return r.txnManager.WithTxn(ctx, fn) return txn.WithTxn(ctx, r.txnManager, fn)
}
func (r *Resolver) withReadTxn(ctx context.Context, fn func(r models.ReaderRepository) error) error {
return r.txnManager.WithReadTxn(ctx, fn)
} }
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) { func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.SceneMarker().Wall(q) ret, err = r.repository.SceneMarker.Wall(ctx, q)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -104,8 +106,8 @@ func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*model
} }
func (r *queryResolver) SceneWall(ctx context.Context, q *string) (ret []*models.Scene, err error) { func (r *queryResolver) SceneWall(ctx context.Context, q *string) (ret []*models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Scene().Wall(q) ret, err = r.repository.Scene.Wall(ctx, q)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -115,8 +117,8 @@ func (r *queryResolver) SceneWall(ctx context.Context, q *string) (ret []*models
} }
func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *string) (ret []*models.MarkerStringsResultType, err error) { func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *string) (ret []*models.MarkerStringsResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.SceneMarker().GetMarkerStrings(q, sort) ret, err = r.repository.SceneMarker.GetMarkerStrings(ctx, q, sort)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -125,28 +127,29 @@ func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *stri
return ret, nil return ret, nil
} }
func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, error) { func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) {
var ret models.StatsResultType var ret StatsResultType
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
scenesQB := repo.Scene() repo := r.repository
imageQB := repo.Image() scenesQB := repo.Scene
galleryQB := repo.Gallery() imageQB := repo.Image
studiosQB := repo.Studio() galleryQB := repo.Gallery
performersQB := repo.Performer() studiosQB := repo.Studio
moviesQB := repo.Movie() performersQB := repo.Performer
tagsQB := repo.Tag() moviesQB := repo.Movie
scenesCount, _ := scenesQB.Count() tagsQB := repo.Tag
scenesSize, _ := scenesQB.Size() scenesCount, _ := scenesQB.Count(ctx)
scenesDuration, _ := scenesQB.Duration() scenesSize, _ := scenesQB.Size(ctx)
imageCount, _ := imageQB.Count() scenesDuration, _ := scenesQB.Duration(ctx)
imageSize, _ := imageQB.Size() imageCount, _ := imageQB.Count(ctx)
galleryCount, _ := galleryQB.Count() imageSize, _ := imageQB.Size(ctx)
performersCount, _ := performersQB.Count() galleryCount, _ := galleryQB.Count(ctx)
studiosCount, _ := studiosQB.Count() performersCount, _ := performersQB.Count(ctx)
moviesCount, _ := moviesQB.Count() studiosCount, _ := studiosQB.Count(ctx)
tagsCount, _ := tagsQB.Count() moviesCount, _ := moviesQB.Count(ctx)
tagsCount, _ := tagsQB.Count(ctx)
ret = models.StatsResultType{ ret = StatsResultType{
SceneCount: scenesCount, SceneCount: scenesCount,
ScenesSize: scenesSize, ScenesSize: scenesSize,
ScenesDuration: scenesDuration, ScenesDuration: scenesDuration,
@@ -167,10 +170,10 @@ func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, err
return &ret, nil return &ret, nil
} }
func (r *queryResolver) Version(ctx context.Context) (*models.Version, error) { func (r *queryResolver) Version(ctx context.Context) (*Version, error) {
version, hash, buildtime := GetVersion() version, hash, buildtime := GetVersion()
return &models.Version{ return &Version{
Version: &version, Version: &version,
Hash: hash, Hash: hash,
BuildTime: buildtime, BuildTime: buildtime,
@@ -178,7 +181,7 @@ func (r *queryResolver) Version(ctx context.Context) (*models.Version, error) {
} }
// Latestversion returns the latest git shorthash commit. // Latestversion returns the latest git shorthash commit.
func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion, error) { func (r *queryResolver) Latestversion(ctx context.Context) (*ShortVersion, error) {
ver, url, err := GetLatestVersion(ctx, true) ver, url, err := GetLatestVersion(ctx, true)
if err == nil { if err == nil {
logger.Infof("Retrieved latest hash: %s", ver) logger.Infof("Retrieved latest hash: %s", ver)
@@ -186,37 +189,37 @@ func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion
logger.Errorf("Error while retrieving latest hash: %s", err) logger.Errorf("Error while retrieving latest hash: %s", err)
} }
return &models.ShortVersion{ return &ShortVersion{
Shorthash: ver, Shorthash: ver,
URL: url, URL: url,
}, err }, err
} }
// Get scene marker tags which show up under the video. // Get scene marker tags which show up under the video.
func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([]*models.SceneMarkerTag, error) { func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([]*SceneMarkerTag, error) {
sceneID, err := strconv.Atoi(scene_id) sceneID, err := strconv.Atoi(scene_id)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var keys []int var keys []int
tags := make(map[int]*models.SceneMarkerTag) tags := make(map[int]*SceneMarkerTag)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
sceneMarkers, err := repo.SceneMarker().FindBySceneID(sceneID) sceneMarkers, err := r.repository.SceneMarker.FindBySceneID(ctx, sceneID)
if err != nil { if err != nil {
return err return err
} }
tqb := repo.Tag() tqb := r.repository.Tag
for _, sceneMarker := range sceneMarkers { for _, sceneMarker := range sceneMarkers {
markerPrimaryTag, err := tqb.Find(sceneMarker.PrimaryTagID) markerPrimaryTag, err := tqb.Find(ctx, sceneMarker.PrimaryTagID)
if err != nil { if err != nil {
return err return err
} }
_, hasKey := tags[markerPrimaryTag.ID] _, hasKey := tags[markerPrimaryTag.ID]
if !hasKey { if !hasKey {
sceneMarkerTag := &models.SceneMarkerTag{Tag: markerPrimaryTag} sceneMarkerTag := &SceneMarkerTag{Tag: markerPrimaryTag}
tags[markerPrimaryTag.ID] = sceneMarkerTag tags[markerPrimaryTag.ID] = sceneMarkerTag
keys = append(keys, markerPrimaryTag.ID) keys = append(keys, markerPrimaryTag.ID)
} }
@@ -235,10 +238,20 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
return a.SceneMarkers[0].Seconds < b.SceneMarkers[0].Seconds return a.SceneMarkers[0].Seconds < b.SceneMarkers[0].Seconds
}) })
var result []*models.SceneMarkerTag var result []*SceneMarkerTag
for _, key := range keys { for _, key := range keys {
result = append(result, tags[key]) result = append(result, tags[key])
} }
return result, nil return result, nil
} }
func firstError(errs []error) error {
for _, e := range errs {
if e != nil {
return e
}
}
return nil
}

View File

@@ -2,34 +2,134 @@ package api
import ( import (
"context" "context"
"strconv"
"time" "time"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
) )
func (r *galleryResolver) Path(ctx context.Context, obj *models.Gallery) (*string, error) { func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (file.File, error) {
if obj.Path.Valid { if obj.PrimaryFileID != nil {
return &obj.Path.String, nil f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
if err != nil {
return nil, err
} }
return f, nil
}
return nil, nil return nil, nil
} }
func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*string, error) { func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]file.File, error) {
if obj.Title.Valid { fileIDs, err := loaders.From(ctx).GalleryFiles.Load(obj.ID)
return &obj.Title.String, nil if err != nil {
return nil, err
} }
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
return files, firstError(errs)
}
func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*GalleryFile, error) {
files, err := r.getFiles(ctx, obj)
if err != nil {
return nil, err
}
ret := make([]*GalleryFile, len(files))
for i, f := range files {
base := f.Base()
ret[i] = &GalleryFile{
ID: strconv.Itoa(int(base.ID)),
Path: base.Path,
Basename: base.Basename,
ParentFolderID: strconv.Itoa(int(base.ParentFolderID)),
ModTime: base.ModTime,
Size: base.Size,
CreatedAt: base.CreatedAt,
UpdatedAt: base.UpdatedAt,
Fingerprints: resolveFingerprints(base),
}
if base.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*base.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return ret, nil
}
func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Folder, error) {
if obj.FolderID == nil {
return nil, nil
}
var ret *file.Folder
if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = r.repository.Folder.Find(ctx, *obj.FolderID)
if err != nil {
return err
}
return err
}); err != nil {
return nil, err
}
if ret == nil {
return nil, nil
}
rr := &Folder{
ID: ret.ID.String(),
Path: ret.Path,
ModTime: ret.ModTime,
CreatedAt: ret.CreatedAt,
UpdatedAt: ret.UpdatedAt,
}
if ret.ParentFolderID != nil {
pfidStr := ret.ParentFolderID.String()
rr.ParentFolderID = &pfidStr
}
if ret.ZipFileID != nil {
zfidStr := ret.ZipFileID.String()
rr.ZipFileID = &zfidStr
}
return rr, nil
}
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f != nil {
return &f.Base().ModTime, nil
}
return nil, nil return nil, nil
} }
func (r *galleryResolver) Images(ctx context.Context, obj *models.Gallery) (ret []*models.Image, err error) { func (r *galleryResolver) Images(ctx context.Context, obj *models.Gallery) (ret []*models.Image, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
// #2376 - sort images by path // #2376 - sort images by path
// doing this via Query is really slow, so stick with FindByGalleryID // doing this via Query is really slow, so stick with FindByGalleryID
ret, err = repo.Image().FindByGalleryID(obj.ID) ret, err = r.repository.Image.FindByGalleryID(ctx, obj.ID)
if err != nil { if err != nil {
return err return err
} }
@@ -43,9 +143,9 @@ func (r *galleryResolver) Images(ctx context.Context, obj *models.Gallery) (ret
} }
func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (ret *models.Image, err error) { func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (ret *models.Image, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
// doing this via Query is really slow, so stick with FindByGalleryID // doing this via Query is really slow, so stick with FindByGalleryID
imgs, err := repo.Image().FindByGalleryID(obj.ID) imgs, err := r.repository.Image.FindByGalleryID(ctx, obj.ID)
if err != nil { if err != nil {
return err return err
} }
@@ -70,91 +170,67 @@ func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (ret *
} }
func (r *galleryResolver) Date(ctx context.Context, obj *models.Gallery) (*string, error) { func (r *galleryResolver) Date(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.Date.Valid { if obj.Date != nil {
result := utils.GetYMDFromDatabaseDate(obj.Date.String) result := obj.Date.String()
return &result, nil return &result, nil
} }
return nil, nil return nil, nil
} }
func (r *galleryResolver) URL(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.URL.Valid {
return &obj.URL.String, nil
}
return nil, nil
}
func (r *galleryResolver) Details(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.Details.Valid {
return &obj.Details.String, nil
}
return nil, nil
}
func (r *galleryResolver) Rating(ctx context.Context, obj *models.Gallery) (*int, error) {
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret []*models.Scene, err error) { func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret []*models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.SceneIDs.Loaded() {
var err error if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Scene().FindByGalleryID(obj.ID) return obj.LoadSceneIDs(ctx, r.repository.Gallery)
return err
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
}
return ret, nil var errs []error
ret, errs = loaders.From(ctx).SceneByID.LoadAll(obj.SceneIDs.List())
return ret, firstError(errs)
} }
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) { func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) {
if !obj.StudioID.Valid { if obj.StudioID == nil {
return nil, nil return nil, nil
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { return loaders.From(ctx).StudioByID.Load(*obj.StudioID)
var err error
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
} }
func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) (ret []*models.Tag, err error) { func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.TagIDs.Loaded() {
var err error if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().FindByGalleryID(obj.ID) return obj.LoadTagIDs(ctx, r.repository.Gallery)
return err
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
}
return ret, nil var errs []error
ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List())
return ret, firstError(errs)
} }
func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) (ret []*models.Performer, err error) { func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.PerformerIDs.Loaded() {
var err error if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Performer().FindByGalleryID(obj.ID) return obj.LoadPerformerIDs(ctx, r.repository.Gallery)
return err
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
}
return ret, nil var errs []error
ret, errs = loaders.From(ctx).PerformerByID.LoadAll(obj.PerformerIDs.List())
return ret, firstError(errs)
} }
func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (ret int, err error) { func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (ret int, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
ret, err = repo.Image().CountByGalleryID(obj.ID) ret, err = r.repository.Image.CountByGalleryID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
@@ -162,15 +238,3 @@ func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (
return ret, nil return ret, nil
} }
func (r *galleryResolver) CreatedAt(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.CreatedAt.Timestamp, nil
}
func (r *galleryResolver) UpdatedAt(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
}

View File

@@ -2,105 +2,180 @@ package api
import ( import (
"context" "context"
"fmt"
"strconv"
"time" "time"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) { func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (*file.ImageFile, error) {
ret := image.GetTitle(obj) if obj.PrimaryFileID != nil {
return &ret, nil f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
if err != nil {
return nil, err
} }
func (r *imageResolver) Rating(ctx context.Context, obj *models.Image) (*int, error) { ret, ok := f.(*file.ImageFile)
if obj.Rating.Valid { if !ok {
rating := int(obj.Rating.Int64) return nil, fmt.Errorf("file %T is not an image file", f)
return &rating, nil
} }
return ret, nil
}
return nil, nil return nil, nil
} }
func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*models.ImageFileType, error) { func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]*file.ImageFile, error) {
width := int(obj.Width.Int64) fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID)
height := int(obj.Height.Int64) if err != nil {
size := int(obj.Size.Int64) return nil, err
return &models.ImageFileType{ }
Size: &size,
Width: &width, files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
Height: &height, ret := make([]*file.ImageFile, len(files))
for i, bf := range files {
f, ok := bf.(*file.ImageFile)
if !ok {
return nil, fmt.Errorf("file %T is not an image file", f)
}
ret[i] = f
}
return ret, firstError(errs)
}
func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) {
ret := obj.GetTitle()
return &ret, nil
}
func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFileType, error) {
f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f == nil {
return nil, nil
}
width := f.Width
height := f.Height
size := f.Size
return &ImageFileType{
Size: int(size),
Width: width,
Height: height,
}, nil }, nil
} }
func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*models.ImagePathsType, error) { func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) {
files, err := r.getFiles(ctx, obj)
if err != nil {
return nil, err
}
ret := make([]*ImageFile, len(files))
for i, f := range files {
ret[i] = &ImageFile{
ID: strconv.Itoa(int(f.ID)),
Path: f.Path,
Basename: f.Basename,
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
ModTime: f.ModTime,
Size: f.Size,
Width: f.Width,
Height: f.Height,
CreatedAt: f.CreatedAt,
UpdatedAt: f.UpdatedAt,
Fingerprints: resolveFingerprints(f.Base()),
}
if f.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*f.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return ret, nil
}
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f != nil {
return &f.ModTime, nil
}
return nil, nil
}
func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePathsType, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string) baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
builder := urlbuilders.NewImageURLBuilder(baseURL, obj) builder := urlbuilders.NewImageURLBuilder(baseURL, obj)
thumbnailPath := builder.GetThumbnailURL() thumbnailPath := builder.GetThumbnailURL()
imagePath := builder.GetImageURL() imagePath := builder.GetImageURL()
return &models.ImagePathsType{ return &ImagePathsType{
Image: &imagePath, Image: &imagePath,
Thumbnail: &thumbnailPath, Thumbnail: &thumbnailPath,
}, nil }, nil
} }
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) { func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.GalleryIDs.Loaded() {
var err error if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Gallery().FindByImageID(obj.ID) return obj.LoadGalleryIDs(ctx, r.repository.Image)
return err
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
}
return ret, nil var errs []error
ret, errs = loaders.From(ctx).GalleryByID.LoadAll(obj.GalleryIDs.List())
return ret, firstError(errs)
} }
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) { func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) {
if !obj.StudioID.Valid { if obj.StudioID == nil {
return nil, nil return nil, nil
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { return loaders.From(ctx).StudioByID.Load(*obj.StudioID)
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
} }
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) { func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.TagIDs.Loaded() {
ret, err = repo.Tag().FindByImageID(obj.ID) if err := r.withTxn(ctx, func(ctx context.Context) error {
return err return obj.LoadTagIDs(ctx, r.repository.Image)
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
}
return ret, nil var errs []error
ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List())
return ret, firstError(errs)
} }
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) { func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.PerformerIDs.Loaded() {
ret, err = repo.Performer().FindByImageID(obj.ID) if err := r.withTxn(ctx, func(ctx context.Context) error {
return err return obj.LoadPerformerIDs(ctx, r.repository.Image)
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
return ret, nil
} }
func (r *imageResolver) CreatedAt(ctx context.Context, obj *models.Image) (*time.Time, error) { var errs []error
return &obj.CreatedAt.Timestamp, nil ret, errs = loaders.From(ctx).PerformerByID.LoadAll(obj.PerformerIDs.List())
} return ret, firstError(errs)
func (r *imageResolver) UpdatedAt(ctx context.Context, obj *models.Image) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
} }

View File

@@ -4,6 +4,7 @@ import (
"context" "context"
"time" "time"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
@@ -56,14 +57,7 @@ func (r *movieResolver) Rating(ctx context.Context, obj *models.Movie) (*int, er
func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *models.Studio, err error) { func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *models.Studio, err error) {
if obj.StudioID.Valid { if obj.StudioID.Valid {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { return loaders.From(ctx).StudioByID.Load(int(obj.StudioID.Int64))
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
} }
return nil, nil return nil, nil
@@ -92,9 +86,9 @@ func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (
func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) { func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
// don't return any thing if there is no back image // don't return any thing if there is no back image
var img []byte var img []byte
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
img, err = repo.Movie().GetBackImage(obj.ID) img, err = r.repository.Movie.GetBackImage(ctx, obj.ID)
if err != nil { if err != nil {
return err return err
} }
@@ -115,8 +109,8 @@ func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*
func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret *int, err error) { func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = repo.Scene().CountByMovieID(obj.ID) res, err = r.repository.Scene.CountByMovieID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -126,9 +120,9 @@ func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret
} }
func (r *movieResolver) Scenes(ctx context.Context, obj *models.Movie) (ret []*models.Scene, err error) { func (r *movieResolver) Scenes(ctx context.Context, obj *models.Movie) (ret []*models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
ret, err = repo.Scene().FindByMovieID(obj.ID) ret, err = r.repository.Scene.FindByMovieID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -142,8 +142,8 @@ func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer
} }
func (r *performerResolver) Tags(ctx context.Context, obj *models.Performer) (ret []*models.Tag, err error) { func (r *performerResolver) Tags(ctx context.Context, obj *models.Performer) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().FindByPerformerID(obj.ID) ret, err = r.repository.Tag.FindByPerformerID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -154,8 +154,8 @@ func (r *performerResolver) Tags(ctx context.Context, obj *models.Performer) (re
func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (ret *int, err error) { func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = repo.Scene().CountByPerformerID(obj.ID) res, err = r.repository.Scene.CountByPerformerID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -166,8 +166,8 @@ func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performe
func (r *performerResolver) ImageCount(ctx context.Context, obj *models.Performer) (ret *int, err error) { func (r *performerResolver) ImageCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = image.CountByPerformerID(repo.Image(), obj.ID) res, err = image.CountByPerformerID(ctx, r.repository.Image, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -178,8 +178,8 @@ func (r *performerResolver) ImageCount(ctx context.Context, obj *models.Performe
func (r *performerResolver) GalleryCount(ctx context.Context, obj *models.Performer) (ret *int, err error) { func (r *performerResolver) GalleryCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = gallery.CountByPerformerID(repo.Gallery(), obj.ID) res, err = gallery.CountByPerformerID(ctx, r.repository.Gallery, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -189,8 +189,8 @@ func (r *performerResolver) GalleryCount(ctx context.Context, obj *models.Perfor
} }
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (ret []*models.Scene, err error) { func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (ret []*models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Scene().FindByPerformerID(obj.ID) ret, err = r.repository.Scene.FindByPerformerID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -199,15 +199,17 @@ func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (
return ret, nil return ret, nil
} }
func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) (ret []*models.StashID, err error) { func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) ([]*models.StashID, error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { var ret []models.StashID
ret, err = repo.Performer().GetStashIDs(obj.ID) if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = r.repository.Performer.GetStashIDs(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
return ret, nil return stashIDsSliceToPtrSlice(ret), nil
} }
func (r *performerResolver) Rating(ctx context.Context, obj *models.Performer) (*int, error) { func (r *performerResolver) Rating(ctx context.Context, obj *models.Performer) (*int, error) {
@@ -256,8 +258,8 @@ func (r *performerResolver) UpdatedAt(ctx context.Context, obj *models.Performer
} }
func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Movie, err error) { func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Movie, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Movie().FindByPerformerID(obj.ID) ret, err = r.repository.Movie.FindByPerformerID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -268,8 +270,8 @@ func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (
func (r *performerResolver) MovieCount(ctx context.Context, obj *models.Performer) (ret *int, err error) { func (r *performerResolver) MovieCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = repo.Movie().CountByPerformerID(obj.ID) res, err = r.repository.Movie.CountByPerformerID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -2,95 +2,171 @@ package api
import ( import (
"context" "context"
"fmt"
"strconv"
"time" "time"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
func (r *sceneResolver) Checksum(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*file.VideoFile, error) {
if obj.Checksum.Valid { if obj.PrimaryFileID != nil {
return &obj.Checksum.String, nil f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
if err != nil {
return nil, err
} }
ret, ok := f.(*file.VideoFile)
if !ok {
return nil, fmt.Errorf("file %T is not an image file", f)
}
obj.Files.SetPrimary(ret)
return ret, nil
}
return nil, nil return nil, nil
} }
func (r *sceneResolver) Oshash(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*file.VideoFile, error) {
if obj.OSHash.Valid { fileIDs, err := loaders.From(ctx).SceneFiles.Load(obj.ID)
return &obj.OSHash.String, nil if err != nil {
} return nil, err
return nil, nil
} }
func (r *sceneResolver) Title(ctx context.Context, obj *models.Scene) (*string, error) { files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
if obj.Title.Valid { ret := make([]*file.VideoFile, len(files))
return &obj.Title.String, nil for i, bf := range files {
} f, ok := bf.(*file.VideoFile)
return nil, nil if !ok {
return nil, fmt.Errorf("file %T is not a video file", f)
} }
func (r *sceneResolver) Details(ctx context.Context, obj *models.Scene) (*string, error) { ret[i] = f
if obj.Details.Valid {
return &obj.Details.String, nil
}
return nil, nil
} }
func (r *sceneResolver) URL(ctx context.Context, obj *models.Scene) (*string, error) { obj.Files.Set(ret)
if obj.URL.Valid {
return &obj.URL.String, nil return ret, firstError(errs)
}
func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
f, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if f != nil {
return &f.ModTime, nil
} }
return nil, nil return nil, nil
} }
func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Date.Valid { if obj.Date != nil {
result := utils.GetYMDFromDatabaseDate(obj.Date.String) result := obj.Date.String()
return &result, nil return &result, nil
} }
return nil, nil return nil, nil
} }
func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, error) { // File is deprecated
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
if obj.InteractiveSpeed.Valid {
interactive_speed := int(obj.InteractiveSpeed.Int64)
return &interactive_speed, nil
}
return nil, nil
}
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) { func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) {
width := int(obj.Width.Int64) f, err := r.getPrimaryFile(ctx, obj)
height := int(obj.Height.Int64) if err != nil {
bitrate := int(obj.Bitrate.Int64) return nil, err
}
if f == nil {
return nil, nil
}
bitrate := int(f.BitRate)
size := strconv.FormatInt(f.Size, 10)
return &models.SceneFileType{ return &models.SceneFileType{
Size: &obj.Size.String, Size: &size,
Duration: handleFloat64(obj.Duration.Float64), Duration: handleFloat64(f.Duration),
VideoCodec: &obj.VideoCodec.String, VideoCodec: &f.VideoCodec,
AudioCodec: &obj.AudioCodec.String, AudioCodec: &f.AudioCodec,
Width: &width, Width: &f.Width,
Height: &height, Height: &f.Height,
Framerate: handleFloat64(obj.Framerate.Float64), Framerate: handleFloat64(f.FrameRate),
Bitrate: &bitrate, Bitrate: &bitrate,
}, nil }, nil
} }
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.ScenePathsType, error) { func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) {
files, err := r.getFiles(ctx, obj)
if err != nil {
return nil, err
}
ret := make([]*VideoFile, len(files))
for i, f := range files {
ret[i] = &VideoFile{
ID: strconv.Itoa(int(f.ID)),
Path: f.Path,
Basename: f.Basename,
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
ModTime: f.ModTime,
Format: f.Format,
Size: f.Size,
Duration: handleFloat64Value(f.Duration),
VideoCodec: f.VideoCodec,
AudioCodec: f.AudioCodec,
Width: f.Width,
Height: f.Height,
FrameRate: handleFloat64Value(f.FrameRate),
BitRate: int(f.BitRate),
CreatedAt: f.CreatedAt,
UpdatedAt: f.UpdatedAt,
Fingerprints: resolveFingerprints(f.Base()),
}
if f.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*f.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return ret, nil
}
func resolveFingerprints(f *file.BaseFile) []*Fingerprint {
ret := make([]*Fingerprint, len(f.Fingerprints))
for i, fp := range f.Fingerprints {
ret[i] = &Fingerprint{
Type: fp.Type,
Value: formatFingerprint(fp.Fingerprint),
}
}
return ret
}
func formatFingerprint(fp interface{}) string {
switch v := fp.(type) {
case int64:
return strconv.FormatUint(uint64(v), 16)
default:
return fmt.Sprintf("%v", fp)
}
}
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string) baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
config := manager.GetInstance().Config config := manager.GetInstance().Config
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID) builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
builder.APIKey = config.GetAPIKey() builder.APIKey = config.GetAPIKey()
screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt.Timestamp) screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt)
previewPath := builder.GetStreamPreviewURL() previewPath := builder.GetStreamPreviewURL()
streamPath := builder.GetStreamURL() streamPath := builder.GetStreamURL()
webpPath := builder.GetStreamPreviewImageURL() webpPath := builder.GetStreamPreviewImageURL()
@@ -101,7 +177,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
captionBasePath := builder.GetCaptionURL() captionBasePath := builder.GetCaptionURL()
interactiveHeatmap := builder.GetInteractiveHeatmapURL() interactiveHeatmap := builder.GetInteractiveHeatmapURL()
return &models.ScenePathsType{ return &ScenePathsType{
Screenshot: &screenshotPath, Screenshot: &screenshotPath,
Preview: &previewPath, Preview: &previewPath,
Stream: &streamPath, Stream: &streamPath,
@@ -116,8 +192,8 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
} }
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (ret []*models.SceneMarker, err error) { func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (ret []*models.SceneMarker, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.SceneMarker().FindBySceneID(obj.ID) ret, err = r.repository.SceneMarker.FindBySceneID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -126,9 +202,17 @@ func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (re
return ret, nil return ret, nil
} }
func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.SceneCaption, err error) { func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.VideoCaption, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { primaryFile, err := r.getPrimaryFile(ctx, obj)
ret, err = repo.Scene().GetCaptions(obj.ID) if err != nil {
return nil, err
}
if primaryFile == nil {
return nil, nil
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.File.GetCaptions(ctx, primaryFile.Base().ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -138,121 +222,137 @@ func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []
} }
func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret []*models.Gallery, err error) { func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret []*models.Gallery, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.GalleryIDs.Loaded() {
ret, err = repo.Gallery().FindBySceneID(obj.ID) if err := r.withTxn(ctx, func(ctx context.Context) error {
return err return obj.LoadGalleryIDs(ctx, r.repository.Scene)
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
}
return ret, nil var errs []error
ret, errs = loaders.From(ctx).GalleryByID.LoadAll(obj.GalleryIDs.List())
return ret, firstError(errs)
} }
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) { func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) {
if !obj.StudioID.Valid { if obj.StudioID == nil {
return nil, nil return nil, nil
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { return loaders.From(ctx).StudioByID.Load(*obj.StudioID)
ret, err = repo.Studio().Find(int(obj.StudioID.Int64)) }
return err
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*SceneMovie, err error) {
if !obj.Movies.Loaded() {
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
return obj.LoadMovies(ctx, qb)
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
return ret, nil
} }
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*models.SceneMovie, err error) { loader := loaders.From(ctx).MovieByID
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
qb := repo.Scene()
mqb := repo.Movie()
sceneMovies, err := qb.GetMovies(obj.ID) for _, sm := range obj.Movies.List() {
movie, err := loader.Load(sm.MovieID)
if err != nil { if err != nil {
return err return nil, err
}
for _, sm := range sceneMovies {
movie, err := mqb.Find(sm.MovieID)
if err != nil {
return err
} }
sceneIdx := sm.SceneIndex sceneIdx := sm.SceneIndex
sceneMovie := &models.SceneMovie{ sceneMovie := &SceneMovie{
Movie: movie, Movie: movie,
} SceneIndex: sceneIdx,
if sceneIdx.Valid {
idx := int(sceneIdx.Int64)
sceneMovie.SceneIndex = &idx
} }
ret = append(ret, sceneMovie) ret = append(ret, sceneMovie)
} }
return nil
}); err != nil {
return nil, err
}
return ret, nil return ret, nil
} }
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) (ret []*models.Tag, err error) { func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.TagIDs.Loaded() {
ret, err = repo.Tag().FindBySceneID(obj.ID) if err := r.withTxn(ctx, func(ctx context.Context) error {
return err return obj.LoadTagIDs(ctx, r.repository.Scene)
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
}
return ret, nil var errs []error
ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List())
return ret, firstError(errs)
} }
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret []*models.Performer, err error) { func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if !obj.PerformerIDs.Loaded() {
ret, err = repo.Performer().FindBySceneID(obj.ID) if err := r.withTxn(ctx, func(ctx context.Context) error {
return err return obj.LoadPerformerIDs(ctx, r.repository.Scene)
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
}
return ret, nil var errs []error
ret, errs = loaders.From(ctx).PerformerByID.LoadAll(obj.PerformerIDs.List())
return ret, firstError(errs)
}
func stashIDsSliceToPtrSlice(v []models.StashID) []*models.StashID {
ret := make([]*models.StashID, len(v))
for i, vv := range v {
c := vv
ret[i] = &c
}
return ret
} }
func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []*models.StashID, err error) { func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []*models.StashID, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Scene().GetStashIDs(obj.ID) return obj.LoadStashIDs(ctx, r.repository.Scene)
return err
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
return ret, nil return stashIDsSliceToPtrSlice(obj.StashIDs.List()), nil
} }
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Phash.Valid { f, err := r.getPrimaryFile(ctx, obj)
hexval := utils.PhashToString(obj.Phash.Int64) if err != nil {
return nil, err
}
if f == nil {
return nil, nil
}
val := f.Fingerprints.Get(file.FingerprintTypePhash)
if val == nil {
return nil, nil
}
phash, _ := val.(int64)
if phash != 0 {
hexval := utils.PhashToString(phash)
return &hexval, nil return &hexval, nil
} }
return nil, nil return nil, nil
} }
func (r *sceneResolver) CreatedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) { func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*manager.SceneStreamEndpoint, error) {
return &obj.CreatedAt.Timestamp, nil // load the primary file into the scene
_, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
} }
func (r *sceneResolver) UpdatedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
}
func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*models.SceneStreamEndpoint, error) {
config := manager.GetInstance().Config config := manager.GetInstance().Config
baseURL, _ := ctx.Value(BaseURLCtxKey).(string) baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
@@ -260,3 +360,27 @@ func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]
return manager.GetSceneStreamPaths(obj, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize()) return manager.GetSceneStreamPaths(obj, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize())
} }
func (r *sceneResolver) Interactive(ctx context.Context, obj *models.Scene) (bool, error) {
primaryFile, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return false, err
}
if primaryFile == nil {
return false, nil
}
return primaryFile.Interactive, nil
}
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
primaryFile, err := r.getPrimaryFile(ctx, obj)
if err != nil {
return nil, err
}
if primaryFile == nil {
return nil, nil
}
return primaryFile.InteractiveSpeed, nil
}

View File

@@ -13,9 +13,9 @@ func (r *sceneMarkerResolver) Scene(ctx context.Context, obj *models.SceneMarker
panic("Invalid scene id") panic("Invalid scene id")
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
sceneID := int(obj.SceneID.Int64) sceneID := int(obj.SceneID.Int64)
ret, err = repo.Scene().Find(sceneID) ret, err = r.repository.Scene.Find(ctx, sceneID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -25,8 +25,8 @@ func (r *sceneMarkerResolver) Scene(ctx context.Context, obj *models.SceneMarker
} }
func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneMarker) (ret *models.Tag, err error) { func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneMarker) (ret *models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().Find(obj.PrimaryTagID) ret, err = r.repository.Tag.Find(ctx, obj.PrimaryTagID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -36,8 +36,8 @@ func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneM
} }
func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) (ret []*models.Tag, err error) { func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().FindBySceneMarkerID(obj.ID) ret, err = r.repository.Tag.FindBySceneMarkerID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -4,6 +4,7 @@ import (
"context" "context"
"time" "time"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
@@ -29,9 +30,9 @@ func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*st
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj).GetStudioImageURL() imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj).GetStudioImageURL()
var hasImage bool var hasImage bool
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
hasImage, err = repo.Studio().HasImage(obj.ID) hasImage, err = r.repository.Studio.HasImage(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -46,8 +47,8 @@ func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*st
} }
func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) (ret []string, err error) { func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) (ret []string, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Studio().GetAliases(obj.ID) ret, err = r.repository.Studio.GetAliases(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -58,8 +59,8 @@ func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) (ret [
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (ret *int, err error) { func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = repo.Scene().CountByStudioID(obj.ID) res, err = r.repository.Scene.CountByStudioID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -70,8 +71,8 @@ func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (re
func (r *studioResolver) ImageCount(ctx context.Context, obj *models.Studio) (ret *int, err error) { func (r *studioResolver) ImageCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = image.CountByStudioID(repo.Image(), obj.ID) res, err = image.CountByStudioID(ctx, r.repository.Image, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -82,8 +83,8 @@ func (r *studioResolver) ImageCount(ctx context.Context, obj *models.Studio) (re
func (r *studioResolver) GalleryCount(ctx context.Context, obj *models.Studio) (ret *int, err error) { func (r *studioResolver) GalleryCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = gallery.CountByStudioID(repo.Gallery(), obj.ID) res, err = gallery.CountByStudioID(ctx, r.repository.Gallery, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -97,19 +98,12 @@ func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (
return nil, nil return nil, nil
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { return loaders.From(ctx).StudioByID.Load(int(obj.ParentID.Int64))
ret, err = repo.Studio().Find(int(obj.ParentID.Int64))
return err
}); err != nil {
return nil, err
}
return ret, nil
} }
func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) (ret []*models.Studio, err error) { func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) (ret []*models.Studio, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Studio().FindChildren(obj.ID) ret, err = r.repository.Studio.FindChildren(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -118,15 +112,17 @@ func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) (
return ret, nil return ret, nil
} }
func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) (ret []*models.StashID, err error) { func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) ([]*models.StashID, error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { var ret []models.StashID
ret, err = repo.Studio().GetStashIDs(obj.ID) if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = r.repository.Studio.GetStashIDs(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
return ret, nil return stashIDsSliceToPtrSlice(ret), nil
} }
func (r *studioResolver) Rating(ctx context.Context, obj *models.Studio) (*int, error) { func (r *studioResolver) Rating(ctx context.Context, obj *models.Studio) (*int, error) {
@@ -153,8 +149,8 @@ func (r *studioResolver) UpdatedAt(ctx context.Context, obj *models.Studio) (*ti
} }
func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Movie, err error) { func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Movie, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Movie().FindByStudioID(obj.ID) ret, err = r.repository.Movie.FindByStudioID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -165,8 +161,8 @@ func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []
func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio) (ret *int, err error) { func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = repo.Movie().CountByStudioID(obj.ID) res, err = r.repository.Movie.CountByStudioID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -11,8 +11,8 @@ import (
) )
func (r *tagResolver) Parents(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) { func (r *tagResolver) Parents(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().FindByChildTagID(obj.ID) ret, err = r.repository.Tag.FindByChildTagID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -22,8 +22,8 @@ func (r *tagResolver) Parents(ctx context.Context, obj *models.Tag) (ret []*mode
} }
func (r *tagResolver) Children(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) { func (r *tagResolver) Children(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().FindByParentTagID(obj.ID) ret, err = r.repository.Tag.FindByParentTagID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -33,8 +33,8 @@ func (r *tagResolver) Children(ctx context.Context, obj *models.Tag) (ret []*mod
} }
func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []string, err error) { func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []string, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().GetAliases(obj.ID) ret, err = r.repository.Tag.GetAliases(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -45,8 +45,8 @@ func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []strin
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var count int var count int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
count, err = repo.Scene().CountByTagID(obj.ID) count, err = r.repository.Scene.CountByTagID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -57,8 +57,8 @@ func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (ret *int
func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var count int var count int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
count, err = repo.SceneMarker().CountByTagID(obj.ID) count, err = r.repository.SceneMarker.CountByTagID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -69,8 +69,8 @@ func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (re
func (r *tagResolver) ImageCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { func (r *tagResolver) ImageCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = image.CountByTagID(repo.Image(), obj.ID) res, err = image.CountByTagID(ctx, r.repository.Image, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -81,8 +81,8 @@ func (r *tagResolver) ImageCount(ctx context.Context, obj *models.Tag) (ret *int
func (r *tagResolver) GalleryCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { func (r *tagResolver) GalleryCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var res int var res int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
res, err = gallery.CountByTagID(repo.Gallery(), obj.ID) res, err = gallery.CountByTagID(ctx, r.repository.Gallery, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -93,8 +93,8 @@ func (r *tagResolver) GalleryCount(ctx context.Context, obj *models.Tag) (ret *i
func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) { func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
var count int var count int
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
count, err = repo.Performer().CountByTagID(obj.ID) count, err = r.repository.Performer.CountByTagID(ctx, obj.ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -15,21 +15,21 @@ import (
var ErrOverriddenConfig = errors.New("cannot set overridden value") var ErrOverriddenConfig = errors.New("cannot set overridden value")
func (r *mutationResolver) Setup(ctx context.Context, input models.SetupInput) (bool, error) { func (r *mutationResolver) Setup(ctx context.Context, input manager.SetupInput) (bool, error) {
err := manager.GetInstance().Setup(ctx, input) err := manager.GetInstance().Setup(ctx, input)
return err == nil, err return err == nil, err
} }
func (r *mutationResolver) Migrate(ctx context.Context, input models.MigrateInput) (bool, error) { func (r *mutationResolver) Migrate(ctx context.Context, input manager.MigrateInput) (bool, error) {
err := manager.GetInstance().Migrate(ctx, input) err := manager.GetInstance().Migrate(ctx, input)
return err == nil, err return err == nil, err
} }
func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.ConfigGeneralInput) (*models.ConfigGeneralResult, error) { func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGeneralInput) (*ConfigGeneralResult, error) {
c := config.GetInstance() c := config.GetInstance()
existingPaths := c.GetStashPaths() existingPaths := c.GetStashPaths()
if len(input.Stashes) > 0 { if input.Stashes != nil {
for _, s := range input.Stashes { for _, s := range input.Stashes {
// Only validate existence of new paths // Only validate existence of new paths
isNew := true isNew := true
@@ -132,7 +132,9 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
} }
// validate changing VideoFileNamingAlgorithm // validate changing VideoFileNamingAlgorithm
if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, *input.VideoFileNamingAlgorithm); err != nil { if err := r.withTxn(context.TODO(), func(ctx context.Context) error {
return manager.ValidateVideoFileNamingAlgorithm(ctx, r.repository.Scene, *input.VideoFileNamingAlgorithm)
}); err != nil {
return makeConfigGeneralResult(), err return makeConfigGeneralResult(), err
} }
@@ -281,7 +283,7 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
return makeConfigGeneralResult(), nil return makeConfigGeneralResult(), nil
} }
func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.ConfigInterfaceInput) (*models.ConfigInterfaceResult, error) { func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigInterfaceInput) (*ConfigInterfaceResult, error) {
c := config.GetInstance() c := config.GetInstance()
setBool := func(key string, v *bool) { setBool := func(key string, v *bool) {
@@ -338,10 +340,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
c.Set(config.ImageLightboxSlideshowDelay, *options.SlideshowDelay) c.Set(config.ImageLightboxSlideshowDelay, *options.SlideshowDelay)
} }
setString(config.ImageLightboxDisplayMode, (*string)(options.DisplayMode)) setString(config.ImageLightboxDisplayModeKey, (*string)(options.DisplayMode))
setBool(config.ImageLightboxScaleUp, options.ScaleUp) setBool(config.ImageLightboxScaleUp, options.ScaleUp)
setBool(config.ImageLightboxResetZoomOnNav, options.ResetZoomOnNav) setBool(config.ImageLightboxResetZoomOnNav, options.ResetZoomOnNav)
setString(config.ImageLightboxScrollMode, (*string)(options.ScrollMode)) setString(config.ImageLightboxScrollModeKey, (*string)(options.ScrollMode))
if options.ScrollAttemptsBeforeChange != nil { if options.ScrollAttemptsBeforeChange != nil {
c.Set(config.ImageLightboxScrollAttemptsBeforeChange, *options.ScrollAttemptsBeforeChange) c.Set(config.ImageLightboxScrollAttemptsBeforeChange, *options.ScrollAttemptsBeforeChange)
@@ -376,7 +378,7 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
return makeConfigInterfaceResult(), nil return makeConfigInterfaceResult(), nil
} }
func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.ConfigDLNAInput) (*models.ConfigDLNAResult, error) { func (r *mutationResolver) ConfigureDlna(ctx context.Context, input ConfigDLNAInput) (*ConfigDLNAResult, error) {
c := config.GetInstance() c := config.GetInstance()
if input.ServerName != nil { if input.ServerName != nil {
@@ -413,7 +415,7 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.Confi
return makeConfigDLNAResult(), nil return makeConfigDLNAResult(), nil
} }
func (r *mutationResolver) ConfigureScraping(ctx context.Context, input models.ConfigScrapingInput) (*models.ConfigScrapingResult, error) { func (r *mutationResolver) ConfigureScraping(ctx context.Context, input ConfigScrapingInput) (*ConfigScrapingResult, error) {
c := config.GetInstance() c := config.GetInstance()
refreshScraperCache := false refreshScraperCache := false
@@ -445,7 +447,7 @@ func (r *mutationResolver) ConfigureScraping(ctx context.Context, input models.C
return makeConfigScrapingResult(), nil return makeConfigScrapingResult(), nil
} }
func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input models.ConfigDefaultSettingsInput) (*models.ConfigDefaultSettingsResult, error) { func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input ConfigDefaultSettingsInput) (*ConfigDefaultSettingsResult, error) {
c := config.GetInstance() c := config.GetInstance()
if input.Identify != nil { if input.Identify != nil {
@@ -453,7 +455,7 @@ func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input models.C
} }
if input.Scan != nil { if input.Scan != nil {
c.Set(config.DefaultScanSettings, input.Scan) c.Set(config.DefaultScanSettings, input.Scan.ScanMetadataOptions)
} }
if input.AutoTag != nil { if input.AutoTag != nil {
@@ -479,7 +481,7 @@ func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input models.C
return makeConfigDefaultsResult(), nil return makeConfigDefaultsResult(), nil
} }
func (r *mutationResolver) GenerateAPIKey(ctx context.Context, input models.GenerateAPIKeyInput) (string, error) { func (r *mutationResolver) GenerateAPIKey(ctx context.Context, input GenerateAPIKeyInput) (string, error) {
c := config.GetInstance() c := config.GetInstance()
var newAPIKey string var newAPIKey string

View File

@@ -5,10 +5,9 @@ import (
"time" "time"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models"
) )
func (r *mutationResolver) EnableDlna(ctx context.Context, input models.EnableDLNAInput) (bool, error) { func (r *mutationResolver) EnableDlna(ctx context.Context, input EnableDLNAInput) (bool, error) {
err := manager.GetInstance().DLNAService.Start(parseMinutes(input.Duration)) err := manager.GetInstance().DLNAService.Start(parseMinutes(input.Duration))
if err != nil { if err != nil {
return false, err return false, err
@@ -16,17 +15,17 @@ func (r *mutationResolver) EnableDlna(ctx context.Context, input models.EnableDL
return true, nil return true, nil
} }
func (r *mutationResolver) DisableDlna(ctx context.Context, input models.DisableDLNAInput) (bool, error) { func (r *mutationResolver) DisableDlna(ctx context.Context, input DisableDLNAInput) (bool, error) {
manager.GetInstance().DLNAService.Stop(parseMinutes(input.Duration)) manager.GetInstance().DLNAService.Stop(parseMinutes(input.Duration))
return true, nil return true, nil
} }
func (r *mutationResolver) AddTempDlnaip(ctx context.Context, input models.AddTempDLNAIPInput) (bool, error) { func (r *mutationResolver) AddTempDlnaip(ctx context.Context, input AddTempDLNAIPInput) (bool, error) {
manager.GetInstance().DLNAService.AddTempDLNAIP(input.Address, parseMinutes(input.Duration)) manager.GetInstance().DLNAService.AddTempDLNAIP(input.Address, parseMinutes(input.Duration))
return true, nil return true, nil
} }
func (r *mutationResolver) RemoveTempDlnaip(ctx context.Context, input models.RemoveTempDLNAIPInput) (bool, error) { func (r *mutationResolver) RemoveTempDlnaip(ctx context.Context, input RemoveTempDLNAIPInput) (bool, error) {
ret := manager.GetInstance().DLNAService.RemoveTempDLNAIP(input.Address) ret := manager.GetInstance().DLNAService.RemoveTempDLNAIP(input.Address)
return ret, nil return ret, nil
} }

View File

@@ -2,7 +2,6 @@ package api
import ( import (
"context" "context"
"database/sql"
"errors" "errors"
"fmt" "fmt"
"os" "os"
@@ -11,7 +10,6 @@ import (
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/plugin"
@@ -21,8 +19,8 @@ import (
) )
func (r *mutationResolver) getGallery(ctx context.Context, id int) (ret *models.Gallery, err error) { func (r *mutationResolver) getGallery(ctx context.Context, id int) (ret *models.Gallery, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Gallery().Find(id) ret, err = r.repository.Gallery.Find(ctx, id)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -31,75 +29,57 @@ func (r *mutationResolver) getGallery(ctx context.Context, id int) (ret *models.
return ret, nil return ret, nil
} }
func (r *mutationResolver) GalleryCreate(ctx context.Context, input models.GalleryCreateInput) (*models.Gallery, error) { func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreateInput) (*models.Gallery, error) {
// name must be provided // name must be provided
if input.Title == "" { if input.Title == "" {
return nil, errors.New("title must not be empty") return nil, errors.New("title must not be empty")
} }
// for manually created galleries, generate checksum from title
checksum := md5.FromString(input.Title)
// Populate a new performer from the input // Populate a new performer from the input
performerIDs, err := stringslice.StringSliceToIntSlice(input.PerformerIds)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
tagIDs, err := stringslice.StringSliceToIntSlice(input.TagIds)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
sceneIDs, err := stringslice.StringSliceToIntSlice(input.SceneIds)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
}
currentTime := time.Now() currentTime := time.Now()
newGallery := models.Gallery{ newGallery := models.Gallery{
Title: sql.NullString{ Title: input.Title,
String: input.Title, PerformerIDs: models.NewRelatedIDs(performerIDs),
Valid: true, TagIDs: models.NewRelatedIDs(tagIDs),
}, SceneIDs: models.NewRelatedIDs(sceneIDs),
Checksum: checksum, CreatedAt: currentTime,
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, UpdatedAt: currentTime,
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
} }
if input.URL != nil { if input.URL != nil {
newGallery.URL = sql.NullString{String: *input.URL, Valid: true} newGallery.URL = *input.URL
} }
if input.Details != nil { if input.Details != nil {
newGallery.Details = sql.NullString{String: *input.Details, Valid: true} newGallery.Details = *input.Details
}
if input.URL != nil {
newGallery.URL = sql.NullString{String: *input.URL, Valid: true}
}
if input.Date != nil {
newGallery.Date = models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
newGallery.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
} else {
// rating must be nullable
newGallery.Rating = sql.NullInt64{Valid: false}
} }
if input.Date != nil {
d := models.NewDate(*input.Date)
newGallery.Date = &d
}
newGallery.Rating = input.Rating
if input.StudioID != nil { if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64) studioID, _ := strconv.Atoi(*input.StudioID)
newGallery.StudioID = sql.NullInt64{Int64: studioID, Valid: true} newGallery.StudioID = &studioID
} else {
// studio must be nullable
newGallery.StudioID = sql.NullInt64{Valid: false}
} }
// Start the transaction and save the gallery // Start the transaction and save the gallery
var gallery *models.Gallery if err := r.withTxn(ctx, func(ctx context.Context) error {
if err := r.withTxn(ctx, func(repo models.Repository) error { qb := r.repository.Gallery
qb := repo.Gallery() if err := qb.Create(ctx, &newGallery, nil); err != nil {
var err error
gallery, err = qb.Create(newGallery)
if err != nil {
return err
}
// Save the performers
if err := r.updateGalleryPerformers(qb, gallery.ID, input.PerformerIds); err != nil {
return err
}
// Save the tags
if err := r.updateGalleryTags(qb, gallery.ID, input.TagIds); err != nil {
return err
}
// Save the scenes
if err := r.updateGalleryScenes(qb, gallery.ID, input.SceneIds); err != nil {
return err return err
} }
@@ -108,32 +88,12 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input models.Galle
return nil, err return nil, err
} }
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryCreatePost, input, nil) r.hookExecutor.ExecutePostHooks(ctx, newGallery.ID, plugin.GalleryCreatePost, input, nil)
return r.getGallery(ctx, gallery.ID) return r.getGallery(ctx, newGallery.ID)
} }
func (r *mutationResolver) updateGalleryPerformers(qb models.GalleryReaderWriter, galleryID int, performerIDs []string) error { type GallerySceneUpdater interface {
ids, err := stringslice.StringSliceToIntSlice(performerIDs) UpdateScenes(ctx context.Context, galleryID int, sceneIDs []int) error
if err != nil {
return err
}
return qb.UpdatePerformers(galleryID, ids)
}
func (r *mutationResolver) updateGalleryTags(qb models.GalleryReaderWriter, galleryID int, tagIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagIDs)
if err != nil {
return err
}
return qb.UpdateTags(galleryID, ids)
}
func (r *mutationResolver) updateGalleryScenes(qb models.GalleryReaderWriter, galleryID int, sceneIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(sceneIDs)
if err != nil {
return err
}
return qb.UpdateScenes(galleryID, ids)
} }
func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (ret *models.Gallery, err error) { func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (ret *models.Gallery, err error) {
@@ -142,8 +102,8 @@ func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.Galle
} }
// Start the transaction and save the gallery // Start the transaction and save the gallery
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.galleryUpdate(input, translator, repo) ret, err = r.galleryUpdate(ctx, input, translator)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -158,13 +118,13 @@ func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.
inputMaps := getUpdateInputMaps(ctx) inputMaps := getUpdateInputMaps(ctx)
// Start the transaction and save the gallery // Start the transaction and save the gallery
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
for i, gallery := range input { for i, gallery := range input {
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: inputMaps[i], inputMap: inputMaps[i],
} }
thisGallery, err := r.galleryUpdate(*gallery, translator, repo) thisGallery, err := r.galleryUpdate(ctx, *gallery, translator)
if err != nil { if err != nil {
return err return err
} }
@@ -196,8 +156,8 @@ func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.
return newRet, nil return newRet, nil
} }
func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Gallery, error) { func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.GalleryUpdateInput, translator changesetTranslator) (*models.Gallery, error) {
qb := repo.Gallery() qb := r.repository.Gallery
// Populate gallery from the input // Populate gallery from the input
galleryID, err := strconv.Atoi(input.ID) galleryID, err := strconv.Atoi(input.ID)
@@ -205,7 +165,7 @@ func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, transl
return nil, err return nil, err
} }
originalGallery, err := qb.Find(galleryID) originalGallery, err := qb.Find(ctx, galleryID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -214,11 +174,7 @@ func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, transl
return nil, errors.New("not found") return nil, errors.New("not found")
} }
updatedTime := time.Now() updatedGallery := models.NewGalleryPartial()
updatedGallery := models.GalleryPartial{
ID: galleryID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if input.Title != nil { if input.Title != nil {
// ensure title is not empty // ensure title is not empty
@@ -226,124 +182,106 @@ func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, transl
return nil, errors.New("title must not be empty") return nil, errors.New("title must not be empty")
} }
// if gallery is not zip-based, then generate the checksum from the title updatedGallery.Title = models.NewOptionalString(*input.Title)
if !originalGallery.Path.Valid {
checksum := md5.FromString(*input.Title)
updatedGallery.Checksum = &checksum
} }
updatedGallery.Title = &sql.NullString{String: *input.Title, Valid: true} updatedGallery.Details = translator.optionalString(input.Details, "details")
updatedGallery.URL = translator.optionalString(input.URL, "url")
updatedGallery.Date = translator.optionalDate(input.Date, "date")
updatedGallery.Rating = translator.optionalInt(input.Rating, "rating")
updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedGallery.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("performer_ids") {
updatedGallery.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
} }
updatedGallery.Details = translator.nullString(input.Details, "details") if translator.hasField("tag_ids") {
updatedGallery.URL = translator.nullString(input.URL, "url") updatedGallery.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
updatedGallery.Date = translator.sqliteDate(input.Date, "date") if err != nil {
updatedGallery.Rating = translator.nullInt64(input.Rating, "rating") return nil, fmt.Errorf("converting tag ids: %w", err)
updatedGallery.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") }
updatedGallery.Organized = input.Organized }
if translator.hasField("scene_ids") {
updatedGallery.SceneIDs, err = translateUpdateIDs(input.SceneIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
}
}
// gallery scene is set from the scene only // gallery scene is set from the scene only
gallery, err := qb.UpdatePartial(updatedGallery) gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Save the performers
if translator.hasField("performer_ids") {
if err := r.updateGalleryPerformers(qb, galleryID, input.PerformerIds); err != nil {
return nil, err
}
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updateGalleryTags(qb, galleryID, input.TagIds); err != nil {
return nil, err
}
}
// Save the scenes
if translator.hasField("scene_ids") {
if err := r.updateGalleryScenes(qb, galleryID, input.SceneIds); err != nil {
return nil, err
}
}
return gallery, nil return gallery, nil
} }
func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.BulkGalleryUpdateInput) ([]*models.Gallery, error) { func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGalleryUpdateInput) ([]*models.Gallery, error) {
// Populate gallery from the input // Populate gallery from the input
updatedTime := time.Now()
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx), inputMap: getUpdateInputMap(ctx),
} }
updatedGallery := models.GalleryPartial{ updatedGallery := models.NewGalleryPartial()
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
updatedGallery.Details = translator.optionalString(input.Details, "details")
updatedGallery.URL = translator.optionalString(input.URL, "url")
updatedGallery.Date = translator.optionalDate(input.Date, "date")
updatedGallery.Rating = translator.optionalInt(input.Rating, "rating")
var err error
updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedGallery.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("performer_ids") {
updatedGallery.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
} }
updatedGallery.Details = translator.nullString(input.Details, "details") if translator.hasField("tag_ids") {
updatedGallery.URL = translator.nullString(input.URL, "url") updatedGallery.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
updatedGallery.Date = translator.sqliteDate(input.Date, "date") if err != nil {
updatedGallery.Rating = translator.nullInt64(input.Rating, "rating") return nil, fmt.Errorf("converting tag ids: %w", err)
updatedGallery.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") }
updatedGallery.Organized = input.Organized }
if translator.hasField("scene_ids") {
updatedGallery.SceneIDs, err = translateUpdateIDs(input.SceneIds.Ids, input.SceneIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
}
}
ret := []*models.Gallery{} ret := []*models.Gallery{}
// Start the transaction and save the galleries // Start the transaction and save the galleries
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Gallery() qb := r.repository.Gallery
for _, galleryIDStr := range input.Ids { for _, galleryIDStr := range input.Ids {
galleryID, _ := strconv.Atoi(galleryIDStr) galleryID, _ := strconv.Atoi(galleryIDStr)
updatedGallery.ID = galleryID
gallery, err := qb.UpdatePartial(updatedGallery) gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery)
if err != nil { if err != nil {
return err return err
} }
ret = append(ret, gallery) ret = append(ret, gallery)
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := adjustGalleryPerformerIDs(qb, galleryID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(galleryID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustGalleryTagIDs(qb, galleryID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(galleryID, tagIDs); err != nil {
return err
}
}
// Save the scenes
if translator.hasField("scene_ids") {
sceneIDs, err := adjustGallerySceneIDs(qb, galleryID, *input.SceneIds)
if err != nil {
return err
}
if err := qb.UpdateScenes(galleryID, sceneIDs); err != nil {
return err
}
}
} }
return nil return nil
@@ -367,31 +305,8 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.B
return newRet, nil return newRet, nil
} }
func adjustGalleryPerformerIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) { type GallerySceneGetter interface {
ret, err = qb.GetPerformerIDs(galleryID) GetSceneIDs(ctx context.Context, galleryID int) ([]int, error)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustGalleryTagIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustGallerySceneIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetSceneIDs(galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
} }
func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) { func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) {
@@ -403,19 +318,18 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
var galleries []*models.Gallery var galleries []*models.Gallery
var imgsDestroyed []*models.Image var imgsDestroyed []*models.Image
fileDeleter := &image.FileDeleter{ fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile) deleteFile := utils.IsTrue(input.DeleteFile)
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Gallery() qb := r.repository.Gallery
iqb := repo.Image()
for _, id := range galleryIDs { for _, id := range galleryIDs {
gallery, err := qb.Find(id) gallery, err := qb.Find(ctx, id)
if err != nil { if err != nil {
return err return err
} }
@@ -424,57 +338,16 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
return fmt.Errorf("gallery with id %d not found", id) return fmt.Errorf("gallery with id %d not found", id)
} }
if err := gallery.LoadFiles(ctx, qb); err != nil {
return fmt.Errorf("loading files for gallery %d", id)
}
galleries = append(galleries, gallery) galleries = append(galleries, gallery)
// if this is a zip-based gallery, delete the images as well first imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile)
if gallery.Zip {
imgs, err := iqb.FindByGalleryID(id)
if err != nil { if err != nil {
return err return err
} }
for _, img := range imgs {
if err := image.Destroy(img, iqb, fileDeleter, deleteGenerated, false); err != nil {
return err
}
imgsDestroyed = append(imgsDestroyed, img)
}
if deleteFile {
if err := fileDeleter.Files([]string{gallery.Path.String}); err != nil {
return err
}
}
} else if deleteFile {
// Delete image if it is only attached to this gallery
imgs, err := iqb.FindByGalleryID(id)
if err != nil {
return err
}
for _, img := range imgs {
imgGalleries, err := qb.FindByImageID(img.ID)
if err != nil {
return err
}
if len(imgGalleries) == 1 {
if err := image.Destroy(img, iqb, fileDeleter, deleteGenerated, deleteFile); err != nil {
return err
}
imgsDestroyed = append(imgsDestroyed, img)
}
}
// we only want to delete a folder-based gallery if it is empty.
// don't do this with the file deleter
}
if err := qb.Destroy(id); err != nil {
return err
}
} }
return nil return nil
@@ -488,10 +361,11 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
for _, gallery := range galleries { for _, gallery := range galleries {
// don't delete stash library paths // don't delete stash library paths
if utils.IsTrue(input.DeleteFile) && !gallery.Zip && gallery.Path.Valid && !isStashPath(gallery.Path.String) { path := gallery.Path
if deleteFile && path != "" && !isStashPath(path) {
// try to remove the folder - it is possible that it is not empty // try to remove the folder - it is possible that it is not empty
// so swallow the error if present // so swallow the error if present
_ = os.Remove(gallery.Path.String) _ = os.Remove(path)
} }
} }
@@ -499,8 +373,8 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
for _, gallery := range galleries { for _, gallery := range galleries {
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
GalleryDestroyInput: input, GalleryDestroyInput: input,
Checksum: gallery.Checksum, Checksum: gallery.Checksum(),
Path: gallery.Path.String, Path: gallery.Path,
}, nil) }, nil)
} }
@@ -526,7 +400,7 @@ func isStashPath(path string) bool {
return false return false
} }
func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.GalleryAddInput) (bool, error) { func (r *mutationResolver) AddGalleryImages(ctx context.Context, input GalleryAddInput) (bool, error) {
galleryID, err := strconv.Atoi(input.GalleryID) galleryID, err := strconv.Atoi(input.GalleryID)
if err != nil { if err != nil {
return false, err return false, err
@@ -537,9 +411,9 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.Ga
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Gallery() qb := r.repository.Gallery
gallery, err := qb.Find(galleryID) gallery, err := qb.Find(ctx, galleryID)
if err != nil { if err != nil {
return err return err
} }
@@ -548,17 +422,13 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.Ga
return errors.New("gallery not found") return errors.New("gallery not found")
} }
if gallery.Zip { newIDs, err := qb.GetImageIDs(ctx, galleryID)
return errors.New("cannot modify zip gallery images")
}
newIDs, err := qb.GetImageIDs(galleryID)
if err != nil { if err != nil {
return err return err
} }
newIDs = intslice.IntAppendUniques(newIDs, imageIDs) newIDs = intslice.IntAppendUniques(newIDs, imageIDs)
return qb.UpdateImages(galleryID, newIDs) return qb.UpdateImages(ctx, galleryID, newIDs)
}); err != nil { }); err != nil {
return false, err return false, err
} }
@@ -566,7 +436,7 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.Ga
return true, nil return true, nil
} }
func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input models.GalleryRemoveInput) (bool, error) { func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input GalleryRemoveInput) (bool, error) {
galleryID, err := strconv.Atoi(input.GalleryID) galleryID, err := strconv.Atoi(input.GalleryID)
if err != nil { if err != nil {
return false, err return false, err
@@ -577,9 +447,9 @@ func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input models
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Gallery() qb := r.repository.Gallery
gallery, err := qb.Find(galleryID) gallery, err := qb.Find(ctx, galleryID)
if err != nil { if err != nil {
return err return err
} }
@@ -588,17 +458,13 @@ func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input models
return errors.New("gallery not found") return errors.New("gallery not found")
} }
if gallery.Zip { newIDs, err := qb.GetImageIDs(ctx, galleryID)
return errors.New("cannot modify zip gallery images")
}
newIDs, err := qb.GetImageIDs(galleryID)
if err != nil { if err != nil {
return err return err
} }
newIDs = intslice.IntExclude(newIDs, imageIDs) newIDs = intslice.IntExclude(newIDs, imageIDs)
return qb.UpdateImages(galleryID, newIDs) return qb.UpdateImages(ctx, galleryID, newIDs)
}); err != nil { }); err != nil {
return false, err return false, err
} }

View File

@@ -4,7 +4,6 @@ import (
"context" "context"
"fmt" "fmt"
"strconv" "strconv"
"time"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
@@ -16,8 +15,8 @@ import (
) )
func (r *mutationResolver) getImage(ctx context.Context, id int) (ret *models.Image, err error) { func (r *mutationResolver) getImage(ctx context.Context, id int) (ret *models.Image, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Image().Find(id) ret, err = r.repository.Image.Find(ctx, id)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -26,14 +25,14 @@ func (r *mutationResolver) getImage(ctx context.Context, id int) (ret *models.Im
return ret, nil return ret, nil
} }
func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (ret *models.Image, err error) { func (r *mutationResolver) ImageUpdate(ctx context.Context, input ImageUpdateInput) (ret *models.Image, err error) {
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx), inputMap: getUpdateInputMap(ctx),
} }
// Start the transaction and save the image // Start the transaction and save the image
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.imageUpdate(input, translator, repo) ret, err = r.imageUpdate(ctx, input, translator)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -44,17 +43,17 @@ func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUp
return r.getImage(ctx, ret.ID) return r.getImage(ctx, ret.ID)
} }
func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) (ret []*models.Image, err error) { func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*ImageUpdateInput) (ret []*models.Image, err error) {
inputMaps := getUpdateInputMaps(ctx) inputMaps := getUpdateInputMaps(ctx)
// Start the transaction and save the image // Start the transaction and save the image
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
for i, image := range input { for i, image := range input {
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: inputMaps[i], inputMap: inputMaps[i],
} }
thisImage, err := r.imageUpdate(*image, translator, repo) thisImage, err := r.imageUpdate(ctx, *image, translator)
if err != nil { if err != nil {
return err return err
} }
@@ -86,148 +85,105 @@ func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.Ima
return newRet, nil return newRet, nil
} }
func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Image, error) { func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInput, translator changesetTranslator) (*models.Image, error) {
// Populate image from the input // Populate image from the input
imageID, err := strconv.Atoi(input.ID) imageID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
updatedTime := time.Now() updatedImage := models.NewImagePartial()
updatedImage := models.ImagePartial{ updatedImage.Title = translator.optionalString(input.Title, "title")
ID: imageID, updatedImage.Rating = translator.optionalInt(input.Rating, "rating")
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, updatedImage.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
}
updatedImage.Title = translator.nullString(input.Title, "title")
updatedImage.Rating = translator.nullInt64(input.Rating, "rating")
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedImage.Organized = input.Organized
qb := repo.Image()
image, err := qb.Update(updatedImage)
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("converting studio id: %w", err)
} }
updatedImage.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("gallery_ids") { if translator.hasField("gallery_ids") {
if err := r.updateImageGalleries(qb, imageID, input.GalleryIds); err != nil { updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet)
return nil, err if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
} }
} }
// Save the performers
if translator.hasField("performer_ids") { if translator.hasField("performer_ids") {
if err := r.updateImagePerformers(qb, imageID, input.PerformerIds); err != nil { updatedImage.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
return nil, err if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
} }
} }
// Save the tags
if translator.hasField("tag_ids") { if translator.hasField("tag_ids") {
if err := r.updateImageTags(qb, imageID, input.TagIds); err != nil { updatedImage.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
return nil, err if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
} }
} }
qb := r.repository.Image
image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
if err != nil {
return nil, err
}
return image, nil return image, nil
} }
func (r *mutationResolver) updateImageGalleries(qb models.ImageReaderWriter, imageID int, galleryIDs []string) error { func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageUpdateInput) (ret []*models.Image, err error) {
ids, err := stringslice.StringSliceToIntSlice(galleryIDs)
if err != nil {
return err
}
return qb.UpdateGalleries(imageID, ids)
}
func (r *mutationResolver) updateImagePerformers(qb models.ImageReaderWriter, imageID int, performerIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return qb.UpdatePerformers(imageID, ids)
}
func (r *mutationResolver) updateImageTags(qb models.ImageReaderWriter, imageID int, tagsIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return qb.UpdateTags(imageID, ids)
}
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.BulkImageUpdateInput) (ret []*models.Image, err error) {
imageIDs, err := stringslice.StringSliceToIntSlice(input.Ids) imageIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Populate image from the input // Populate image from the input
updatedTime := time.Now() updatedImage := models.NewImagePartial()
updatedImage := models.ImagePartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx), inputMap: getUpdateInputMap(ctx),
} }
updatedImage.Title = translator.nullString(input.Title, "title") updatedImage.Title = translator.optionalString(input.Title, "title")
updatedImage.Rating = translator.nullInt64(input.Rating, "rating") updatedImage.Rating = translator.optionalInt(input.Rating, "rating")
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") updatedImage.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
updatedImage.Organized = input.Organized if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedImage.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("gallery_ids") {
updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds.Ids, input.GalleryIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
if translator.hasField("performer_ids") {
updatedImage.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
}
if translator.hasField("tag_ids") {
updatedImage.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
// Start the transaction and save the image marker // Start the transaction and save the image marker
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Image() qb := r.repository.Image
for _, imageID := range imageIDs { for _, imageID := range imageIDs {
updatedImage.ID = imageID image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
image, err := qb.Update(updatedImage)
if err != nil { if err != nil {
return err return err
} }
ret = append(ret, image) ret = append(ret, image)
// Save the galleries
if translator.hasField("gallery_ids") {
galleryIDs, err := adjustImageGalleryIDs(qb, imageID, *input.GalleryIds)
if err != nil {
return err
}
if err := qb.UpdateGalleries(imageID, galleryIDs); err != nil {
return err
}
}
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := adjustImagePerformerIDs(qb, imageID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(imageID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustImageTagIDs(qb, imageID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(imageID, tagIDs); err != nil {
return err
}
}
} }
return nil return nil
@@ -251,33 +207,6 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.Bul
return newRet, nil return newRet, nil
} }
func adjustImageGalleryIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetGalleryIDs(imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustImagePerformerIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetPerformerIDs(imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustImageTagIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (ret bool, err error) { func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (ret bool, err error) {
imageID, err := strconv.Atoi(input.ID) imageID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -286,13 +215,11 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
var i *models.Image var i *models.Image
fileDeleter := &image.FileDeleter{ fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Image() i, err = r.repository.Image.Find(ctx, imageID)
i, err = qb.Find(imageID)
if err != nil { if err != nil {
return err return err
} }
@@ -301,7 +228,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
return fmt.Errorf("image with id %d not found", imageID) return fmt.Errorf("image with id %d not found", imageID)
} }
return image.Destroy(i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)) return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile))
}); err != nil { }); err != nil {
fileDeleter.Rollback() fileDeleter.Rollback()
return false, err return false, err
@@ -328,15 +255,14 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
var images []*models.Image var images []*models.Image
fileDeleter := &image.FileDeleter{ fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Image() qb := r.repository.Image
for _, imageID := range imageIDs { for _, imageID := range imageIDs {
i, err := qb.Find(ctx, imageID)
i, err := qb.Find(imageID)
if err != nil { if err != nil {
return err return err
} }
@@ -347,7 +273,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
images = append(images, i) images = append(images, i)
if err := image.Destroy(i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil { if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil {
return err return err
} }
} }
@@ -379,10 +305,10 @@ func (r *mutationResolver) ImageIncrementO(ctx context.Context, id string) (ret
return 0, err return 0, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Image() qb := r.repository.Image
ret, err = qb.IncrementOCounter(imageID) ret, err = qb.IncrementOCounter(ctx, imageID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
@@ -397,10 +323,10 @@ func (r *mutationResolver) ImageDecrementO(ctx context.Context, id string) (ret
return 0, err return 0, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Image() qb := r.repository.Image
ret, err = qb.DecrementOCounter(imageID) ret, err = qb.DecrementOCounter(ctx, imageID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
@@ -415,10 +341,10 @@ func (r *mutationResolver) ImageResetO(ctx context.Context, id string) (ret int,
return 0, err return 0, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Image() qb := r.repository.Image
ret, err = qb.ResetOCounter(imageID) ret, err = qb.ResetOCounter(ctx, imageID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err

View File

@@ -9,15 +9,14 @@ import (
"sync" "sync"
"time" "time"
"github.com/stashapp/stash/internal/identify"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/database"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
) )
func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) { func (r *mutationResolver) MetadataScan(ctx context.Context, input manager.ScanMetadataInput) (string, error) {
jobID, err := manager.GetInstance().Scan(ctx, input) jobID, err := manager.GetInstance().Scan(ctx, input)
if err != nil { if err != nil {
@@ -36,7 +35,7 @@ func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
return strconv.Itoa(jobID), nil return strconv.Itoa(jobID), nil
} }
func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) { func (r *mutationResolver) ImportObjects(ctx context.Context, input manager.ImportObjectsInput) (string, error) {
t, err := manager.CreateImportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input) t, err := manager.CreateImportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
if err != nil { if err != nil {
return "", err return "", err
@@ -56,7 +55,7 @@ func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
return strconv.Itoa(jobID), nil return strconv.Itoa(jobID), nil
} }
func (r *mutationResolver) ExportObjects(ctx context.Context, input models.ExportObjectsInput) (*string, error) { func (r *mutationResolver) ExportObjects(ctx context.Context, input manager.ExportObjectsInput) (*string, error) {
t := manager.CreateExportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input) t := manager.CreateExportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
var wg sync.WaitGroup var wg sync.WaitGroup
@@ -75,7 +74,7 @@ func (r *mutationResolver) ExportObjects(ctx context.Context, input models.Expor
return nil, nil return nil, nil
} }
func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) { func (r *mutationResolver) MetadataGenerate(ctx context.Context, input manager.GenerateMetadataInput) (string, error) {
jobID, err := manager.GetInstance().Generate(ctx, input) jobID, err := manager.GetInstance().Generate(ctx, input)
if err != nil { if err != nil {
@@ -85,19 +84,19 @@ func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.Ge
return strconv.Itoa(jobID), nil return strconv.Itoa(jobID), nil
} }
func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) { func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input manager.AutoTagMetadataInput) (string, error) {
jobID := manager.GetInstance().AutoTag(ctx, input) jobID := manager.GetInstance().AutoTag(ctx, input)
return strconv.Itoa(jobID), nil return strconv.Itoa(jobID), nil
} }
func (r *mutationResolver) MetadataIdentify(ctx context.Context, input models.IdentifyMetadataInput) (string, error) { func (r *mutationResolver) MetadataIdentify(ctx context.Context, input identify.Options) (string, error) {
t := manager.CreateIdentifyJob(input) t := manager.CreateIdentifyJob(input)
jobID := manager.GetInstance().JobManager.Add(ctx, "Identifying...", t) jobID := manager.GetInstance().JobManager.Add(ctx, "Identifying...", t)
return strconv.Itoa(jobID), nil return strconv.Itoa(jobID), nil
} }
func (r *mutationResolver) MetadataClean(ctx context.Context, input models.CleanMetadataInput) (string, error) { func (r *mutationResolver) MetadataClean(ctx context.Context, input manager.CleanMetadataInput) (string, error) {
jobID := manager.GetInstance().Clean(ctx, input) jobID := manager.GetInstance().Clean(ctx, input)
return strconv.Itoa(jobID), nil return strconv.Itoa(jobID), nil
} }
@@ -107,10 +106,11 @@ func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error
return strconv.Itoa(jobID), nil return strconv.Itoa(jobID), nil
} }
func (r *mutationResolver) BackupDatabase(ctx context.Context, input models.BackupDatabaseInput) (*string, error) { func (r *mutationResolver) BackupDatabase(ctx context.Context, input BackupDatabaseInput) (*string, error) {
// if download is true, then backup to temporary file and return a link // if download is true, then backup to temporary file and return a link
download := input.Download != nil && *input.Download download := input.Download != nil && *input.Download
mgr := manager.GetInstance() mgr := manager.GetInstance()
database := mgr.Database
var backupPath string var backupPath string
if download { if download {
if err := fsutil.EnsureDir(mgr.Paths.Generated.Downloads); err != nil { if err := fsutil.EnsureDir(mgr.Paths.Generated.Downloads); err != nil {
@@ -127,7 +127,7 @@ func (r *mutationResolver) BackupDatabase(ctx context.Context, input models.Back
backupPath = database.DatabaseBackupPath() backupPath = database.DatabaseBackupPath()
} }
err := database.Backup(database.DB, backupPath) err := database.Backup(backupPath)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -15,8 +15,8 @@ import (
) )
func (r *mutationResolver) getMovie(ctx context.Context, id int) (ret *models.Movie, err error) { func (r *mutationResolver) getMovie(ctx context.Context, id int) (ret *models.Movie, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Movie().Find(id) ret, err = r.repository.Movie.Find(ctx, id)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -25,7 +25,7 @@ func (r *mutationResolver) getMovie(ctx context.Context, id int) (ret *models.Mo
return ret, nil return ret, nil
} }
func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCreateInput) (*models.Movie, error) { func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInput) (*models.Movie, error) {
// generate checksum from movie name rather than image // generate checksum from movie name rather than image
checksum := md5.FromString(input.Name) checksum := md5.FromString(input.Name)
@@ -100,16 +100,16 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
// Start the transaction and save the movie // Start the transaction and save the movie
var movie *models.Movie var movie *models.Movie
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Movie() qb := r.repository.Movie
movie, err = qb.Create(newMovie) movie, err = qb.Create(ctx, newMovie)
if err != nil { if err != nil {
return err return err
} }
// update image table // update image table
if len(frontimageData) > 0 { if len(frontimageData) > 0 {
if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil { if err := qb.UpdateImages(ctx, movie.ID, frontimageData, backimageData); err != nil {
return err return err
} }
} }
@@ -123,7 +123,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
return r.getMovie(ctx, movie.ID) return r.getMovie(ctx, movie.ID)
} }
func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUpdateInput) (*models.Movie, error) { func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInput) (*models.Movie, error) {
// Populate movie from the input // Populate movie from the input
movieID, err := strconv.Atoi(input.ID) movieID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -174,9 +174,9 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
// Start the transaction and save the movie // Start the transaction and save the movie
var movie *models.Movie var movie *models.Movie
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Movie() qb := r.repository.Movie
movie, err = qb.Update(updatedMovie) movie, err = qb.Update(ctx, updatedMovie)
if err != nil { if err != nil {
return err return err
} }
@@ -184,13 +184,13 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
// update image table // update image table
if frontImageIncluded || backImageIncluded { if frontImageIncluded || backImageIncluded {
if !frontImageIncluded { if !frontImageIncluded {
frontimageData, err = qb.GetFrontImage(updatedMovie.ID) frontimageData, err = qb.GetFrontImage(ctx, updatedMovie.ID)
if err != nil { if err != nil {
return err return err
} }
} }
if !backImageIncluded { if !backImageIncluded {
backimageData, err = qb.GetBackImage(updatedMovie.ID) backimageData, err = qb.GetBackImage(ctx, updatedMovie.ID)
if err != nil { if err != nil {
return err return err
} }
@@ -198,7 +198,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
if len(frontimageData) == 0 && len(backimageData) == 0 { if len(frontimageData) == 0 && len(backimageData) == 0 {
// both images are being nulled. Destroy them. // both images are being nulled. Destroy them.
if err := qb.DestroyImages(movie.ID); err != nil { if err := qb.DestroyImages(ctx, movie.ID); err != nil {
return err return err
} }
} else { } else {
@@ -208,7 +208,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
frontimageData, _ = utils.ProcessImageInput(ctx, models.DefaultMovieImage) frontimageData, _ = utils.ProcessImageInput(ctx, models.DefaultMovieImage)
} }
if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil { if err := qb.UpdateImages(ctx, movie.ID, frontimageData, backimageData); err != nil {
return err return err
} }
} }
@@ -223,7 +223,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
return r.getMovie(ctx, movie.ID) return r.getMovie(ctx, movie.ID)
} }
func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input models.BulkMovieUpdateInput) ([]*models.Movie, error) { func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieUpdateInput) ([]*models.Movie, error) {
movieIDs, err := stringslice.StringSliceToIntSlice(input.Ids) movieIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -245,13 +245,13 @@ func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input models.Bul
ret := []*models.Movie{} ret := []*models.Movie{}
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Movie() qb := r.repository.Movie
for _, movieID := range movieIDs { for _, movieID := range movieIDs {
updatedMovie.ID = movieID updatedMovie.ID = movieID
existing, err := qb.Find(movieID) existing, err := qb.Find(ctx, movieID)
if err != nil { if err != nil {
return err return err
} }
@@ -260,7 +260,7 @@ func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input models.Bul
return fmt.Errorf("movie with id %d not found", movieID) return fmt.Errorf("movie with id %d not found", movieID)
} }
movie, err := qb.Update(updatedMovie) movie, err := qb.Update(ctx, updatedMovie)
if err != nil { if err != nil {
return err return err
} }
@@ -288,14 +288,14 @@ func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input models.Bul
return newRet, nil return newRet, nil
} }
func (r *mutationResolver) MovieDestroy(ctx context.Context, input models.MovieDestroyInput) (bool, error) { func (r *mutationResolver) MovieDestroy(ctx context.Context, input MovieDestroyInput) (bool, error) {
id, err := strconv.Atoi(input.ID) id, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
return repo.Movie().Destroy(id) return r.repository.Movie.Destroy(ctx, id)
}); err != nil { }); err != nil {
return false, err return false, err
} }
@@ -311,10 +311,10 @@ func (r *mutationResolver) MoviesDestroy(ctx context.Context, movieIDs []string)
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Movie() qb := r.repository.Movie
for _, id := range ids { for _, id := range ids {
if err := qb.Destroy(id); err != nil { if err := qb.Destroy(ctx, id); err != nil {
return err return err
} }
} }

View File

@@ -16,8 +16,8 @@ import (
) )
func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *models.Performer, err error) { func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Performer().Find(id) ret, err = r.repository.Performer.Find(ctx, id)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -26,7 +26,17 @@ func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *model
return ret, nil return ret, nil
} }
func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.PerformerCreateInput) (*models.Performer, error) { func stashIDPtrSliceToSlice(v []*models.StashID) []models.StashID {
ret := make([]models.StashID, len(v))
for i, vv := range v {
c := vv
ret[i] = *c
}
return ret
}
func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerCreateInput) (*models.Performer, error) {
// generate checksum from performer name rather than image // generate checksum from performer name rather than image
checksum := md5.FromString(input.Name) checksum := md5.FromString(input.Name)
@@ -129,31 +139,31 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
// Start the transaction and save the performer // Start the transaction and save the performer
var performer *models.Performer var performer *models.Performer
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Performer() qb := r.repository.Performer
performer, err = qb.Create(newPerformer) performer, err = qb.Create(ctx, newPerformer)
if err != nil { if err != nil {
return err return err
} }
if len(input.TagIds) > 0 { if len(input.TagIds) > 0 {
if err := r.updatePerformerTags(qb, performer.ID, input.TagIds); err != nil { if err := r.updatePerformerTags(ctx, performer.ID, input.TagIds); err != nil {
return err return err
} }
} }
// update image table // update image table
if len(imageData) > 0 { if len(imageData) > 0 {
if err := qb.UpdateImage(performer.ID, imageData); err != nil { if err := qb.UpdateImage(ctx, performer.ID, imageData); err != nil {
return err return err
} }
} }
// Save the stash_ids // Save the stash_ids
if input.StashIds != nil { if input.StashIds != nil {
stashIDJoins := models.StashIDsFromInput(input.StashIds) stashIDJoins := stashIDPtrSliceToSlice(input.StashIds)
if err := qb.UpdateStashIDs(performer.ID, stashIDJoins); err != nil { if err := qb.UpdateStashIDs(ctx, performer.ID, stashIDJoins); err != nil {
return err return err
} }
} }
@@ -167,7 +177,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
return r.getPerformer(ctx, performer.ID) return r.getPerformer(ctx, performer.ID)
} }
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) { func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerUpdateInput) (*models.Performer, error) {
// Populate performer from the input // Populate performer from the input
performerID, _ := strconv.Atoi(input.ID) performerID, _ := strconv.Atoi(input.ID)
updatedPerformer := models.PerformerPartial{ updatedPerformer := models.PerformerPartial{
@@ -230,11 +240,11 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
// Start the transaction and save the p // Start the transaction and save the p
var p *models.Performer var p *models.Performer
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Performer() qb := r.repository.Performer
// need to get existing performer // need to get existing performer
existing, err := qb.Find(updatedPerformer.ID) existing, err := qb.Find(ctx, updatedPerformer.ID)
if err != nil { if err != nil {
return err return err
} }
@@ -249,34 +259,34 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
} }
} }
p, err = qb.Update(updatedPerformer) p, err = qb.Update(ctx, updatedPerformer)
if err != nil { if err != nil {
return err return err
} }
// Save the tags // Save the tags
if translator.hasField("tag_ids") { if translator.hasField("tag_ids") {
if err := r.updatePerformerTags(qb, p.ID, input.TagIds); err != nil { if err := r.updatePerformerTags(ctx, p.ID, input.TagIds); err != nil {
return err return err
} }
} }
// update image table // update image table
if len(imageData) > 0 { if len(imageData) > 0 {
if err := qb.UpdateImage(p.ID, imageData); err != nil { if err := qb.UpdateImage(ctx, p.ID, imageData); err != nil {
return err return err
} }
} else if imageIncluded { } else if imageIncluded {
// must be unsetting // must be unsetting
if err := qb.DestroyImage(p.ID); err != nil { if err := qb.DestroyImage(ctx, p.ID); err != nil {
return err return err
} }
} }
// Save the stash_ids // Save the stash_ids
if translator.hasField("stash_ids") { if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds) stashIDJoins := stashIDPtrSliceToSlice(input.StashIds)
if err := qb.UpdateStashIDs(performerID, stashIDJoins); err != nil { if err := qb.UpdateStashIDs(ctx, performerID, stashIDJoins); err != nil {
return err return err
} }
} }
@@ -290,15 +300,15 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
return r.getPerformer(ctx, p.ID) return r.getPerformer(ctx, p.ID)
} }
func (r *mutationResolver) updatePerformerTags(qb models.PerformerReaderWriter, performerID int, tagsIDs []string) error { func (r *mutationResolver) updatePerformerTags(ctx context.Context, performerID int, tagsIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagsIDs) ids, err := stringslice.StringSliceToIntSlice(tagsIDs)
if err != nil { if err != nil {
return err return err
} }
return qb.UpdateTags(performerID, ids) return r.repository.Performer.UpdateTags(ctx, performerID, ids)
} }
func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models.BulkPerformerUpdateInput) ([]*models.Performer, error) { func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPerformerUpdateInput) ([]*models.Performer, error) {
performerIDs, err := stringslice.StringSliceToIntSlice(input.Ids) performerIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -348,14 +358,14 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models
ret := []*models.Performer{} ret := []*models.Performer{}
// Start the transaction and save the scene marker // Start the transaction and save the scene marker
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Performer() qb := r.repository.Performer
for _, performerID := range performerIDs { for _, performerID := range performerIDs {
updatedPerformer.ID = performerID updatedPerformer.ID = performerID
// need to get existing performer // need to get existing performer
existing, err := qb.Find(performerID) existing, err := qb.Find(ctx, performerID)
if err != nil { if err != nil {
return err return err
} }
@@ -368,7 +378,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models
return err return err
} }
performer, err := qb.Update(updatedPerformer) performer, err := qb.Update(ctx, updatedPerformer)
if err != nil { if err != nil {
return err return err
} }
@@ -377,12 +387,12 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models
// Save the tags // Save the tags
if translator.hasField("tag_ids") { if translator.hasField("tag_ids") {
tagIDs, err := adjustTagIDs(qb, performerID, *input.TagIds) tagIDs, err := adjustTagIDs(ctx, qb, performerID, *input.TagIds)
if err != nil { if err != nil {
return err return err
} }
if err := qb.UpdateTags(performerID, tagIDs); err != nil { if err := qb.UpdateTags(ctx, performerID, tagIDs); err != nil {
return err return err
} }
} }
@@ -409,14 +419,14 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models
return newRet, nil return newRet, nil
} }
func (r *mutationResolver) PerformerDestroy(ctx context.Context, input models.PerformerDestroyInput) (bool, error) { func (r *mutationResolver) PerformerDestroy(ctx context.Context, input PerformerDestroyInput) (bool, error) {
id, err := strconv.Atoi(input.ID) id, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
return repo.Performer().Destroy(id) return r.repository.Performer.Destroy(ctx, id)
}); err != nil { }); err != nil {
return false, err return false, err
} }
@@ -432,10 +442,10 @@ func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs [
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Performer() qb := r.repository.Performer
for _, id := range ids { for _, id := range ids {
if err := qb.Destroy(id); err != nil { if err := qb.Destroy(ctx, id); err != nil {
return err return err
} }
} }

View File

@@ -5,10 +5,10 @@ import (
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin"
) )
func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, taskName string, args []*models.PluginArgInput) (string, error) { func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, taskName string, args []*plugin.PluginArgInput) (string, error) {
m := manager.GetInstance() m := manager.GetInstance()
m.RunPluginTask(ctx, pluginID, taskName, args) m.RunPluginTask(ctx, pluginID, taskName, args)
return "todo", nil return "todo", nil

View File

@@ -9,7 +9,7 @@ import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func (r *mutationResolver) SaveFilter(ctx context.Context, input models.SaveFilterInput) (ret *models.SavedFilter, err error) { func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput) (ret *models.SavedFilter, err error) {
if strings.TrimSpace(input.Name) == "" { if strings.TrimSpace(input.Name) == "" {
return nil, errors.New("name must be non-empty") return nil, errors.New("name must be non-empty")
} }
@@ -23,17 +23,17 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input models.SaveFilt
id = &idv id = &idv
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
f := models.SavedFilter{ f := models.SavedFilter{
Mode: input.Mode, Mode: input.Mode,
Name: input.Name, Name: input.Name,
Filter: input.Filter, Filter: input.Filter,
} }
if id == nil { if id == nil {
ret, err = repo.SavedFilter().Create(f) ret, err = r.repository.SavedFilter.Create(ctx, f)
} else { } else {
f.ID = *id f.ID = *id
ret, err = repo.SavedFilter().Update(f) ret, err = r.repository.SavedFilter.Update(ctx, f)
} }
return err return err
}); err != nil { }); err != nil {
@@ -42,14 +42,14 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input models.SaveFilt
return ret, err return ret, err
} }
func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input models.DestroyFilterInput) (bool, error) { func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input DestroyFilterInput) (bool, error) {
id, err := strconv.Atoi(input.ID) id, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
return repo.SavedFilter().Destroy(id) return r.repository.SavedFilter.Destroy(ctx, id)
}); err != nil { }); err != nil {
return false, err return false, err
} }
@@ -57,25 +57,25 @@ func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input models.
return true, nil return true, nil
} }
func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input models.SetDefaultFilterInput) (bool, error) { func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input SetDefaultFilterInput) (bool, error) {
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.SavedFilter() qb := r.repository.SavedFilter
if input.Filter == nil { if input.Filter == nil {
// clearing // clearing
def, err := qb.FindDefault(input.Mode) def, err := qb.FindDefault(ctx, input.Mode)
if err != nil { if err != nil {
return err return err
} }
if def != nil { if def != nil {
return qb.Destroy(def.ID) return qb.Destroy(ctx, def.ID)
} }
return nil return nil
} }
_, err := qb.SetDefault(models.SavedFilter{ _, err := qb.SetDefault(ctx, models.SavedFilter{
Mode: input.Mode, Mode: input.Mode,
Filter: *input.Filter, Filter: *input.Filter,
}) })

View File

@@ -19,8 +19,8 @@ import (
) )
func (r *mutationResolver) getScene(ctx context.Context, id int) (ret *models.Scene, err error) { func (r *mutationResolver) getScene(ctx context.Context, id int) (ret *models.Scene, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Scene().Find(id) ret, err = r.repository.Scene.Find(ctx, id)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -35,8 +35,8 @@ func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUp
} }
// Start the transaction and save the scene // Start the transaction and save the scene
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.sceneUpdate(ctx, input, translator, repo) ret, err = r.sceneUpdate(ctx, input, translator)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -50,13 +50,13 @@ func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.Sce
inputMaps := getUpdateInputMaps(ctx) inputMaps := getUpdateInputMaps(ctx)
// Start the transaction and save the scene // Start the transaction and save the scene
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
for i, scene := range input { for i, scene := range input {
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: inputMaps[i], inputMap: inputMaps[i],
} }
thisScene, err := r.sceneUpdate(ctx, *scene, translator, repo) thisScene, err := r.sceneUpdate(ctx, *scene, translator)
ret = append(ret, thisScene) ret = append(ret, thisScene)
if err != nil { if err != nil {
@@ -89,7 +89,7 @@ func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.Sce
return newRet, nil return newRet, nil
} }
func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Scene, error) { func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUpdateInput, translator changesetTranslator) (*models.Scene, error) {
// Populate scene from the input // Populate scene from the input
sceneID, err := strconv.Atoi(input.ID) sceneID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -98,19 +98,55 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
var coverImageData []byte var coverImageData []byte
updatedTime := time.Now() updatedScene := models.NewScenePartial()
updatedScene := models.ScenePartial{ updatedScene.Title = translator.optionalString(input.Title, "title")
ID: sceneID, updatedScene.Details = translator.optionalString(input.Details, "details")
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, updatedScene.URL = translator.optionalString(input.URL, "url")
updatedScene.Date = translator.optionalDate(input.Date, "date")
updatedScene.Rating = translator.optionalInt(input.Rating, "rating")
updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
} }
updatedScene.Title = translator.nullString(input.Title, "title") updatedScene.Organized = translator.optionalBool(input.Organized, "organized")
updatedScene.Details = translator.nullString(input.Details, "details")
updatedScene.URL = translator.nullString(input.URL, "url") if translator.hasField("performer_ids") {
updatedScene.Date = translator.sqliteDate(input.Date, "date") updatedScene.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
updatedScene.Rating = translator.nullInt64(input.Rating, "rating") if err != nil {
updatedScene.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") return nil, fmt.Errorf("converting performer ids: %w", err)
updatedScene.Organized = input.Organized }
}
if translator.hasField("tag_ids") {
updatedScene.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
if translator.hasField("gallery_ids") {
updatedScene.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
// Save the movies
if translator.hasField("movies") {
updatedScene.MovieIDs, err = models.UpdateMovieIDsFromInput(input.Movies)
if err != nil {
return nil, fmt.Errorf("converting movie ids: %w", err)
}
}
// Save the stash_ids
if translator.hasField("stash_ids") {
updatedScene.StashIDs = &models.UpdateStashIDs{
StashIDs: input.StashIds,
Mode: models.RelationshipUpdateModeSet,
}
}
if input.CoverImage != nil && *input.CoverImage != "" { if input.CoverImage != nil && *input.CoverImage != "" {
var err error var err error
@@ -122,51 +158,15 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
// update the cover after updating the scene // update the cover after updating the scene
} }
qb := repo.Scene() qb := r.repository.Scene
s, err := qb.Update(updatedScene) s, err := qb.UpdatePartial(ctx, sceneID, updatedScene)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// update cover table // update cover table
if len(coverImageData) > 0 { if len(coverImageData) > 0 {
if err := qb.UpdateCover(sceneID, coverImageData); err != nil { if err := qb.UpdateCover(ctx, sceneID, coverImageData); err != nil {
return nil, err
}
}
// Save the performers
if translator.hasField("performer_ids") {
if err := r.updateScenePerformers(qb, sceneID, input.PerformerIds); err != nil {
return nil, err
}
}
// Save the movies
if translator.hasField("movies") {
if err := r.updateSceneMovies(qb, sceneID, input.Movies); err != nil {
return nil, err
}
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updateSceneTags(qb, sceneID, input.TagIds); err != nil {
return nil, err
}
}
// Save the galleries
if translator.hasField("gallery_ids") {
if err := r.updateSceneGalleries(qb, sceneID, input.GalleryIds); err != nil {
return nil, err
}
}
// Save the stash_ids
if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
if err := qb.UpdateStashIDs(sceneID, stashIDJoins); err != nil {
return nil, err return nil, err
} }
} }
@@ -182,144 +182,72 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
return s, nil return s, nil
} }
func (r *mutationResolver) updateScenePerformers(qb models.SceneReaderWriter, sceneID int, performerIDs []string) error { func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneUpdateInput) ([]*models.Scene, error) {
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return qb.UpdatePerformers(sceneID, ids)
}
func (r *mutationResolver) updateSceneMovies(qb models.SceneReaderWriter, sceneID int, movies []*models.SceneMovieInput) error {
var movieJoins []models.MoviesScenes
for _, movie := range movies {
movieID, err := strconv.Atoi(movie.MovieID)
if err != nil {
return err
}
movieJoin := models.MoviesScenes{
MovieID: movieID,
}
if movie.SceneIndex != nil {
movieJoin.SceneIndex = sql.NullInt64{
Int64: int64(*movie.SceneIndex),
Valid: true,
}
}
movieJoins = append(movieJoins, movieJoin)
}
return qb.UpdateMovies(sceneID, movieJoins)
}
func (r *mutationResolver) updateSceneTags(qb models.SceneReaderWriter, sceneID int, tagsIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return qb.UpdateTags(sceneID, ids)
}
func (r *mutationResolver) updateSceneGalleries(qb models.SceneReaderWriter, sceneID int, galleryIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(galleryIDs)
if err != nil {
return err
}
return qb.UpdateGalleries(sceneID, ids)
}
func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.BulkSceneUpdateInput) ([]*models.Scene, error) {
sceneIDs, err := stringslice.StringSliceToIntSlice(input.Ids) sceneIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Populate scene from the input // Populate scene from the input
updatedTime := time.Now()
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx), inputMap: getUpdateInputMap(ctx),
} }
updatedScene := models.ScenePartial{ updatedScene := models.NewScenePartial()
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, updatedScene.Title = translator.optionalString(input.Title, "title")
updatedScene.Details = translator.optionalString(input.Details, "details")
updatedScene.URL = translator.optionalString(input.URL, "url")
updatedScene.Date = translator.optionalDate(input.Date, "date")
updatedScene.Rating = translator.optionalInt(input.Rating, "rating")
updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
} }
updatedScene.Title = translator.nullString(input.Title, "title") updatedScene.Organized = translator.optionalBool(input.Organized, "organized")
updatedScene.Details = translator.nullString(input.Details, "details")
updatedScene.URL = translator.nullString(input.URL, "url") if translator.hasField("performer_ids") {
updatedScene.Date = translator.sqliteDate(input.Date, "date") updatedScene.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
updatedScene.Rating = translator.nullInt64(input.Rating, "rating") if err != nil {
updatedScene.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") return nil, fmt.Errorf("converting performer ids: %w", err)
updatedScene.Organized = input.Organized }
}
if translator.hasField("tag_ids") {
updatedScene.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
if translator.hasField("gallery_ids") {
updatedScene.GalleryIDs, err = translateUpdateIDs(input.GalleryIds.Ids, input.GalleryIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
// Save the movies
if translator.hasField("movies") {
updatedScene.MovieIDs, err = translateSceneMovieIDs(*input.MovieIds)
if err != nil {
return nil, fmt.Errorf("converting movie ids: %w", err)
}
}
ret := []*models.Scene{} ret := []*models.Scene{}
// Start the transaction and save the scene marker // Start the transaction and save the scene marker
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
for _, sceneID := range sceneIDs { for _, sceneID := range sceneIDs {
updatedScene.ID = sceneID scene, err := qb.UpdatePartial(ctx, sceneID, updatedScene)
scene, err := qb.Update(updatedScene)
if err != nil { if err != nil {
return err return err
} }
ret = append(ret, scene) ret = append(ret, scene)
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := adjustScenePerformerIDs(qb, sceneID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(sceneID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustTagIDs(qb, sceneID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(sceneID, tagIDs); err != nil {
return err
}
}
// Save the galleries
if translator.hasField("gallery_ids") {
galleryIDs, err := adjustSceneGalleryIDs(qb, sceneID, *input.GalleryIds)
if err != nil {
return err
}
if err := qb.UpdateGalleries(sceneID, galleryIDs); err != nil {
return err
}
}
// Save the movies
if translator.hasField("movie_ids") {
movies, err := adjustSceneMovieIDs(qb, sceneID, *input.MovieIds)
if err != nil {
return err
}
if err := qb.UpdateMovies(sceneID, movies); err != nil {
return err
}
}
} }
return nil return nil
@@ -343,9 +271,9 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
return newRet, nil return newRet, nil
} }
func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int { func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
// if we are setting the ids, just return the ids // if we are setting the ids, just return the ids
if updateIDs.Mode == models.BulkUpdateIDModeSet { if updateIDs.Mode == models.RelationshipUpdateModeSet {
existingIDs = []int{} existingIDs = []int{}
for _, idStr := range updateIDs.Ids { for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr) id, _ := strconv.Atoi(idStr)
@@ -362,7 +290,7 @@ func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
foundExisting := false foundExisting := false
for idx, existingID := range existingIDs { for idx, existingID := range existingIDs {
if existingID == id { if existingID == id {
if updateIDs.Mode == models.BulkUpdateIDModeRemove { if updateIDs.Mode == models.RelationshipUpdateModeRemove {
// remove from the list // remove from the list
existingIDs = append(existingIDs[:idx], existingIDs[idx+1:]...) existingIDs = append(existingIDs[:idx], existingIDs[idx+1:]...)
} }
@@ -372,7 +300,7 @@ func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
} }
} }
if !foundExisting && updateIDs.Mode != models.BulkUpdateIDModeRemove { if !foundExisting && updateIDs.Mode != models.RelationshipUpdateModeRemove {
existingIDs = append(existingIDs, id) existingIDs = append(existingIDs, id)
} }
} }
@@ -380,21 +308,12 @@ func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
return existingIDs return existingIDs
} }
func adjustScenePerformerIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetPerformerIDs(sceneID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
type tagIDsGetter interface { type tagIDsGetter interface {
GetTagIDs(id int) ([]int, error) GetTagIDs(ctx context.Context, id int) ([]int, error)
} }
func adjustTagIDs(qb tagIDsGetter, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) { func adjustTagIDs(ctx context.Context, qb tagIDsGetter, sceneID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(sceneID) ret, err = qb.GetTagIDs(ctx, sceneID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -402,57 +321,6 @@ func adjustTagIDs(qb tagIDsGetter, sceneID int, ids models.BulkUpdateIds) (ret [
return adjustIDs(ret, ids), nil return adjustIDs(ret, ids), nil
} }
func adjustSceneGalleryIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetGalleryIDs(sceneID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustSceneMovieIDs(qb models.SceneReader, sceneID int, updateIDs models.BulkUpdateIds) ([]models.MoviesScenes, error) {
existingMovies, err := qb.GetMovies(sceneID)
if err != nil {
return nil, err
}
// if we are setting the ids, just return the ids
if updateIDs.Mode == models.BulkUpdateIDModeSet {
existingMovies = []models.MoviesScenes{}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
}
return existingMovies, nil
}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
// look for the id in the list
foundExisting := false
for idx, existingMovie := range existingMovies {
if existingMovie.MovieID == id {
if updateIDs.Mode == models.BulkUpdateIDModeRemove {
// remove from the list
existingMovies = append(existingMovies[:idx], existingMovies[idx+1:]...)
}
foundExisting = true
break
}
}
if !foundExisting && updateIDs.Mode != models.BulkUpdateIDModeRemove {
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
}
}
return existingMovies, err
}
func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) { func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) {
sceneID, err := strconv.Atoi(input.ID) sceneID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -463,7 +331,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
var s *models.Scene var s *models.Scene
fileDeleter := &scene.FileDeleter{ fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
@@ -471,10 +339,10 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile) deleteFile := utils.IsTrue(input.DeleteFile)
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
var err error var err error
s, err = qb.Find(sceneID) s, err = qb.Find(ctx, sceneID)
if err != nil { if err != nil {
return err return err
} }
@@ -486,7 +354,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// kill any running encoders // kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo) manager.KillRunningStreams(s, fileNamingAlgo)
return scene.Destroy(s, repo, fileDeleter, deleteGenerated, deleteFile) return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile)
}); err != nil { }); err != nil {
fileDeleter.Rollback() fileDeleter.Rollback()
return false, err return false, err
@@ -498,8 +366,8 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
SceneDestroyInput: input, SceneDestroyInput: input,
Checksum: s.Checksum.String, Checksum: s.Checksum,
OSHash: s.OSHash.String, OSHash: s.OSHash,
Path: s.Path, Path: s.Path,
}, nil) }, nil)
@@ -511,7 +379,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{ fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
@@ -519,13 +387,13 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
deleteGenerated := utils.IsTrue(input.DeleteGenerated) deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile) deleteFile := utils.IsTrue(input.DeleteFile)
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
for _, id := range input.Ids { for _, id := range input.Ids {
sceneID, _ := strconv.Atoi(id) sceneID, _ := strconv.Atoi(id)
s, err := qb.Find(sceneID) s, err := qb.Find(ctx, sceneID)
if err != nil { if err != nil {
return err return err
} }
@@ -536,7 +404,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// kill any running encoders // kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo) manager.KillRunningStreams(s, fileNamingAlgo)
if err := scene.Destroy(s, repo, fileDeleter, deleteGenerated, deleteFile); err != nil { if err := r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile); err != nil {
return err return err
} }
} }
@@ -554,8 +422,8 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{
ScenesDestroyInput: input, ScenesDestroyInput: input,
Checksum: scene.Checksum.String, Checksum: scene.Checksum,
OSHash: scene.OSHash.String, OSHash: scene.OSHash,
Path: scene.Path, Path: scene.Path,
}, nil) }, nil)
} }
@@ -564,8 +432,8 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
} }
func (r *mutationResolver) getSceneMarker(ctx context.Context, id int) (ret *models.SceneMarker, err error) { func (r *mutationResolver) getSceneMarker(ctx context.Context, id int) (ret *models.SceneMarker, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.SceneMarker().Find(id) ret, err = r.repository.SceneMarker.Find(ctx, id)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -574,7 +442,7 @@ func (r *mutationResolver) getSceneMarker(ctx context.Context, id int) (ret *mod
return ret, nil return ret, nil
} }
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.SceneMarkerCreateInput) (*models.SceneMarker, error) { func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input SceneMarkerCreateInput) (*models.SceneMarker, error) {
primaryTagID, err := strconv.Atoi(input.PrimaryTagID) primaryTagID, err := strconv.Atoi(input.PrimaryTagID)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -609,7 +477,7 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.S
return r.getSceneMarker(ctx, ret.ID) return r.getSceneMarker(ctx, ret.ID)
} }
func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.SceneMarkerUpdateInput) (*models.SceneMarker, error) { func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMarkerUpdateInput) (*models.SceneMarker, error) {
// Populate scene marker from the input // Populate scene marker from the input
sceneMarkerID, err := strconv.Atoi(input.ID) sceneMarkerID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -661,16 +529,16 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{ fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.SceneMarker() qb := r.repository.SceneMarker
sqb := repo.Scene() sqb := r.repository.Scene
marker, err := qb.Find(markerID) marker, err := qb.Find(ctx, markerID)
if err != nil { if err != nil {
return err return err
@@ -680,12 +548,12 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
return fmt.Errorf("scene marker with id %d not found", markerID) return fmt.Errorf("scene marker with id %d not found", markerID)
} }
s, err := sqb.Find(int(marker.SceneID.Int64)) s, err := sqb.Find(ctx, int(marker.SceneID.Int64))
if err != nil { if err != nil {
return err return err
} }
return scene.DestroyMarker(s, marker, qb, fileDeleter) return scene.DestroyMarker(ctx, s, marker, qb, fileDeleter)
}); err != nil { }); err != nil {
fileDeleter.Rollback() fileDeleter.Rollback()
return false, err return false, err
@@ -707,32 +575,32 @@ func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, cha
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{ fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
// Start the transaction and save the scene marker // Start the transaction and save the scene marker
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.SceneMarker() qb := r.repository.SceneMarker
sqb := repo.Scene() sqb := r.repository.Scene
var err error var err error
switch changeType { switch changeType {
case create: case create:
sceneMarker, err = qb.Create(changedMarker) sceneMarker, err = qb.Create(ctx, changedMarker)
case update: case update:
// check to see if timestamp was changed // check to see if timestamp was changed
existingMarker, err = qb.Find(changedMarker.ID) existingMarker, err = qb.Find(ctx, changedMarker.ID)
if err != nil { if err != nil {
return err return err
} }
sceneMarker, err = qb.Update(changedMarker) sceneMarker, err = qb.Update(ctx, changedMarker)
if err != nil { if err != nil {
return err return err
} }
s, err = sqb.Find(int(existingMarker.SceneID.Int64)) s, err = sqb.Find(ctx, int(existingMarker.SceneID.Int64))
} }
if err != nil { if err != nil {
return err return err
@@ -749,7 +617,7 @@ func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, cha
// Save the marker tags // Save the marker tags
// If this tag is the primary tag, then let's not add it. // If this tag is the primary tag, then let's not add it.
tagIDs = intslice.IntExclude(tagIDs, []int{changedMarker.PrimaryTagID}) tagIDs = intslice.IntExclude(tagIDs, []int{changedMarker.PrimaryTagID})
return qb.UpdateTags(sceneMarker.ID, tagIDs) return qb.UpdateTags(ctx, sceneMarker.ID, tagIDs)
}); err != nil { }); err != nil {
fileDeleter.Rollback() fileDeleter.Rollback()
return nil, err return nil, err
@@ -766,10 +634,10 @@ func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (ret
return 0, err return 0, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
ret, err = qb.IncrementOCounter(sceneID) ret, err = qb.IncrementOCounter(ctx, sceneID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
@@ -784,10 +652,10 @@ func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (ret
return 0, err return 0, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
ret, err = qb.DecrementOCounter(sceneID) ret, err = qb.DecrementOCounter(ctx, sceneID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
@@ -802,10 +670,10 @@ func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int,
return 0, err return 0, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
ret, err = qb.ResetOCounter(sceneID) ret, err = qb.ResetOCounter(ctx, sceneID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err

View File

@@ -7,35 +7,43 @@ import (
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scraper/stashbox" "github.com/stashapp/stash/pkg/scraper/stashbox"
) )
func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input models.StashBoxFingerprintSubmissionInput) (bool, error) { func (r *Resolver) stashboxRepository() stashbox.Repository {
return stashbox.Repository{
Scene: r.repository.Scene,
Performer: r.repository.Performer,
Tag: r.repository.Tag,
Studio: r.repository.Studio,
}
}
func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input StashBoxFingerprintSubmissionInput) (bool, error) {
boxes := config.GetInstance().GetStashBoxes() boxes := config.GetInstance().GetStashBoxes()
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
return false, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) return false, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
} }
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager) client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager, r.stashboxRepository())
return client.SubmitStashBoxFingerprints(ctx, input.SceneIds, boxes[input.StashBoxIndex].Endpoint) return client.SubmitStashBoxFingerprints(ctx, input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
} }
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) (string, error) { func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input manager.StashBoxBatchPerformerTagInput) (string, error) {
jobID := manager.GetInstance().StashBoxBatchPerformerTag(ctx, input) jobID := manager.GetInstance().StashBoxBatchPerformerTag(ctx, input)
return strconv.Itoa(jobID), nil return strconv.Itoa(jobID), nil
} }
func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input models.StashBoxDraftSubmissionInput) (*string, error) { func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) {
boxes := config.GetInstance().GetStashBoxes() boxes := config.GetInstance().GetStashBoxes()
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
} }
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager) client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager, r.stashboxRepository())
id, err := strconv.Atoi(input.ID) id, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -43,29 +51,30 @@ func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input m
} }
var res *string var res *string
err = r.withReadTxn(ctx, func(repo models.ReaderRepository) error { err = r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
scene, err := qb.Find(id) scene, err := qb.Find(ctx, id)
if err != nil { if err != nil {
return err return err
} }
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm())) filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
res, err = client.SubmitSceneDraft(ctx, id, boxes[input.StashBoxIndex].Endpoint, filepath) res, err = client.SubmitSceneDraft(ctx, scene, boxes[input.StashBoxIndex].Endpoint, filepath)
return err return err
}) })
return res, err return res, err
} }
func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, input models.StashBoxDraftSubmissionInput) (*string, error) { func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) {
boxes := config.GetInstance().GetStashBoxes() boxes := config.GetInstance().GetStashBoxes()
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
} }
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager) client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager, r.stashboxRepository())
id, err := strconv.Atoi(input.ID) id, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -73,9 +82,9 @@ func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, inp
} }
var res *string var res *string
err = r.withReadTxn(ctx, func(repo models.ReaderRepository) error { err = r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Performer() qb := r.repository.Performer
performer, err := qb.Find(id) performer, err := qb.Find(ctx, id)
if err != nil { if err != nil {
return err return err
} }

View File

@@ -17,8 +17,8 @@ import (
) )
func (r *mutationResolver) getStudio(ctx context.Context, id int) (ret *models.Studio, err error) { func (r *mutationResolver) getStudio(ctx context.Context, id int) (ret *models.Studio, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Studio().Find(id) ret, err = r.repository.Studio.Find(ctx, id)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -27,7 +27,7 @@ func (r *mutationResolver) getStudio(ctx context.Context, id int) (ret *models.S
return ret, nil return ret, nil
} }
func (r *mutationResolver) StudioCreate(ctx context.Context, input models.StudioCreateInput) (*models.Studio, error) { func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateInput) (*models.Studio, error) {
// generate checksum from studio name rather than image // generate checksum from studio name rather than image
checksum := md5.FromString(input.Name) checksum := md5.FromString(input.Name)
@@ -72,36 +72,36 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
// Start the transaction and save the studio // Start the transaction and save the studio
var s *models.Studio var s *models.Studio
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Studio() qb := r.repository.Studio
var err error var err error
s, err = qb.Create(newStudio) s, err = qb.Create(ctx, newStudio)
if err != nil { if err != nil {
return err return err
} }
// update image table // update image table
if len(imageData) > 0 { if len(imageData) > 0 {
if err := qb.UpdateImage(s.ID, imageData); err != nil { if err := qb.UpdateImage(ctx, s.ID, imageData); err != nil {
return err return err
} }
} }
// Save the stash_ids // Save the stash_ids
if input.StashIds != nil { if input.StashIds != nil {
stashIDJoins := models.StashIDsFromInput(input.StashIds) stashIDJoins := stashIDPtrSliceToSlice(input.StashIds)
if err := qb.UpdateStashIDs(s.ID, stashIDJoins); err != nil { if err := qb.UpdateStashIDs(ctx, s.ID, stashIDJoins); err != nil {
return err return err
} }
} }
if len(input.Aliases) > 0 { if len(input.Aliases) > 0 {
if err := studio.EnsureAliasesUnique(s.ID, input.Aliases, qb); err != nil { if err := studio.EnsureAliasesUnique(ctx, s.ID, input.Aliases, qb); err != nil {
return err return err
} }
if err := qb.UpdateAliases(s.ID, input.Aliases); err != nil { if err := qb.UpdateAliases(ctx, s.ID, input.Aliases); err != nil {
return err return err
} }
} }
@@ -115,7 +115,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
return r.getStudio(ctx, s.ID) return r.getStudio(ctx, s.ID)
} }
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) { func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateInput) (*models.Studio, error) {
// Populate studio from the input // Populate studio from the input
studioID, err := strconv.Atoi(input.ID) studioID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -155,45 +155,45 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
// Start the transaction and save the studio // Start the transaction and save the studio
var s *models.Studio var s *models.Studio
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Studio() qb := r.repository.Studio
if err := manager.ValidateModifyStudio(updatedStudio, qb); err != nil { if err := manager.ValidateModifyStudio(ctx, updatedStudio, qb); err != nil {
return err return err
} }
var err error var err error
s, err = qb.Update(updatedStudio) s, err = qb.Update(ctx, updatedStudio)
if err != nil { if err != nil {
return err return err
} }
// update image table // update image table
if len(imageData) > 0 { if len(imageData) > 0 {
if err := qb.UpdateImage(s.ID, imageData); err != nil { if err := qb.UpdateImage(ctx, s.ID, imageData); err != nil {
return err return err
} }
} else if imageIncluded { } else if imageIncluded {
// must be unsetting // must be unsetting
if err := qb.DestroyImage(s.ID); err != nil { if err := qb.DestroyImage(ctx, s.ID); err != nil {
return err return err
} }
} }
// Save the stash_ids // Save the stash_ids
if translator.hasField("stash_ids") { if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds) stashIDJoins := stashIDPtrSliceToSlice(input.StashIds)
if err := qb.UpdateStashIDs(studioID, stashIDJoins); err != nil { if err := qb.UpdateStashIDs(ctx, studioID, stashIDJoins); err != nil {
return err return err
} }
} }
if translator.hasField("aliases") { if translator.hasField("aliases") {
if err := studio.EnsureAliasesUnique(studioID, input.Aliases, qb); err != nil { if err := studio.EnsureAliasesUnique(ctx, studioID, input.Aliases, qb); err != nil {
return err return err
} }
if err := qb.UpdateAliases(studioID, input.Aliases); err != nil { if err := qb.UpdateAliases(ctx, studioID, input.Aliases); err != nil {
return err return err
} }
} }
@@ -207,14 +207,14 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
return r.getStudio(ctx, s.ID) return r.getStudio(ctx, s.ID)
} }
func (r *mutationResolver) StudioDestroy(ctx context.Context, input models.StudioDestroyInput) (bool, error) { func (r *mutationResolver) StudioDestroy(ctx context.Context, input StudioDestroyInput) (bool, error) {
id, err := strconv.Atoi(input.ID) id, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
return repo.Studio().Destroy(id) return r.repository.Studio.Destroy(ctx, id)
}); err != nil { }); err != nil {
return false, err return false, err
} }
@@ -230,10 +230,10 @@ func (r *mutationResolver) StudiosDestroy(ctx context.Context, studioIDs []strin
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Studio() qb := r.repository.Studio
for _, id := range ids { for _, id := range ids {
if err := qb.Destroy(id); err != nil { if err := qb.Destroy(ctx, id); err != nil {
return err return err
} }
} }

View File

@@ -15,8 +15,8 @@ import (
) )
func (r *mutationResolver) getTag(ctx context.Context, id int) (ret *models.Tag, err error) { func (r *mutationResolver) getTag(ctx context.Context, id int) (ret *models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().Find(id) ret, err = r.repository.Tag.Find(ctx, id)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -25,7 +25,7 @@ func (r *mutationResolver) getTag(ctx context.Context, id int) (ret *models.Tag,
return ret, nil return ret, nil
} }
func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreateInput) (*models.Tag, error) { func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) (*models.Tag, error) {
// Populate a new tag from the input // Populate a new tag from the input
currentTime := time.Now() currentTime := time.Now()
newTag := models.Tag{ newTag := models.Tag{
@@ -68,44 +68,44 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
// Start the transaction and save the tag // Start the transaction and save the tag
var t *models.Tag var t *models.Tag
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Tag() qb := r.repository.Tag
// ensure name is unique // ensure name is unique
if err := tag.EnsureTagNameUnique(0, newTag.Name, qb); err != nil { if err := tag.EnsureTagNameUnique(ctx, 0, newTag.Name, qb); err != nil {
return err return err
} }
t, err = qb.Create(newTag) t, err = qb.Create(ctx, newTag)
if err != nil { if err != nil {
return err return err
} }
// update image table // update image table
if len(imageData) > 0 { if len(imageData) > 0 {
if err := qb.UpdateImage(t.ID, imageData); err != nil { if err := qb.UpdateImage(ctx, t.ID, imageData); err != nil {
return err return err
} }
} }
if len(input.Aliases) > 0 { if len(input.Aliases) > 0 {
if err := tag.EnsureAliasesUnique(t.ID, input.Aliases, qb); err != nil { if err := tag.EnsureAliasesUnique(ctx, t.ID, input.Aliases, qb); err != nil {
return err return err
} }
if err := qb.UpdateAliases(t.ID, input.Aliases); err != nil { if err := qb.UpdateAliases(ctx, t.ID, input.Aliases); err != nil {
return err return err
} }
} }
if len(parentIDs) > 0 { if len(parentIDs) > 0 {
if err := qb.UpdateParentTags(t.ID, parentIDs); err != nil { if err := qb.UpdateParentTags(ctx, t.ID, parentIDs); err != nil {
return err return err
} }
} }
if len(childIDs) > 0 { if len(childIDs) > 0 {
if err := qb.UpdateChildTags(t.ID, childIDs); err != nil { if err := qb.UpdateChildTags(ctx, t.ID, childIDs); err != nil {
return err return err
} }
} }
@@ -113,7 +113,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
// FIXME: This should be called before any changes are made, but // FIXME: This should be called before any changes are made, but
// requires a rewrite of ValidateHierarchy. // requires a rewrite of ValidateHierarchy.
if len(parentIDs) > 0 || len(childIDs) > 0 { if len(parentIDs) > 0 || len(childIDs) > 0 {
if err := tag.ValidateHierarchy(t, parentIDs, childIDs, qb); err != nil { if err := tag.ValidateHierarchy(ctx, t, parentIDs, childIDs, qb); err != nil {
return err return err
} }
} }
@@ -127,7 +127,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
return r.getTag(ctx, t.ID) return r.getTag(ctx, t.ID)
} }
func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdateInput) (*models.Tag, error) { func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) {
// Populate tag from the input // Populate tag from the input
tagID, err := strconv.Atoi(input.ID) tagID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -168,11 +168,11 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
// Start the transaction and save the tag // Start the transaction and save the tag
var t *models.Tag var t *models.Tag
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Tag() qb := r.repository.Tag
// ensure name is unique // ensure name is unique
t, err = qb.Find(tagID) t, err = qb.Find(ctx, tagID)
if err != nil { if err != nil {
return err return err
} }
@@ -188,48 +188,48 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
} }
if input.Name != nil && t.Name != *input.Name { if input.Name != nil && t.Name != *input.Name {
if err := tag.EnsureTagNameUnique(tagID, *input.Name, qb); err != nil { if err := tag.EnsureTagNameUnique(ctx, tagID, *input.Name, qb); err != nil {
return err return err
} }
updatedTag.Name = input.Name updatedTag.Name = input.Name
} }
t, err = qb.Update(updatedTag) t, err = qb.Update(ctx, updatedTag)
if err != nil { if err != nil {
return err return err
} }
// update image table // update image table
if len(imageData) > 0 { if len(imageData) > 0 {
if err := qb.UpdateImage(tagID, imageData); err != nil { if err := qb.UpdateImage(ctx, tagID, imageData); err != nil {
return err return err
} }
} else if imageIncluded { } else if imageIncluded {
// must be unsetting // must be unsetting
if err := qb.DestroyImage(tagID); err != nil { if err := qb.DestroyImage(ctx, tagID); err != nil {
return err return err
} }
} }
if translator.hasField("aliases") { if translator.hasField("aliases") {
if err := tag.EnsureAliasesUnique(tagID, input.Aliases, qb); err != nil { if err := tag.EnsureAliasesUnique(ctx, tagID, input.Aliases, qb); err != nil {
return err return err
} }
if err := qb.UpdateAliases(tagID, input.Aliases); err != nil { if err := qb.UpdateAliases(ctx, tagID, input.Aliases); err != nil {
return err return err
} }
} }
if parentIDs != nil { if parentIDs != nil {
if err := qb.UpdateParentTags(tagID, parentIDs); err != nil { if err := qb.UpdateParentTags(ctx, tagID, parentIDs); err != nil {
return err return err
} }
} }
if childIDs != nil { if childIDs != nil {
if err := qb.UpdateChildTags(tagID, childIDs); err != nil { if err := qb.UpdateChildTags(ctx, tagID, childIDs); err != nil {
return err return err
} }
} }
@@ -237,7 +237,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
// FIXME: This should be called before any changes are made, but // FIXME: This should be called before any changes are made, but
// requires a rewrite of ValidateHierarchy. // requires a rewrite of ValidateHierarchy.
if parentIDs != nil || childIDs != nil { if parentIDs != nil || childIDs != nil {
if err := tag.ValidateHierarchy(t, parentIDs, childIDs, qb); err != nil { if err := tag.ValidateHierarchy(ctx, t, parentIDs, childIDs, qb); err != nil {
logger.Errorf("Error saving tag: %s", err) logger.Errorf("Error saving tag: %s", err)
return err return err
} }
@@ -252,14 +252,14 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
return r.getTag(ctx, t.ID) return r.getTag(ctx, t.ID)
} }
func (r *mutationResolver) TagDestroy(ctx context.Context, input models.TagDestroyInput) (bool, error) { func (r *mutationResolver) TagDestroy(ctx context.Context, input TagDestroyInput) (bool, error) {
tagID, err := strconv.Atoi(input.ID) tagID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
return repo.Tag().Destroy(tagID) return r.repository.Tag.Destroy(ctx, tagID)
}); err != nil { }); err != nil {
return false, err return false, err
} }
@@ -275,10 +275,10 @@ func (r *mutationResolver) TagsDestroy(ctx context.Context, tagIDs []string) (bo
return false, err return false, err
} }
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Tag() qb := r.repository.Tag
for _, id := range ids { for _, id := range ids {
if err := qb.Destroy(id); err != nil { if err := qb.Destroy(ctx, id); err != nil {
return err return err
} }
} }
@@ -295,7 +295,7 @@ func (r *mutationResolver) TagsDestroy(ctx context.Context, tagIDs []string) (bo
return true, nil return true, nil
} }
func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMergeInput) (*models.Tag, error) { func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) (*models.Tag, error) {
source, err := stringslice.StringSliceToIntSlice(input.Source) source, err := stringslice.StringSliceToIntSlice(input.Source)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -311,11 +311,11 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMerge
} }
var t *models.Tag var t *models.Tag
if err := r.withTxn(ctx, func(repo models.Repository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Tag() qb := r.repository.Tag
var err error var err error
t, err = qb.Find(destination) t, err = qb.Find(ctx, destination)
if err != nil { if err != nil {
return err return err
} }
@@ -324,25 +324,25 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMerge
return fmt.Errorf("Tag with ID %d not found", destination) return fmt.Errorf("Tag with ID %d not found", destination)
} }
parents, children, err := tag.MergeHierarchy(destination, source, qb) parents, children, err := tag.MergeHierarchy(ctx, destination, source, qb)
if err != nil { if err != nil {
return err return err
} }
if err = qb.Merge(source, destination); err != nil { if err = qb.Merge(ctx, source, destination); err != nil {
return err return err
} }
err = qb.UpdateParentTags(destination, parents) err = qb.UpdateParentTags(ctx, destination, parents)
if err != nil { if err != nil {
return err return err
} }
err = qb.UpdateChildTags(destination, children) err = qb.UpdateChildTags(ctx, destination, children)
if err != nil { if err != nil {
return err return err
} }
err = tag.ValidateHierarchy(t, parents, children, qb) err = tag.ValidateHierarchy(ctx, t, parents, children, qb)
if err != nil { if err != nil {
logger.Errorf("Error merging tag: %s", err) logger.Errorf("Error merging tag: %s", err)
return err return err

View File

@@ -5,6 +5,7 @@ import (
"errors" "errors"
"testing" "testing"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/plugin"
@@ -15,18 +16,28 @@ import (
// TODO - move this into a common area // TODO - move this into a common area
func newResolver() *Resolver { func newResolver() *Resolver {
txnMgr := &mocks.TxnManager{}
return &Resolver{ return &Resolver{
txnManager: mocks.NewTransactionManager(), txnManager: txnMgr,
repository: manager.Repository{
TxnManager: txnMgr,
Tag: &mocks.TagReaderWriter{},
},
hookExecutor: &mockHookExecutor{}, hookExecutor: &mockHookExecutor{},
} }
} }
const tagName = "tagName" const (
const errTagName = "errTagName" tagName = "tagName"
errTagName = "errTagName"
const existingTagID = 1 existingTagID = 1
const existingTagName = "existingTagName" existingTagName = "existingTagName"
const newTagID = 2
newTagID = 2
)
var testCtx = context.Background()
type mockHookExecutor struct{} type mockHookExecutor struct{}
@@ -36,7 +47,7 @@ func (*mockHookExecutor) ExecutePostHooks(ctx context.Context, id int, hookType
func TestTagCreate(t *testing.T) { func TestTagCreate(t *testing.T) {
r := newResolver() r := newResolver()
tagRW := r.txnManager.(*mocks.TransactionManager).Tag().(*mocks.TagReaderWriter) tagRW := r.repository.Tag.(*mocks.TagReaderWriter)
pp := 1 pp := 1
findFilter := &models.FindFilterType{ findFilter := &models.FindFilterType{
@@ -61,25 +72,25 @@ func TestTagCreate(t *testing.T) {
} }
} }
tagRW.On("Query", tagFilterForName(existingTagName), findFilter).Return([]*models.Tag{ tagRW.On("Query", testCtx, tagFilterForName(existingTagName), findFilter).Return([]*models.Tag{
{ {
ID: existingTagID, ID: existingTagID,
Name: existingTagName, Name: existingTagName,
}, },
}, 1, nil).Once() }, 1, nil).Once()
tagRW.On("Query", tagFilterForName(errTagName), findFilter).Return(nil, 0, nil).Once() tagRW.On("Query", testCtx, tagFilterForName(errTagName), findFilter).Return(nil, 0, nil).Once()
tagRW.On("Query", tagFilterForAlias(errTagName), findFilter).Return(nil, 0, nil).Once() tagRW.On("Query", testCtx, tagFilterForAlias(errTagName), findFilter).Return(nil, 0, nil).Once()
expectedErr := errors.New("TagCreate error") expectedErr := errors.New("TagCreate error")
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, expectedErr) tagRW.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, expectedErr)
_, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{ _, err := r.Mutation().TagCreate(testCtx, TagCreateInput{
Name: existingTagName, Name: existingTagName,
}) })
assert.NotNil(t, err) assert.NotNil(t, err)
_, err = r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{ _, err = r.Mutation().TagCreate(testCtx, TagCreateInput{
Name: errTagName, Name: errTagName,
}) })
@@ -87,18 +98,18 @@ func TestTagCreate(t *testing.T) {
tagRW.AssertExpectations(t) tagRW.AssertExpectations(t)
r = newResolver() r = newResolver()
tagRW = r.txnManager.(*mocks.TransactionManager).Tag().(*mocks.TagReaderWriter) tagRW = r.repository.Tag.(*mocks.TagReaderWriter)
tagRW.On("Query", tagFilterForName(tagName), findFilter).Return(nil, 0, nil).Once() tagRW.On("Query", testCtx, tagFilterForName(tagName), findFilter).Return(nil, 0, nil).Once()
tagRW.On("Query", tagFilterForAlias(tagName), findFilter).Return(nil, 0, nil).Once() tagRW.On("Query", testCtx, tagFilterForAlias(tagName), findFilter).Return(nil, 0, nil).Once()
newTag := &models.Tag{ newTag := &models.Tag{
ID: newTagID, ID: newTagID,
Name: tagName, Name: tagName,
} }
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(newTag, nil) tagRW.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(newTag, nil)
tagRW.On("Find", newTagID).Return(newTag, nil) tagRW.On("Find", testCtx, newTagID).Return(newTag, nil)
tag, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{ tag, err := r.Mutation().TagCreate(testCtx, TagCreateInput{
Name: tagName, Name: tagName,
}) })

View File

@@ -13,13 +13,13 @@ import (
"golang.org/x/text/collate" "golang.org/x/text/collate"
) )
func (r *queryResolver) Configuration(ctx context.Context) (*models.ConfigResult, error) { func (r *queryResolver) Configuration(ctx context.Context) (*ConfigResult, error) {
return makeConfigResult(), nil return makeConfigResult(), nil
} }
func (r *queryResolver) Directory(ctx context.Context, path, locale *string) (*models.Directory, error) { func (r *queryResolver) Directory(ctx context.Context, path, locale *string) (*Directory, error) {
directory := &models.Directory{} directory := &Directory{}
var err error var err error
col := newCollator(locale, collate.IgnoreCase, collate.Numeric) col := newCollator(locale, collate.IgnoreCase, collate.Numeric)
@@ -59,8 +59,8 @@ func getParent(path string) *string {
} }
} }
func makeConfigResult() *models.ConfigResult { func makeConfigResult() *ConfigResult {
return &models.ConfigResult{ return &ConfigResult{
General: makeConfigGeneralResult(), General: makeConfigGeneralResult(),
Interface: makeConfigInterfaceResult(), Interface: makeConfigInterfaceResult(),
Dlna: makeConfigDLNAResult(), Dlna: makeConfigDLNAResult(),
@@ -70,7 +70,7 @@ func makeConfigResult() *models.ConfigResult {
} }
} }
func makeConfigGeneralResult() *models.ConfigGeneralResult { func makeConfigGeneralResult() *ConfigGeneralResult {
config := config.GetInstance() config := config.GetInstance()
logFile := config.GetLogFile() logFile := config.GetLogFile()
@@ -82,7 +82,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
scraperUserAgent := config.GetScraperUserAgent() scraperUserAgent := config.GetScraperUserAgent()
scraperCDPPath := config.GetScraperCDPPath() scraperCDPPath := config.GetScraperCDPPath()
return &models.ConfigGeneralResult{ return &ConfigGeneralResult{
Stashes: config.GetStashPaths(), Stashes: config.GetStashPaths(),
DatabasePath: config.GetDatabasePath(), DatabasePath: config.GetDatabasePath(),
GeneratedPath: config.GetGeneratedPath(), GeneratedPath: config.GetGeneratedPath(),
@@ -125,7 +125,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
} }
} }
func makeConfigInterfaceResult() *models.ConfigInterfaceResult { func makeConfigInterfaceResult() *ConfigInterfaceResult {
config := config.GetInstance() config := config.GetInstance()
menuItems := config.GetMenuItems() menuItems := config.GetMenuItems()
soundOnPreview := config.GetSoundOnPreview() soundOnPreview := config.GetSoundOnPreview()
@@ -149,7 +149,7 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
// FIXME - misnamed output field means we have redundant fields // FIXME - misnamed output field means we have redundant fields
disableDropdownCreate := config.GetDisableDropdownCreate() disableDropdownCreate := config.GetDisableDropdownCreate()
return &models.ConfigInterfaceResult{ return &ConfigInterfaceResult{
MenuItems: menuItems, MenuItems: menuItems,
SoundOnPreview: &soundOnPreview, SoundOnPreview: &soundOnPreview,
WallShowTitle: &wallShowTitle, WallShowTitle: &wallShowTitle,
@@ -177,10 +177,10 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
} }
} }
func makeConfigDLNAResult() *models.ConfigDLNAResult { func makeConfigDLNAResult() *ConfigDLNAResult {
config := config.GetInstance() config := config.GetInstance()
return &models.ConfigDLNAResult{ return &ConfigDLNAResult{
ServerName: config.GetDLNAServerName(), ServerName: config.GetDLNAServerName(),
Enabled: config.GetDLNADefaultEnabled(), Enabled: config.GetDLNADefaultEnabled(),
WhitelistedIPs: config.GetDLNADefaultIPWhitelist(), WhitelistedIPs: config.GetDLNADefaultIPWhitelist(),
@@ -188,13 +188,13 @@ func makeConfigDLNAResult() *models.ConfigDLNAResult {
} }
} }
func makeConfigScrapingResult() *models.ConfigScrapingResult { func makeConfigScrapingResult() *ConfigScrapingResult {
config := config.GetInstance() config := config.GetInstance()
scraperUserAgent := config.GetScraperUserAgent() scraperUserAgent := config.GetScraperUserAgent()
scraperCDPPath := config.GetScraperCDPPath() scraperCDPPath := config.GetScraperCDPPath()
return &models.ConfigScrapingResult{ return &ConfigScrapingResult{
ScraperUserAgent: &scraperUserAgent, ScraperUserAgent: &scraperUserAgent,
ScraperCertCheck: config.GetScraperCertCheck(), ScraperCertCheck: config.GetScraperCertCheck(),
ScraperCDPPath: &scraperCDPPath, ScraperCDPPath: &scraperCDPPath,
@@ -202,12 +202,12 @@ func makeConfigScrapingResult() *models.ConfigScrapingResult {
} }
} }
func makeConfigDefaultsResult() *models.ConfigDefaultSettingsResult { func makeConfigDefaultsResult() *ConfigDefaultSettingsResult {
config := config.GetInstance() config := config.GetInstance()
deleteFileDefault := config.GetDeleteFileDefault() deleteFileDefault := config.GetDeleteFileDefault()
deleteGeneratedDefault := config.GetDeleteGeneratedDefault() deleteGeneratedDefault := config.GetDeleteGeneratedDefault()
return &models.ConfigDefaultSettingsResult{ return &ConfigDefaultSettingsResult{
Identify: config.GetDefaultIdentifySettings(), Identify: config.GetDefaultIdentifySettings(),
Scan: config.GetDefaultScanSettings(), Scan: config.GetDefaultScanSettings(),
AutoTag: config.GetDefaultAutoTagSettings(), AutoTag: config.GetDefaultAutoTagSettings(),
@@ -221,8 +221,8 @@ func makeConfigUIResult() map[string]interface{} {
return config.GetInstance().GetUIConfiguration() return config.GetInstance().GetUIConfiguration()
} }
func (r *queryResolver) ValidateStashBoxCredentials(ctx context.Context, input models.StashBoxInput) (*models.StashBoxValidationResult, error) { func (r *queryResolver) ValidateStashBoxCredentials(ctx context.Context, input config.StashBoxInput) (*StashBoxValidationResult, error) {
client := stashbox.NewClient(models.StashBox{Endpoint: input.Endpoint, APIKey: input.APIKey}, r.txnManager) client := stashbox.NewClient(models.StashBox{Endpoint: input.Endpoint, APIKey: input.APIKey}, r.txnManager, r.stashboxRepository())
user, err := client.GetUser(ctx) user, err := client.GetUser(ctx)
valid := user != nil && user.Me != nil valid := user != nil && user.Me != nil
@@ -248,7 +248,7 @@ func (r *queryResolver) ValidateStashBoxCredentials(ctx context.Context, input m
} }
} }
result := models.StashBoxValidationResult{ result := StashBoxValidationResult{
Valid: valid, Valid: valid,
Status: status, Status: status,
} }

View File

@@ -3,10 +3,10 @@ package api
import ( import (
"context" "context"
"github.com/stashapp/stash/internal/dlna"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models"
) )
func (r *queryResolver) DlnaStatus(ctx context.Context) (*models.DLNAStatus, error) { func (r *queryResolver) DlnaStatus(ctx context.Context) (*dlna.Status, error) {
return manager.GetInstance().DLNAService.Status(), nil return manager.GetInstance().DLNAService.Status(), nil
} }

View File

@@ -13,8 +13,8 @@ func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models
return nil, err return nil, err
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Gallery().Find(idInt) ret, err = r.repository.Gallery.Find(ctx, idInt)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -23,14 +23,14 @@ func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models
return ret, nil return ret, nil
} }
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (ret *models.FindGalleriesResultType, err error) { func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (ret *FindGalleriesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
galleries, total, err := repo.Gallery().Query(galleryFilter, filter) galleries, total, err := r.repository.Gallery.Query(ctx, galleryFilter, filter)
if err != nil { if err != nil {
return err return err
} }
ret = &models.FindGalleriesResultType{ ret = &FindGalleriesResultType{
Count: total, Count: total,
Galleries: galleries, Galleries: galleries,
} }

View File

@@ -12,8 +12,8 @@ import (
func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *string) (*models.Image, error) { func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *string) (*models.Image, error) {
var image *models.Image var image *models.Image
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Image() qb := r.repository.Image
var err error var err error
if id != nil { if id != nil {
@@ -22,12 +22,20 @@ func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *str
return err return err
} }
image, err = qb.Find(idInt) image, err = qb.Find(ctx, idInt)
if err != nil { if err != nil {
return err return err
} }
} else if checksum != nil { } else if checksum != nil {
image, err = qb.FindByChecksum(*checksum) var images []*models.Image
images, err = qb.FindByChecksum(ctx, *checksum)
if err != nil {
return err
}
if len(images) > 0 {
image = images[0]
}
} }
return err return err
@@ -38,13 +46,13 @@ func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *str
return image, nil return image, nil
} }
func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (ret *models.FindImagesResultType, err error) { func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (ret *FindImagesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Image() qb := r.repository.Image
fields := graphql.CollectAllFields(ctx) fields := graphql.CollectAllFields(ctx)
result, err := qb.Query(models.ImageQueryOptions{ result, err := qb.Query(ctx, models.ImageQueryOptions{
QueryOptions: models.QueryOptions{ QueryOptions: models.QueryOptions{
FindFilter: filter, FindFilter: filter,
Count: stringslice.StrInclude(fields, "count"), Count: stringslice.StrInclude(fields, "count"),
@@ -57,12 +65,12 @@ func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.Imag
return err return err
} }
images, err := result.Resolve() images, err := result.Resolve(ctx)
if err != nil { if err != nil {
return err return err
} }
ret = &models.FindImagesResultType{ ret = &FindImagesResultType{
Count: result.Count, Count: result.Count,
Images: images, Images: images,
Megapixels: result.Megapixels, Megapixels: result.Megapixels,

View File

@@ -13,8 +13,8 @@ func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.M
return nil, err return nil, err
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Movie().Find(idInt) ret, err = r.repository.Movie.Find(ctx, idInt)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -23,14 +23,14 @@ func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.M
return ret, nil return ret, nil
} }
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (ret *models.FindMoviesResultType, err error) { func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (ret *FindMoviesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
movies, total, err := repo.Movie().Query(movieFilter, filter) movies, total, err := r.repository.Movie.Query(ctx, movieFilter, filter)
if err != nil { if err != nil {
return err return err
} }
ret = &models.FindMoviesResultType{ ret = &FindMoviesResultType{
Count: total, Count: total,
Movies: movies, Movies: movies,
} }
@@ -44,8 +44,8 @@ func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.Movi
} }
func (r *queryResolver) AllMovies(ctx context.Context) (ret []*models.Movie, err error) { func (r *queryResolver) AllMovies(ctx context.Context) (ret []*models.Movie, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Movie().All() ret, err = r.repository.Movie.All(ctx)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -13,8 +13,8 @@ func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *mode
return nil, err return nil, err
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Performer().Find(idInt) ret, err = r.repository.Performer.Find(ctx, idInt)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -23,14 +23,14 @@ func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *mode
return ret, nil return ret, nil
} }
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (ret *models.FindPerformersResultType, err error) { func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (ret *FindPerformersResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
performers, total, err := repo.Performer().Query(performerFilter, filter) performers, total, err := r.repository.Performer.Query(ctx, performerFilter, filter)
if err != nil { if err != nil {
return err return err
} }
ret = &models.FindPerformersResultType{ ret = &FindPerformersResultType{
Count: total, Count: total,
Performers: performers, Performers: performers,
} }
@@ -43,8 +43,8 @@ func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *mod
} }
func (r *queryResolver) AllPerformers(ctx context.Context) (ret []*models.Performer, err error) { func (r *queryResolver) AllPerformers(ctx context.Context) (ret []*models.Performer, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Performer().All() ret, err = r.repository.Performer.All(ctx)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -13,8 +13,8 @@ func (r *queryResolver) FindSavedFilter(ctx context.Context, id string) (ret *mo
return nil, err return nil, err
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.SavedFilter().Find(idInt) ret, err = r.repository.SavedFilter.Find(ctx, idInt)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -23,11 +23,11 @@ func (r *queryResolver) FindSavedFilter(ctx context.Context, id string) (ret *mo
} }
func (r *queryResolver) FindSavedFilters(ctx context.Context, mode *models.FilterMode) (ret []*models.SavedFilter, err error) { func (r *queryResolver) FindSavedFilters(ctx context.Context, mode *models.FilterMode) (ret []*models.SavedFilter, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
if mode != nil { if mode != nil {
ret, err = repo.SavedFilter().FindByMode(*mode) ret, err = r.repository.SavedFilter.FindByMode(ctx, *mode)
} else { } else {
ret, err = repo.SavedFilter().All() ret, err = r.repository.SavedFilter.All(ctx)
} }
return err return err
}); err != nil { }); err != nil {
@@ -37,8 +37,8 @@ func (r *queryResolver) FindSavedFilters(ctx context.Context, mode *models.Filte
} }
func (r *queryResolver) FindDefaultFilter(ctx context.Context, mode models.FilterMode) (ret *models.SavedFilter, err error) { func (r *queryResolver) FindDefaultFilter(ctx context.Context, mode models.FilterMode) (ret *models.SavedFilter, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.SavedFilter().FindDefault(mode) ret, err = r.repository.SavedFilter.FindDefault(ctx, mode)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -12,20 +12,24 @@ import (
func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) { func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) {
var scene *models.Scene var scene *models.Scene
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
var err error var err error
if id != nil { if id != nil {
idInt, err := strconv.Atoi(*id) idInt, err := strconv.Atoi(*id)
if err != nil { if err != nil {
return err return err
} }
scene, err = qb.Find(idInt) scene, err = qb.Find(ctx, idInt)
if err != nil { if err != nil {
return err return err
} }
} else if checksum != nil { } else if checksum != nil {
scene, err = qb.FindByChecksum(*checksum) var scenes []*models.Scene
scenes, err = qb.FindByChecksum(ctx, *checksum)
if len(scenes) > 0 {
scene = scenes[0]
}
} }
return err return err
@@ -36,24 +40,29 @@ func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *str
return scene, nil return scene, nil
} }
func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneHashInput) (*models.Scene, error) { func (r *queryResolver) FindSceneByHash(ctx context.Context, input SceneHashInput) (*models.Scene, error) {
var scene *models.Scene var scene *models.Scene
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := repo.Scene() qb := r.repository.Scene
var err error
if input.Checksum != nil { if input.Checksum != nil {
scene, err = qb.FindByChecksum(*input.Checksum) scenes, err := qb.FindByChecksum(ctx, *input.Checksum)
if err != nil { if err != nil {
return err return err
} }
if len(scenes) > 0 {
scene = scenes[0]
}
} }
if scene == nil && input.Oshash != nil { if scene == nil && input.Oshash != nil {
scene, err = qb.FindByOSHash(*input.Oshash) scenes, err := qb.FindByOSHash(ctx, *input.Oshash)
if err != nil { if err != nil {
return err return err
} }
if len(scenes) > 0 {
scene = scenes[0]
}
} }
return nil return nil
@@ -64,8 +73,8 @@ func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneH
return scene, nil return scene, nil
} }
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIDs []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) { func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIDs []int, filter *models.FindFilterType) (ret *FindScenesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var scenes []*models.Scene var scenes []*models.Scene
var err error var err error
@@ -73,17 +82,26 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
result := &models.SceneQueryResult{} result := &models.SceneQueryResult{}
if len(sceneIDs) > 0 { if len(sceneIDs) > 0 {
scenes, err = repo.Scene().FindMany(sceneIDs) scenes, err = r.repository.Scene.FindMany(ctx, sceneIDs)
if err == nil { if err == nil {
result.Count = len(scenes) result.Count = len(scenes)
for _, s := range scenes { for _, s := range scenes {
result.TotalDuration += s.Duration.Float64 if err = s.LoadPrimaryFile(ctx, r.repository.File); err != nil {
size, _ := strconv.ParseFloat(s.Size.String, 64) break
result.TotalSize += size }
f := s.Files.Primary()
if f == nil {
continue
}
result.TotalDuration += f.Duration
result.TotalSize += float64(f.Size)
} }
} }
} else { } else {
result, err = repo.Scene().Query(models.SceneQueryOptions{ result, err = r.repository.Scene.Query(ctx, models.SceneQueryOptions{
QueryOptions: models.QueryOptions{ QueryOptions: models.QueryOptions{
FindFilter: filter, FindFilter: filter,
Count: stringslice.StrInclude(fields, "count"), Count: stringslice.StrInclude(fields, "count"),
@@ -93,7 +111,7 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
TotalSize: stringslice.StrInclude(fields, "filesize"), TotalSize: stringslice.StrInclude(fields, "filesize"),
}) })
if err == nil { if err == nil {
scenes, err = result.Resolve() scenes, err = result.Resolve(ctx)
} }
} }
@@ -101,7 +119,7 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
return err return err
} }
ret = &models.FindScenesResultType{ ret = &FindScenesResultType{
Count: result.Count, Count: result.Count,
Scenes: scenes, Scenes: scenes,
Duration: result.TotalDuration, Duration: result.TotalDuration,
@@ -116,8 +134,8 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
return ret, nil return ret, nil
} }
func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) { func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (ret *FindScenesResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
sceneFilter := &models.SceneFilterType{} sceneFilter := &models.SceneFilterType{}
@@ -138,7 +156,7 @@ func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *model
fields := graphql.CollectAllFields(ctx) fields := graphql.CollectAllFields(ctx)
result, err := repo.Scene().Query(models.SceneQueryOptions{ result, err := r.repository.Scene.Query(ctx, models.SceneQueryOptions{
QueryOptions: models.QueryOptions{ QueryOptions: models.QueryOptions{
FindFilter: queryFilter, FindFilter: queryFilter,
Count: stringslice.StrInclude(fields, "count"), Count: stringslice.StrInclude(fields, "count"),
@@ -151,12 +169,12 @@ func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *model
return err return err
} }
scenes, err := result.Resolve() scenes, err := result.Resolve(ctx)
if err != nil { if err != nil {
return err return err
} }
ret = &models.FindScenesResultType{ ret = &FindScenesResultType{
Count: result.Count, Count: result.Count,
Scenes: scenes, Scenes: scenes,
Duration: result.TotalDuration, Duration: result.TotalDuration,
@@ -171,17 +189,23 @@ func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *model
return ret, nil return ret, nil
} }
func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.FindFilterType, config models.SceneParserInput) (ret *models.SceneParserResultType, err error) { func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.FindFilterType, config manager.SceneParserInput) (ret *SceneParserResultType, err error) {
parser := manager.NewSceneFilenameParser(filter, config) parser := manager.NewSceneFilenameParser(filter, config)
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
result, count, err := parser.Parse(repo) result, count, err := parser.Parse(ctx, manager.SceneFilenameParserRepository{
Scene: r.repository.Scene,
Performer: r.repository.Performer,
Studio: r.repository.Studio,
Movie: r.repository.Movie,
Tag: r.repository.Tag,
})
if err != nil { if err != nil {
return err return err
} }
ret = &models.SceneParserResultType{ ret = &SceneParserResultType{
Count: count, Count: count,
Results: result, Results: result,
} }
@@ -199,8 +223,8 @@ func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int)
if distance != nil { if distance != nil {
dist = *distance dist = *distance
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Scene().FindDuplicates(dist) ret, err = r.repository.Scene.FindDuplicates(ctx, dist)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -6,13 +6,13 @@ import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType) (ret *models.FindSceneMarkersResultType, err error) { func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType) (ret *FindSceneMarkersResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
sceneMarkers, total, err := repo.SceneMarker().Query(sceneMarkerFilter, filter) sceneMarkers, total, err := r.repository.SceneMarker.Query(ctx, sceneMarkerFilter, filter)
if err != nil { if err != nil {
return err return err
} }
ret = &models.FindSceneMarkersResultType{ ret = &FindSceneMarkersResultType{
Count: total, Count: total,
SceneMarkers: sceneMarkers, SceneMarkers: sceneMarkers,
} }

View File

@@ -13,9 +13,9 @@ func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.
return nil, err return nil, err
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
ret, err = repo.Studio().Find(idInt) ret, err = r.repository.Studio.Find(ctx, idInt)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -24,14 +24,14 @@ func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.
return ret, nil return ret, nil
} }
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (ret *models.FindStudiosResultType, err error) { func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (ret *FindStudiosResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
studios, total, err := repo.Studio().Query(studioFilter, filter) studios, total, err := r.repository.Studio.Query(ctx, studioFilter, filter)
if err != nil { if err != nil {
return err return err
} }
ret = &models.FindStudiosResultType{ ret = &FindStudiosResultType{
Count: total, Count: total,
Studios: studios, Studios: studios,
} }
@@ -45,8 +45,8 @@ func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.St
} }
func (r *queryResolver) AllStudios(ctx context.Context) (ret []*models.Studio, err error) { func (r *queryResolver) AllStudios(ctx context.Context) (ret []*models.Studio, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Studio().All() ret, err = r.repository.Studio.All(ctx)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -13,8 +13,8 @@ func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag
return nil, err return nil, err
} }
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().Find(idInt) ret, err = r.repository.Tag.Find(ctx, idInt)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -23,14 +23,14 @@ func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag
return ret, nil return ret, nil
} }
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (ret *models.FindTagsResultType, err error) { func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (ret *FindTagsResultType, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
tags, total, err := repo.Tag().Query(tagFilter, filter) tags, total, err := r.repository.Tag.Query(ctx, tagFilter, filter)
if err != nil { if err != nil {
return err return err
} }
ret = &models.FindTagsResultType{ ret = &FindTagsResultType{
Count: total, Count: total,
Tags: tags, Tags: tags,
} }
@@ -44,8 +44,8 @@ func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilte
} }
func (r *queryResolver) AllTags(ctx context.Context) (ret []*models.Tag, err error) { func (r *queryResolver) AllTags(ctx context.Context) (ret []*models.Tag, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = repo.Tag().All() ret, err = r.repository.Tag.All(ctx)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -6,13 +6,12 @@ import (
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/models"
) )
func (r *queryResolver) JobQueue(ctx context.Context) ([]*models.Job, error) { func (r *queryResolver) JobQueue(ctx context.Context) ([]*Job, error) {
queue := manager.GetInstance().JobManager.GetQueue() queue := manager.GetInstance().JobManager.GetQueue()
var ret []*models.Job var ret []*Job
for _, j := range queue { for _, j := range queue {
ret = append(ret, jobToJobModel(j)) ret = append(ret, jobToJobModel(j))
} }
@@ -20,7 +19,7 @@ func (r *queryResolver) JobQueue(ctx context.Context) ([]*models.Job, error) {
return ret, nil return ret, nil
} }
func (r *queryResolver) FindJob(ctx context.Context, input models.FindJobInput) (*models.Job, error) { func (r *queryResolver) FindJob(ctx context.Context, input FindJobInput) (*Job, error) {
jobID, err := strconv.Atoi(input.ID) jobID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -33,10 +32,10 @@ func (r *queryResolver) FindJob(ctx context.Context, input models.FindJobInput)
return jobToJobModel(*j), nil return jobToJobModel(*j), nil
} }
func jobToJobModel(j job.Job) *models.Job { func jobToJobModel(j job.Job) *Job {
ret := &models.Job{ ret := &Job{
ID: strconv.Itoa(j.ID), ID: strconv.Itoa(j.ID),
Status: models.JobStatus(j.Status), Status: JobStatus(j.Status),
Description: j.Description, Description: j.Description,
SubTasks: j.Details, SubTasks: j.Details,
StartTime: j.StartTime, StartTime: j.StartTime,

View File

@@ -4,16 +4,15 @@ import (
"context" "context"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models"
) )
func (r *queryResolver) Logs(ctx context.Context) ([]*models.LogEntry, error) { func (r *queryResolver) Logs(ctx context.Context) ([]*LogEntry, error) {
logger := manager.GetInstance().Logger logger := manager.GetInstance().Logger
logCache := logger.GetLogCache() logCache := logger.GetLogCache()
ret := make([]*models.LogEntry, len(logCache)) ret := make([]*LogEntry, len(logCache))
for i, entry := range logCache { for i, entry := range logCache {
ret[i] = &models.LogEntry{ ret[i] = &LogEntry{
Time: entry.Time, Time: entry.Time,
Level: getLogLevel(entry.Type), Level: getLogLevel(entry.Type),
Message: entry.Message, Message: entry.Message,

View File

@@ -4,9 +4,8 @@ import (
"context" "context"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models"
) )
func (r *queryResolver) SystemStatus(ctx context.Context) (*models.SystemStatus, error) { func (r *queryResolver) SystemStatus(ctx context.Context) (*manager.SystemStatus, error) {
return manager.GetInstance().GetSystemStatus(), nil return manager.GetInstance().GetSystemStatus(), nil
} }

View File

@@ -4,13 +4,13 @@ import (
"context" "context"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin"
) )
func (r *queryResolver) Plugins(ctx context.Context) ([]*models.Plugin, error) { func (r *queryResolver) Plugins(ctx context.Context) ([]*plugin.Plugin, error) {
return manager.GetInstance().PluginCache.ListPlugins(), nil return manager.GetInstance().PluginCache.ListPlugins(), nil
} }
func (r *queryResolver) PluginTasks(ctx context.Context) ([]*models.PluginTask, error) { func (r *queryResolver) PluginTasks(ctx context.Context) ([]*plugin.PluginTask, error) {
return manager.GetInstance().PluginCache.ListPluginTasks(), nil return manager.GetInstance().PluginCache.ListPluginTasks(), nil
} }

View File

@@ -11,13 +11,18 @@ import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models.SceneStreamEndpoint, error) { func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*manager.SceneStreamEndpoint, error) {
// find the scene // find the scene
var scene *models.Scene var scene *models.Scene
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
idInt, _ := strconv.Atoi(*id) idInt, _ := strconv.Atoi(*id)
var err error var err error
scene, err = repo.Scene().Find(idInt) scene, err = r.repository.Scene.Find(ctx, idInt)
if scene != nil {
err = scene.LoadPrimaryFile(ctx, r.repository.File)
}
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err

View File

@@ -17,13 +17,13 @@ import (
"github.com/stashapp/stash/pkg/sliceutil/stringslice" "github.com/stashapp/stash/pkg/sliceutil/stringslice"
) )
func (r *queryResolver) ScrapeURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) { func (r *queryResolver) ScrapeURL(ctx context.Context, url string, ty scraper.ScrapeContentType) (scraper.ScrapedContent, error) {
return r.scraperCache().ScrapeURL(ctx, url, ty) return r.scraperCache().ScrapeURL(ctx, url, ty)
} }
// deprecated // deprecated
func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) { func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) {
content, err := r.scraperCache().ScrapeName(ctx, scraper.FreeonesScraperID, query, models.ScrapeContentTypePerformer) content, err := r.scraperCache().ScrapeName(ctx, scraper.FreeonesScraperID, query, scraper.ScrapeContentTypePerformer)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -44,24 +44,24 @@ func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query s
return ret, nil return ret, nil
} }
func (r *queryResolver) ListScrapers(ctx context.Context, types []models.ScrapeContentType) ([]*models.Scraper, error) { func (r *queryResolver) ListScrapers(ctx context.Context, types []scraper.ScrapeContentType) ([]*scraper.Scraper, error) {
return r.scraperCache().ListScrapers(types), nil return r.scraperCache().ListScrapers(types), nil
} }
func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*models.Scraper, error) { func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*scraper.Scraper, error) {
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypePerformer}), nil return r.scraperCache().ListScrapers([]scraper.ScrapeContentType{scraper.ScrapeContentTypePerformer}), nil
} }
func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*models.Scraper, error) { func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*scraper.Scraper, error) {
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeScene}), nil return r.scraperCache().ListScrapers([]scraper.ScrapeContentType{scraper.ScrapeContentTypeScene}), nil
} }
func (r *queryResolver) ListGalleryScrapers(ctx context.Context) ([]*models.Scraper, error) { func (r *queryResolver) ListGalleryScrapers(ctx context.Context) ([]*scraper.Scraper, error) {
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeGallery}), nil return r.scraperCache().ListScrapers([]scraper.ScrapeContentType{scraper.ScrapeContentTypeGallery}), nil
} }
func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*models.Scraper, error) { func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*scraper.Scraper, error) {
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeMovie}), nil return r.scraperCache().ListScrapers([]scraper.ScrapeContentType{scraper.ScrapeContentTypeMovie}), nil
} }
func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) { func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) {
@@ -69,7 +69,7 @@ func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID strin
return nil, nil return nil, nil
} }
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, models.ScrapeContentTypePerformer) content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, scraper.ScrapeContentTypePerformer)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -77,7 +77,7 @@ func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID strin
return marshalScrapedPerformers(content) return marshalScrapedPerformers(content)
} }
func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer scraper.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
content, err := r.scraperCache().ScrapeFragment(ctx, scraperID, scraper.Input{Performer: &scrapedPerformer}) content, err := r.scraperCache().ScrapeFragment(ctx, scraperID, scraper.Input{Performer: &scrapedPerformer})
if err != nil { if err != nil {
return nil, err return nil, err
@@ -86,7 +86,7 @@ func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, s
} }
func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) { func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) {
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypePerformer) content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypePerformer)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -94,12 +94,12 @@ func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*mo
return marshalScrapedPerformer(content) return marshalScrapedPerformer(content)
} }
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) { func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*scraper.ScrapedScene, error) {
if query == "" { if query == "" {
return nil, nil return nil, nil
} }
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, models.ScrapeContentTypeScene) content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, scraper.ScrapeContentTypeScene)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -113,13 +113,13 @@ func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string,
return ret, nil return ret, nil
} }
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) { func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*scraper.ScrapedScene, error) {
id, err := strconv.Atoi(scene.ID) id, err := strconv.Atoi(scene.ID)
if err != nil { if err != nil {
return nil, fmt.Errorf("%w: scene.ID is not an integer: '%s'", ErrInput, scene.ID) return nil, fmt.Errorf("%w: scene.ID is not an integer: '%s'", ErrInput, scene.ID)
} }
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, models.ScrapeContentTypeScene) content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, scraper.ScrapeContentTypeScene)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -129,13 +129,13 @@ func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene
return nil, err return nil, err
} }
filterSceneTags([]*models.ScrapedScene{ret}) filterSceneTags([]*scraper.ScrapedScene{ret})
return ret, nil return ret, nil
} }
// filterSceneTags removes tags matching excluded tag patterns from the provided scraped scenes // filterSceneTags removes tags matching excluded tag patterns from the provided scraped scenes
func filterSceneTags(scenes []*models.ScrapedScene) { func filterSceneTags(scenes []*scraper.ScrapedScene) {
excludePatterns := manager.GetInstance().Config.GetScraperExcludeTagPatterns() excludePatterns := manager.GetInstance().Config.GetScraperExcludeTagPatterns()
var excludeRegexps []*regexp.Regexp var excludeRegexps []*regexp.Regexp
@@ -179,8 +179,8 @@ func filterSceneTags(scenes []*models.ScrapedScene) {
} }
} }
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) { func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*scraper.ScrapedScene, error) {
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeScene) content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeScene)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -190,18 +190,18 @@ func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models
return nil, err return nil, err
} }
filterSceneTags([]*models.ScrapedScene{ret}) filterSceneTags([]*scraper.ScrapedScene{ret})
return ret, nil return ret, nil
} }
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) { func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*scraper.ScrapedGallery, error) {
id, err := strconv.Atoi(gallery.ID) id, err := strconv.Atoi(gallery.ID)
if err != nil { if err != nil {
return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, gallery.ID) return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, gallery.ID)
} }
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, models.ScrapeContentTypeGallery) content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, scraper.ScrapeContentTypeGallery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -209,8 +209,8 @@ func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gal
return marshalScrapedGallery(content) return marshalScrapedGallery(content)
} }
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) { func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*scraper.ScrapedGallery, error) {
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeGallery) content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeGallery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -219,7 +219,7 @@ func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*mode
} }
func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) { func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) {
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeMovie) content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeMovie)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -234,11 +234,11 @@ func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) {
return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, index) return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, index)
} }
return stashbox.NewClient(*boxes[index], r.txnManager), nil return stashbox.NewClient(*boxes[index], r.txnManager, r.stashboxRepository()), nil
} }
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleSceneInput) ([]*models.ScrapedScene, error) { func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.Source, input ScrapeSingleSceneInput) ([]*scraper.ScrapedScene, error) {
var ret []*models.ScrapedScene var ret []*scraper.ScrapedScene
var sceneID int var sceneID int
if input.SceneID != nil { if input.SceneID != nil {
@@ -252,22 +252,22 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
switch { switch {
case source.ScraperID != nil: case source.ScraperID != nil:
var err error var err error
var c models.ScrapedContent var c scraper.ScrapedContent
var content []models.ScrapedContent var content []scraper.ScrapedContent
switch { switch {
case input.SceneID != nil: case input.SceneID != nil:
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, sceneID, models.ScrapeContentTypeScene) c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, sceneID, scraper.ScrapeContentTypeScene)
if c != nil { if c != nil {
content = []models.ScrapedContent{c} content = []scraper.ScrapedContent{c}
} }
case input.SceneInput != nil: case input.SceneInput != nil:
c, err = r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Scene: input.SceneInput}) c, err = r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Scene: input.SceneInput})
if c != nil { if c != nil {
content = []models.ScrapedContent{c} content = []scraper.ScrapedContent{c}
} }
case input.Query != nil: case input.Query != nil:
content, err = r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, models.ScrapeContentTypeScene) content, err = r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, scraper.ScrapeContentTypeScene)
default: default:
err = fmt.Errorf("%w: scene_id, scene_input, or query must be set", ErrInput) err = fmt.Errorf("%w: scene_id, scene_input, or query must be set", ErrInput)
} }
@@ -307,7 +307,7 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
return ret, nil return ret, nil
} }
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) { func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.Source, input ScrapeMultiScenesInput) ([][]*scraper.ScrapedScene, error) {
if source.ScraperID != nil { if source.ScraperID != nil {
return nil, ErrNotImplemented return nil, ErrNotImplemented
} else if source.StashBoxIndex != nil { } else if source.StashBoxIndex != nil {
@@ -327,7 +327,7 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.Scr
return nil, errors.New("scraper_id or stash_box_index must be set") return nil, errors.New("scraper_id or stash_box_index must be set")
} }
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) { func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scraper.Source, input ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
if source.ScraperID != nil { if source.ScraperID != nil {
if input.PerformerInput != nil { if input.PerformerInput != nil {
performer, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Performer: input.PerformerInput}) performer, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Performer: input.PerformerInput})
@@ -335,11 +335,11 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models
return nil, err return nil, err
} }
return marshalScrapedPerformers([]models.ScrapedContent{performer}) return marshalScrapedPerformers([]scraper.ScrapedContent{performer})
} }
if input.Query != nil { if input.Query != nil {
content, err := r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, models.ScrapeContentTypePerformer) content, err := r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, scraper.ScrapeContentTypePerformer)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -354,7 +354,7 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models
return nil, err return nil, err
} }
var ret []*models.StashBoxPerformerQueryResult var ret []*stashbox.StashBoxPerformerQueryResult
switch { switch {
case input.PerformerID != nil: case input.PerformerID != nil:
ret, err = client.FindStashBoxPerformersByNames(ctx, []string{*input.PerformerID}) ret, err = client.FindStashBoxPerformersByNames(ctx, []string{*input.PerformerID})
@@ -378,7 +378,7 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models
return nil, errors.New("scraper_id or stash_box_index must be set") return nil, errors.New("scraper_id or stash_box_index must be set")
} }
func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiPerformersInput) ([][]*models.ScrapedPerformer, error) { func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source scraper.Source, input ScrapeMultiPerformersInput) ([][]*models.ScrapedPerformer, error) {
if source.ScraperID != nil { if source.ScraperID != nil {
return nil, ErrNotImplemented return nil, ErrNotImplemented
} else if source.StashBoxIndex != nil { } else if source.StashBoxIndex != nil {
@@ -393,7 +393,7 @@ func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models
return nil, errors.New("scraper_id or stash_box_index must be set") return nil, errors.New("scraper_id or stash_box_index must be set")
} }
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleGalleryInput) ([]*models.ScrapedGallery, error) { func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source scraper.Source, input ScrapeSingleGalleryInput) ([]*scraper.ScrapedGallery, error) {
if source.StashBoxIndex != nil { if source.StashBoxIndex != nil {
return nil, ErrNotSupported return nil, ErrNotSupported
} }
@@ -402,7 +402,7 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.S
return nil, fmt.Errorf("%w: scraper_id must be set", ErrInput) return nil, fmt.Errorf("%w: scraper_id must be set", ErrInput)
} }
var c models.ScrapedContent var c scraper.ScrapedContent
switch { switch {
case input.GalleryID != nil: case input.GalleryID != nil:
@@ -410,22 +410,22 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.S
if err != nil { if err != nil {
return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, *input.GalleryID) return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, *input.GalleryID)
} }
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, galleryID, models.ScrapeContentTypeGallery) c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, galleryID, scraper.ScrapeContentTypeGallery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return marshalScrapedGalleries([]models.ScrapedContent{c}) return marshalScrapedGalleries([]scraper.ScrapedContent{c})
case input.GalleryInput != nil: case input.GalleryInput != nil:
c, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Gallery: input.GalleryInput}) c, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Gallery: input.GalleryInput})
if err != nil { if err != nil {
return nil, err return nil, err
} }
return marshalScrapedGalleries([]models.ScrapedContent{c}) return marshalScrapedGalleries([]scraper.ScrapedContent{c})
default: default:
return nil, ErrNotImplemented return nil, ErrNotImplemented
} }
} }
func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) { func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source scraper.Source, input ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) {
return nil, ErrNotSupported return nil, ErrNotSupported
} }

View File

@@ -5,18 +5,17 @@ import (
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/models"
) )
func makeJobStatusUpdate(t models.JobStatusUpdateType, j job.Job) *models.JobStatusUpdate { func makeJobStatusUpdate(t JobStatusUpdateType, j job.Job) *JobStatusUpdate {
return &models.JobStatusUpdate{ return &JobStatusUpdate{
Type: t, Type: t,
Job: jobToJobModel(j), Job: jobToJobModel(j),
} }
} }
func (r *subscriptionResolver) JobsSubscribe(ctx context.Context) (<-chan *models.JobStatusUpdate, error) { func (r *subscriptionResolver) JobsSubscribe(ctx context.Context) (<-chan *JobStatusUpdate, error) {
msg := make(chan *models.JobStatusUpdate, 100) msg := make(chan *JobStatusUpdate, 100)
subscription := manager.GetInstance().JobManager.Subscribe(ctx) subscription := manager.GetInstance().JobManager.Subscribe(ctx)
@@ -24,11 +23,11 @@ func (r *subscriptionResolver) JobsSubscribe(ctx context.Context) (<-chan *model
for { for {
select { select {
case j := <-subscription.NewJob: case j := <-subscription.NewJob:
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeAdd, j) msg <- makeJobStatusUpdate(JobStatusUpdateTypeAdd, j)
case j := <-subscription.RemovedJob: case j := <-subscription.RemovedJob:
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeRemove, j) msg <- makeJobStatusUpdate(JobStatusUpdateTypeRemove, j)
case j := <-subscription.UpdatedJob: case j := <-subscription.UpdatedJob:
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeUpdate, j) msg <- makeJobStatusUpdate(JobStatusUpdateTypeUpdate, j)
case <-ctx.Done(): case <-ctx.Done():
close(msg) close(msg)
return return

View File

@@ -5,33 +5,32 @@ import (
"github.com/stashapp/stash/internal/log" "github.com/stashapp/stash/internal/log"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models"
) )
func getLogLevel(logType string) models.LogLevel { func getLogLevel(logType string) LogLevel {
switch logType { switch logType {
case "progress": case "progress":
return models.LogLevelProgress return LogLevelProgress
case "trace": case "trace":
return models.LogLevelTrace return LogLevelTrace
case "debug": case "debug":
return models.LogLevelDebug return LogLevelDebug
case "info": case "info":
return models.LogLevelInfo return LogLevelInfo
case "warn": case "warn":
return models.LogLevelWarning return LogLevelWarning
case "error": case "error":
return models.LogLevelError return LogLevelError
default: default:
return models.LogLevelDebug return LogLevelDebug
} }
} }
func logEntriesFromLogItems(logItems []log.LogItem) []*models.LogEntry { func logEntriesFromLogItems(logItems []log.LogItem) []*LogEntry {
ret := make([]*models.LogEntry, len(logItems)) ret := make([]*LogEntry, len(logItems))
for i, entry := range logItems { for i, entry := range logItems {
ret[i] = &models.LogEntry{ ret[i] = &LogEntry{
Time: entry.Time, Time: entry.Time,
Level: getLogLevel(entry.Type), Level: getLogLevel(entry.Type),
Message: entry.Message, Message: entry.Message,
@@ -41,8 +40,8 @@ func logEntriesFromLogItems(logItems []log.LogItem) []*models.LogEntry {
return ret return ret
} }
func (r *subscriptionResolver) LoggingSubscribe(ctx context.Context) (<-chan []*models.LogEntry, error) { func (r *subscriptionResolver) LoggingSubscribe(ctx context.Context) (<-chan []*LogEntry, error) {
ret := make(chan []*models.LogEntry, 100) ret := make(chan []*LogEntry, 100)
stop := make(chan int, 1) stop := make(chan int, 1)
logger := manager.GetInstance().Logger logger := manager.GetInstance().Logger
logSub := logger.SubscribeToLog(stop) logSub := logger.SubscribeToLog(stop)

View File

@@ -9,21 +9,30 @@ import (
"github.com/go-chi/chi" "github.com/go-chi/chi"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
) )
type ImageFinder interface {
Find(ctx context.Context, id int) (*models.Image, error)
FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error)
}
type imageRoutes struct { type imageRoutes struct {
txnManager models.TransactionManager txnManager txn.Manager
imageFinder ImageFinder
fileFinder file.Finder
} }
func (rs imageRoutes) Routes() chi.Router { func (rs imageRoutes) Routes() chi.Router {
r := chi.NewRouter() r := chi.NewRouter()
r.Route("/{imageId}", func(r chi.Router) { r.Route("/{imageId}", func(r chi.Router) {
r.Use(ImageCtx) r.Use(rs.ImageCtx)
r.Get("/image", rs.Image) r.Get("/image", rs.Image)
r.Get("/thumbnail", rs.Thumbnail) r.Get("/thumbnail", rs.Thumbnail)
@@ -45,12 +54,20 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
if exists { if exists {
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
} else { } else {
// don't return anything if there is no file
f := img.Files.Primary()
if f == nil {
// TODO - probably want to return a placeholder
http.Error(w, http.StatusText(404), 404)
return
}
encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG) encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG)
data, err := encoder.GetThumbnail(img, models.DefaultGthumbWidth) data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth)
if err != nil { if err != nil {
// don't log for unsupported image format // don't log for unsupported image format
if !errors.Is(err, image.ErrNotSupportedForThumbnail) { if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
logger.Errorf("error generating thumbnail for image: %s", err.Error()) logger.Errorf("error generating thumbnail for %s: %v", f.Path, err)
var exitErr *exec.ExitError var exitErr *exec.ExitError
if errors.As(err, &exitErr) { if errors.As(err, &exitErr) {
@@ -80,23 +97,36 @@ func (rs imageRoutes) Image(w http.ResponseWriter, r *http.Request) {
i := r.Context().Value(imageKey).(*models.Image) i := r.Context().Value(imageKey).(*models.Image)
// if image is in a zip file, we need to serve it specifically // if image is in a zip file, we need to serve it specifically
image.Serve(w, r, i.Path)
if i.Files.Primary() == nil {
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
return
}
i.Files.Primary().Serve(&file.OsFS{}, w, r)
} }
// endregion // endregion
func ImageCtx(next http.Handler) http.Handler { func (rs imageRoutes) ImageCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
imageIdentifierQueryParam := chi.URLParam(r, "imageId") imageIdentifierQueryParam := chi.URLParam(r, "imageId")
imageID, _ := strconv.Atoi(imageIdentifierQueryParam) imageID, _ := strconv.Atoi(imageIdentifierQueryParam)
var image *models.Image var image *models.Image
readTxnErr := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
qb := repo.Image() qb := rs.imageFinder
if imageID == 0 { if imageID == 0 {
image, _ = qb.FindByChecksum(imageIdentifierQueryParam) images, _ := qb.FindByChecksum(ctx, imageIdentifierQueryParam)
if len(images) > 0 {
image = images[0]
}
} else { } else {
image, _ = qb.Find(imageID) image, _ = qb.Find(ctx, imageID)
}
if image != nil {
_ = image.LoadPrimaryFile(ctx, rs.fileFinder)
} }
return nil return nil

View File

@@ -6,21 +6,28 @@ import (
"strconv" "strconv"
"github.com/go-chi/chi" "github.com/go-chi/chi"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
type MovieFinder interface {
GetFrontImage(ctx context.Context, movieID int) ([]byte, error)
GetBackImage(ctx context.Context, movieID int) ([]byte, error)
Find(ctx context.Context, id int) (*models.Movie, error)
}
type movieRoutes struct { type movieRoutes struct {
txnManager models.TransactionManager txnManager txn.Manager
movieFinder MovieFinder
} }
func (rs movieRoutes) Routes() chi.Router { func (rs movieRoutes) Routes() chi.Router {
r := chi.NewRouter() r := chi.NewRouter()
r.Route("/{movieId}", func(r chi.Router) { r.Route("/{movieId}", func(r chi.Router) {
r.Use(MovieCtx) r.Use(rs.MovieCtx)
r.Get("/frontimage", rs.FrontImage) r.Get("/frontimage", rs.FrontImage)
r.Get("/backimage", rs.BackImage) r.Get("/backimage", rs.BackImage)
}) })
@@ -33,8 +40,8 @@ func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
defaultParam := r.URL.Query().Get("default") defaultParam := r.URL.Query().Get("default")
var image []byte var image []byte
if defaultParam != "true" { if defaultParam != "true" {
err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
image, _ = repo.Movie().GetFrontImage(movie.ID) image, _ = rs.movieFinder.GetFrontImage(ctx, movie.ID)
return nil return nil
}) })
if err != nil { if err != nil {
@@ -56,8 +63,8 @@ func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
defaultParam := r.URL.Query().Get("default") defaultParam := r.URL.Query().Get("default")
var image []byte var image []byte
if defaultParam != "true" { if defaultParam != "true" {
err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
image, _ = repo.Movie().GetBackImage(movie.ID) image, _ = rs.movieFinder.GetBackImage(ctx, movie.ID)
return nil return nil
}) })
if err != nil { if err != nil {
@@ -74,7 +81,7 @@ func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
} }
} }
func MovieCtx(next http.Handler) http.Handler { func (rs movieRoutes) MovieCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
movieID, err := strconv.Atoi(chi.URLParam(r, "movieId")) movieID, err := strconv.Atoi(chi.URLParam(r, "movieId"))
if err != nil { if err != nil {
@@ -83,9 +90,9 @@ func MovieCtx(next http.Handler) http.Handler {
} }
var movie *models.Movie var movie *models.Movie
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
movie, err = repo.Movie().Find(movieID) movie, err = rs.movieFinder.Find(ctx, movieID)
return err return err
}); err != nil { }); err != nil {
http.Error(w, http.StatusText(404), 404) http.Error(w, http.StatusText(404), 404)

View File

@@ -6,22 +6,28 @@ import (
"strconv" "strconv"
"github.com/go-chi/chi" "github.com/go-chi/chi"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
type PerformerFinder interface {
Find(ctx context.Context, id int) (*models.Performer, error)
GetImage(ctx context.Context, performerID int) ([]byte, error)
}
type performerRoutes struct { type performerRoutes struct {
txnManager models.TransactionManager txnManager txn.Manager
performerFinder PerformerFinder
} }
func (rs performerRoutes) Routes() chi.Router { func (rs performerRoutes) Routes() chi.Router {
r := chi.NewRouter() r := chi.NewRouter()
r.Route("/{performerId}", func(r chi.Router) { r.Route("/{performerId}", func(r chi.Router) {
r.Use(PerformerCtx) r.Use(rs.PerformerCtx)
r.Get("/image", rs.Image) r.Get("/image", rs.Image)
}) })
@@ -34,8 +40,8 @@ func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
var image []byte var image []byte
if defaultParam != "true" { if defaultParam != "true" {
readTxnErr := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
image, _ = repo.Performer().GetImage(performer.ID) image, _ = rs.performerFinder.GetImage(ctx, performer.ID)
return nil return nil
}) })
if readTxnErr != nil { if readTxnErr != nil {
@@ -52,7 +58,7 @@ func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
} }
} }
func PerformerCtx(next http.Handler) http.Handler { func (rs performerRoutes) PerformerCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
performerID, err := strconv.Atoi(chi.URLParam(r, "performerId")) performerID, err := strconv.Atoi(chi.URLParam(r, "performerId"))
if err != nil { if err != nil {
@@ -61,9 +67,9 @@ func PerformerCtx(next http.Handler) http.Handler {
} }
var performer *models.Performer var performer *models.Performer
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
performer, err = repo.Performer().Find(performerID) performer, err = rs.performerFinder.Find(ctx, performerID)
return err return err
}); err != nil { }); err != nil {
http.Error(w, http.StatusText(404), 404) http.Error(w, http.StatusText(404), 404)

View File

@@ -11,22 +11,47 @@ import (
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/txn"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
type SceneFinder interface {
manager.SceneCoverGetter
scene.IDFinder
FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error)
FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error)
}
type SceneMarkerFinder interface {
Find(ctx context.Context, id int) (*models.SceneMarker, error)
FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error)
}
type CaptionFinder interface {
GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error)
}
type sceneRoutes struct { type sceneRoutes struct {
txnManager models.TransactionManager txnManager txn.Manager
sceneFinder SceneFinder
fileFinder file.Finder
captionFinder CaptionFinder
sceneMarkerFinder SceneMarkerFinder
tagFinder scene.MarkerTagFinder
} }
func (rs sceneRoutes) Routes() chi.Router { func (rs sceneRoutes) Routes() chi.Router {
r := chi.NewRouter() r := chi.NewRouter()
r.Route("/{sceneId}", func(r chi.Router) { r.Route("/{sceneId}", func(r chi.Router) {
r.Use(SceneCtx) r.Use(rs.SceneCtx)
// streaming endpoints // streaming endpoints
r.Get("/stream", rs.StreamDirect) r.Get("/stream", rs.StreamDirect)
@@ -48,8 +73,8 @@ func (rs sceneRoutes) Routes() chi.Router {
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview) r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
r.Get("/scene_marker/{sceneMarkerId}/screenshot", rs.SceneMarkerScreenshot) r.Get("/scene_marker/{sceneMarkerId}/screenshot", rs.SceneMarkerScreenshot)
}) })
r.With(SceneCtx).Get("/{sceneId}_thumbs.vtt", rs.VttThumbs) r.With(rs.SceneCtx).Get("/{sceneId}_thumbs.vtt", rs.VttThumbs)
r.With(SceneCtx).Get("/{sceneId}_sprite.jpg", rs.VttSprite) r.With(rs.SceneCtx).Get("/{sceneId}_sprite.jpg", rs.VttSprite)
return r return r
} }
@@ -60,7 +85,8 @@ func (rs sceneRoutes) StreamDirect(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
ss := manager.SceneServer{ ss := manager.SceneServer{
TXNManager: rs.txnManager, TxnManager: rs.txnManager,
SceneCoverGetter: rs.sceneFinder,
} }
ss.StreamSceneDirect(scene, w, r) ss.StreamSceneDirect(scene, w, r)
} }
@@ -69,7 +95,12 @@ func (rs sceneRoutes) StreamMKV(w http.ResponseWriter, r *http.Request) {
// only allow mkv streaming if the scene container is an mkv already // only allow mkv streaming if the scene container is an mkv already
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
container, err := manager.GetSceneFileContainer(scene) pf := scene.Files.Primary()
if pf == nil {
return
}
container, err := manager.GetVideoFileContainer(pf)
if err != nil { if err != nil {
logger.Errorf("[transcode] error getting container: %v", err) logger.Errorf("[transcode] error getting container: %v", err)
} }
@@ -96,10 +127,8 @@ func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
ffprobe := manager.GetInstance().FFProbe pf := scene.Files.Primary()
videoFile, err := ffprobe.NewVideoFile(scene.Path) if pf == nil {
if err != nil {
logger.Errorf("[stream] error reading video file: %v", err)
return return
} }
@@ -109,7 +138,7 @@ func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", ffmpeg.MimeHLS) w.Header().Set("Content-Type", ffmpeg.MimeHLS)
var str strings.Builder var str strings.Builder
ffmpeg.WriteHLSPlaylist(videoFile.Duration, r.URL.String(), &str) ffmpeg.WriteHLSPlaylist(pf.Duration, r.URL.String(), &str)
requestByteRange := createByteRange(r.Header.Get("Range")) requestByteRange := createByteRange(r.Header.Get("Range"))
if requestByteRange.RawString != "" { if requestByteRange.RawString != "" {
@@ -130,9 +159,14 @@ func (rs sceneRoutes) StreamTS(w http.ResponseWriter, r *http.Request) {
} }
func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamFormat ffmpeg.StreamFormat) { func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamFormat ffmpeg.StreamFormat) {
logger.Debugf("Streaming as %s", streamFormat.MimeType)
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
f := scene.Files.Primary()
if f == nil {
return
}
logger.Debugf("Streaming as %s", streamFormat.MimeType)
// start stream based on query param, if provided // start stream based on query param, if provided
if err := r.ParseForm(); err != nil { if err := r.ParseForm(); err != nil {
logger.Warnf("[stream] error parsing query form: %v", err) logger.Warnf("[stream] error parsing query form: %v", err)
@@ -143,17 +177,20 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, st
requestedSize := r.Form.Get("resolution") requestedSize := r.Form.Get("resolution")
audioCodec := ffmpeg.MissingUnsupported audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec.Valid { if f.AudioCodec != "" {
audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec.String) audioCodec = ffmpeg.ProbeAudioCodec(f.AudioCodec)
} }
width := f.Width
height := f.Height
options := ffmpeg.TranscodeStreamOptions{ options := ffmpeg.TranscodeStreamOptions{
Input: scene.Path, Input: f.Path,
Codec: streamFormat, Codec: streamFormat,
VideoOnly: audioCodec == ffmpeg.MissingUnsupported, VideoOnly: audioCodec == ffmpeg.MissingUnsupported,
VideoWidth: int(scene.Width.Int64), VideoWidth: width,
VideoHeight: int(scene.Height.Int64), VideoHeight: height,
StartTime: ss, StartTime: ss,
MaxTranscodeSize: config.GetInstance().GetMaxStreamingTranscodeSize().GetMaxResolution(), MaxTranscodeSize: config.GetInstance().GetMaxStreamingTranscodeSize().GetMaxResolution(),
@@ -167,7 +204,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, st
lm := manager.GetInstance().ReadLockManager lm := manager.GetInstance().ReadLockManager
streamRequestCtx := manager.NewStreamRequestContext(w, r) streamRequestCtx := manager.NewStreamRequestContext(w, r)
lockCtx := lm.ReadLock(streamRequestCtx, scene.Path) lockCtx := lm.ReadLock(streamRequestCtx, f.Path)
defer lockCtx.Cancel() defer lockCtx.Cancel()
stream, err := encoder.GetTranscodeStream(lockCtx, options) stream, err := encoder.GetTranscodeStream(lockCtx, options)
@@ -190,7 +227,8 @@ func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
ss := manager.SceneServer{ ss := manager.SceneServer{
TXNManager: rs.txnManager, TxnManager: rs.txnManager,
SceneCoverGetter: rs.sceneFinder,
} }
ss.ServeScreenshot(scene, w, r) ss.ServeScreenshot(scene, w, r)
} }
@@ -221,16 +259,16 @@ func (rs sceneRoutes) getChapterVttTitle(ctx context.Context, marker *models.Sce
} }
var ret string var ret string
if err := rs.txnManager.WithReadTxn(ctx, func(repo models.ReaderRepository) error { if err := txn.WithTxn(ctx, rs.txnManager, func(ctx context.Context) error {
qb := repo.Tag() qb := rs.tagFinder
primaryTag, err := qb.Find(marker.PrimaryTagID) primaryTag, err := qb.Find(ctx, marker.PrimaryTagID)
if err != nil { if err != nil {
return err return err
} }
ret = primaryTag.Name ret = primaryTag.Name
tags, err := qb.FindBySceneMarkerID(marker.ID) tags, err := qb.FindBySceneMarkerID(ctx, marker.ID)
if err != nil { if err != nil {
return err return err
} }
@@ -250,9 +288,9 @@ func (rs sceneRoutes) getChapterVttTitle(ctx context.Context, marker *models.Sce
func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
var sceneMarkers []*models.SceneMarker var sceneMarkers []*models.SceneMarker
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
sceneMarkers, err = repo.SceneMarker().FindBySceneID(scene.ID) sceneMarkers, err = rs.sceneMarkerFinder.FindBySceneID(ctx, scene.ID)
return err return err
}); err != nil { }); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
@@ -275,7 +313,7 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
s := r.Context().Value(sceneKey).(*models.Scene) s := r.Context().Value(sceneKey).(*models.Scene)
funscript := scene.GetFunscriptPath(s.Path) funscript := video.GetFunscriptPath(s.Path)
serveFileNoCache(w, r, funscript) serveFileNoCache(w, r, funscript)
} }
@@ -289,12 +327,17 @@ func (rs sceneRoutes) InteractiveHeatmap(w http.ResponseWriter, r *http.Request)
func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang string, ext string) { func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang string, ext string) {
s := r.Context().Value(sceneKey).(*models.Scene) s := r.Context().Value(sceneKey).(*models.Scene)
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
captions, err := repo.Scene().GetCaptions(s.ID) primaryFile := s.Files.Primary()
if primaryFile == nil {
return nil
}
captions, err := rs.captionFinder.GetCaptions(ctx, primaryFile.Base().ID)
for _, caption := range captions { for _, caption := range captions {
if lang == caption.LanguageCode && ext == caption.CaptionType { if lang == caption.LanguageCode && ext == caption.CaptionType {
sub, err := scene.ReadSubs(caption.Path(s.Path)) sub, err := video.ReadSubs(caption.Path(s.Path))
if err == nil { if err == nil {
var b bytes.Buffer var b bytes.Buffer
err = sub.WriteToWebVTT(&b) err = sub.WriteToWebVTT(&b)
@@ -344,9 +387,9 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId")) sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
var sceneMarker *models.SceneMarker var sceneMarker *models.SceneMarker
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
sceneMarker, err = repo.SceneMarker().Find(sceneMarkerID) sceneMarker, err = rs.sceneMarkerFinder.Find(ctx, sceneMarkerID)
return err return err
}); err != nil { }); err != nil {
logger.Warnf("Error when getting scene marker for stream: %s", err.Error()) logger.Warnf("Error when getting scene marker for stream: %s", err.Error())
@@ -367,9 +410,9 @@ func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request)
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId")) sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
var sceneMarker *models.SceneMarker var sceneMarker *models.SceneMarker
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
sceneMarker, err = repo.SceneMarker().Find(sceneMarkerID) sceneMarker, err = rs.sceneMarkerFinder.Find(ctx, sceneMarkerID)
return err return err
}); err != nil { }); err != nil {
logger.Warnf("Error when getting scene marker for stream: %s", err.Error()) logger.Warnf("Error when getting scene marker for stream: %s", err.Error())
@@ -400,9 +443,9 @@ func (rs sceneRoutes) SceneMarkerScreenshot(w http.ResponseWriter, r *http.Reque
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId")) sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
var sceneMarker *models.SceneMarker var sceneMarker *models.SceneMarker
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
sceneMarker, err = repo.SceneMarker().Find(sceneMarkerID) sceneMarker, err = rs.sceneMarkerFinder.Find(ctx, sceneMarkerID)
return err return err
}); err != nil { }); err != nil {
logger.Warnf("Error when getting scene marker for stream: %s", err.Error()) logger.Warnf("Error when getting scene marker for stream: %s", err.Error())
@@ -431,23 +474,33 @@ func (rs sceneRoutes) SceneMarkerScreenshot(w http.ResponseWriter, r *http.Reque
// endregion // endregion
func SceneCtx(next http.Handler) http.Handler { func (rs sceneRoutes) SceneCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sceneIdentifierQueryParam := chi.URLParam(r, "sceneId") sceneIdentifierQueryParam := chi.URLParam(r, "sceneId")
sceneID, _ := strconv.Atoi(sceneIdentifierQueryParam) sceneID, _ := strconv.Atoi(sceneIdentifierQueryParam)
var scene *models.Scene var scene *models.Scene
readTxnErr := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
qb := repo.Scene() qb := rs.sceneFinder
if sceneID == 0 { if sceneID == 0 {
var scenes []*models.Scene
// determine checksum/os by the length of the query param // determine checksum/os by the length of the query param
if len(sceneIdentifierQueryParam) == 32 { if len(sceneIdentifierQueryParam) == 32 {
scene, _ = qb.FindByChecksum(sceneIdentifierQueryParam) scenes, _ = qb.FindByChecksum(ctx, sceneIdentifierQueryParam)
} else { } else {
scene, _ = qb.FindByOSHash(sceneIdentifierQueryParam) scenes, _ = qb.FindByOSHash(ctx, sceneIdentifierQueryParam)
}
if len(scenes) > 0 {
scene = scenes[0]
} }
} else { } else {
scene, _ = qb.Find(sceneID) scene, _ = qb.Find(ctx, sceneID)
}
if scene != nil {
_ = scene.LoadPrimaryFile(ctx, rs.fileFinder)
} }
return nil return nil

View File

@@ -8,21 +8,28 @@ import (
"syscall" "syscall"
"github.com/go-chi/chi" "github.com/go-chi/chi"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/studio"
"github.com/stashapp/stash/pkg/txn"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
type StudioFinder interface {
studio.Finder
GetImage(ctx context.Context, studioID int) ([]byte, error)
}
type studioRoutes struct { type studioRoutes struct {
txnManager models.TransactionManager txnManager txn.Manager
studioFinder StudioFinder
} }
func (rs studioRoutes) Routes() chi.Router { func (rs studioRoutes) Routes() chi.Router {
r := chi.NewRouter() r := chi.NewRouter()
r.Route("/{studioId}", func(r chi.Router) { r.Route("/{studioId}", func(r chi.Router) {
r.Use(StudioCtx) r.Use(rs.StudioCtx)
r.Get("/image", rs.Image) r.Get("/image", rs.Image)
}) })
@@ -35,8 +42,8 @@ func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
var image []byte var image []byte
if defaultParam != "true" { if defaultParam != "true" {
err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
image, _ = repo.Studio().GetImage(studio.ID) image, _ = rs.studioFinder.GetImage(ctx, studio.ID)
return nil return nil
}) })
if err != nil { if err != nil {
@@ -58,7 +65,7 @@ func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
} }
} }
func StudioCtx(next http.Handler) http.Handler { func (rs studioRoutes) StudioCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
studioID, err := strconv.Atoi(chi.URLParam(r, "studioId")) studioID, err := strconv.Atoi(chi.URLParam(r, "studioId"))
if err != nil { if err != nil {
@@ -67,9 +74,9 @@ func StudioCtx(next http.Handler) http.Handler {
} }
var studio *models.Studio var studio *models.Studio
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
studio, err = repo.Studio().Find(studioID) studio, err = rs.studioFinder.Find(ctx, studioID)
return err return err
}); err != nil { }); err != nil {
http.Error(w, http.StatusText(404), 404) http.Error(w, http.StatusText(404), 404)

View File

@@ -6,21 +6,28 @@ import (
"strconv" "strconv"
"github.com/go-chi/chi" "github.com/go-chi/chi"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/tag"
"github.com/stashapp/stash/pkg/txn"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
type TagFinder interface {
tag.Finder
GetImage(ctx context.Context, tagID int) ([]byte, error)
}
type tagRoutes struct { type tagRoutes struct {
txnManager models.TransactionManager txnManager txn.Manager
tagFinder TagFinder
} }
func (rs tagRoutes) Routes() chi.Router { func (rs tagRoutes) Routes() chi.Router {
r := chi.NewRouter() r := chi.NewRouter()
r.Route("/{tagId}", func(r chi.Router) { r.Route("/{tagId}", func(r chi.Router) {
r.Use(TagCtx) r.Use(rs.TagCtx)
r.Get("/image", rs.Image) r.Get("/image", rs.Image)
}) })
@@ -33,8 +40,8 @@ func (rs tagRoutes) Image(w http.ResponseWriter, r *http.Request) {
var image []byte var image []byte
if defaultParam != "true" { if defaultParam != "true" {
err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
image, _ = repo.Tag().GetImage(tag.ID) image, _ = rs.tagFinder.GetImage(ctx, tag.ID)
return nil return nil
}) })
if err != nil { if err != nil {
@@ -51,7 +58,7 @@ func (rs tagRoutes) Image(w http.ResponseWriter, r *http.Request) {
} }
} }
func TagCtx(next http.Handler) http.Handler { func (rs tagRoutes) TagCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
tagID, err := strconv.Atoi(chi.URLParam(r, "tagId")) tagID, err := strconv.Atoi(chi.URLParam(r, "tagId"))
if err != nil { if err != nil {
@@ -60,9 +67,9 @@ func TagCtx(next http.Handler) http.Handler {
} }
var tag *models.Tag var tag *models.Tag
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
tag, err = repo.Tag().Find(tagID) tag, err = rs.tagFinder.Find(ctx, tagID)
return err return err
}); err != nil { }); err != nil {
http.Error(w, http.StatusText(404), 404) http.Error(w, http.StatusText(404), 404)

View File

@@ -4,12 +4,13 @@ import (
"fmt" "fmt"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scraper"
) )
// marshalScrapedScenes converts ScrapedContent into ScrapedScene. If conversion fails, an // marshalScrapedScenes converts ScrapedContent into ScrapedScene. If conversion fails, an
// error is returned to the caller. // error is returned to the caller.
func marshalScrapedScenes(content []models.ScrapedContent) ([]*models.ScrapedScene, error) { func marshalScrapedScenes(content []scraper.ScrapedContent) ([]*scraper.ScrapedScene, error) {
var ret []*models.ScrapedScene var ret []*scraper.ScrapedScene
for _, c := range content { for _, c := range content {
if c == nil { if c == nil {
// graphql schema requires scenes to be non-nil // graphql schema requires scenes to be non-nil
@@ -17,9 +18,9 @@ func marshalScrapedScenes(content []models.ScrapedContent) ([]*models.ScrapedSce
} }
switch s := c.(type) { switch s := c.(type) {
case *models.ScrapedScene: case *scraper.ScrapedScene:
ret = append(ret, s) ret = append(ret, s)
case models.ScrapedScene: case scraper.ScrapedScene:
ret = append(ret, &s) ret = append(ret, &s)
default: default:
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedScene", models.ErrConversion) return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedScene", models.ErrConversion)
@@ -31,7 +32,7 @@ func marshalScrapedScenes(content []models.ScrapedContent) ([]*models.ScrapedSce
// marshalScrapedPerformers converts ScrapedContent into ScrapedPerformer. If conversion // marshalScrapedPerformers converts ScrapedContent into ScrapedPerformer. If conversion
// fails, an error is returned to the caller. // fails, an error is returned to the caller.
func marshalScrapedPerformers(content []models.ScrapedContent) ([]*models.ScrapedPerformer, error) { func marshalScrapedPerformers(content []scraper.ScrapedContent) ([]*models.ScrapedPerformer, error) {
var ret []*models.ScrapedPerformer var ret []*models.ScrapedPerformer
for _, c := range content { for _, c := range content {
if c == nil { if c == nil {
@@ -54,8 +55,8 @@ func marshalScrapedPerformers(content []models.ScrapedContent) ([]*models.Scrape
// marshalScrapedGalleries converts ScrapedContent into ScrapedGallery. If // marshalScrapedGalleries converts ScrapedContent into ScrapedGallery. If
// conversion fails, an error is returned. // conversion fails, an error is returned.
func marshalScrapedGalleries(content []models.ScrapedContent) ([]*models.ScrapedGallery, error) { func marshalScrapedGalleries(content []scraper.ScrapedContent) ([]*scraper.ScrapedGallery, error) {
var ret []*models.ScrapedGallery var ret []*scraper.ScrapedGallery
for _, c := range content { for _, c := range content {
if c == nil { if c == nil {
// graphql schema requires galleries to be non-nil // graphql schema requires galleries to be non-nil
@@ -63,9 +64,9 @@ func marshalScrapedGalleries(content []models.ScrapedContent) ([]*models.Scraped
} }
switch g := c.(type) { switch g := c.(type) {
case *models.ScrapedGallery: case *scraper.ScrapedGallery:
ret = append(ret, g) ret = append(ret, g)
case models.ScrapedGallery: case scraper.ScrapedGallery:
ret = append(ret, &g) ret = append(ret, &g)
default: default:
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGallery", models.ErrConversion) return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGallery", models.ErrConversion)
@@ -77,7 +78,7 @@ func marshalScrapedGalleries(content []models.ScrapedContent) ([]*models.Scraped
// marshalScrapedMovies converts ScrapedContent into ScrapedMovie. If conversion // marshalScrapedMovies converts ScrapedContent into ScrapedMovie. If conversion
// fails, an error is returned. // fails, an error is returned.
func marshalScrapedMovies(content []models.ScrapedContent) ([]*models.ScrapedMovie, error) { func marshalScrapedMovies(content []scraper.ScrapedContent) ([]*models.ScrapedMovie, error) {
var ret []*models.ScrapedMovie var ret []*models.ScrapedMovie
for _, c := range content { for _, c := range content {
if c == nil { if c == nil {
@@ -99,8 +100,8 @@ func marshalScrapedMovies(content []models.ScrapedContent) ([]*models.ScrapedMov
} }
// marshalScrapedPerformer will marshal a single performer // marshalScrapedPerformer will marshal a single performer
func marshalScrapedPerformer(content models.ScrapedContent) (*models.ScrapedPerformer, error) { func marshalScrapedPerformer(content scraper.ScrapedContent) (*models.ScrapedPerformer, error) {
p, err := marshalScrapedPerformers([]models.ScrapedContent{content}) p, err := marshalScrapedPerformers([]scraper.ScrapedContent{content})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -109,8 +110,8 @@ func marshalScrapedPerformer(content models.ScrapedContent) (*models.ScrapedPerf
} }
// marshalScrapedScene will marshal a single scraped scene // marshalScrapedScene will marshal a single scraped scene
func marshalScrapedScene(content models.ScrapedContent) (*models.ScrapedScene, error) { func marshalScrapedScene(content scraper.ScrapedContent) (*scraper.ScrapedScene, error) {
s, err := marshalScrapedScenes([]models.ScrapedContent{content}) s, err := marshalScrapedScenes([]scraper.ScrapedContent{content})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -119,8 +120,8 @@ func marshalScrapedScene(content models.ScrapedContent) (*models.ScrapedScene, e
} }
// marshalScrapedGallery will marshal a single scraped gallery // marshalScrapedGallery will marshal a single scraped gallery
func marshalScrapedGallery(content models.ScrapedContent) (*models.ScrapedGallery, error) { func marshalScrapedGallery(content scraper.ScrapedContent) (*scraper.ScrapedGallery, error) {
g, err := marshalScrapedGalleries([]models.ScrapedContent{content}) g, err := marshalScrapedGalleries([]scraper.ScrapedContent{content})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -129,8 +130,8 @@ func marshalScrapedGallery(content models.ScrapedContent) (*models.ScrapedGaller
} }
// marshalScrapedMovie will marshal a single scraped movie // marshalScrapedMovie will marshal a single scraped movie
func marshalScrapedMovie(content models.ScrapedContent) (*models.ScrapedMovie, error) { func marshalScrapedMovie(content scraper.ScrapedContent) (*models.ScrapedMovie, error) {
m, err := marshalScrapedMovies([]models.ScrapedContent{content}) m, err := marshalScrapedMovies([]scraper.ScrapedContent{content})
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -26,11 +26,11 @@ import (
"github.com/go-chi/httplog" "github.com/go-chi/httplog"
"github.com/rs/cors" "github.com/rs/cors"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/ui" "github.com/stashapp/stash/ui"
) )
@@ -74,14 +74,29 @@ func Start() error {
return errors.New(message) return errors.New(message)
} }
txnManager := manager.GetInstance().TxnManager txnManager := manager.GetInstance().Repository
dataloaders := loaders.Middleware{
DatabaseProvider: txnManager,
Repository: txnManager,
}
r.Use(dataloaders.Middleware)
pluginCache := manager.GetInstance().PluginCache pluginCache := manager.GetInstance().PluginCache
sceneService := manager.GetInstance().SceneService
imageService := manager.GetInstance().ImageService
galleryService := manager.GetInstance().GalleryService
resolver := &Resolver{ resolver := &Resolver{
txnManager: txnManager, txnManager: txnManager,
repository: txnManager,
sceneService: sceneService,
imageService: imageService,
galleryService: galleryService,
hookExecutor: pluginCache, hookExecutor: pluginCache,
} }
gqlSrv := gqlHandler.New(models.NewExecutableSchema(models.Config{Resolvers: resolver})) gqlSrv := gqlHandler.New(NewExecutableSchema(Config{Resolvers: resolver}))
gqlSrv.SetRecoverFunc(recoverFunc) gqlSrv.SetRecoverFunc(recoverFunc)
gqlSrv.AddTransport(gqlTransport.Websocket{ gqlSrv.AddTransport(gqlTransport.Websocket{
Upgrader: websocket.Upgrader{ Upgrader: websocket.Upgrader{
@@ -120,21 +135,32 @@ func Start() error {
r.Mount("/performer", performerRoutes{ r.Mount("/performer", performerRoutes{
txnManager: txnManager, txnManager: txnManager,
performerFinder: txnManager.Performer,
}.Routes()) }.Routes())
r.Mount("/scene", sceneRoutes{ r.Mount("/scene", sceneRoutes{
txnManager: txnManager, txnManager: txnManager,
sceneFinder: txnManager.Scene,
fileFinder: txnManager.File,
captionFinder: txnManager.File,
sceneMarkerFinder: txnManager.SceneMarker,
tagFinder: txnManager.Tag,
}.Routes()) }.Routes())
r.Mount("/image", imageRoutes{ r.Mount("/image", imageRoutes{
txnManager: txnManager, txnManager: txnManager,
imageFinder: txnManager.Image,
fileFinder: txnManager.File,
}.Routes()) }.Routes())
r.Mount("/studio", studioRoutes{ r.Mount("/studio", studioRoutes{
txnManager: txnManager, txnManager: txnManager,
studioFinder: txnManager.Studio,
}.Routes()) }.Routes())
r.Mount("/movie", movieRoutes{ r.Mount("/movie", movieRoutes{
txnManager: txnManager, txnManager: txnManager,
movieFinder: txnManager.Movie,
}.Routes()) }.Routes())
r.Mount("/tag", tagRoutes{ r.Mount("/tag", tagRoutes{
txnManager: txnManager, txnManager: txnManager,
tagFinder: txnManager.Tag,
}.Routes()) }.Routes())
r.Mount("/downloads", downloadsRoutes{}.Routes()) r.Mount("/downloads", downloadsRoutes{}.Routes())

View File

@@ -1,6 +1,12 @@
package api package api
import "math" import (
"fmt"
"math"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
// An enum https://golang.org/ref/spec#Iota // An enum https://golang.org/ref/spec#Iota
const ( const (
@@ -17,3 +23,41 @@ func handleFloat64(v float64) *float64 {
return &v return &v
} }
func handleFloat64Value(v float64) float64 {
if math.IsInf(v, 0) || math.IsNaN(v) {
return 0
}
return v
}
func translateUpdateIDs(strIDs []string, mode models.RelationshipUpdateMode) (*models.UpdateIDs, error) {
ids, err := stringslice.StringSliceToIntSlice(strIDs)
if err != nil {
return nil, fmt.Errorf("converting ids [%v]: %w", strIDs, err)
}
return &models.UpdateIDs{
IDs: ids,
Mode: mode,
}, nil
}
func translateSceneMovieIDs(input BulkUpdateIds) (*models.UpdateMovieIDs, error) {
ids, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil {
return nil, fmt.Errorf("converting ids [%v]: %w", input.Ids, err)
}
ret := &models.UpdateMovieIDs{
Mode: input.Mode,
}
for _, id := range ids {
ret.Movies = append(ret.Movies, models.MoviesScenes{
MovieID: id,
})
}
return ret, nil
}

View File

@@ -1,8 +1,9 @@
package urlbuilders package urlbuilders
import ( import (
"github.com/stashapp/stash/pkg/models"
"strconv" "strconv"
"github.com/stashapp/stash/pkg/models"
) )
type ImageURLBuilder struct { type ImageURLBuilder struct {
@@ -15,7 +16,7 @@ func NewImageURLBuilder(baseURL string, image *models.Image) ImageURLBuilder {
return ImageURLBuilder{ return ImageURLBuilder{
BaseURL: baseURL, BaseURL: baseURL,
ImageID: strconv.Itoa(image.ID), ImageID: strconv.Itoa(image.ID),
UpdatedAt: strconv.FormatInt(image.UpdatedAt.Timestamp.Unix(), 10), UpdatedAt: strconv.FormatInt(image.UpdatedAt.Unix(), 10),
} }
} }

View File

@@ -1,55 +1,99 @@
package autotag package autotag
import ( import (
"context"
"github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/intslice"
) )
type GalleryPerformerUpdater interface {
models.PerformerIDLoader
gallery.PartialUpdater
}
type GalleryTagUpdater interface {
models.TagIDLoader
gallery.PartialUpdater
}
func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger {
var path string
if s.Path != "" {
path = s.Path
}
// only trim the extension if gallery is file-based // only trim the extension if gallery is file-based
trimExt := s.Zip trimExt := s.PrimaryFileID != nil
return tagger{ return tagger{
ID: s.ID, ID: s.ID,
Type: "gallery", Type: "gallery",
Name: s.GetTitle(), Name: s.DisplayName(),
Path: s.Path.String, Path: path,
trimExt: trimExt, trimExt: trimExt,
cache: cache, cache: cache,
} }
} }
// GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path. // GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path.
func GalleryPerformers(s *models.Gallery, rw models.GalleryReaderWriter, performerReader models.PerformerReader, cache *match.Cache) error { func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getGalleryFileTagger(s, cache) t := getGalleryFileTagger(s, cache)
return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) { return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return gallery.AddPerformer(rw, subjectID, otherID) if err := s.LoadPerformerIDs(ctx, rw); err != nil {
return false, err
}
existing := s.PerformerIDs.List()
if intslice.IntInclude(existing, otherID) {
return false, nil
}
if err := gallery.AddPerformer(ctx, rw, s, otherID); err != nil {
return false, err
}
return true, nil
}) })
} }
// GalleryStudios tags the provided gallery with the first studio whose name matches the gallery's path. // GalleryStudios tags the provided gallery with the first studio whose name matches the gallery's path.
// //
// Gallerys will not be tagged if studio is already set. // Gallerys will not be tagged if studio is already set.
func GalleryStudios(s *models.Gallery, rw models.GalleryReaderWriter, studioReader models.StudioReader, cache *match.Cache) error { func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid { if s.StudioID != nil {
// don't modify // don't modify
return nil return nil
} }
t := getGalleryFileTagger(s, cache) t := getGalleryFileTagger(s, cache)
return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) { return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addGalleryStudio(rw, subjectID, otherID) return addGalleryStudio(ctx, rw, s, otherID)
}) })
} }
// GalleryTags tags the provided gallery with tags whose name matches the gallery's path. // GalleryTags tags the provided gallery with tags whose name matches the gallery's path.
func GalleryTags(s *models.Gallery, rw models.GalleryReaderWriter, tagReader models.TagReader, cache *match.Cache) error { func GalleryTags(ctx context.Context, s *models.Gallery, rw GalleryTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getGalleryFileTagger(s, cache) t := getGalleryFileTagger(s, cache)
return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) { return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return gallery.AddTag(rw, subjectID, otherID) if err := s.LoadTagIDs(ctx, rw); err != nil {
return false, err
}
existing := s.TagIDs.List()
if intslice.IntInclude(existing, otherID) {
return false, nil
}
if err := gallery.AddTag(ctx, rw, s, otherID); err != nil {
return false, err
}
return true, nil
}) })
} }

View File

@@ -1,6 +1,7 @@
package autotag package autotag
import ( import (
"context"
"testing" "testing"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
@@ -11,6 +12,8 @@ import (
const galleryExt = "zip" const galleryExt = "zip"
var testCtx = context.Background()
func TestGalleryPerformers(t *testing.T) { func TestGalleryPerformers(t *testing.T) {
t.Parallel() t.Parallel()
@@ -37,19 +40,24 @@ func TestGalleryPerformers(t *testing.T) {
mockPerformerReader := &mocks.PerformerReaderWriter{} mockPerformerReader := &mocks.PerformerReaderWriter{}
mockGalleryReader := &mocks.GalleryReaderWriter{} mockGalleryReader := &mocks.GalleryReaderWriter{}
mockPerformerReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockPerformerReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches { if test.Matches {
mockGalleryReader.On("GetPerformerIDs", galleryID).Return(nil, nil).Once() mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdatePerformers", galleryID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Path: models.NullString(test.Path), Path: test.Path,
PerformerIDs: models.NewRelatedIDs([]int{}),
} }
err := GalleryPerformers(&gallery, mockGalleryReader, mockPerformerReader, nil) err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil)
assert.Nil(err) assert.Nil(err)
mockPerformerReader.AssertExpectations(t) mockPerformerReader.AssertExpectations(t)
@@ -62,7 +70,7 @@ func TestGalleryStudios(t *testing.T) {
const galleryID = 1 const galleryID = 1
const studioName = "studio name" const studioName = "studio name"
const studioID = 2 var studioID = 2
studio := models.Studio{ studio := models.Studio{
ID: studioID, ID: studioID,
Name: models.NullString(studioName), Name: models.NullString(studioName),
@@ -81,19 +89,17 @@ func TestGalleryStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockGalleryReader.On("Find", galleryID).Return(&models.Gallery{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdatePartial", models.GalleryPartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: galleryID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Path: models.NullString(test.Path), Path: test.Path,
} }
err := GalleryStudios(&gallery, mockGalleryReader, mockStudioReader, nil) err := GalleryStudios(testCtx, &gallery, mockGalleryReader, mockStudioReader, nil)
assert.Nil(err) assert.Nil(err)
mockStudioReader.AssertExpectations(t) mockStudioReader.AssertExpectations(t)
@@ -104,9 +110,9 @@ func TestGalleryStudios(t *testing.T) {
mockStudioReader := &mocks.StudioReaderWriter{} mockStudioReader := &mocks.StudioReaderWriter{}
mockGalleryReader := &mocks.GalleryReaderWriter{} mockGalleryReader := &mocks.GalleryReaderWriter{}
mockStudioReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockStudioReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() mockStudioReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
mockStudioReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe() mockStudioReader.On("GetAliases", testCtx, mock.Anything).Return([]string{}, nil).Maybe()
doTest(mockStudioReader, mockGalleryReader, test) doTest(mockStudioReader, mockGalleryReader, test)
} }
@@ -119,12 +125,12 @@ func TestGalleryStudios(t *testing.T) {
mockStudioReader := &mocks.StudioReaderWriter{} mockStudioReader := &mocks.StudioReaderWriter{}
mockGalleryReader := &mocks.GalleryReaderWriter{} mockGalleryReader := &mocks.GalleryReaderWriter{}
mockStudioReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockStudioReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() mockStudioReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
mockStudioReader.On("GetAliases", studioID).Return([]string{ mockStudioReader.On("GetAliases", testCtx, studioID).Return([]string{
studioName, studioName,
}, nil).Once() }, nil).Once()
mockStudioReader.On("GetAliases", reversedStudioID).Return([]string{}, nil).Once() mockStudioReader.On("GetAliases", testCtx, reversedStudioID).Return([]string{}, nil).Once()
doTest(mockStudioReader, mockGalleryReader, test) doTest(mockStudioReader, mockGalleryReader, test)
} }
@@ -154,15 +160,20 @@ func TestGalleryTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockGalleryReader.On("GetTagIDs", galleryID).Return(nil, nil).Once() mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdateTags", galleryID, []int{tagID}).Return(nil).Once() TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Path: models.NullString(test.Path), Path: test.Path,
TagIDs: models.NewRelatedIDs([]int{}),
} }
err := GalleryTags(&gallery, mockGalleryReader, mockTagReader, nil) err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil)
assert.Nil(err) assert.Nil(err)
mockTagReader.AssertExpectations(t) mockTagReader.AssertExpectations(t)
@@ -173,9 +184,9 @@ func TestGalleryTags(t *testing.T) {
mockTagReader := &mocks.TagReaderWriter{} mockTagReader := &mocks.TagReaderWriter{}
mockGalleryReader := &mocks.GalleryReaderWriter{} mockGalleryReader := &mocks.GalleryReaderWriter{}
mockTagReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockTagReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() mockTagReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
mockTagReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe() mockTagReader.On("GetAliases", testCtx, mock.Anything).Return([]string{}, nil).Maybe()
doTest(mockTagReader, mockGalleryReader, test) doTest(mockTagReader, mockGalleryReader, test)
} }
@@ -187,12 +198,12 @@ func TestGalleryTags(t *testing.T) {
mockTagReader := &mocks.TagReaderWriter{} mockTagReader := &mocks.TagReaderWriter{}
mockGalleryReader := &mocks.GalleryReaderWriter{} mockGalleryReader := &mocks.GalleryReaderWriter{}
mockTagReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockTagReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() mockTagReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
mockTagReader.On("GetAliases", tagID).Return([]string{ mockTagReader.On("GetAliases", testCtx, tagID).Return([]string{
tagName, tagName,
}, nil).Once() }, nil).Once()
mockTagReader.On("GetAliases", reversedTagID).Return([]string{}, nil).Once() mockTagReader.On("GetAliases", testCtx, reversedTagID).Return([]string{}, nil).Once()
doTest(mockTagReader, mockGalleryReader, test) doTest(mockTagReader, mockGalleryReader, test)
} }

View File

@@ -1,51 +1,90 @@
package autotag package autotag
import ( import (
"context"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/intslice"
) )
type ImagePerformerUpdater interface {
models.PerformerIDLoader
image.PartialUpdater
}
type ImageTagUpdater interface {
models.TagIDLoader
image.PartialUpdater
}
func getImageFileTagger(s *models.Image, cache *match.Cache) tagger { func getImageFileTagger(s *models.Image, cache *match.Cache) tagger {
return tagger{ return tagger{
ID: s.ID, ID: s.ID,
Type: "image", Type: "image",
Name: s.GetTitle(), Name: s.DisplayName(),
Path: s.Path, Path: s.Path,
cache: cache, cache: cache,
} }
} }
// ImagePerformers tags the provided image with performers whose name matches the image's path. // ImagePerformers tags the provided image with performers whose name matches the image's path.
func ImagePerformers(s *models.Image, rw models.ImageReaderWriter, performerReader models.PerformerReader, cache *match.Cache) error { func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getImageFileTagger(s, cache) t := getImageFileTagger(s, cache)
return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) { return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return image.AddPerformer(rw, subjectID, otherID) if err := s.LoadPerformerIDs(ctx, rw); err != nil {
return false, err
}
existing := s.PerformerIDs.List()
if intslice.IntInclude(existing, otherID) {
return false, nil
}
if err := image.AddPerformer(ctx, rw, s, otherID); err != nil {
return false, err
}
return true, nil
}) })
} }
// ImageStudios tags the provided image with the first studio whose name matches the image's path. // ImageStudios tags the provided image with the first studio whose name matches the image's path.
// //
// Images will not be tagged if studio is already set. // Images will not be tagged if studio is already set.
func ImageStudios(s *models.Image, rw models.ImageReaderWriter, studioReader models.StudioReader, cache *match.Cache) error { func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid { if s.StudioID != nil {
// don't modify // don't modify
return nil return nil
} }
t := getImageFileTagger(s, cache) t := getImageFileTagger(s, cache)
return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) { return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addImageStudio(rw, subjectID, otherID) return addImageStudio(ctx, rw, s, otherID)
}) })
} }
// ImageTags tags the provided image with tags whose name matches the image's path. // ImageTags tags the provided image with tags whose name matches the image's path.
func ImageTags(s *models.Image, rw models.ImageReaderWriter, tagReader models.TagReader, cache *match.Cache) error { func ImageTags(ctx context.Context, s *models.Image, rw ImageTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getImageFileTagger(s, cache) t := getImageFileTagger(s, cache)
return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) { return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return image.AddTag(rw, subjectID, otherID) if err := s.LoadTagIDs(ctx, rw); err != nil {
return false, err
}
existing := s.TagIDs.List()
if intslice.IntInclude(existing, otherID) {
return false, nil
}
if err := image.AddTag(ctx, rw, s, otherID); err != nil {
return false, err
}
return true, nil
}) })
} }

View File

@@ -37,19 +37,24 @@ func TestImagePerformers(t *testing.T) {
mockPerformerReader := &mocks.PerformerReaderWriter{} mockPerformerReader := &mocks.PerformerReaderWriter{}
mockImageReader := &mocks.ImageReaderWriter{} mockImageReader := &mocks.ImageReaderWriter{}
mockPerformerReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockPerformerReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches { if test.Matches {
mockImageReader.On("GetPerformerIDs", imageID).Return(nil, nil).Once() mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("UpdatePerformers", imageID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
image := models.Image{ image := models.Image{
ID: imageID, ID: imageID,
Path: test.Path, Path: test.Path,
PerformerIDs: models.NewRelatedIDs([]int{}),
} }
err := ImagePerformers(&image, mockImageReader, mockPerformerReader, nil) err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil)
assert.Nil(err) assert.Nil(err)
mockPerformerReader.AssertExpectations(t) mockPerformerReader.AssertExpectations(t)
@@ -62,7 +67,7 @@ func TestImageStudios(t *testing.T) {
const imageID = 1 const imageID = 1
const studioName = "studio name" const studioName = "studio name"
const studioID = 2 var studioID = 2
studio := models.Studio{ studio := models.Studio{
ID: studioID, ID: studioID,
Name: models.NullString(studioName), Name: models.NullString(studioName),
@@ -81,11 +86,9 @@ func TestImageStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockImageReader.On("Find", imageID).Return(&models.Image{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("Update", models.ImagePartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: imageID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
@@ -93,7 +96,7 @@ func TestImageStudios(t *testing.T) {
ID: imageID, ID: imageID,
Path: test.Path, Path: test.Path,
} }
err := ImageStudios(&image, mockImageReader, mockStudioReader, nil) err := ImageStudios(testCtx, &image, mockImageReader, mockStudioReader, nil)
assert.Nil(err) assert.Nil(err)
mockStudioReader.AssertExpectations(t) mockStudioReader.AssertExpectations(t)
@@ -104,9 +107,9 @@ func TestImageStudios(t *testing.T) {
mockStudioReader := &mocks.StudioReaderWriter{} mockStudioReader := &mocks.StudioReaderWriter{}
mockImageReader := &mocks.ImageReaderWriter{} mockImageReader := &mocks.ImageReaderWriter{}
mockStudioReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockStudioReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() mockStudioReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
mockStudioReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe() mockStudioReader.On("GetAliases", testCtx, mock.Anything).Return([]string{}, nil).Maybe()
doTest(mockStudioReader, mockImageReader, test) doTest(mockStudioReader, mockImageReader, test)
} }
@@ -119,12 +122,12 @@ func TestImageStudios(t *testing.T) {
mockStudioReader := &mocks.StudioReaderWriter{} mockStudioReader := &mocks.StudioReaderWriter{}
mockImageReader := &mocks.ImageReaderWriter{} mockImageReader := &mocks.ImageReaderWriter{}
mockStudioReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockStudioReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() mockStudioReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
mockStudioReader.On("GetAliases", studioID).Return([]string{ mockStudioReader.On("GetAliases", testCtx, studioID).Return([]string{
studioName, studioName,
}, nil).Once() }, nil).Once()
mockStudioReader.On("GetAliases", reversedStudioID).Return([]string{}, nil).Once() mockStudioReader.On("GetAliases", testCtx, reversedStudioID).Return([]string{}, nil).Once()
doTest(mockStudioReader, mockImageReader, test) doTest(mockStudioReader, mockImageReader, test)
} }
@@ -154,15 +157,20 @@ func TestImageTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockImageReader.On("GetTagIDs", imageID).Return(nil, nil).Once() mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("UpdateTags", imageID, []int{tagID}).Return(nil).Once() TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
image := models.Image{ image := models.Image{
ID: imageID, ID: imageID,
Path: test.Path, Path: test.Path,
TagIDs: models.NewRelatedIDs([]int{}),
} }
err := ImageTags(&image, mockImageReader, mockTagReader, nil) err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil)
assert.Nil(err) assert.Nil(err)
mockTagReader.AssertExpectations(t) mockTagReader.AssertExpectations(t)
@@ -173,9 +181,9 @@ func TestImageTags(t *testing.T) {
mockTagReader := &mocks.TagReaderWriter{} mockTagReader := &mocks.TagReaderWriter{}
mockImageReader := &mocks.ImageReaderWriter{} mockImageReader := &mocks.ImageReaderWriter{}
mockTagReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockTagReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() mockTagReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
mockTagReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe() mockTagReader.On("GetAliases", testCtx, mock.Anything).Return([]string{}, nil).Maybe()
doTest(mockTagReader, mockImageReader, test) doTest(mockTagReader, mockImageReader, test)
} }
@@ -188,12 +196,12 @@ func TestImageTags(t *testing.T) {
mockTagReader := &mocks.TagReaderWriter{} mockTagReader := &mocks.TagReaderWriter{}
mockImageReader := &mocks.ImageReaderWriter{} mockImageReader := &mocks.ImageReaderWriter{}
mockTagReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockTagReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() mockTagReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
mockTagReader.On("GetAliases", tagID).Return([]string{ mockTagReader.On("GetAliases", testCtx, tagID).Return([]string{
tagName, tagName,
}, nil).Once() }, nil).Once()
mockTagReader.On("GetAliases", reversedTagID).Return([]string{}, nil).Once() mockTagReader.On("GetAliases", testCtx, reversedTagID).Return([]string{}, nil).Once()
doTest(mockTagReader, mockImageReader, test) doTest(mockTagReader, mockImageReader, test)
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,34 @@
package autotag package autotag
import ( import (
"context"
"github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/sliceutil/intslice"
) )
type SceneQueryPerformerUpdater interface {
scene.Queryer
models.PerformerIDLoader
scene.PartialUpdater
}
type ImageQueryPerformerUpdater interface {
image.Queryer
models.PerformerIDLoader
image.PartialUpdater
}
type GalleryQueryPerformerUpdater interface {
gallery.Queryer
models.PerformerIDLoader
gallery.PartialUpdater
}
func getPerformerTagger(p *models.Performer, cache *match.Cache) tagger { func getPerformerTagger(p *models.Performer, cache *match.Cache) tagger {
return tagger{ return tagger{
ID: p.ID, ID: p.ID,
@@ -18,28 +39,67 @@ func getPerformerTagger(p *models.Performer, cache *match.Cache) tagger {
} }
// PerformerScenes searches for scenes whose path matches the provided performer name and tags the scene with the performer. // PerformerScenes searches for scenes whose path matches the provided performer name and tags the scene with the performer.
func PerformerScenes(p *models.Performer, paths []string, rw models.SceneReaderWriter, cache *match.Cache) error { func PerformerScenes(ctx context.Context, p *models.Performer, paths []string, rw SceneQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache) t := getPerformerTagger(p, cache)
return t.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) { return t.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return scene.AddPerformer(rw, otherID, subjectID) if err := o.LoadPerformerIDs(ctx, rw); err != nil {
return false, err
}
existing := o.PerformerIDs.List()
if intslice.IntInclude(existing, p.ID) {
return false, nil
}
if err := scene.AddPerformer(ctx, rw, o, p.ID); err != nil {
return false, err
}
return true, nil
}) })
} }
// PerformerImages searches for images whose path matches the provided performer name and tags the image with the performer. // PerformerImages searches for images whose path matches the provided performer name and tags the image with the performer.
func PerformerImages(p *models.Performer, paths []string, rw models.ImageReaderWriter, cache *match.Cache) error { func PerformerImages(ctx context.Context, p *models.Performer, paths []string, rw ImageQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache) t := getPerformerTagger(p, cache)
return t.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) { return t.tagImages(ctx, paths, rw, func(o *models.Image) (bool, error) {
return image.AddPerformer(rw, otherID, subjectID) if err := o.LoadPerformerIDs(ctx, rw); err != nil {
return false, err
}
existing := o.PerformerIDs.List()
if intslice.IntInclude(existing, p.ID) {
return false, nil
}
if err := image.AddPerformer(ctx, rw, o, p.ID); err != nil {
return false, err
}
return true, nil
}) })
} }
// PerformerGalleries searches for galleries whose path matches the provided performer name and tags the gallery with the performer. // PerformerGalleries searches for galleries whose path matches the provided performer name and tags the gallery with the performer.
func PerformerGalleries(p *models.Performer, paths []string, rw models.GalleryReaderWriter, cache *match.Cache) error { func PerformerGalleries(ctx context.Context, p *models.Performer, paths []string, rw GalleryQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache) t := getPerformerTagger(p, cache)
return t.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) { return t.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return gallery.AddPerformer(rw, otherID, subjectID) if err := o.LoadPerformerIDs(ctx, rw); err != nil {
return false, err
}
existing := o.PerformerIDs.List()
if intslice.IntInclude(existing, p.ID) {
return false, nil
}
if err := gallery.AddPerformer(ctx, rw, o, p.ID); err != nil {
return false, err
}
return true, nil
}) })
} }

View File

@@ -1,6 +1,7 @@
package autotag package autotag
import ( import (
"path/filepath"
"testing" "testing"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
@@ -27,10 +28,14 @@ func TestPerformerScenes(t *testing.T) {
"performer + name", "performer + name",
`(?i)(?:^|_|[^\p{L}\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])performer[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
}, },
{ }
// trailing backslash tests only work where filepath separator is not backslash
if filepath.Separator != '\\' {
performerNames = append(performerNames, test{
`performer + name\`, `performer + name\`,
`(?i)(?:^|_|[^\p{L}\d])performer[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])performer[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
}, })
} }
for _, p := range performerNames { for _, p := range performerNames {
@@ -49,6 +54,7 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
scenes = append(scenes, &models.Scene{ scenes = append(scenes, &models.Scene{
ID: i + 1, ID: i + 1,
Path: p, Path: p,
PerformerIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -72,16 +78,20 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
PerPage: &perPage, PerPage: &perPage,
} }
mockSceneReader.On("Query", scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false)). mockSceneReader.On("Query", testCtx, scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false)).
Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once()
for i := range matchingPaths { for i := range matchingPaths {
sceneID := i + 1 sceneID := i + 1
mockSceneReader.On("GetPerformerIDs", sceneID).Return(nil, nil).Once() mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("UpdatePerformers", sceneID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := PerformerScenes(&performer, nil, mockSceneReader, nil) err := PerformerScenes(testCtx, &performer, nil, mockSceneReader, nil)
assert := assert.New(t) assert := assert.New(t)
@@ -124,6 +134,7 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
images = append(images, &models.Image{ images = append(images, &models.Image{
ID: i + 1, ID: i + 1,
Path: p, Path: p,
PerformerIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -147,16 +158,20 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
PerPage: &perPage, PerPage: &perPage,
} }
mockImageReader.On("Query", image.QueryOptions(expectedImageFilter, expectedFindFilter, false)). mockImageReader.On("Query", testCtx, image.QueryOptions(expectedImageFilter, expectedFindFilter, false)).
Return(mocks.ImageQueryResult(images, len(images)), nil).Once() Return(mocks.ImageQueryResult(images, len(images)), nil).Once()
for i := range matchingPaths { for i := range matchingPaths {
imageID := i + 1 imageID := i + 1
mockImageReader.On("GetPerformerIDs", imageID).Return(nil, nil).Once() mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("UpdatePerformers", imageID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := PerformerImages(&performer, nil, mockImageReader, nil) err := PerformerImages(testCtx, &performer, nil, mockImageReader, nil)
assert := assert.New(t) assert := assert.New(t)
@@ -196,9 +211,11 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
var galleries []*models.Gallery var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(performerName, galleryExt) matchingPaths, falsePaths := generateTestPaths(performerName, galleryExt)
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Path: models.NullString(p), Path: v,
PerformerIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -222,15 +239,19 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
PerPage: &perPage, PerPage: &perPage,
} }
mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once() mockGalleryReader.On("Query", testCtx, expectedGalleryFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
for i := range matchingPaths { for i := range matchingPaths {
galleryID := i + 1 galleryID := i + 1
mockGalleryReader.On("GetPerformerIDs", galleryID).Return(nil, nil).Once() mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdatePerformers", galleryID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := PerformerGalleries(&performer, nil, mockGalleryReader, nil) err := PerformerGalleries(testCtx, &performer, nil, mockGalleryReader, nil)
assert := assert.New(t) assert := assert.New(t)

View File

@@ -1,51 +1,90 @@
package autotag package autotag
import ( import (
"context"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/sliceutil/intslice"
) )
type ScenePerformerUpdater interface {
models.PerformerIDLoader
scene.PartialUpdater
}
type SceneTagUpdater interface {
models.TagIDLoader
scene.PartialUpdater
}
func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger { func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger {
return tagger{ return tagger{
ID: s.ID, ID: s.ID,
Type: "scene", Type: "scene",
Name: s.GetTitle(), Name: s.DisplayName(),
Path: s.Path, Path: s.Path,
cache: cache, cache: cache,
} }
} }
// ScenePerformers tags the provided scene with performers whose name matches the scene's path. // ScenePerformers tags the provided scene with performers whose name matches the scene's path.
func ScenePerformers(s *models.Scene, rw models.SceneReaderWriter, performerReader models.PerformerReader, cache *match.Cache) error { func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getSceneFileTagger(s, cache) t := getSceneFileTagger(s, cache)
return t.tagPerformers(performerReader, func(subjectID, otherID int) (bool, error) { return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return scene.AddPerformer(rw, subjectID, otherID) if err := s.LoadPerformerIDs(ctx, rw); err != nil {
return false, err
}
existing := s.PerformerIDs.List()
if intslice.IntInclude(existing, otherID) {
return false, nil
}
if err := scene.AddPerformer(ctx, rw, s, otherID); err != nil {
return false, err
}
return true, nil
}) })
} }
// SceneStudios tags the provided scene with the first studio whose name matches the scene's path. // SceneStudios tags the provided scene with the first studio whose name matches the scene's path.
// //
// Scenes will not be tagged if studio is already set. // Scenes will not be tagged if studio is already set.
func SceneStudios(s *models.Scene, rw models.SceneReaderWriter, studioReader models.StudioReader, cache *match.Cache) error { func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid { if s.StudioID != nil {
// don't modify // don't modify
return nil return nil
} }
t := getSceneFileTagger(s, cache) t := getSceneFileTagger(s, cache)
return t.tagStudios(studioReader, func(subjectID, otherID int) (bool, error) { return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addSceneStudio(rw, subjectID, otherID) return addSceneStudio(ctx, rw, s, otherID)
}) })
} }
// SceneTags tags the provided scene with tags whose name matches the scene's path. // SceneTags tags the provided scene with tags whose name matches the scene's path.
func SceneTags(s *models.Scene, rw models.SceneReaderWriter, tagReader models.TagReader, cache *match.Cache) error { func SceneTags(ctx context.Context, s *models.Scene, rw SceneTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getSceneFileTagger(s, cache) t := getSceneFileTagger(s, cache)
return t.tagTags(tagReader, func(subjectID, otherID int) (bool, error) { return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return scene.AddTag(rw, subjectID, otherID) if err := s.LoadTagIDs(ctx, rw); err != nil {
return false, err
}
existing := s.TagIDs.List()
if intslice.IntInclude(existing, otherID) {
return false, nil
}
if err := scene.AddTag(ctx, rw, s, otherID); err != nil {
return false, err
}
return true, nil
}) })
} }

View File

@@ -2,6 +2,7 @@ package autotag
import ( import (
"fmt" "fmt"
"path/filepath"
"strings" "strings"
"testing" "testing"
@@ -33,13 +34,10 @@ func generateNamePatterns(name, separator, ext string) []string {
ret = append(ret, fmt.Sprintf("%s%saaa.%s", name, separator, ext)) ret = append(ret, fmt.Sprintf("%s%saaa.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("aaa%s%s.%s", separator, name, ext)) ret = append(ret, fmt.Sprintf("aaa%s%s.%s", separator, name, ext))
ret = append(ret, fmt.Sprintf("aaa%s%s%sbbb.%s", separator, name, separator, ext)) ret = append(ret, fmt.Sprintf("aaa%s%s%sbbb.%s", separator, name, separator, ext))
ret = append(ret, fmt.Sprintf("dir/%s%saaa.%s", name, separator, ext)) ret = append(ret, filepath.Join("dir", fmt.Sprintf("%s%saaa.%s", name, separator, ext)))
ret = append(ret, fmt.Sprintf("dir%sdir/%s%saaa.%s", separator, name, separator, ext)) ret = append(ret, filepath.Join(fmt.Sprintf("dir%sdir", separator), fmt.Sprintf("%s%saaa.%s", name, separator, ext)))
ret = append(ret, fmt.Sprintf("dir\\%s%saaa.%s", name, separator, ext)) ret = append(ret, filepath.Join(fmt.Sprintf("%s%saaa", name, separator), "dir", fmt.Sprintf("bbb.%s", ext)))
ret = append(ret, fmt.Sprintf("%s%saaa/dir/bbb.%s", name, separator, ext)) ret = append(ret, filepath.Join("dir", fmt.Sprintf("%s%s", name, separator), fmt.Sprintf("aaa.%s", ext)))
ret = append(ret, fmt.Sprintf("%s%saaa\\dir\\bbb.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("dir/%s%s/aaa.%s", name, separator, ext))
ret = append(ret, fmt.Sprintf("dir\\%s%s\\aaa.%s", name, separator, ext))
return ret return ret
} }
@@ -90,8 +88,7 @@ func generateTestPaths(testName, ext string) (scenePatterns []string, falseScene
falseScenePatterns = append(falseScenePatterns, fmt.Sprintf("%saaa.%s", testName, ext)) falseScenePatterns = append(falseScenePatterns, fmt.Sprintf("%saaa.%s", testName, ext))
// add path separator false scenarios // add path separator false scenarios
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, "/", ext)...) falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, string(filepath.Separator), ext)...)
falseScenePatterns = append(falseScenePatterns, generateFalseNamePatterns(testName, "\\", ext)...)
// split patterns only valid for ._- and whitespace // split patterns only valid for ._- and whitespace
for _, separator := range testSeparators { for _, separator := range testSeparators {
@@ -173,19 +170,25 @@ func TestScenePerformers(t *testing.T) {
mockPerformerReader := &mocks.PerformerReaderWriter{} mockPerformerReader := &mocks.PerformerReaderWriter{}
mockSceneReader := &mocks.SceneReaderWriter{} mockSceneReader := &mocks.SceneReaderWriter{}
mockPerformerReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockPerformerReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockPerformerReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches {
mockSceneReader.On("GetPerformerIDs", sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdatePerformers", sceneID, []int{performerID}).Return(nil).Once()
}
scene := models.Scene{ scene := models.Scene{
ID: sceneID, ID: sceneID,
Path: test.Path, Path: test.Path,
PerformerIDs: models.NewRelatedIDs([]int{}),
} }
err := ScenePerformers(&scene, mockSceneReader, mockPerformerReader, nil)
if test.Matches {
mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := ScenePerformers(testCtx, &scene, mockSceneReader, mockPerformerReader, nil)
assert.Nil(err) assert.Nil(err)
mockPerformerReader.AssertExpectations(t) mockPerformerReader.AssertExpectations(t)
@@ -196,9 +199,11 @@ func TestScenePerformers(t *testing.T) {
func TestSceneStudios(t *testing.T) { func TestSceneStudios(t *testing.T) {
t.Parallel() t.Parallel()
const sceneID = 1 var (
const studioName = "studio name" sceneID = 1
const studioID = 2 studioName = "studio name"
studioID = 2
)
studio := models.Studio{ studio := models.Studio{
ID: studioID, ID: studioID,
Name: models.NullString(studioName), Name: models.NullString(studioName),
@@ -217,11 +222,9 @@ func TestSceneStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockSceneReader.On("Find", sceneID).Return(&models.Scene{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("Update", models.ScenePartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: sceneID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
@@ -229,7 +232,7 @@ func TestSceneStudios(t *testing.T) {
ID: sceneID, ID: sceneID,
Path: test.Path, Path: test.Path,
} }
err := SceneStudios(&scene, mockSceneReader, mockStudioReader, nil) err := SceneStudios(testCtx, &scene, mockSceneReader, mockStudioReader, nil)
assert.Nil(err) assert.Nil(err)
mockStudioReader.AssertExpectations(t) mockStudioReader.AssertExpectations(t)
@@ -240,9 +243,9 @@ func TestSceneStudios(t *testing.T) {
mockStudioReader := &mocks.StudioReaderWriter{} mockStudioReader := &mocks.StudioReaderWriter{}
mockSceneReader := &mocks.SceneReaderWriter{} mockSceneReader := &mocks.SceneReaderWriter{}
mockStudioReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockStudioReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() mockStudioReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
mockStudioReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe() mockStudioReader.On("GetAliases", testCtx, mock.Anything).Return([]string{}, nil).Maybe()
doTest(mockStudioReader, mockSceneReader, test) doTest(mockStudioReader, mockSceneReader, test)
} }
@@ -255,12 +258,12 @@ func TestSceneStudios(t *testing.T) {
mockStudioReader := &mocks.StudioReaderWriter{} mockStudioReader := &mocks.StudioReaderWriter{}
mockSceneReader := &mocks.SceneReaderWriter{} mockSceneReader := &mocks.SceneReaderWriter{}
mockStudioReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockStudioReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockStudioReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once() mockStudioReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Studio{&studio, &reversedStudio}, nil).Once()
mockStudioReader.On("GetAliases", studioID).Return([]string{ mockStudioReader.On("GetAliases", testCtx, studioID).Return([]string{
studioName, studioName,
}, nil).Once() }, nil).Once()
mockStudioReader.On("GetAliases", reversedStudioID).Return([]string{}, nil).Once() mockStudioReader.On("GetAliases", testCtx, reversedStudioID).Return([]string{}, nil).Once()
doTest(mockStudioReader, mockSceneReader, test) doTest(mockStudioReader, mockSceneReader, test)
} }
@@ -290,15 +293,20 @@ func TestSceneTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockSceneReader.On("GetTagIDs", sceneID).Return(nil, nil).Once() mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("UpdateTags", sceneID, []int{tagID}).Return(nil).Once() TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
scene := models.Scene{ scene := models.Scene{
ID: sceneID, ID: sceneID,
Path: test.Path, Path: test.Path,
TagIDs: models.NewRelatedIDs([]int{}),
} }
err := SceneTags(&scene, mockSceneReader, mockTagReader, nil) err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil)
assert.Nil(err) assert.Nil(err)
mockTagReader.AssertExpectations(t) mockTagReader.AssertExpectations(t)
@@ -309,9 +317,9 @@ func TestSceneTags(t *testing.T) {
mockTagReader := &mocks.TagReaderWriter{} mockTagReader := &mocks.TagReaderWriter{}
mockSceneReader := &mocks.SceneReaderWriter{} mockSceneReader := &mocks.SceneReaderWriter{}
mockTagReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockTagReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() mockTagReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
mockTagReader.On("GetAliases", mock.Anything).Return([]string{}, nil).Maybe() mockTagReader.On("GetAliases", testCtx, mock.Anything).Return([]string{}, nil).Maybe()
doTest(mockTagReader, mockSceneReader, test) doTest(mockTagReader, mockSceneReader, test)
} }
@@ -324,12 +332,12 @@ func TestSceneTags(t *testing.T) {
mockTagReader := &mocks.TagReaderWriter{} mockTagReader := &mocks.TagReaderWriter{}
mockSceneReader := &mocks.SceneReaderWriter{} mockSceneReader := &mocks.SceneReaderWriter{}
mockTagReader.On("Query", mock.Anything, mock.Anything).Return(nil, 0, nil) mockTagReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockTagReader.On("QueryForAutoTag", mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once() mockTagReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Tag{&tag, &reversedTag}, nil).Once()
mockTagReader.On("GetAliases", tagID).Return([]string{ mockTagReader.On("GetAliases", testCtx, tagID).Return([]string{
tagName, tagName,
}, nil).Once() }, nil).Once()
mockTagReader.On("GetAliases", reversedTagID).Return([]string{}, nil).Once() mockTagReader.On("GetAliases", testCtx, reversedTagID).Return([]string{}, nil).Once()
doTest(mockTagReader, mockSceneReader, test) doTest(mockTagReader, mockSceneReader, test)
} }

View File

@@ -1,79 +1,61 @@
package autotag package autotag
import ( import (
"database/sql" "context"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
) )
func addSceneStudio(sceneWriter models.SceneReaderWriter, sceneID, studioID int) (bool, error) { func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *models.Scene, studioID int) (bool, error) {
// don't set if already set // don't set if already set
scene, err := sceneWriter.Find(sceneID) if o.StudioID != nil {
if err != nil {
return false, err
}
if scene.StudioID.Valid {
return false, nil return false, nil
} }
// set the studio id // set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
scenePartial := models.ScenePartial{ scenePartial := models.ScenePartial{
ID: sceneID, StudioID: models.NewOptionalInt(studioID),
StudioID: &s,
} }
if _, err := sceneWriter.Update(scenePartial); err != nil { if _, err := sceneWriter.UpdatePartial(ctx, o.ID, scenePartial); err != nil {
return false, err return false, err
} }
return true, nil return true, nil
} }
func addImageStudio(imageWriter models.ImageReaderWriter, imageID, studioID int) (bool, error) { func addImageStudio(ctx context.Context, imageWriter image.PartialUpdater, i *models.Image, studioID int) (bool, error) {
// don't set if already set // don't set if already set
image, err := imageWriter.Find(imageID) if i.StudioID != nil {
if err != nil {
return false, err
}
if image.StudioID.Valid {
return false, nil return false, nil
} }
// set the studio id // set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
imagePartial := models.ImagePartial{ imagePartial := models.ImagePartial{
ID: imageID, StudioID: models.NewOptionalInt(studioID),
StudioID: &s,
} }
if _, err := imageWriter.Update(imagePartial); err != nil { if _, err := imageWriter.UpdatePartial(ctx, i.ID, imagePartial); err != nil {
return false, err return false, err
} }
return true, nil return true, nil
} }
func addGalleryStudio(galleryWriter models.GalleryReaderWriter, galleryID, studioID int) (bool, error) { func addGalleryStudio(ctx context.Context, galleryWriter GalleryFinderUpdater, o *models.Gallery, studioID int) (bool, error) {
// don't set if already set // don't set if already set
gallery, err := galleryWriter.Find(galleryID) if o.StudioID != nil {
if err != nil {
return false, err
}
if gallery.StudioID.Valid {
return false, nil return false, nil
} }
// set the studio id // set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
galleryPartial := models.GalleryPartial{ galleryPartial := models.GalleryPartial{
ID: galleryID, StudioID: models.NewOptionalInt(studioID),
StudioID: &s,
} }
if _, err := galleryWriter.UpdatePartial(galleryPartial); err != nil { if _, err := galleryWriter.UpdatePartial(ctx, o.ID, galleryPartial); err != nil {
return false, err return false, err
} }
return true, nil return true, nil
@@ -98,13 +80,18 @@ func getStudioTagger(p *models.Studio, aliases []string, cache *match.Cache) []t
return ret return ret
} }
type SceneFinderUpdater interface {
scene.Queryer
scene.PartialUpdater
}
// StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene. // StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene.
func StudioScenes(p *models.Studio, paths []string, aliases []string, rw models.SceneReaderWriter, cache *match.Cache) error { func StudioScenes(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw SceneFinderUpdater, cache *match.Cache) error {
t := getStudioTagger(p, aliases, cache) t := getStudioTagger(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return addSceneStudio(rw, otherID, subjectID) return addSceneStudio(ctx, rw, o, p.ID)
}); err != nil { }); err != nil {
return err return err
} }
@@ -113,13 +100,19 @@ func StudioScenes(p *models.Studio, paths []string, aliases []string, rw models.
return nil return nil
} }
type ImageFinderUpdater interface {
image.Queryer
Find(ctx context.Context, id int) (*models.Image, error)
UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error)
}
// StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image. // StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image.
func StudioImages(p *models.Studio, paths []string, aliases []string, rw models.ImageReaderWriter, cache *match.Cache) error { func StudioImages(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw ImageFinderUpdater, cache *match.Cache) error {
t := getStudioTagger(p, aliases, cache) t := getStudioTagger(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
return addImageStudio(rw, otherID, subjectID) return addImageStudio(ctx, rw, i, p.ID)
}); err != nil { }); err != nil {
return err return err
} }
@@ -128,13 +121,19 @@ func StudioImages(p *models.Studio, paths []string, aliases []string, rw models.
return nil return nil
} }
type GalleryFinderUpdater interface {
gallery.Queryer
gallery.PartialUpdater
Find(ctx context.Context, id int) (*models.Gallery, error)
}
// StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery. // StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery.
func StudioGalleries(p *models.Studio, paths []string, aliases []string, rw models.GalleryReaderWriter, cache *match.Cache) error { func StudioGalleries(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw GalleryFinderUpdater, cache *match.Cache) error {
t := getStudioTagger(p, aliases, cache) t := getStudioTagger(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return addGalleryStudio(rw, otherID, subjectID) return addGalleryStudio(ctx, rw, o, p.ID)
}); err != nil { }); err != nil {
return err return err
} }

View File

@@ -1,6 +1,7 @@
package autotag package autotag
import ( import (
"path/filepath"
"testing" "testing"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
@@ -17,7 +18,8 @@ type testStudioCase struct {
aliasRegex string aliasRegex string
} }
var testStudioCases = []testStudioCase{ var (
testStudioCases = []testStudioCase{
{ {
"studio name", "studio name",
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*name(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
@@ -30,12 +32,6 @@ var testStudioCases = []testStudioCase{
"", "",
"", "",
}, },
{
`studio + name\`,
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
"",
"",
},
{ {
"studio name", "studio name",
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*name(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
@@ -48,6 +44,15 @@ var testStudioCases = []testStudioCase{
"alias + name", "alias + name",
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
}, },
}
trailingBackslashStudioCases = []testStudioCase{
{
`studio + name\`,
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
"",
"",
},
{ {
`studio + name\`, `studio + name\`,
`(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])studio[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
@@ -55,11 +60,18 @@ var testStudioCases = []testStudioCase{
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
}, },
} }
)
func TestStudioScenes(t *testing.T) { func TestStudioScenes(t *testing.T) {
t.Parallel() t.Parallel()
for _, p := range testStudioCases { tc := testStudioCases
// trailing backslash tests only work where filepath separator is not backslash
if filepath.Separator != '\\' {
tc = append(tc, trailingBackslashStudioCases...)
}
for _, p := range tc {
testStudioScenes(t, p) testStudioScenes(t, p)
} }
} }
@@ -72,7 +84,7 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
mockSceneReader := &mocks.SceneReaderWriter{} mockSceneReader := &mocks.SceneReaderWriter{}
const studioID = 2 var studioID = 2
var aliases []string var aliases []string
@@ -113,7 +125,7 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
} }
// if alias provided, then don't find by name // if alias provided, then don't find by name
onNameQuery := mockSceneReader.On("Query", scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false)) onNameQuery := mockSceneReader.On("Query", testCtx, scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false))
if aliasName == "" { if aliasName == "" {
onNameQuery.Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() onNameQuery.Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once()
@@ -128,21 +140,19 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
}, },
} }
mockSceneReader.On("Query", scene.QueryOptions(expectedAliasFilter, expectedFindFilter, false)). mockSceneReader.On("Query", testCtx, scene.QueryOptions(expectedAliasFilter, expectedFindFilter, false)).
Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once()
} }
for i := range matchingPaths { for i := range matchingPaths {
sceneID := i + 1 sceneID := i + 1
mockSceneReader.On("Find", sceneID).Return(&models.Scene{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("Update", models.ScenePartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: sceneID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
err := StudioScenes(&studio, nil, aliases, mockSceneReader, nil) err := StudioScenes(testCtx, &studio, nil, aliases, mockSceneReader, nil)
assert := assert.New(t) assert := assert.New(t)
@@ -166,7 +176,7 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
mockImageReader := &mocks.ImageReaderWriter{} mockImageReader := &mocks.ImageReaderWriter{}
const studioID = 2 var studioID = 2
var aliases []string var aliases []string
@@ -206,7 +216,7 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
} }
// if alias provided, then don't find by name // if alias provided, then don't find by name
onNameQuery := mockImageReader.On("Query", image.QueryOptions(expectedImageFilter, expectedFindFilter, false)) onNameQuery := mockImageReader.On("Query", testCtx, image.QueryOptions(expectedImageFilter, expectedFindFilter, false))
if aliasName == "" { if aliasName == "" {
onNameQuery.Return(mocks.ImageQueryResult(images, len(images)), nil).Once() onNameQuery.Return(mocks.ImageQueryResult(images, len(images)), nil).Once()
} else { } else {
@@ -220,21 +230,19 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
}, },
} }
mockImageReader.On("Query", image.QueryOptions(expectedAliasFilter, expectedFindFilter, false)). mockImageReader.On("Query", testCtx, image.QueryOptions(expectedAliasFilter, expectedFindFilter, false)).
Return(mocks.ImageQueryResult(images, len(images)), nil).Once() Return(mocks.ImageQueryResult(images, len(images)), nil).Once()
} }
for i := range matchingPaths { for i := range matchingPaths {
imageID := i + 1 imageID := i + 1
mockImageReader.On("Find", imageID).Return(&models.Image{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("Update", models.ImagePartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: imageID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
err := StudioImages(&studio, nil, aliases, mockImageReader, nil) err := StudioImages(testCtx, &studio, nil, aliases, mockImageReader, nil)
assert := assert.New(t) assert := assert.New(t)
@@ -257,7 +265,7 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
aliasRegex := tc.aliasRegex aliasRegex := tc.aliasRegex
mockGalleryReader := &mocks.GalleryReaderWriter{} mockGalleryReader := &mocks.GalleryReaderWriter{}
const studioID = 2 var studioID = 2
var aliases []string var aliases []string
@@ -270,9 +278,10 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
var galleries []*models.Gallery var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(testPathName, galleryExt) matchingPaths, falsePaths := generateTestPaths(testPathName, galleryExt)
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Path: models.NullString(p), Path: v,
}) })
} }
@@ -297,7 +306,7 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
} }
// if alias provided, then don't find by name // if alias provided, then don't find by name
onNameQuery := mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter) onNameQuery := mockGalleryReader.On("Query", testCtx, expectedGalleryFilter, expectedFindFilter)
if aliasName == "" { if aliasName == "" {
onNameQuery.Return(galleries, len(galleries), nil).Once() onNameQuery.Return(galleries, len(galleries), nil).Once()
} else { } else {
@@ -311,20 +320,18 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
}, },
} }
mockGalleryReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once() mockGalleryReader.On("Query", testCtx, expectedAliasFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
} }
for i := range matchingPaths { for i := range matchingPaths {
galleryID := i + 1 galleryID := i + 1
mockGalleryReader.On("Find", galleryID).Return(&models.Gallery{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdatePartial", models.GalleryPartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: galleryID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
err := StudioGalleries(&studio, nil, aliases, mockGalleryReader, nil) err := StudioGalleries(testCtx, &studio, nil, aliases, mockGalleryReader, nil)
assert := assert.New(t) assert := assert.New(t)

View File

@@ -1,13 +1,34 @@
package autotag package autotag
import ( import (
"context"
"github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/sliceutil/intslice"
) )
type SceneQueryTagUpdater interface {
scene.Queryer
models.TagIDLoader
scene.PartialUpdater
}
type ImageQueryTagUpdater interface {
image.Queryer
models.TagIDLoader
image.PartialUpdater
}
type GalleryQueryTagUpdater interface {
gallery.Queryer
models.TagIDLoader
gallery.PartialUpdater
}
func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger { func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger {
ret := []tagger{{ ret := []tagger{{
ID: p.ID, ID: p.ID,
@@ -29,12 +50,25 @@ func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger
} }
// TagScenes searches for scenes whose path matches the provided tag name and tags the scene with the tag. // TagScenes searches for scenes whose path matches the provided tag name and tags the scene with the tag.
func TagScenes(p *models.Tag, paths []string, aliases []string, rw models.SceneReaderWriter, cache *match.Cache) error { func TagScenes(ctx context.Context, p *models.Tag, paths []string, aliases []string, rw SceneQueryTagUpdater, cache *match.Cache) error {
t := getTagTaggers(p, aliases, cache) t := getTagTaggers(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagScenes(paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return scene.AddTag(rw, otherID, subjectID) if err := o.LoadTagIDs(ctx, rw); err != nil {
return false, err
}
existing := o.TagIDs.List()
if intslice.IntInclude(existing, p.ID) {
return false, nil
}
if err := scene.AddTag(ctx, rw, o, p.ID); err != nil {
return false, err
}
return true, nil
}); err != nil { }); err != nil {
return err return err
} }
@@ -43,12 +77,25 @@ func TagScenes(p *models.Tag, paths []string, aliases []string, rw models.SceneR
} }
// TagImages searches for images whose path matches the provided tag name and tags the image with the tag. // TagImages searches for images whose path matches the provided tag name and tags the image with the tag.
func TagImages(p *models.Tag, paths []string, aliases []string, rw models.ImageReaderWriter, cache *match.Cache) error { func TagImages(ctx context.Context, p *models.Tag, paths []string, aliases []string, rw ImageQueryTagUpdater, cache *match.Cache) error {
t := getTagTaggers(p, aliases, cache) t := getTagTaggers(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagImages(paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagImages(ctx, paths, rw, func(o *models.Image) (bool, error) {
return image.AddTag(rw, otherID, subjectID) if err := o.LoadTagIDs(ctx, rw); err != nil {
return false, err
}
existing := o.TagIDs.List()
if intslice.IntInclude(existing, p.ID) {
return false, nil
}
if err := image.AddTag(ctx, rw, o, p.ID); err != nil {
return false, err
}
return true, nil
}); err != nil { }); err != nil {
return err return err
} }
@@ -57,12 +104,25 @@ func TagImages(p *models.Tag, paths []string, aliases []string, rw models.ImageR
} }
// TagGalleries searches for galleries whose path matches the provided tag name and tags the gallery with the tag. // TagGalleries searches for galleries whose path matches the provided tag name and tags the gallery with the tag.
func TagGalleries(p *models.Tag, paths []string, aliases []string, rw models.GalleryReaderWriter, cache *match.Cache) error { func TagGalleries(ctx context.Context, p *models.Tag, paths []string, aliases []string, rw GalleryQueryTagUpdater, cache *match.Cache) error {
t := getTagTaggers(p, aliases, cache) t := getTagTaggers(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagGalleries(paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return gallery.AddTag(rw, otherID, subjectID) if err := o.LoadTagIDs(ctx, rw); err != nil {
return false, err
}
existing := o.TagIDs.List()
if intslice.IntInclude(existing, p.ID) {
return false, nil
}
if err := gallery.AddTag(ctx, rw, o, p.ID); err != nil {
return false, err
}
return true, nil
}); err != nil { }); err != nil {
return err return err
} }

View File

@@ -1,6 +1,7 @@
package autotag package autotag
import ( import (
"path/filepath"
"testing" "testing"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
@@ -17,7 +18,8 @@ type testTagCase struct {
aliasRegex string aliasRegex string
} }
var testTagCases = []testTagCase{ var (
testTagCases = []testTagCase{
{ {
"tag name", "tag name",
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*name(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
@@ -30,12 +32,6 @@ var testTagCases = []testTagCase{
"", "",
"", "",
}, },
{
`tag + name\`,
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
"",
"",
},
{ {
"tag name", "tag name",
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*name(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
@@ -48,6 +44,15 @@ var testTagCases = []testTagCase{
"alias + name", "alias + name",
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name(?:$|_|[^\p{L}\d])`,
}, },
}
trailingBackslashCases = []testTagCase{
{
`tag + name\`,
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
"",
"",
},
{ {
`tag + name\`, `tag + name\`,
`(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])tag[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
@@ -55,11 +60,18 @@ var testTagCases = []testTagCase{
`(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`, `(?i)(?:^|_|[^\p{L}\d])alias[.\-_ ]*\+[.\-_ ]*name\\(?:$|_|[^\p{L}\d])`,
}, },
} }
)
func TestTagScenes(t *testing.T) { func TestTagScenes(t *testing.T) {
t.Parallel() t.Parallel()
for _, p := range testTagCases { tc := testTagCases
// trailing backslash tests only work where filepath separator is not backslash
if filepath.Separator != '\\' {
tc = append(tc, trailingBackslashCases...)
}
for _, p := range tc {
testTagScenes(t, p) testTagScenes(t, p)
} }
} }
@@ -89,6 +101,7 @@ func testTagScenes(t *testing.T, tc testTagCase) {
scenes = append(scenes, &models.Scene{ scenes = append(scenes, &models.Scene{
ID: i + 1, ID: i + 1,
Path: p, Path: p,
TagIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -113,7 +126,7 @@ func testTagScenes(t *testing.T, tc testTagCase) {
} }
// if alias provided, then don't find by name // if alias provided, then don't find by name
onNameQuery := mockSceneReader.On("Query", scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false)) onNameQuery := mockSceneReader.On("Query", testCtx, scene.QueryOptions(expectedSceneFilter, expectedFindFilter, false))
if aliasName == "" { if aliasName == "" {
onNameQuery.Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() onNameQuery.Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once()
} else { } else {
@@ -127,17 +140,21 @@ func testTagScenes(t *testing.T, tc testTagCase) {
}, },
} }
mockSceneReader.On("Query", scene.QueryOptions(expectedAliasFilter, expectedFindFilter, false)). mockSceneReader.On("Query", testCtx, scene.QueryOptions(expectedAliasFilter, expectedFindFilter, false)).
Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once() Return(mocks.SceneQueryResult(scenes, len(scenes)), nil).Once()
} }
for i := range matchingPaths { for i := range matchingPaths {
sceneID := i + 1 sceneID := i + 1
mockSceneReader.On("GetTagIDs", sceneID).Return(nil, nil).Once() mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("UpdateTags", sceneID, []int{tagID}).Return(nil).Once() TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := TagScenes(&tag, nil, aliases, mockSceneReader, nil) err := TagScenes(testCtx, &tag, nil, aliases, mockSceneReader, nil)
assert := assert.New(t) assert := assert.New(t)
@@ -177,6 +194,7 @@ func testTagImages(t *testing.T, tc testTagCase) {
images = append(images, &models.Image{ images = append(images, &models.Image{
ID: i + 1, ID: i + 1,
Path: p, Path: p,
TagIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -201,7 +219,7 @@ func testTagImages(t *testing.T, tc testTagCase) {
} }
// if alias provided, then don't find by name // if alias provided, then don't find by name
onNameQuery := mockImageReader.On("Query", image.QueryOptions(expectedImageFilter, expectedFindFilter, false)) onNameQuery := mockImageReader.On("Query", testCtx, image.QueryOptions(expectedImageFilter, expectedFindFilter, false))
if aliasName == "" { if aliasName == "" {
onNameQuery.Return(mocks.ImageQueryResult(images, len(images)), nil).Once() onNameQuery.Return(mocks.ImageQueryResult(images, len(images)), nil).Once()
} else { } else {
@@ -215,17 +233,22 @@ func testTagImages(t *testing.T, tc testTagCase) {
}, },
} }
mockImageReader.On("Query", image.QueryOptions(expectedAliasFilter, expectedFindFilter, false)). mockImageReader.On("Query", testCtx, image.QueryOptions(expectedAliasFilter, expectedFindFilter, false)).
Return(mocks.ImageQueryResult(images, len(images)), nil).Once() Return(mocks.ImageQueryResult(images, len(images)), nil).Once()
} }
for i := range matchingPaths { for i := range matchingPaths {
imageID := i + 1 imageID := i + 1
mockImageReader.On("GetTagIDs", imageID).Return(nil, nil).Once()
mockImageReader.On("UpdateTags", imageID, []int{tagID}).Return(nil).Once() mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := TagImages(&tag, nil, aliases, mockImageReader, nil) err := TagImages(testCtx, &tag, nil, aliases, mockImageReader, nil)
assert := assert.New(t) assert := assert.New(t)
@@ -262,9 +285,11 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
var galleries []*models.Gallery var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4") matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Path: models.NullString(p), Path: v,
TagIDs: models.NewRelatedIDs([]int{}),
}) })
} }
@@ -289,7 +314,7 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
} }
// if alias provided, then don't find by name // if alias provided, then don't find by name
onNameQuery := mockGalleryReader.On("Query", expectedGalleryFilter, expectedFindFilter) onNameQuery := mockGalleryReader.On("Query", testCtx, expectedGalleryFilter, expectedFindFilter)
if aliasName == "" { if aliasName == "" {
onNameQuery.Return(galleries, len(galleries), nil).Once() onNameQuery.Return(galleries, len(galleries), nil).Once()
} else { } else {
@@ -303,16 +328,22 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
}, },
} }
mockGalleryReader.On("Query", expectedAliasFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once() mockGalleryReader.On("Query", testCtx, expectedAliasFilter, expectedFindFilter).Return(galleries, len(galleries), nil).Once()
} }
for i := range matchingPaths { for i := range matchingPaths {
galleryID := i + 1 galleryID := i + 1
mockGalleryReader.On("GetTagIDs", galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdateTags", galleryID, []int{tagID}).Return(nil).Once() mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := TagGalleries(&tag, nil, aliases, mockGalleryReader, nil) err := TagGalleries(testCtx, &tag, nil, aliases, mockGalleryReader, nil)
assert := assert.New(t) assert := assert.New(t)

View File

@@ -14,11 +14,15 @@
package autotag package autotag
import ( import (
"context"
"fmt" "fmt"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
) )
type tagger struct { type tagger struct {
@@ -32,6 +36,9 @@ type tagger struct {
} }
type addLinkFunc func(subjectID, otherID int) (bool, error) type addLinkFunc func(subjectID, otherID int) (bool, error)
type addImageLinkFunc func(o *models.Image) (bool, error)
type addGalleryLinkFunc func(o *models.Gallery) (bool, error)
type addSceneLinkFunc func(o *models.Scene) (bool, error)
func (t *tagger) addError(otherType, otherName string, err error) error { func (t *tagger) addError(otherType, otherName string, err error) error {
return fmt.Errorf("error adding %s '%s' to %s '%s': %s", otherType, otherName, t.Type, t.Name, err.Error()) return fmt.Errorf("error adding %s '%s' to %s '%s': %s", otherType, otherName, t.Type, t.Name, err.Error())
@@ -41,8 +48,8 @@ func (t *tagger) addLog(otherType, otherName string) {
logger.Infof("Added %s '%s' to %s '%s'", otherType, otherName, t.Type, t.Name) logger.Infof("Added %s '%s' to %s '%s'", otherType, otherName, t.Type, t.Name)
} }
func (t *tagger) tagPerformers(performerReader models.PerformerReader, addFunc addLinkFunc) error { func (t *tagger) tagPerformers(ctx context.Context, performerReader match.PerformerAutoTagQueryer, addFunc addLinkFunc) error {
others, err := match.PathToPerformers(t.Path, performerReader, t.cache, t.trimExt) others, err := match.PathToPerformers(ctx, t.Path, performerReader, t.cache, t.trimExt)
if err != nil { if err != nil {
return err return err
} }
@@ -62,8 +69,8 @@ func (t *tagger) tagPerformers(performerReader models.PerformerReader, addFunc a
return nil return nil
} }
func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFunc) error { func (t *tagger) tagStudios(ctx context.Context, studioReader match.StudioAutoTagQueryer, addFunc addLinkFunc) error {
studio, err := match.PathToStudio(t.Path, studioReader, t.cache, t.trimExt) studio, err := match.PathToStudio(ctx, t.Path, studioReader, t.cache, t.trimExt)
if err != nil { if err != nil {
return err return err
} }
@@ -83,8 +90,8 @@ func (t *tagger) tagStudios(studioReader models.StudioReader, addFunc addLinkFun
return nil return nil
} }
func (t *tagger) tagTags(tagReader models.TagReader, addFunc addLinkFunc) error { func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer, addFunc addLinkFunc) error {
others, err := match.PathToTags(t.Path, tagReader, t.cache, t.trimExt) others, err := match.PathToTags(ctx, t.Path, tagReader, t.cache, t.trimExt)
if err != nil { if err != nil {
return err return err
} }
@@ -104,63 +111,63 @@ func (t *tagger) tagTags(tagReader models.TagReader, addFunc addLinkFunc) error
return nil return nil
} }
func (t *tagger) tagScenes(paths []string, sceneReader models.SceneReader, addFunc addLinkFunc) error { func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scene.Queryer, addFunc addSceneLinkFunc) error {
others, err := match.PathToScenes(t.Name, paths, sceneReader) others, err := match.PathToScenes(ctx, t.Name, paths, sceneReader)
if err != nil { if err != nil {
return err return err
} }
for _, p := range others { for _, p := range others {
added, err := addFunc(t.ID, p.ID) added, err := addFunc(p)
if err != nil { if err != nil {
return t.addError("scene", p.GetTitle(), err) return t.addError("scene", p.DisplayName(), err)
} }
if added { if added {
t.addLog("scene", p.GetTitle()) t.addLog("scene", p.DisplayName())
} }
} }
return nil return nil
} }
func (t *tagger) tagImages(paths []string, imageReader models.ImageReader, addFunc addLinkFunc) error { func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader image.Queryer, addFunc addImageLinkFunc) error {
others, err := match.PathToImages(t.Name, paths, imageReader) others, err := match.PathToImages(ctx, t.Name, paths, imageReader)
if err != nil { if err != nil {
return err return err
} }
for _, p := range others { for _, p := range others {
added, err := addFunc(t.ID, p.ID) added, err := addFunc(p)
if err != nil { if err != nil {
return t.addError("image", p.GetTitle(), err) return t.addError("image", p.DisplayName(), err)
} }
if added { if added {
t.addLog("image", p.GetTitle()) t.addLog("image", p.DisplayName())
} }
} }
return nil return nil
} }
func (t *tagger) tagGalleries(paths []string, galleryReader models.GalleryReader, addFunc addLinkFunc) error { func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader gallery.Queryer, addFunc addGalleryLinkFunc) error {
others, err := match.PathToGalleries(t.Name, paths, galleryReader) others, err := match.PathToGalleries(ctx, t.Name, paths, galleryReader)
if err != nil { if err != nil {
return err return err
} }
for _, p := range others { for _, p := range others {
added, err := addFunc(t.ID, p.ID) added, err := addFunc(p)
if err != nil { if err != nil {
return t.addError("gallery", p.GetTitle(), err) return t.addError("gallery", p.DisplayName(), err)
} }
if added { if added {
t.addLog("gallery", p.GetTitle()) t.addLog("gallery", p.DisplayName())
} }
} }

View File

@@ -41,6 +41,7 @@ import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/sliceutil/stringslice" "github.com/stashapp/stash/pkg/sliceutil/stringslice"
"github.com/stashapp/stash/pkg/txn"
) )
var pageSize = 100 var pageSize = 100
@@ -56,7 +57,6 @@ type browse struct {
type contentDirectoryService struct { type contentDirectoryService struct {
*Server *Server
upnp.Eventing upnp.Eventing
txnManager models.TransactionManager
} }
func formatDurationSexagesimal(d time.Duration) string { func formatDurationSexagesimal(d time.Duration) string {
@@ -108,9 +108,18 @@ func sceneToContainer(scene *models.Scene, parent string, host string) interface
} }
mimeType := "video/mp4" mimeType := "video/mp4"
size, _ := strconv.Atoi(scene.Size.String) var (
size int
bitrate uint
duration int64
)
duration := int64(scene.Duration.Float64) f := scene.Files.Primary()
if f != nil {
size = int(f.Size)
bitrate = uint(f.BitRate)
duration = int64(f.Duration)
}
item.Res = append(item.Res, upnpav.Resource{ item.Res = append(item.Res, upnpav.Resource{
URL: (&url.URL{ URL: (&url.URL{
@@ -124,8 +133,7 @@ func sceneToContainer(scene *models.Scene, parent string, host string) interface
ProtocolInfo: fmt.Sprintf("http-get:*:%s:%s", mimeType, dlna.ContentFeatures{ ProtocolInfo: fmt.Sprintf("http-get:*:%s:%s", mimeType, dlna.ContentFeatures{
SupportRange: true, SupportRange: true,
}.String()), }.String()),
Bitrate: uint(scene.Bitrate.Int64), Bitrate: bitrate,
// TODO - make %d:%02d:%02d string
Duration: formatDurationSexagesimal(time.Duration(duration) * time.Second), Duration: formatDurationSexagesimal(time.Duration(duration) * time.Second),
Size: uint64(size), Size: uint64(size),
// Resolution: resolution, // Resolution: resolution,
@@ -352,8 +360,12 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string)
} else { } else {
var scene *models.Scene var scene *models.Scene
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
scene, err = r.Scene().Find(sceneID) scene, err = me.repository.SceneFinder.Find(ctx, sceneID)
if scene != nil {
err = scene.LoadPrimaryFile(ctx, me.repository.FileFinder)
}
if err != nil { if err != nil {
return err return err
} }
@@ -370,7 +382,7 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string)
// http://upnp.org/specs/av/UPnP-av-ContentDirectory-v1-Service.pdf // http://upnp.org/specs/av/UPnP-av-ContentDirectory-v1-Service.pdf
// maximum update ID is 2**32, then rolls back to 0 // maximum update ID is 2**32, then rolls back to 0
const maxUpdateID int64 = 1 << 32 const maxUpdateID int64 = 1 << 32
updateID = fmt.Sprint(scene.UpdatedAt.Timestamp.Unix() % maxUpdateID) updateID = fmt.Sprint(scene.UpdatedAt.Unix() % maxUpdateID)
} else { } else {
return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "scene not found") return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "scene not found")
} }
@@ -431,14 +443,14 @@ func getRootObjects() []interface{} {
func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType, parentID string, host string) []interface{} { func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType, parentID string, host string) []interface{} {
var objs []interface{} var objs []interface{}
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
sort := "title" sort := "title"
findFilter := &models.FindFilterType{ findFilter := &models.FindFilterType{
PerPage: &pageSize, PerPage: &pageSize,
Sort: &sort, Sort: &sort,
} }
scenes, total, err := scene.QueryWithCount(r.Scene(), sceneFilter, findFilter) scenes, total, err := scene.QueryWithCount(ctx, me.repository.SceneFinder, sceneFilter, findFilter)
if err != nil { if err != nil {
return err return err
} }
@@ -449,7 +461,7 @@ func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType
parentID: parentID, parentID: parentID,
} }
objs, err = pager.getPages(r, total) objs, err = pager.getPages(ctx, me.repository.SceneFinder, total)
if err != nil { if err != nil {
return err return err
} }
@@ -470,14 +482,14 @@ func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType
func (me *contentDirectoryService) getPageVideos(sceneFilter *models.SceneFilterType, parentID string, page int, host string) []interface{} { func (me *contentDirectoryService) getPageVideos(sceneFilter *models.SceneFilterType, parentID string, page int, host string) []interface{} {
var objs []interface{} var objs []interface{}
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
pager := scenePager{ pager := scenePager{
sceneFilter: sceneFilter, sceneFilter: sceneFilter,
parentID: parentID, parentID: parentID,
} }
var err error var err error
objs, err = pager.getPageVideos(r, page, host) objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, page, host)
if err != nil { if err != nil {
return err return err
} }
@@ -511,8 +523,8 @@ func (me *contentDirectoryService) getAllScenes(host string) []interface{} {
func (me *contentDirectoryService) getStudios() []interface{} { func (me *contentDirectoryService) getStudios() []interface{} {
var objs []interface{} var objs []interface{}
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
studios, err := r.Studio().All() studios, err := me.repository.StudioFinder.All(ctx)
if err != nil { if err != nil {
return err return err
} }
@@ -550,8 +562,8 @@ func (me *contentDirectoryService) getStudioScenes(paths []string, host string)
func (me *contentDirectoryService) getTags() []interface{} { func (me *contentDirectoryService) getTags() []interface{} {
var objs []interface{} var objs []interface{}
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
tags, err := r.Tag().All() tags, err := me.repository.TagFinder.All(ctx)
if err != nil { if err != nil {
return err return err
} }
@@ -589,8 +601,8 @@ func (me *contentDirectoryService) getTagScenes(paths []string, host string) []i
func (me *contentDirectoryService) getPerformers() []interface{} { func (me *contentDirectoryService) getPerformers() []interface{} {
var objs []interface{} var objs []interface{}
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
performers, err := r.Performer().All() performers, err := me.repository.PerformerFinder.All(ctx)
if err != nil { if err != nil {
return err return err
} }
@@ -628,8 +640,8 @@ func (me *contentDirectoryService) getPerformerScenes(paths []string, host strin
func (me *contentDirectoryService) getMovies() []interface{} { func (me *contentDirectoryService) getMovies() []interface{} {
var objs []interface{} var objs []interface{}
if err := me.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
movies, err := r.Movie().All() movies, err := me.repository.MovieFinder.All(ctx)
if err != nil { if err != nil {
return err return err
} }

Some files were not shown because too many files have changed in this diff Show More