mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 04:14:39 +03:00
[Files Refactor] Performance tuning (#2819)
* Load scene relationships on demand * Load image relationships on demand * Load gallery relationships on demand * Add dataloaden * Use dataloaders * Use where in for other find many functions
This commit is contained in:
4
Makefile
4
Makefile
@@ -162,6 +162,10 @@ generate-frontend:
|
|||||||
generate-backend: touch-ui
|
generate-backend: touch-ui
|
||||||
go generate -mod=vendor ./cmd/stash
|
go generate -mod=vendor ./cmd/stash
|
||||||
|
|
||||||
|
.PHONY: generate-dataloaders
|
||||||
|
generate-dataloaders:
|
||||||
|
go generate -mod=vendor ./internal/api/loaders
|
||||||
|
|
||||||
# Regenerates stash-box client files
|
# Regenerates stash-box client files
|
||||||
.PHONY: generate-stash-box-client
|
.PHONY: generate-stash-box-client
|
||||||
generate-stash-box-client:
|
generate-stash-box-client:
|
||||||
|
|||||||
10
go.mod
10
go.mod
@@ -36,11 +36,11 @@ require (
|
|||||||
github.com/vektra/mockery/v2 v2.10.0
|
github.com/vektra/mockery/v2 v2.10.0
|
||||||
golang.org/x/crypto v0.0.0-20220321153916-2c7772ba3064
|
golang.org/x/crypto v0.0.0-20220321153916-2c7772ba3064
|
||||||
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
|
golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb
|
||||||
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b
|
||||||
golang.org/x/sys v0.0.0-20220329152356-43be30ef3008
|
golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
|
||||||
golang.org/x/text v0.3.7
|
golang.org/x/text v0.3.7
|
||||||
golang.org/x/tools v0.1.10 // indirect
|
golang.org/x/tools v0.1.12 // indirect
|
||||||
gopkg.in/sourcemap.v1 v1.0.5 // indirect
|
gopkg.in/sourcemap.v1 v1.0.5 // indirect
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
)
|
)
|
||||||
@@ -56,6 +56,7 @@ require (
|
|||||||
github.com/lucasb-eyer/go-colorful v1.2.0
|
github.com/lucasb-eyer/go-colorful v1.2.0
|
||||||
github.com/spf13/cast v1.4.1
|
github.com/spf13/cast v1.4.1
|
||||||
github.com/vearutop/statigz v1.1.6
|
github.com/vearutop/statigz v1.1.6
|
||||||
|
github.com/vektah/dataloaden v0.3.0
|
||||||
github.com/vektah/gqlparser/v2 v2.4.1
|
github.com/vektah/gqlparser/v2 v2.4.1
|
||||||
gopkg.in/guregu/null.v4 v4.0.0
|
gopkg.in/guregu/null.v4 v4.0.0
|
||||||
)
|
)
|
||||||
@@ -100,8 +101,7 @@ require (
|
|||||||
github.com/tidwall/match v1.1.1 // indirect
|
github.com/tidwall/match v1.1.1 // indirect
|
||||||
github.com/urfave/cli/v2 v2.4.0 // indirect
|
github.com/urfave/cli/v2 v2.4.0 // indirect
|
||||||
go.uber.org/atomic v1.7.0 // indirect
|
go.uber.org/atomic v1.7.0 // indirect
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
|
||||||
gopkg.in/ini.v1 v1.66.4 // indirect
|
gopkg.in/ini.v1 v1.66.4 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
22
go.sum
22
go.sum
@@ -753,6 +753,8 @@ github.com/urfave/cli/v2 v2.4.0 h1:m2pxjjDFgDxSPtO8WSdbndj17Wu2y8vOT86wE/tjr+I=
|
|||||||
github.com/urfave/cli/v2 v2.4.0/go.mod h1:NX9W0zmTvedE5oDoOMs2RTC8RvdK98NTYZE5LbaEYPg=
|
github.com/urfave/cli/v2 v2.4.0/go.mod h1:NX9W0zmTvedE5oDoOMs2RTC8RvdK98NTYZE5LbaEYPg=
|
||||||
github.com/vearutop/statigz v1.1.6 h1:si1zvulh/6P4S/SjFticuKQ8/EgQISglaRuycj8PWso=
|
github.com/vearutop/statigz v1.1.6 h1:si1zvulh/6P4S/SjFticuKQ8/EgQISglaRuycj8PWso=
|
||||||
github.com/vearutop/statigz v1.1.6/go.mod h1:czAv7iXgPv/s+xsgXpVEhhD0NSOQ4wZPgmM/n7LANDI=
|
github.com/vearutop/statigz v1.1.6/go.mod h1:czAv7iXgPv/s+xsgXpVEhhD0NSOQ4wZPgmM/n7LANDI=
|
||||||
|
github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84=
|
||||||
|
github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U=
|
||||||
github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
|
github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
|
||||||
github.com/vektah/gqlparser/v2 v2.4.1 h1:QOyEn8DAPMUMARGMeshKDkDgNmVoEaEGiDB0uWxcSlQ=
|
github.com/vektah/gqlparser/v2 v2.4.1 h1:QOyEn8DAPMUMARGMeshKDkDgNmVoEaEGiDB0uWxcSlQ=
|
||||||
github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
|
github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
|
||||||
@@ -771,6 +773,7 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
|
|||||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||||
github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||||
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||||
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE=
|
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b/go.mod h1:T3BPAOm2cqquPa0MKWeNkmOM5RQsRhkrwMWonFMN7fE=
|
||||||
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
||||||
@@ -863,8 +866,9 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|||||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||||
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o=
|
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
@@ -920,8 +924,8 @@ golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qx
|
|||||||
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9 h1:0qxwC5n+ttVOINCBeRHO0nq9X7uy8SDsPoi5OaCdIEI=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0=
|
||||||
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
@@ -954,6 +958,7 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
|
|||||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20180224232135-f6cff0780e54/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180224232135-f6cff0780e54/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
@@ -1047,8 +1052,10 @@ golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220329152356-43be30ef3008 h1:pq9pwoi2rjLWvmiVser/lIOgiyA3fli4M+RfGVMA7nE=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220329152356-43be30ef3008/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664 h1:v1W7bwXHsnLLloWYTVEdvGvA7BHMeBYsPcF0GLDxIRs=
|
||||||
|
golang.org/x/sys v0.0.0-20220808155132-1c4a2a72c664/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
@@ -1078,6 +1085,7 @@ golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3
|
|||||||
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
|
golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
@@ -1135,14 +1143,14 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
|||||||
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
|
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
|
||||||
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||||
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||||
golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20=
|
|
||||||
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
|
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
|
||||||
|
golang.org/x/tools v0.1.12 h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=
|
||||||
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||||
|
|||||||
187
internal/api/loaders/dataloaders.go
Normal file
187
internal/api/loaders/dataloaders.go
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene
|
||||||
|
//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery
|
||||||
|
//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image
|
||||||
|
//go:generate go run -mod=vendor github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer
|
||||||
|
//go:generate go run -mod=vendor github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio
|
||||||
|
//go:generate go run -mod=vendor github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag
|
||||||
|
//go:generate go run -mod=vendor github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie
|
||||||
|
|
||||||
|
package loaders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/internal/manager"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/txn"
|
||||||
|
)
|
||||||
|
|
||||||
|
type contextKey struct{ name string }
|
||||||
|
|
||||||
|
var (
|
||||||
|
loadersCtxKey = &contextKey{"loaders"}
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
wait = 1 * time.Millisecond
|
||||||
|
maxBatch = 100
|
||||||
|
)
|
||||||
|
|
||||||
|
type Loaders struct {
|
||||||
|
SceneByID *SceneLoader
|
||||||
|
GalleryByID *GalleryLoader
|
||||||
|
ImageByID *ImageLoader
|
||||||
|
PerformerByID *PerformerLoader
|
||||||
|
StudioByID *StudioLoader
|
||||||
|
TagByID *TagLoader
|
||||||
|
MovieByID *MovieLoader
|
||||||
|
}
|
||||||
|
|
||||||
|
type Middleware struct {
|
||||||
|
DatabaseProvider txn.DatabaseProvider
|
||||||
|
Repository manager.Repository
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) Middleware(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
ldrs := Loaders{
|
||||||
|
SceneByID: &SceneLoader{
|
||||||
|
wait: wait,
|
||||||
|
maxBatch: maxBatch,
|
||||||
|
fetch: m.fetchScenes(ctx),
|
||||||
|
},
|
||||||
|
GalleryByID: &GalleryLoader{
|
||||||
|
wait: wait,
|
||||||
|
maxBatch: maxBatch,
|
||||||
|
fetch: m.fetchGalleries(ctx),
|
||||||
|
},
|
||||||
|
ImageByID: &ImageLoader{
|
||||||
|
wait: wait,
|
||||||
|
maxBatch: maxBatch,
|
||||||
|
fetch: m.fetchImages(ctx),
|
||||||
|
},
|
||||||
|
PerformerByID: &PerformerLoader{
|
||||||
|
wait: wait,
|
||||||
|
maxBatch: maxBatch,
|
||||||
|
fetch: m.fetchPerformers(ctx),
|
||||||
|
},
|
||||||
|
StudioByID: &StudioLoader{
|
||||||
|
wait: wait,
|
||||||
|
maxBatch: maxBatch,
|
||||||
|
fetch: m.fetchStudios(ctx),
|
||||||
|
},
|
||||||
|
TagByID: &TagLoader{
|
||||||
|
wait: wait,
|
||||||
|
maxBatch: maxBatch,
|
||||||
|
fetch: m.fetchTags(ctx),
|
||||||
|
},
|
||||||
|
MovieByID: &MovieLoader{
|
||||||
|
wait: wait,
|
||||||
|
maxBatch: maxBatch,
|
||||||
|
fetch: m.fetchMovies(ctx),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
newCtx := context.WithValue(r.Context(), loadersCtxKey, ldrs)
|
||||||
|
next.ServeHTTP(w, r.WithContext(newCtx))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func From(ctx context.Context) Loaders {
|
||||||
|
return ctx.Value(loadersCtxKey).(Loaders)
|
||||||
|
}
|
||||||
|
|
||||||
|
func toErrorSlice(err error) []error {
|
||||||
|
if err != nil {
|
||||||
|
return []error{err}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) withTxn(ctx context.Context, fn func(ctx context.Context) error) error {
|
||||||
|
return txn.WithDatabase(ctx, m.DatabaseProvider, fn)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) fetchScenes(ctx context.Context) func(keys []int) ([]*models.Scene, []error) {
|
||||||
|
return func(keys []int) (ret []*models.Scene, errs []error) {
|
||||||
|
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
|
ret, err = m.Repository.Scene.FindMany(ctx, keys)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
return ret, toErrorSlice(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models.Image, []error) {
|
||||||
|
return func(keys []int) (ret []*models.Image, errs []error) {
|
||||||
|
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
|
ret, err = m.Repository.Image.FindMany(ctx, keys)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
|
||||||
|
return ret, toErrorSlice(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) fetchGalleries(ctx context.Context) func(keys []int) ([]*models.Gallery, []error) {
|
||||||
|
return func(keys []int) (ret []*models.Gallery, errs []error) {
|
||||||
|
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
|
ret, err = m.Repository.Gallery.FindMany(ctx, keys)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
|
||||||
|
return ret, toErrorSlice(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) fetchPerformers(ctx context.Context) func(keys []int) ([]*models.Performer, []error) {
|
||||||
|
return func(keys []int) (ret []*models.Performer, errs []error) {
|
||||||
|
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
|
ret, err = m.Repository.Performer.FindMany(ctx, keys)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
|
||||||
|
return ret, toErrorSlice(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*models.Studio, []error) {
|
||||||
|
return func(keys []int) (ret []*models.Studio, errs []error) {
|
||||||
|
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
|
ret, err = m.Repository.Studio.FindMany(ctx, keys)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
return ret, toErrorSlice(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.Tag, []error) {
|
||||||
|
return func(keys []int) (ret []*models.Tag, errs []error) {
|
||||||
|
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
|
ret, err = m.Repository.Tag.FindMany(ctx, keys)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
return ret, toErrorSlice(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Middleware) fetchMovies(ctx context.Context) func(keys []int) ([]*models.Movie, []error) {
|
||||||
|
return func(keys []int) (ret []*models.Movie, errs []error) {
|
||||||
|
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
|
ret, err = m.Repository.Movie.FindMany(ctx, keys)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
return ret, toErrorSlice(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
224
internal/api/loaders/galleryloader_gen.go
Normal file
224
internal/api/loaders/galleryloader_gen.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||||
|
|
||||||
|
package loaders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GalleryLoaderConfig captures the config to create a new GalleryLoader
|
||||||
|
type GalleryLoaderConfig struct {
|
||||||
|
// Fetch is a method that provides the data for the loader
|
||||||
|
Fetch func(keys []int) ([]*models.Gallery, []error)
|
||||||
|
|
||||||
|
// Wait is how long wait before sending a batch
|
||||||
|
Wait time.Duration
|
||||||
|
|
||||||
|
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||||
|
MaxBatch int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewGalleryLoader creates a new GalleryLoader given a fetch, wait, and maxBatch
|
||||||
|
func NewGalleryLoader(config GalleryLoaderConfig) *GalleryLoader {
|
||||||
|
return &GalleryLoader{
|
||||||
|
fetch: config.Fetch,
|
||||||
|
wait: config.Wait,
|
||||||
|
maxBatch: config.MaxBatch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GalleryLoader batches and caches requests
|
||||||
|
type GalleryLoader struct {
|
||||||
|
// this method provides the data for the loader
|
||||||
|
fetch func(keys []int) ([]*models.Gallery, []error)
|
||||||
|
|
||||||
|
// how long to done before sending a batch
|
||||||
|
wait time.Duration
|
||||||
|
|
||||||
|
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||||
|
maxBatch int
|
||||||
|
|
||||||
|
// INTERNAL
|
||||||
|
|
||||||
|
// lazily created cache
|
||||||
|
cache map[int]*models.Gallery
|
||||||
|
|
||||||
|
// the current batch. keys will continue to be collected until timeout is hit,
|
||||||
|
// then everything will be sent to the fetch method and out to the listeners
|
||||||
|
batch *galleryLoaderBatch
|
||||||
|
|
||||||
|
// mutex to prevent races
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
type galleryLoaderBatch struct {
|
||||||
|
keys []int
|
||||||
|
data []*models.Gallery
|
||||||
|
error []error
|
||||||
|
closing bool
|
||||||
|
done chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load a Gallery by key, batching and caching will be applied automatically
|
||||||
|
func (l *GalleryLoader) Load(key int) (*models.Gallery, error) {
|
||||||
|
return l.LoadThunk(key)()
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadThunk returns a function that when called will block waiting for a Gallery.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *GalleryLoader) LoadThunk(key int) func() (*models.Gallery, error) {
|
||||||
|
l.mu.Lock()
|
||||||
|
if it, ok := l.cache[key]; ok {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return func() (*models.Gallery, error) {
|
||||||
|
return it, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if l.batch == nil {
|
||||||
|
l.batch = &galleryLoaderBatch{done: make(chan struct{})}
|
||||||
|
}
|
||||||
|
batch := l.batch
|
||||||
|
pos := batch.keyIndex(l, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
return func() (*models.Gallery, error) {
|
||||||
|
<-batch.done
|
||||||
|
|
||||||
|
var data *models.Gallery
|
||||||
|
if pos < len(batch.data) {
|
||||||
|
data = batch.data[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
// its convenient to be able to return a single error for everything
|
||||||
|
if len(batch.error) == 1 {
|
||||||
|
err = batch.error[0]
|
||||||
|
} else if batch.error != nil {
|
||||||
|
err = batch.error[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
l.mu.Lock()
|
||||||
|
l.unsafeSet(key, data)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||||
|
// sub batches depending on how the loader is configured
|
||||||
|
func (l *GalleryLoader) LoadAll(keys []int) ([]*models.Gallery, []error) {
|
||||||
|
results := make([]func() (*models.Gallery, error), len(keys))
|
||||||
|
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
gallerys := make([]*models.Gallery, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
gallerys[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return gallerys, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAllThunk returns a function that when called will block waiting for a Gallerys.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *GalleryLoader) LoadAllThunk(keys []int) func() ([]*models.Gallery, []error) {
|
||||||
|
results := make([]func() (*models.Gallery, error), len(keys))
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
return func() ([]*models.Gallery, []error) {
|
||||||
|
gallerys := make([]*models.Gallery, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
gallerys[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return gallerys, errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||||
|
// and false is returned.
|
||||||
|
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||||
|
func (l *GalleryLoader) Prime(key int, value *models.Gallery) bool {
|
||||||
|
l.mu.Lock()
|
||||||
|
var found bool
|
||||||
|
if _, found = l.cache[key]; !found {
|
||||||
|
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||||
|
// and end up with the whole cache pointing to the same value.
|
||||||
|
cpy := *value
|
||||||
|
l.unsafeSet(key, &cpy)
|
||||||
|
}
|
||||||
|
l.mu.Unlock()
|
||||||
|
return !found
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the value at key from the cache, if it exists
|
||||||
|
func (l *GalleryLoader) Clear(key int) {
|
||||||
|
l.mu.Lock()
|
||||||
|
delete(l.cache, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *GalleryLoader) unsafeSet(key int, value *models.Gallery) {
|
||||||
|
if l.cache == nil {
|
||||||
|
l.cache = map[int]*models.Gallery{}
|
||||||
|
}
|
||||||
|
l.cache[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyIndex will return the location of the key in the batch, if its not found
|
||||||
|
// it will add the key to the batch
|
||||||
|
func (b *galleryLoaderBatch) keyIndex(l *GalleryLoader, key int) int {
|
||||||
|
for i, existingKey := range b.keys {
|
||||||
|
if key == existingKey {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos := len(b.keys)
|
||||||
|
b.keys = append(b.keys, key)
|
||||||
|
if pos == 0 {
|
||||||
|
go b.startTimer(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||||
|
if !b.closing {
|
||||||
|
b.closing = true
|
||||||
|
l.batch = nil
|
||||||
|
go b.end(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *galleryLoaderBatch) startTimer(l *GalleryLoader) {
|
||||||
|
time.Sleep(l.wait)
|
||||||
|
l.mu.Lock()
|
||||||
|
|
||||||
|
// we must have hit a batch limit and are already finalizing this batch
|
||||||
|
if b.closing {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
l.batch = nil
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
b.end(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *galleryLoaderBatch) end(l *GalleryLoader) {
|
||||||
|
b.data, b.error = l.fetch(b.keys)
|
||||||
|
close(b.done)
|
||||||
|
}
|
||||||
224
internal/api/loaders/imageloader_gen.go
Normal file
224
internal/api/loaders/imageloader_gen.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||||
|
|
||||||
|
package loaders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ImageLoaderConfig captures the config to create a new ImageLoader
|
||||||
|
type ImageLoaderConfig struct {
|
||||||
|
// Fetch is a method that provides the data for the loader
|
||||||
|
Fetch func(keys []int) ([]*models.Image, []error)
|
||||||
|
|
||||||
|
// Wait is how long wait before sending a batch
|
||||||
|
Wait time.Duration
|
||||||
|
|
||||||
|
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||||
|
MaxBatch int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewImageLoader creates a new ImageLoader given a fetch, wait, and maxBatch
|
||||||
|
func NewImageLoader(config ImageLoaderConfig) *ImageLoader {
|
||||||
|
return &ImageLoader{
|
||||||
|
fetch: config.Fetch,
|
||||||
|
wait: config.Wait,
|
||||||
|
maxBatch: config.MaxBatch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageLoader batches and caches requests
|
||||||
|
type ImageLoader struct {
|
||||||
|
// this method provides the data for the loader
|
||||||
|
fetch func(keys []int) ([]*models.Image, []error)
|
||||||
|
|
||||||
|
// how long to done before sending a batch
|
||||||
|
wait time.Duration
|
||||||
|
|
||||||
|
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||||
|
maxBatch int
|
||||||
|
|
||||||
|
// INTERNAL
|
||||||
|
|
||||||
|
// lazily created cache
|
||||||
|
cache map[int]*models.Image
|
||||||
|
|
||||||
|
// the current batch. keys will continue to be collected until timeout is hit,
|
||||||
|
// then everything will be sent to the fetch method and out to the listeners
|
||||||
|
batch *imageLoaderBatch
|
||||||
|
|
||||||
|
// mutex to prevent races
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
type imageLoaderBatch struct {
|
||||||
|
keys []int
|
||||||
|
data []*models.Image
|
||||||
|
error []error
|
||||||
|
closing bool
|
||||||
|
done chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load a Image by key, batching and caching will be applied automatically
|
||||||
|
func (l *ImageLoader) Load(key int) (*models.Image, error) {
|
||||||
|
return l.LoadThunk(key)()
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadThunk returns a function that when called will block waiting for a Image.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *ImageLoader) LoadThunk(key int) func() (*models.Image, error) {
|
||||||
|
l.mu.Lock()
|
||||||
|
if it, ok := l.cache[key]; ok {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return func() (*models.Image, error) {
|
||||||
|
return it, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if l.batch == nil {
|
||||||
|
l.batch = &imageLoaderBatch{done: make(chan struct{})}
|
||||||
|
}
|
||||||
|
batch := l.batch
|
||||||
|
pos := batch.keyIndex(l, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
return func() (*models.Image, error) {
|
||||||
|
<-batch.done
|
||||||
|
|
||||||
|
var data *models.Image
|
||||||
|
if pos < len(batch.data) {
|
||||||
|
data = batch.data[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
// its convenient to be able to return a single error for everything
|
||||||
|
if len(batch.error) == 1 {
|
||||||
|
err = batch.error[0]
|
||||||
|
} else if batch.error != nil {
|
||||||
|
err = batch.error[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
l.mu.Lock()
|
||||||
|
l.unsafeSet(key, data)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||||
|
// sub batches depending on how the loader is configured
|
||||||
|
func (l *ImageLoader) LoadAll(keys []int) ([]*models.Image, []error) {
|
||||||
|
results := make([]func() (*models.Image, error), len(keys))
|
||||||
|
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
images := make([]*models.Image, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
images[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return images, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAllThunk returns a function that when called will block waiting for a Images.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *ImageLoader) LoadAllThunk(keys []int) func() ([]*models.Image, []error) {
|
||||||
|
results := make([]func() (*models.Image, error), len(keys))
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
return func() ([]*models.Image, []error) {
|
||||||
|
images := make([]*models.Image, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
images[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return images, errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||||
|
// and false is returned.
|
||||||
|
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||||
|
func (l *ImageLoader) Prime(key int, value *models.Image) bool {
|
||||||
|
l.mu.Lock()
|
||||||
|
var found bool
|
||||||
|
if _, found = l.cache[key]; !found {
|
||||||
|
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||||
|
// and end up with the whole cache pointing to the same value.
|
||||||
|
cpy := *value
|
||||||
|
l.unsafeSet(key, &cpy)
|
||||||
|
}
|
||||||
|
l.mu.Unlock()
|
||||||
|
return !found
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the value at key from the cache, if it exists
|
||||||
|
func (l *ImageLoader) Clear(key int) {
|
||||||
|
l.mu.Lock()
|
||||||
|
delete(l.cache, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *ImageLoader) unsafeSet(key int, value *models.Image) {
|
||||||
|
if l.cache == nil {
|
||||||
|
l.cache = map[int]*models.Image{}
|
||||||
|
}
|
||||||
|
l.cache[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyIndex will return the location of the key in the batch, if its not found
|
||||||
|
// it will add the key to the batch
|
||||||
|
func (b *imageLoaderBatch) keyIndex(l *ImageLoader, key int) int {
|
||||||
|
for i, existingKey := range b.keys {
|
||||||
|
if key == existingKey {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos := len(b.keys)
|
||||||
|
b.keys = append(b.keys, key)
|
||||||
|
if pos == 0 {
|
||||||
|
go b.startTimer(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||||
|
if !b.closing {
|
||||||
|
b.closing = true
|
||||||
|
l.batch = nil
|
||||||
|
go b.end(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *imageLoaderBatch) startTimer(l *ImageLoader) {
|
||||||
|
time.Sleep(l.wait)
|
||||||
|
l.mu.Lock()
|
||||||
|
|
||||||
|
// we must have hit a batch limit and are already finalizing this batch
|
||||||
|
if b.closing {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
l.batch = nil
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
b.end(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *imageLoaderBatch) end(l *ImageLoader) {
|
||||||
|
b.data, b.error = l.fetch(b.keys)
|
||||||
|
close(b.done)
|
||||||
|
}
|
||||||
224
internal/api/loaders/movieloader_gen.go
Normal file
224
internal/api/loaders/movieloader_gen.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||||
|
|
||||||
|
package loaders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MovieLoaderConfig captures the config to create a new MovieLoader
|
||||||
|
type MovieLoaderConfig struct {
|
||||||
|
// Fetch is a method that provides the data for the loader
|
||||||
|
Fetch func(keys []int) ([]*models.Movie, []error)
|
||||||
|
|
||||||
|
// Wait is how long wait before sending a batch
|
||||||
|
Wait time.Duration
|
||||||
|
|
||||||
|
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||||
|
MaxBatch int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMovieLoader creates a new MovieLoader given a fetch, wait, and maxBatch
|
||||||
|
func NewMovieLoader(config MovieLoaderConfig) *MovieLoader {
|
||||||
|
return &MovieLoader{
|
||||||
|
fetch: config.Fetch,
|
||||||
|
wait: config.Wait,
|
||||||
|
maxBatch: config.MaxBatch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MovieLoader batches and caches requests
|
||||||
|
type MovieLoader struct {
|
||||||
|
// this method provides the data for the loader
|
||||||
|
fetch func(keys []int) ([]*models.Movie, []error)
|
||||||
|
|
||||||
|
// how long to done before sending a batch
|
||||||
|
wait time.Duration
|
||||||
|
|
||||||
|
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||||
|
maxBatch int
|
||||||
|
|
||||||
|
// INTERNAL
|
||||||
|
|
||||||
|
// lazily created cache
|
||||||
|
cache map[int]*models.Movie
|
||||||
|
|
||||||
|
// the current batch. keys will continue to be collected until timeout is hit,
|
||||||
|
// then everything will be sent to the fetch method and out to the listeners
|
||||||
|
batch *movieLoaderBatch
|
||||||
|
|
||||||
|
// mutex to prevent races
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
type movieLoaderBatch struct {
|
||||||
|
keys []int
|
||||||
|
data []*models.Movie
|
||||||
|
error []error
|
||||||
|
closing bool
|
||||||
|
done chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load a Movie by key, batching and caching will be applied automatically
|
||||||
|
func (l *MovieLoader) Load(key int) (*models.Movie, error) {
|
||||||
|
return l.LoadThunk(key)()
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadThunk returns a function that when called will block waiting for a Movie.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *MovieLoader) LoadThunk(key int) func() (*models.Movie, error) {
|
||||||
|
l.mu.Lock()
|
||||||
|
if it, ok := l.cache[key]; ok {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return func() (*models.Movie, error) {
|
||||||
|
return it, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if l.batch == nil {
|
||||||
|
l.batch = &movieLoaderBatch{done: make(chan struct{})}
|
||||||
|
}
|
||||||
|
batch := l.batch
|
||||||
|
pos := batch.keyIndex(l, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
return func() (*models.Movie, error) {
|
||||||
|
<-batch.done
|
||||||
|
|
||||||
|
var data *models.Movie
|
||||||
|
if pos < len(batch.data) {
|
||||||
|
data = batch.data[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
// its convenient to be able to return a single error for everything
|
||||||
|
if len(batch.error) == 1 {
|
||||||
|
err = batch.error[0]
|
||||||
|
} else if batch.error != nil {
|
||||||
|
err = batch.error[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
l.mu.Lock()
|
||||||
|
l.unsafeSet(key, data)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||||
|
// sub batches depending on how the loader is configured
|
||||||
|
func (l *MovieLoader) LoadAll(keys []int) ([]*models.Movie, []error) {
|
||||||
|
results := make([]func() (*models.Movie, error), len(keys))
|
||||||
|
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
movies := make([]*models.Movie, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
movies[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return movies, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAllThunk returns a function that when called will block waiting for a Movies.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *MovieLoader) LoadAllThunk(keys []int) func() ([]*models.Movie, []error) {
|
||||||
|
results := make([]func() (*models.Movie, error), len(keys))
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
return func() ([]*models.Movie, []error) {
|
||||||
|
movies := make([]*models.Movie, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
movies[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return movies, errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||||
|
// and false is returned.
|
||||||
|
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||||
|
func (l *MovieLoader) Prime(key int, value *models.Movie) bool {
|
||||||
|
l.mu.Lock()
|
||||||
|
var found bool
|
||||||
|
if _, found = l.cache[key]; !found {
|
||||||
|
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||||
|
// and end up with the whole cache pointing to the same value.
|
||||||
|
cpy := *value
|
||||||
|
l.unsafeSet(key, &cpy)
|
||||||
|
}
|
||||||
|
l.mu.Unlock()
|
||||||
|
return !found
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the value at key from the cache, if it exists
|
||||||
|
func (l *MovieLoader) Clear(key int) {
|
||||||
|
l.mu.Lock()
|
||||||
|
delete(l.cache, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *MovieLoader) unsafeSet(key int, value *models.Movie) {
|
||||||
|
if l.cache == nil {
|
||||||
|
l.cache = map[int]*models.Movie{}
|
||||||
|
}
|
||||||
|
l.cache[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyIndex will return the location of the key in the batch, if its not found
|
||||||
|
// it will add the key to the batch
|
||||||
|
func (b *movieLoaderBatch) keyIndex(l *MovieLoader, key int) int {
|
||||||
|
for i, existingKey := range b.keys {
|
||||||
|
if key == existingKey {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos := len(b.keys)
|
||||||
|
b.keys = append(b.keys, key)
|
||||||
|
if pos == 0 {
|
||||||
|
go b.startTimer(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||||
|
if !b.closing {
|
||||||
|
b.closing = true
|
||||||
|
l.batch = nil
|
||||||
|
go b.end(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *movieLoaderBatch) startTimer(l *MovieLoader) {
|
||||||
|
time.Sleep(l.wait)
|
||||||
|
l.mu.Lock()
|
||||||
|
|
||||||
|
// we must have hit a batch limit and are already finalizing this batch
|
||||||
|
if b.closing {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
l.batch = nil
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
b.end(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *movieLoaderBatch) end(l *MovieLoader) {
|
||||||
|
b.data, b.error = l.fetch(b.keys)
|
||||||
|
close(b.done)
|
||||||
|
}
|
||||||
224
internal/api/loaders/performerloader_gen.go
Normal file
224
internal/api/loaders/performerloader_gen.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||||
|
|
||||||
|
package loaders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PerformerLoaderConfig captures the config to create a new PerformerLoader
|
||||||
|
type PerformerLoaderConfig struct {
|
||||||
|
// Fetch is a method that provides the data for the loader
|
||||||
|
Fetch func(keys []int) ([]*models.Performer, []error)
|
||||||
|
|
||||||
|
// Wait is how long wait before sending a batch
|
||||||
|
Wait time.Duration
|
||||||
|
|
||||||
|
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||||
|
MaxBatch int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPerformerLoader creates a new PerformerLoader given a fetch, wait, and maxBatch
|
||||||
|
func NewPerformerLoader(config PerformerLoaderConfig) *PerformerLoader {
|
||||||
|
return &PerformerLoader{
|
||||||
|
fetch: config.Fetch,
|
||||||
|
wait: config.Wait,
|
||||||
|
maxBatch: config.MaxBatch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// PerformerLoader batches and caches requests
|
||||||
|
type PerformerLoader struct {
|
||||||
|
// this method provides the data for the loader
|
||||||
|
fetch func(keys []int) ([]*models.Performer, []error)
|
||||||
|
|
||||||
|
// how long to done before sending a batch
|
||||||
|
wait time.Duration
|
||||||
|
|
||||||
|
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||||
|
maxBatch int
|
||||||
|
|
||||||
|
// INTERNAL
|
||||||
|
|
||||||
|
// lazily created cache
|
||||||
|
cache map[int]*models.Performer
|
||||||
|
|
||||||
|
// the current batch. keys will continue to be collected until timeout is hit,
|
||||||
|
// then everything will be sent to the fetch method and out to the listeners
|
||||||
|
batch *performerLoaderBatch
|
||||||
|
|
||||||
|
// mutex to prevent races
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
type performerLoaderBatch struct {
|
||||||
|
keys []int
|
||||||
|
data []*models.Performer
|
||||||
|
error []error
|
||||||
|
closing bool
|
||||||
|
done chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load a Performer by key, batching and caching will be applied automatically
|
||||||
|
func (l *PerformerLoader) Load(key int) (*models.Performer, error) {
|
||||||
|
return l.LoadThunk(key)()
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadThunk returns a function that when called will block waiting for a Performer.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *PerformerLoader) LoadThunk(key int) func() (*models.Performer, error) {
|
||||||
|
l.mu.Lock()
|
||||||
|
if it, ok := l.cache[key]; ok {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return func() (*models.Performer, error) {
|
||||||
|
return it, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if l.batch == nil {
|
||||||
|
l.batch = &performerLoaderBatch{done: make(chan struct{})}
|
||||||
|
}
|
||||||
|
batch := l.batch
|
||||||
|
pos := batch.keyIndex(l, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
return func() (*models.Performer, error) {
|
||||||
|
<-batch.done
|
||||||
|
|
||||||
|
var data *models.Performer
|
||||||
|
if pos < len(batch.data) {
|
||||||
|
data = batch.data[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
// its convenient to be able to return a single error for everything
|
||||||
|
if len(batch.error) == 1 {
|
||||||
|
err = batch.error[0]
|
||||||
|
} else if batch.error != nil {
|
||||||
|
err = batch.error[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
l.mu.Lock()
|
||||||
|
l.unsafeSet(key, data)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||||
|
// sub batches depending on how the loader is configured
|
||||||
|
func (l *PerformerLoader) LoadAll(keys []int) ([]*models.Performer, []error) {
|
||||||
|
results := make([]func() (*models.Performer, error), len(keys))
|
||||||
|
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
performers := make([]*models.Performer, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
performers[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return performers, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAllThunk returns a function that when called will block waiting for a Performers.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *PerformerLoader) LoadAllThunk(keys []int) func() ([]*models.Performer, []error) {
|
||||||
|
results := make([]func() (*models.Performer, error), len(keys))
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
return func() ([]*models.Performer, []error) {
|
||||||
|
performers := make([]*models.Performer, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
performers[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return performers, errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||||
|
// and false is returned.
|
||||||
|
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||||
|
func (l *PerformerLoader) Prime(key int, value *models.Performer) bool {
|
||||||
|
l.mu.Lock()
|
||||||
|
var found bool
|
||||||
|
if _, found = l.cache[key]; !found {
|
||||||
|
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||||
|
// and end up with the whole cache pointing to the same value.
|
||||||
|
cpy := *value
|
||||||
|
l.unsafeSet(key, &cpy)
|
||||||
|
}
|
||||||
|
l.mu.Unlock()
|
||||||
|
return !found
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the value at key from the cache, if it exists
|
||||||
|
func (l *PerformerLoader) Clear(key int) {
|
||||||
|
l.mu.Lock()
|
||||||
|
delete(l.cache, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *PerformerLoader) unsafeSet(key int, value *models.Performer) {
|
||||||
|
if l.cache == nil {
|
||||||
|
l.cache = map[int]*models.Performer{}
|
||||||
|
}
|
||||||
|
l.cache[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyIndex will return the location of the key in the batch, if its not found
|
||||||
|
// it will add the key to the batch
|
||||||
|
func (b *performerLoaderBatch) keyIndex(l *PerformerLoader, key int) int {
|
||||||
|
for i, existingKey := range b.keys {
|
||||||
|
if key == existingKey {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos := len(b.keys)
|
||||||
|
b.keys = append(b.keys, key)
|
||||||
|
if pos == 0 {
|
||||||
|
go b.startTimer(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||||
|
if !b.closing {
|
||||||
|
b.closing = true
|
||||||
|
l.batch = nil
|
||||||
|
go b.end(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *performerLoaderBatch) startTimer(l *PerformerLoader) {
|
||||||
|
time.Sleep(l.wait)
|
||||||
|
l.mu.Lock()
|
||||||
|
|
||||||
|
// we must have hit a batch limit and are already finalizing this batch
|
||||||
|
if b.closing {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
l.batch = nil
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
b.end(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *performerLoaderBatch) end(l *PerformerLoader) {
|
||||||
|
b.data, b.error = l.fetch(b.keys)
|
||||||
|
close(b.done)
|
||||||
|
}
|
||||||
224
internal/api/loaders/sceneloader_gen.go
Normal file
224
internal/api/loaders/sceneloader_gen.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||||
|
|
||||||
|
package loaders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SceneLoaderConfig captures the config to create a new SceneLoader
|
||||||
|
type SceneLoaderConfig struct {
|
||||||
|
// Fetch is a method that provides the data for the loader
|
||||||
|
Fetch func(keys []int) ([]*models.Scene, []error)
|
||||||
|
|
||||||
|
// Wait is how long wait before sending a batch
|
||||||
|
Wait time.Duration
|
||||||
|
|
||||||
|
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||||
|
MaxBatch int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSceneLoader creates a new SceneLoader given a fetch, wait, and maxBatch
|
||||||
|
func NewSceneLoader(config SceneLoaderConfig) *SceneLoader {
|
||||||
|
return &SceneLoader{
|
||||||
|
fetch: config.Fetch,
|
||||||
|
wait: config.Wait,
|
||||||
|
maxBatch: config.MaxBatch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SceneLoader batches and caches requests
|
||||||
|
type SceneLoader struct {
|
||||||
|
// this method provides the data for the loader
|
||||||
|
fetch func(keys []int) ([]*models.Scene, []error)
|
||||||
|
|
||||||
|
// how long to done before sending a batch
|
||||||
|
wait time.Duration
|
||||||
|
|
||||||
|
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||||
|
maxBatch int
|
||||||
|
|
||||||
|
// INTERNAL
|
||||||
|
|
||||||
|
// lazily created cache
|
||||||
|
cache map[int]*models.Scene
|
||||||
|
|
||||||
|
// the current batch. keys will continue to be collected until timeout is hit,
|
||||||
|
// then everything will be sent to the fetch method and out to the listeners
|
||||||
|
batch *sceneLoaderBatch
|
||||||
|
|
||||||
|
// mutex to prevent races
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
type sceneLoaderBatch struct {
|
||||||
|
keys []int
|
||||||
|
data []*models.Scene
|
||||||
|
error []error
|
||||||
|
closing bool
|
||||||
|
done chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load a Scene by key, batching and caching will be applied automatically
|
||||||
|
func (l *SceneLoader) Load(key int) (*models.Scene, error) {
|
||||||
|
return l.LoadThunk(key)()
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadThunk returns a function that when called will block waiting for a Scene.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *SceneLoader) LoadThunk(key int) func() (*models.Scene, error) {
|
||||||
|
l.mu.Lock()
|
||||||
|
if it, ok := l.cache[key]; ok {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return func() (*models.Scene, error) {
|
||||||
|
return it, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if l.batch == nil {
|
||||||
|
l.batch = &sceneLoaderBatch{done: make(chan struct{})}
|
||||||
|
}
|
||||||
|
batch := l.batch
|
||||||
|
pos := batch.keyIndex(l, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
return func() (*models.Scene, error) {
|
||||||
|
<-batch.done
|
||||||
|
|
||||||
|
var data *models.Scene
|
||||||
|
if pos < len(batch.data) {
|
||||||
|
data = batch.data[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
// its convenient to be able to return a single error for everything
|
||||||
|
if len(batch.error) == 1 {
|
||||||
|
err = batch.error[0]
|
||||||
|
} else if batch.error != nil {
|
||||||
|
err = batch.error[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
l.mu.Lock()
|
||||||
|
l.unsafeSet(key, data)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||||
|
// sub batches depending on how the loader is configured
|
||||||
|
func (l *SceneLoader) LoadAll(keys []int) ([]*models.Scene, []error) {
|
||||||
|
results := make([]func() (*models.Scene, error), len(keys))
|
||||||
|
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
scenes := make([]*models.Scene, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
scenes[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return scenes, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAllThunk returns a function that when called will block waiting for a Scenes.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *SceneLoader) LoadAllThunk(keys []int) func() ([]*models.Scene, []error) {
|
||||||
|
results := make([]func() (*models.Scene, error), len(keys))
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
return func() ([]*models.Scene, []error) {
|
||||||
|
scenes := make([]*models.Scene, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
scenes[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return scenes, errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||||
|
// and false is returned.
|
||||||
|
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||||
|
func (l *SceneLoader) Prime(key int, value *models.Scene) bool {
|
||||||
|
l.mu.Lock()
|
||||||
|
var found bool
|
||||||
|
if _, found = l.cache[key]; !found {
|
||||||
|
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||||
|
// and end up with the whole cache pointing to the same value.
|
||||||
|
cpy := *value
|
||||||
|
l.unsafeSet(key, &cpy)
|
||||||
|
}
|
||||||
|
l.mu.Unlock()
|
||||||
|
return !found
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the value at key from the cache, if it exists
|
||||||
|
func (l *SceneLoader) Clear(key int) {
|
||||||
|
l.mu.Lock()
|
||||||
|
delete(l.cache, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *SceneLoader) unsafeSet(key int, value *models.Scene) {
|
||||||
|
if l.cache == nil {
|
||||||
|
l.cache = map[int]*models.Scene{}
|
||||||
|
}
|
||||||
|
l.cache[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyIndex will return the location of the key in the batch, if its not found
|
||||||
|
// it will add the key to the batch
|
||||||
|
func (b *sceneLoaderBatch) keyIndex(l *SceneLoader, key int) int {
|
||||||
|
for i, existingKey := range b.keys {
|
||||||
|
if key == existingKey {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos := len(b.keys)
|
||||||
|
b.keys = append(b.keys, key)
|
||||||
|
if pos == 0 {
|
||||||
|
go b.startTimer(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||||
|
if !b.closing {
|
||||||
|
b.closing = true
|
||||||
|
l.batch = nil
|
||||||
|
go b.end(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *sceneLoaderBatch) startTimer(l *SceneLoader) {
|
||||||
|
time.Sleep(l.wait)
|
||||||
|
l.mu.Lock()
|
||||||
|
|
||||||
|
// we must have hit a batch limit and are already finalizing this batch
|
||||||
|
if b.closing {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
l.batch = nil
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
b.end(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *sceneLoaderBatch) end(l *SceneLoader) {
|
||||||
|
b.data, b.error = l.fetch(b.keys)
|
||||||
|
close(b.done)
|
||||||
|
}
|
||||||
224
internal/api/loaders/studioloader_gen.go
Normal file
224
internal/api/loaders/studioloader_gen.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||||
|
|
||||||
|
package loaders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// StudioLoaderConfig captures the config to create a new StudioLoader
|
||||||
|
type StudioLoaderConfig struct {
|
||||||
|
// Fetch is a method that provides the data for the loader
|
||||||
|
Fetch func(keys []int) ([]*models.Studio, []error)
|
||||||
|
|
||||||
|
// Wait is how long wait before sending a batch
|
||||||
|
Wait time.Duration
|
||||||
|
|
||||||
|
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||||
|
MaxBatch int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStudioLoader creates a new StudioLoader given a fetch, wait, and maxBatch
|
||||||
|
func NewStudioLoader(config StudioLoaderConfig) *StudioLoader {
|
||||||
|
return &StudioLoader{
|
||||||
|
fetch: config.Fetch,
|
||||||
|
wait: config.Wait,
|
||||||
|
maxBatch: config.MaxBatch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StudioLoader batches and caches requests
|
||||||
|
type StudioLoader struct {
|
||||||
|
// this method provides the data for the loader
|
||||||
|
fetch func(keys []int) ([]*models.Studio, []error)
|
||||||
|
|
||||||
|
// how long to done before sending a batch
|
||||||
|
wait time.Duration
|
||||||
|
|
||||||
|
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||||
|
maxBatch int
|
||||||
|
|
||||||
|
// INTERNAL
|
||||||
|
|
||||||
|
// lazily created cache
|
||||||
|
cache map[int]*models.Studio
|
||||||
|
|
||||||
|
// the current batch. keys will continue to be collected until timeout is hit,
|
||||||
|
// then everything will be sent to the fetch method and out to the listeners
|
||||||
|
batch *studioLoaderBatch
|
||||||
|
|
||||||
|
// mutex to prevent races
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
type studioLoaderBatch struct {
|
||||||
|
keys []int
|
||||||
|
data []*models.Studio
|
||||||
|
error []error
|
||||||
|
closing bool
|
||||||
|
done chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load a Studio by key, batching and caching will be applied automatically
|
||||||
|
func (l *StudioLoader) Load(key int) (*models.Studio, error) {
|
||||||
|
return l.LoadThunk(key)()
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadThunk returns a function that when called will block waiting for a Studio.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *StudioLoader) LoadThunk(key int) func() (*models.Studio, error) {
|
||||||
|
l.mu.Lock()
|
||||||
|
if it, ok := l.cache[key]; ok {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return func() (*models.Studio, error) {
|
||||||
|
return it, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if l.batch == nil {
|
||||||
|
l.batch = &studioLoaderBatch{done: make(chan struct{})}
|
||||||
|
}
|
||||||
|
batch := l.batch
|
||||||
|
pos := batch.keyIndex(l, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
return func() (*models.Studio, error) {
|
||||||
|
<-batch.done
|
||||||
|
|
||||||
|
var data *models.Studio
|
||||||
|
if pos < len(batch.data) {
|
||||||
|
data = batch.data[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
// its convenient to be able to return a single error for everything
|
||||||
|
if len(batch.error) == 1 {
|
||||||
|
err = batch.error[0]
|
||||||
|
} else if batch.error != nil {
|
||||||
|
err = batch.error[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
l.mu.Lock()
|
||||||
|
l.unsafeSet(key, data)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||||
|
// sub batches depending on how the loader is configured
|
||||||
|
func (l *StudioLoader) LoadAll(keys []int) ([]*models.Studio, []error) {
|
||||||
|
results := make([]func() (*models.Studio, error), len(keys))
|
||||||
|
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
studios := make([]*models.Studio, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
studios[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return studios, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAllThunk returns a function that when called will block waiting for a Studios.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *StudioLoader) LoadAllThunk(keys []int) func() ([]*models.Studio, []error) {
|
||||||
|
results := make([]func() (*models.Studio, error), len(keys))
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
return func() ([]*models.Studio, []error) {
|
||||||
|
studios := make([]*models.Studio, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
studios[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return studios, errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||||
|
// and false is returned.
|
||||||
|
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||||
|
func (l *StudioLoader) Prime(key int, value *models.Studio) bool {
|
||||||
|
l.mu.Lock()
|
||||||
|
var found bool
|
||||||
|
if _, found = l.cache[key]; !found {
|
||||||
|
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||||
|
// and end up with the whole cache pointing to the same value.
|
||||||
|
cpy := *value
|
||||||
|
l.unsafeSet(key, &cpy)
|
||||||
|
}
|
||||||
|
l.mu.Unlock()
|
||||||
|
return !found
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the value at key from the cache, if it exists
|
||||||
|
func (l *StudioLoader) Clear(key int) {
|
||||||
|
l.mu.Lock()
|
||||||
|
delete(l.cache, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *StudioLoader) unsafeSet(key int, value *models.Studio) {
|
||||||
|
if l.cache == nil {
|
||||||
|
l.cache = map[int]*models.Studio{}
|
||||||
|
}
|
||||||
|
l.cache[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyIndex will return the location of the key in the batch, if its not found
|
||||||
|
// it will add the key to the batch
|
||||||
|
func (b *studioLoaderBatch) keyIndex(l *StudioLoader, key int) int {
|
||||||
|
for i, existingKey := range b.keys {
|
||||||
|
if key == existingKey {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos := len(b.keys)
|
||||||
|
b.keys = append(b.keys, key)
|
||||||
|
if pos == 0 {
|
||||||
|
go b.startTimer(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||||
|
if !b.closing {
|
||||||
|
b.closing = true
|
||||||
|
l.batch = nil
|
||||||
|
go b.end(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *studioLoaderBatch) startTimer(l *StudioLoader) {
|
||||||
|
time.Sleep(l.wait)
|
||||||
|
l.mu.Lock()
|
||||||
|
|
||||||
|
// we must have hit a batch limit and are already finalizing this batch
|
||||||
|
if b.closing {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
l.batch = nil
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
b.end(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *studioLoaderBatch) end(l *StudioLoader) {
|
||||||
|
b.data, b.error = l.fetch(b.keys)
|
||||||
|
close(b.done)
|
||||||
|
}
|
||||||
224
internal/api/loaders/tagloader_gen.go
Normal file
224
internal/api/loaders/tagloader_gen.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||||
|
|
||||||
|
package loaders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TagLoaderConfig captures the config to create a new TagLoader
|
||||||
|
type TagLoaderConfig struct {
|
||||||
|
// Fetch is a method that provides the data for the loader
|
||||||
|
Fetch func(keys []int) ([]*models.Tag, []error)
|
||||||
|
|
||||||
|
// Wait is how long wait before sending a batch
|
||||||
|
Wait time.Duration
|
||||||
|
|
||||||
|
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||||
|
MaxBatch int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTagLoader creates a new TagLoader given a fetch, wait, and maxBatch
|
||||||
|
func NewTagLoader(config TagLoaderConfig) *TagLoader {
|
||||||
|
return &TagLoader{
|
||||||
|
fetch: config.Fetch,
|
||||||
|
wait: config.Wait,
|
||||||
|
maxBatch: config.MaxBatch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TagLoader batches and caches requests
|
||||||
|
type TagLoader struct {
|
||||||
|
// this method provides the data for the loader
|
||||||
|
fetch func(keys []int) ([]*models.Tag, []error)
|
||||||
|
|
||||||
|
// how long to done before sending a batch
|
||||||
|
wait time.Duration
|
||||||
|
|
||||||
|
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||||
|
maxBatch int
|
||||||
|
|
||||||
|
// INTERNAL
|
||||||
|
|
||||||
|
// lazily created cache
|
||||||
|
cache map[int]*models.Tag
|
||||||
|
|
||||||
|
// the current batch. keys will continue to be collected until timeout is hit,
|
||||||
|
// then everything will be sent to the fetch method and out to the listeners
|
||||||
|
batch *tagLoaderBatch
|
||||||
|
|
||||||
|
// mutex to prevent races
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
type tagLoaderBatch struct {
|
||||||
|
keys []int
|
||||||
|
data []*models.Tag
|
||||||
|
error []error
|
||||||
|
closing bool
|
||||||
|
done chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load a Tag by key, batching and caching will be applied automatically
|
||||||
|
func (l *TagLoader) Load(key int) (*models.Tag, error) {
|
||||||
|
return l.LoadThunk(key)()
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadThunk returns a function that when called will block waiting for a Tag.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *TagLoader) LoadThunk(key int) func() (*models.Tag, error) {
|
||||||
|
l.mu.Lock()
|
||||||
|
if it, ok := l.cache[key]; ok {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return func() (*models.Tag, error) {
|
||||||
|
return it, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if l.batch == nil {
|
||||||
|
l.batch = &tagLoaderBatch{done: make(chan struct{})}
|
||||||
|
}
|
||||||
|
batch := l.batch
|
||||||
|
pos := batch.keyIndex(l, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
return func() (*models.Tag, error) {
|
||||||
|
<-batch.done
|
||||||
|
|
||||||
|
var data *models.Tag
|
||||||
|
if pos < len(batch.data) {
|
||||||
|
data = batch.data[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
// its convenient to be able to return a single error for everything
|
||||||
|
if len(batch.error) == 1 {
|
||||||
|
err = batch.error[0]
|
||||||
|
} else if batch.error != nil {
|
||||||
|
err = batch.error[pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
l.mu.Lock()
|
||||||
|
l.unsafeSet(key, data)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||||
|
// sub batches depending on how the loader is configured
|
||||||
|
func (l *TagLoader) LoadAll(keys []int) ([]*models.Tag, []error) {
|
||||||
|
results := make([]func() (*models.Tag, error), len(keys))
|
||||||
|
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
tags := make([]*models.Tag, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
tags[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return tags, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAllThunk returns a function that when called will block waiting for a Tags.
|
||||||
|
// This method should be used if you want one goroutine to make requests to many
|
||||||
|
// different data loaders without blocking until the thunk is called.
|
||||||
|
func (l *TagLoader) LoadAllThunk(keys []int) func() ([]*models.Tag, []error) {
|
||||||
|
results := make([]func() (*models.Tag, error), len(keys))
|
||||||
|
for i, key := range keys {
|
||||||
|
results[i] = l.LoadThunk(key)
|
||||||
|
}
|
||||||
|
return func() ([]*models.Tag, []error) {
|
||||||
|
tags := make([]*models.Tag, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
for i, thunk := range results {
|
||||||
|
tags[i], errors[i] = thunk()
|
||||||
|
}
|
||||||
|
return tags, errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||||
|
// and false is returned.
|
||||||
|
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||||
|
func (l *TagLoader) Prime(key int, value *models.Tag) bool {
|
||||||
|
l.mu.Lock()
|
||||||
|
var found bool
|
||||||
|
if _, found = l.cache[key]; !found {
|
||||||
|
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||||
|
// and end up with the whole cache pointing to the same value.
|
||||||
|
cpy := *value
|
||||||
|
l.unsafeSet(key, &cpy)
|
||||||
|
}
|
||||||
|
l.mu.Unlock()
|
||||||
|
return !found
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear the value at key from the cache, if it exists
|
||||||
|
func (l *TagLoader) Clear(key int) {
|
||||||
|
l.mu.Lock()
|
||||||
|
delete(l.cache, key)
|
||||||
|
l.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *TagLoader) unsafeSet(key int, value *models.Tag) {
|
||||||
|
if l.cache == nil {
|
||||||
|
l.cache = map[int]*models.Tag{}
|
||||||
|
}
|
||||||
|
l.cache[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// keyIndex will return the location of the key in the batch, if its not found
|
||||||
|
// it will add the key to the batch
|
||||||
|
func (b *tagLoaderBatch) keyIndex(l *TagLoader, key int) int {
|
||||||
|
for i, existingKey := range b.keys {
|
||||||
|
if key == existingKey {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pos := len(b.keys)
|
||||||
|
b.keys = append(b.keys, key)
|
||||||
|
if pos == 0 {
|
||||||
|
go b.startTimer(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||||
|
if !b.closing {
|
||||||
|
b.closing = true
|
||||||
|
l.batch = nil
|
||||||
|
go b.end(l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *tagLoaderBatch) startTimer(l *TagLoader) {
|
||||||
|
time.Sleep(l.wait)
|
||||||
|
l.mu.Lock()
|
||||||
|
|
||||||
|
// we must have hit a batch limit and are already finalizing this batch
|
||||||
|
if b.closing {
|
||||||
|
l.mu.Unlock()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
l.batch = nil
|
||||||
|
l.mu.Unlock()
|
||||||
|
|
||||||
|
b.end(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *tagLoaderBatch) end(l *TagLoader) {
|
||||||
|
b.data, b.error = l.fetch(b.keys)
|
||||||
|
close(b.done)
|
||||||
|
}
|
||||||
@@ -245,3 +245,13 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
|
|||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func firstError(errs []error) error {
|
||||||
|
for _, e := range errs {
|
||||||
|
if e != nil {
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/internal/api/loaders"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/file"
|
"github.com/stashapp/stash/pkg/file"
|
||||||
"github.com/stashapp/stash/pkg/image"
|
"github.com/stashapp/stash/pkg/image"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
@@ -145,15 +147,17 @@ func (r *galleryResolver) Date(ctx context.Context, obj *models.Gallery) (*strin
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret []*models.Scene, err error) {
|
func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret []*models.Scene, err error) {
|
||||||
|
if !obj.SceneIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
var err error
|
return obj.LoadSceneIDs(ctx, r.repository.Gallery)
|
||||||
ret, err = r.repository.Scene.FindMany(ctx, obj.SceneIDs)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).SceneByID.LoadAll(obj.SceneIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) {
|
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) {
|
||||||
@@ -161,39 +165,35 @@ func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
return loaders.From(ctx).StudioByID.Load(*obj.StudioID)
|
||||||
var err error
|
|
||||||
ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) (ret []*models.Tag, err error) {
|
func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) (ret []*models.Tag, err error) {
|
||||||
|
if !obj.TagIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
var err error
|
return obj.LoadTagIDs(ctx, r.repository.Gallery)
|
||||||
ret, err = r.repository.Tag.FindMany(ctx, obj.TagIDs)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) (ret []*models.Performer, err error) {
|
func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) (ret []*models.Performer, err error) {
|
||||||
|
if !obj.PerformerIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
var err error
|
return obj.LoadPerformerIDs(ctx, r.repository.Gallery)
|
||||||
ret, err = r.repository.Performer.FindMany(ctx, obj.PerformerIDs)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).PerformerByID.LoadAll(obj.PerformerIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (ret int, err error) {
|
func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (ret int, err error) {
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/internal/api/loaders"
|
||||||
"github.com/stashapp/stash/internal/api/urlbuilders"
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
)
|
)
|
||||||
@@ -74,15 +75,17 @@ func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePat
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) {
|
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) {
|
||||||
|
if !obj.GalleryIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
var err error
|
return obj.LoadGalleryIDs(ctx, r.repository.Image)
|
||||||
ret, err = r.repository.Gallery.FindMany(ctx, obj.GalleryIDs)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).GalleryByID.LoadAll(obj.GalleryIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) {
|
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) {
|
||||||
@@ -90,34 +93,33 @@ func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *mod
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
return loaders.From(ctx).StudioByID.Load(*obj.StudioID)
|
||||||
ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) {
|
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) {
|
||||||
|
if !obj.TagIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
ret, err = r.repository.Tag.FindMany(ctx, obj.TagIDs)
|
return obj.LoadTagIDs(ctx, r.repository.Image)
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) {
|
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) {
|
||||||
|
if !obj.PerformerIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
ret, err = r.repository.Performer.FindMany(ctx, obj.PerformerIDs)
|
return obj.LoadPerformerIDs(ctx, r.repository.Image)
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return ret, nil
|
|
||||||
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).PerformerByID.LoadAll(obj.PerformerIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/internal/api/loaders"
|
||||||
"github.com/stashapp/stash/internal/api/urlbuilders"
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
@@ -56,14 +57,7 @@ func (r *movieResolver) Rating(ctx context.Context, obj *models.Movie) (*int, er
|
|||||||
|
|
||||||
func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *models.Studio, err error) {
|
func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *models.Studio, err error) {
|
||||||
if obj.StudioID.Valid {
|
if obj.StudioID.Valid {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
return loaders.From(ctx).StudioByID.Load(int(obj.StudioID.Int64))
|
||||||
ret, err = r.repository.Studio.Find(ctx, int(obj.StudioID.Int64))
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
|||||||
@@ -199,15 +199,17 @@ func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) (ret []*models.StashID, err error) {
|
func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) ([]*models.StashID, error) {
|
||||||
|
var ret []models.StashID
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
ret, err = r.repository.Performer.GetStashIDs(ctx, obj.ID)
|
ret, err = r.repository.Performer.GetStashIDs(ctx, obj.ID)
|
||||||
return err
|
return err
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, nil
|
return stashIDsSliceToPtrSlice(ret), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *performerResolver) Rating(ctx context.Context, obj *models.Performer) (*int, error) {
|
func (r *performerResolver) Rating(ctx context.Context, obj *models.Performer) (*int, error) {
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/internal/api/loaders"
|
||||||
"github.com/stashapp/stash/internal/api/urlbuilders"
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
"github.com/stashapp/stash/internal/manager"
|
"github.com/stashapp/stash/internal/manager"
|
||||||
"github.com/stashapp/stash/pkg/file"
|
"github.com/stashapp/stash/pkg/file"
|
||||||
@@ -163,14 +164,17 @@ func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret []*models.Gallery, err error) {
|
func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret []*models.Gallery, err error) {
|
||||||
|
if !obj.GalleryIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
ret, err = r.repository.Gallery.FindMany(ctx, obj.GalleryIDs)
|
return obj.LoadGalleryIDs(ctx, r.repository.Scene)
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).GalleryByID.LoadAll(obj.GalleryIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) {
|
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) {
|
||||||
@@ -178,24 +182,26 @@ func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *mod
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
return loaders.From(ctx).StudioByID.Load(*obj.StudioID)
|
||||||
ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*SceneMovie, err error) {
|
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*SceneMovie, err error) {
|
||||||
|
if !obj.Movies.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
mqb := r.repository.Movie
|
qb := r.repository.Scene
|
||||||
|
|
||||||
for _, sm := range obj.Movies {
|
return obj.LoadMovies(ctx, qb)
|
||||||
movie, err := mqb.Find(ctx, sm.MovieID)
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
loader := loaders.From(ctx).MovieByID
|
||||||
|
|
||||||
|
for _, sm := range obj.Movies.List() {
|
||||||
|
movie, err := loader.Load(sm.MovieID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
sceneIdx := sm.SceneIndex
|
sceneIdx := sm.SceneIndex
|
||||||
@@ -207,33 +213,55 @@ func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*S
|
|||||||
ret = append(ret, sceneMovie)
|
ret = append(ret, sceneMovie)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) (ret []*models.Tag, err error) {
|
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) (ret []*models.Tag, err error) {
|
||||||
|
if !obj.TagIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
ret, err = r.repository.Tag.FindMany(ctx, obj.TagIDs)
|
return obj.LoadTagIDs(ctx, r.repository.Scene)
|
||||||
return err
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret []*models.Performer, err error) {
|
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret []*models.Performer, err error) {
|
||||||
|
if !obj.PerformerIDs.Loaded() {
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
ret, err = r.repository.Performer.FindMany(ctx, obj.PerformerIDs)
|
return obj.LoadPerformerIDs(ctx, r.repository.Scene)
|
||||||
return err
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var errs []error
|
||||||
|
ret, errs = loaders.From(ctx).PerformerByID.LoadAll(obj.PerformerIDs.List())
|
||||||
|
return ret, firstError(errs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func stashIDsSliceToPtrSlice(v []models.StashID) []*models.StashID {
|
||||||
|
ret := make([]*models.StashID, len(v))
|
||||||
|
for i, vv := range v {
|
||||||
|
c := vv
|
||||||
|
ret[i] = &c
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []*models.StashID, err error) {
|
||||||
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
return obj.LoadStashIDs(ctx, r.repository.Scene)
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, nil
|
return stashIDsSliceToPtrSlice(obj.StashIDs.List()), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
|
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/internal/api/loaders"
|
||||||
"github.com/stashapp/stash/internal/api/urlbuilders"
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
"github.com/stashapp/stash/pkg/gallery"
|
"github.com/stashapp/stash/pkg/gallery"
|
||||||
"github.com/stashapp/stash/pkg/image"
|
"github.com/stashapp/stash/pkg/image"
|
||||||
@@ -97,14 +98,7 @@ func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
return loaders.From(ctx).StudioByID.Load(int(obj.ParentID.Int64))
|
||||||
ret, err = r.repository.Studio.Find(ctx, int(obj.ParentID.Int64))
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) (ret []*models.Studio, err error) {
|
func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) (ret []*models.Studio, err error) {
|
||||||
@@ -118,15 +112,17 @@ func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) (
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) (ret []*models.StashID, err error) {
|
func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) ([]*models.StashID, error) {
|
||||||
|
var ret []models.StashID
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
ret, err = r.repository.Studio.GetStashIDs(ctx, obj.ID)
|
ret, err = r.repository.Studio.GetStashIDs(ctx, obj.ID)
|
||||||
return err
|
return err
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, nil
|
return stashIDsSliceToPtrSlice(ret), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *studioResolver) Rating(ctx context.Context, obj *models.Studio) (*int, error) {
|
func (r *studioResolver) Rating(ctx context.Context, obj *models.Studio) (*int, error) {
|
||||||
|
|||||||
@@ -36,9 +36,25 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Populate a new performer from the input
|
// Populate a new performer from the input
|
||||||
|
performerIDs, err := stringslice.StringSliceToIntSlice(input.PerformerIds)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting performer ids: %w", err)
|
||||||
|
}
|
||||||
|
tagIDs, err := stringslice.StringSliceToIntSlice(input.TagIds)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||||
|
}
|
||||||
|
sceneIDs, err := stringslice.StringSliceToIntSlice(input.SceneIds)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("converting scene ids: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
currentTime := time.Now()
|
currentTime := time.Now()
|
||||||
newGallery := models.Gallery{
|
newGallery := models.Gallery{
|
||||||
Title: input.Title,
|
Title: input.Title,
|
||||||
|
PerformerIDs: models.NewRelatedIDs(performerIDs),
|
||||||
|
TagIDs: models.NewRelatedIDs(tagIDs),
|
||||||
|
SceneIDs: models.NewRelatedIDs(sceneIDs),
|
||||||
CreatedAt: currentTime,
|
CreatedAt: currentTime,
|
||||||
UpdatedAt: currentTime,
|
UpdatedAt: currentTime,
|
||||||
}
|
}
|
||||||
@@ -60,20 +76,6 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
|
|||||||
newGallery.StudioID = &studioID
|
newGallery.StudioID = &studioID
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
|
||||||
newGallery.PerformerIDs, err = stringslice.StringSliceToIntSlice(input.PerformerIds)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("converting performer ids: %w", err)
|
|
||||||
}
|
|
||||||
newGallery.TagIDs, err = stringslice.StringSliceToIntSlice(input.TagIds)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
|
||||||
}
|
|
||||||
newGallery.SceneIDs, err = stringslice.StringSliceToIntSlice(input.SceneIds)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("converting scene ids: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start the transaction and save the gallery
|
// Start the transaction and save the gallery
|
||||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||||
qb := r.repository.Gallery
|
qb := r.repository.Gallery
|
||||||
|
|||||||
@@ -26,6 +26,16 @@ func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *model
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func stashIDPtrSliceToSlice(v []*models.StashID) []models.StashID {
|
||||||
|
ret := make([]models.StashID, len(v))
|
||||||
|
for i, vv := range v {
|
||||||
|
c := vv
|
||||||
|
ret[i] = *c
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerCreateInput) (*models.Performer, error) {
|
func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerCreateInput) (*models.Performer, error) {
|
||||||
// generate checksum from performer name rather than image
|
// generate checksum from performer name rather than image
|
||||||
checksum := md5.FromString(input.Name)
|
checksum := md5.FromString(input.Name)
|
||||||
@@ -152,7 +162,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC
|
|||||||
|
|
||||||
// Save the stash_ids
|
// Save the stash_ids
|
||||||
if input.StashIds != nil {
|
if input.StashIds != nil {
|
||||||
stashIDJoins := input.StashIds
|
stashIDJoins := stashIDPtrSliceToSlice(input.StashIds)
|
||||||
if err := qb.UpdateStashIDs(ctx, performer.ID, stashIDJoins); err != nil {
|
if err := qb.UpdateStashIDs(ctx, performer.ID, stashIDJoins); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -275,7 +285,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU
|
|||||||
|
|
||||||
// Save the stash_ids
|
// Save the stash_ids
|
||||||
if translator.hasField("stash_ids") {
|
if translator.hasField("stash_ids") {
|
||||||
stashIDJoins := input.StashIds
|
stashIDJoins := stashIDPtrSliceToSlice(input.StashIds)
|
||||||
if err := qb.UpdateStashIDs(ctx, performerID, stashIDJoins); err != nil {
|
if err := qb.UpdateStashIDs(ctx, performerID, stashIDJoins); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,6 +57,11 @@ func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input S
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := scene.LoadStashIDs(ctx, qb); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
|
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()))
|
||||||
|
|
||||||
res, err = client.SubmitSceneDraft(ctx, scene, boxes[input.StashBoxIndex].Endpoint, filepath)
|
res, err = client.SubmitSceneDraft(ctx, scene, boxes[input.StashBoxIndex].Endpoint, filepath)
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateI
|
|||||||
|
|
||||||
// Save the stash_ids
|
// Save the stash_ids
|
||||||
if input.StashIds != nil {
|
if input.StashIds != nil {
|
||||||
stashIDJoins := input.StashIds
|
stashIDJoins := stashIDPtrSliceToSlice(input.StashIds)
|
||||||
if err := qb.UpdateStashIDs(ctx, s.ID, stashIDJoins); err != nil {
|
if err := qb.UpdateStashIDs(ctx, s.ID, stashIDJoins); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -182,7 +182,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateI
|
|||||||
|
|
||||||
// Save the stash_ids
|
// Save the stash_ids
|
||||||
if translator.hasField("stash_ids") {
|
if translator.hasField("stash_ids") {
|
||||||
stashIDJoins := input.StashIds
|
stashIDJoins := stashIDPtrSliceToSlice(input.StashIds)
|
||||||
if err := qb.UpdateStashIDs(ctx, studioID, stashIDJoins); err != nil {
|
if err := qb.UpdateStashIDs(ctx, studioID, stashIDJoins); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ import (
|
|||||||
|
|
||||||
"github.com/go-chi/httplog"
|
"github.com/go-chi/httplog"
|
||||||
"github.com/rs/cors"
|
"github.com/rs/cors"
|
||||||
|
"github.com/stashapp/stash/internal/api/loaders"
|
||||||
"github.com/stashapp/stash/internal/manager"
|
"github.com/stashapp/stash/internal/manager"
|
||||||
"github.com/stashapp/stash/internal/manager/config"
|
"github.com/stashapp/stash/internal/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/fsutil"
|
"github.com/stashapp/stash/pkg/fsutil"
|
||||||
@@ -74,6 +75,14 @@ func Start() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
txnManager := manager.GetInstance().Repository
|
txnManager := manager.GetInstance().Repository
|
||||||
|
|
||||||
|
dataloaders := loaders.Middleware{
|
||||||
|
DatabaseProvider: txnManager,
|
||||||
|
Repository: txnManager,
|
||||||
|
}
|
||||||
|
|
||||||
|
r.Use(dataloaders.Middleware)
|
||||||
|
|
||||||
pluginCache := manager.GetInstance().PluginCache
|
pluginCache := manager.GetInstance().PluginCache
|
||||||
sceneService := manager.GetInstance().SceneService
|
sceneService := manager.GetInstance().SceneService
|
||||||
imageService := manager.GetInstance().ImageService
|
imageService := manager.GetInstance().ImageService
|
||||||
|
|||||||
@@ -6,8 +6,19 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/gallery"
|
"github.com/stashapp/stash/pkg/gallery"
|
||||||
"github.com/stashapp/stash/pkg/match"
|
"github.com/stashapp/stash/pkg/match"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type GalleryPerformerUpdater interface {
|
||||||
|
models.PerformerIDLoader
|
||||||
|
gallery.PartialUpdater
|
||||||
|
}
|
||||||
|
|
||||||
|
type GalleryTagUpdater interface {
|
||||||
|
models.TagIDLoader
|
||||||
|
gallery.PartialUpdater
|
||||||
|
}
|
||||||
|
|
||||||
func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger {
|
func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger {
|
||||||
var path string
|
var path string
|
||||||
if s.Path() != "" {
|
if s.Path() != "" {
|
||||||
@@ -28,11 +39,24 @@ func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path.
|
// GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path.
|
||||||
func GalleryPerformers(ctx context.Context, s *models.Gallery, rw gallery.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
|
func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
|
||||||
t := getGalleryFileTagger(s, cache)
|
t := getGalleryFileTagger(s, cache)
|
||||||
|
|
||||||
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
|
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
|
||||||
return gallery.AddPerformer(ctx, rw, s, otherID)
|
if err := s.LoadPerformerIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := s.PerformerIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, otherID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := gallery.AddPerformer(ctx, rw, s, otherID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -53,10 +77,23 @@ func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpda
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GalleryTags tags the provided gallery with tags whose name matches the gallery's path.
|
// GalleryTags tags the provided gallery with tags whose name matches the gallery's path.
|
||||||
func GalleryTags(ctx context.Context, s *models.Gallery, rw gallery.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
|
func GalleryTags(ctx context.Context, s *models.Gallery, rw GalleryTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
|
||||||
t := getGalleryFileTagger(s, cache)
|
t := getGalleryFileTagger(s, cache)
|
||||||
|
|
||||||
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
|
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
|
||||||
return gallery.AddTag(ctx, rw, s, otherID)
|
if err := s.LoadTagIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := s.TagIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, otherID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := gallery.AddTag(ctx, rw, s, otherID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -60,6 +60,7 @@ func TestGalleryPerformers(t *testing.T) {
|
|||||||
Path: test.Path,
|
Path: test.Path,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
}
|
}
|
||||||
err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil)
|
err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil)
|
||||||
|
|
||||||
@@ -183,6 +184,7 @@ func TestGalleryTags(t *testing.T) {
|
|||||||
Path: test.Path,
|
Path: test.Path,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
}
|
}
|
||||||
err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil)
|
err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil)
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,19 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/image"
|
"github.com/stashapp/stash/pkg/image"
|
||||||
"github.com/stashapp/stash/pkg/match"
|
"github.com/stashapp/stash/pkg/match"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type ImagePerformerUpdater interface {
|
||||||
|
models.PerformerIDLoader
|
||||||
|
image.PartialUpdater
|
||||||
|
}
|
||||||
|
|
||||||
|
type ImageTagUpdater interface {
|
||||||
|
models.TagIDLoader
|
||||||
|
image.PartialUpdater
|
||||||
|
}
|
||||||
|
|
||||||
func getImageFileTagger(s *models.Image, cache *match.Cache) tagger {
|
func getImageFileTagger(s *models.Image, cache *match.Cache) tagger {
|
||||||
return tagger{
|
return tagger{
|
||||||
ID: s.ID,
|
ID: s.ID,
|
||||||
@@ -19,11 +30,24 @@ func getImageFileTagger(s *models.Image, cache *match.Cache) tagger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ImagePerformers tags the provided image with performers whose name matches the image's path.
|
// ImagePerformers tags the provided image with performers whose name matches the image's path.
|
||||||
func ImagePerformers(ctx context.Context, s *models.Image, rw image.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
|
func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
|
||||||
t := getImageFileTagger(s, cache)
|
t := getImageFileTagger(s, cache)
|
||||||
|
|
||||||
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
|
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
|
||||||
return image.AddPerformer(ctx, rw, s, otherID)
|
if err := s.LoadPerformerIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := s.PerformerIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, otherID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := image.AddPerformer(ctx, rw, s, otherID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -44,10 +68,23 @@ func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, s
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ImageTags tags the provided image with tags whose name matches the image's path.
|
// ImageTags tags the provided image with tags whose name matches the image's path.
|
||||||
func ImageTags(ctx context.Context, s *models.Image, rw image.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
|
func ImageTags(ctx context.Context, s *models.Image, rw ImageTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
|
||||||
t := getImageFileTagger(s, cache)
|
t := getImageFileTagger(s, cache)
|
||||||
|
|
||||||
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
|
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
|
||||||
return image.AddTag(ctx, rw, s, otherID)
|
if err := s.LoadTagIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := s.TagIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, otherID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := image.AddTag(ctx, rw, s, otherID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -61,6 +61,7 @@ func TestImagePerformers(t *testing.T) {
|
|||||||
image := models.Image{
|
image := models.Image{
|
||||||
ID: imageID,
|
ID: imageID,
|
||||||
Files: []*file.ImageFile{makeImageFile(test.Path)},
|
Files: []*file.ImageFile{makeImageFile(test.Path)},
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
}
|
}
|
||||||
err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil)
|
err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil)
|
||||||
|
|
||||||
@@ -176,6 +177,7 @@ func TestImageTags(t *testing.T) {
|
|||||||
image := models.Image{
|
image := models.Image{
|
||||||
ID: imageID,
|
ID: imageID,
|
||||||
Files: []*file.ImageFile{makeImageFile(test.Path)},
|
Files: []*file.ImageFile{makeImageFile(test.Path)},
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
}
|
}
|
||||||
err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil)
|
err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil)
|
||||||
|
|
||||||
|
|||||||
@@ -8,20 +8,24 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/match"
|
"github.com/stashapp/stash/pkg/match"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scene"
|
"github.com/stashapp/stash/pkg/scene"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
type SceneQueryPerformerUpdater interface {
|
type SceneQueryPerformerUpdater interface {
|
||||||
scene.Queryer
|
scene.Queryer
|
||||||
|
models.PerformerIDLoader
|
||||||
scene.PartialUpdater
|
scene.PartialUpdater
|
||||||
}
|
}
|
||||||
|
|
||||||
type ImageQueryPerformerUpdater interface {
|
type ImageQueryPerformerUpdater interface {
|
||||||
image.Queryer
|
image.Queryer
|
||||||
|
models.PerformerIDLoader
|
||||||
image.PartialUpdater
|
image.PartialUpdater
|
||||||
}
|
}
|
||||||
|
|
||||||
type GalleryQueryPerformerUpdater interface {
|
type GalleryQueryPerformerUpdater interface {
|
||||||
gallery.Queryer
|
gallery.Queryer
|
||||||
|
models.PerformerIDLoader
|
||||||
gallery.PartialUpdater
|
gallery.PartialUpdater
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -39,7 +43,20 @@ func PerformerScenes(ctx context.Context, p *models.Performer, paths []string, r
|
|||||||
t := getPerformerTagger(p, cache)
|
t := getPerformerTagger(p, cache)
|
||||||
|
|
||||||
return t.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
|
return t.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
|
||||||
return scene.AddPerformer(ctx, rw, o, p.ID)
|
if err := o.LoadPerformerIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := o.PerformerIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, p.ID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scene.AddPerformer(ctx, rw, o, p.ID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -47,8 +64,21 @@ func PerformerScenes(ctx context.Context, p *models.Performer, paths []string, r
|
|||||||
func PerformerImages(ctx context.Context, p *models.Performer, paths []string, rw ImageQueryPerformerUpdater, cache *match.Cache) error {
|
func PerformerImages(ctx context.Context, p *models.Performer, paths []string, rw ImageQueryPerformerUpdater, cache *match.Cache) error {
|
||||||
t := getPerformerTagger(p, cache)
|
t := getPerformerTagger(p, cache)
|
||||||
|
|
||||||
return t.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
|
return t.tagImages(ctx, paths, rw, func(o *models.Image) (bool, error) {
|
||||||
return image.AddPerformer(ctx, rw, i, p.ID)
|
if err := o.LoadPerformerIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := o.PerformerIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, p.ID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := image.AddPerformer(ctx, rw, o, p.ID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -57,6 +87,19 @@ func PerformerGalleries(ctx context.Context, p *models.Performer, paths []string
|
|||||||
t := getPerformerTagger(p, cache)
|
t := getPerformerTagger(p, cache)
|
||||||
|
|
||||||
return t.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
|
return t.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
|
||||||
return gallery.AddPerformer(ctx, rw, o, p.ID)
|
if err := o.LoadPerformerIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := o.PerformerIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, p.ID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := gallery.AddPerformer(ctx, rw, o, p.ID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -135,6 +136,7 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
|
|||||||
images = append(images, &models.Image{
|
images = append(images, &models.Image{
|
||||||
ID: i + 1,
|
ID: i + 1,
|
||||||
Files: []*file.ImageFile{makeImageFile(p)},
|
Files: []*file.ImageFile{makeImageFile(p)},
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -219,6 +221,7 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
|
|||||||
Path: v,
|
Path: v,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,19 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/match"
|
"github.com/stashapp/stash/pkg/match"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scene"
|
"github.com/stashapp/stash/pkg/scene"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type ScenePerformerUpdater interface {
|
||||||
|
models.PerformerIDLoader
|
||||||
|
scene.PartialUpdater
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneTagUpdater interface {
|
||||||
|
models.TagIDLoader
|
||||||
|
scene.PartialUpdater
|
||||||
|
}
|
||||||
|
|
||||||
func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger {
|
func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger {
|
||||||
return tagger{
|
return tagger{
|
||||||
ID: s.ID,
|
ID: s.ID,
|
||||||
@@ -19,11 +30,24 @@ func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ScenePerformers tags the provided scene with performers whose name matches the scene's path.
|
// ScenePerformers tags the provided scene with performers whose name matches the scene's path.
|
||||||
func ScenePerformers(ctx context.Context, s *models.Scene, rw scene.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
|
func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
|
||||||
t := getSceneFileTagger(s, cache)
|
t := getSceneFileTagger(s, cache)
|
||||||
|
|
||||||
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
|
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
|
||||||
return scene.AddPerformer(ctx, rw, s, otherID)
|
if err := s.LoadPerformerIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := s.PerformerIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, otherID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scene.AddPerformer(ctx, rw, s, otherID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -44,10 +68,23 @@ func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, s
|
|||||||
}
|
}
|
||||||
|
|
||||||
// SceneTags tags the provided scene with tags whose name matches the scene's path.
|
// SceneTags tags the provided scene with tags whose name matches the scene's path.
|
||||||
func SceneTags(ctx context.Context, s *models.Scene, rw scene.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
|
func SceneTags(ctx context.Context, s *models.Scene, rw SceneTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
|
||||||
t := getSceneFileTagger(s, cache)
|
t := getSceneFileTagger(s, cache)
|
||||||
|
|
||||||
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
|
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
|
||||||
return scene.AddTag(ctx, rw, s, otherID)
|
if err := s.LoadTagIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := s.TagIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, otherID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scene.AddTag(ctx, rw, s, otherID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -186,6 +186,7 @@ func TestScenePerformers(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
}
|
}
|
||||||
|
|
||||||
if test.Matches {
|
if test.Matches {
|
||||||
@@ -325,6 +326,7 @@ func TestSceneTags(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
}
|
}
|
||||||
err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil)
|
err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil)
|
||||||
|
|
||||||
|
|||||||
@@ -8,20 +8,24 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/match"
|
"github.com/stashapp/stash/pkg/match"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scene"
|
"github.com/stashapp/stash/pkg/scene"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
type SceneQueryTagUpdater interface {
|
type SceneQueryTagUpdater interface {
|
||||||
scene.Queryer
|
scene.Queryer
|
||||||
|
models.TagIDLoader
|
||||||
scene.PartialUpdater
|
scene.PartialUpdater
|
||||||
}
|
}
|
||||||
|
|
||||||
type ImageQueryTagUpdater interface {
|
type ImageQueryTagUpdater interface {
|
||||||
image.Queryer
|
image.Queryer
|
||||||
|
models.TagIDLoader
|
||||||
image.PartialUpdater
|
image.PartialUpdater
|
||||||
}
|
}
|
||||||
|
|
||||||
type GalleryQueryTagUpdater interface {
|
type GalleryQueryTagUpdater interface {
|
||||||
gallery.Queryer
|
gallery.Queryer
|
||||||
|
models.TagIDLoader
|
||||||
gallery.PartialUpdater
|
gallery.PartialUpdater
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,7 +55,20 @@ func TagScenes(ctx context.Context, p *models.Tag, paths []string, aliases []str
|
|||||||
|
|
||||||
for _, tt := range t {
|
for _, tt := range t {
|
||||||
if err := tt.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
|
if err := tt.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
|
||||||
return scene.AddTag(ctx, rw, o, p.ID)
|
if err := o.LoadTagIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := o.TagIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, p.ID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scene.AddTag(ctx, rw, o, p.ID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -64,8 +81,21 @@ func TagImages(ctx context.Context, p *models.Tag, paths []string, aliases []str
|
|||||||
t := getTagTaggers(p, aliases, cache)
|
t := getTagTaggers(p, aliases, cache)
|
||||||
|
|
||||||
for _, tt := range t {
|
for _, tt := range t {
|
||||||
if err := tt.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
|
if err := tt.tagImages(ctx, paths, rw, func(o *models.Image) (bool, error) {
|
||||||
return image.AddTag(ctx, rw, i, p.ID)
|
if err := o.LoadTagIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := o.TagIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, p.ID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := image.AddTag(ctx, rw, o, p.ID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -79,7 +109,20 @@ func TagGalleries(ctx context.Context, p *models.Tag, paths []string, aliases []
|
|||||||
|
|
||||||
for _, tt := range t {
|
for _, tt := range t {
|
||||||
if err := tt.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
|
if err := tt.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
|
||||||
return gallery.AddTag(ctx, rw, o, p.ID)
|
if err := o.LoadTagIDs(ctx, rw); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
existing := o.TagIDs.List()
|
||||||
|
|
||||||
|
if intslice.IntInclude(existing, p.ID) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := gallery.AddTag(ctx, rw, o, p.ID); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -96,6 +96,7 @@ func testTagScenes(t *testing.T, tc testTagCase) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -188,6 +189,7 @@ func testTagImages(t *testing.T, tc testTagCase) {
|
|||||||
images = append(images, &models.Image{
|
images = append(images, &models.Image{
|
||||||
ID: i + 1,
|
ID: i + 1,
|
||||||
Files: []*file.ImageFile{makeImageFile(p)},
|
Files: []*file.ImageFile{makeImageFile(p)},
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -286,6 +288,7 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
|
|||||||
Path: v,
|
Path: v,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -193,6 +193,17 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene,
|
|||||||
func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manager, s *models.Scene, result *scrapeResult) error {
|
func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manager, s *models.Scene, result *scrapeResult) error {
|
||||||
var updater *scene.UpdateSet
|
var updater *scene.UpdateSet
|
||||||
if err := txn.WithTxn(ctx, txnManager, func(ctx context.Context) error {
|
if err := txn.WithTxn(ctx, txnManager, func(ctx context.Context) error {
|
||||||
|
// load scene relationships
|
||||||
|
if err := s.LoadPerformerIDs(ctx, t.SceneReaderUpdater); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := s.LoadTagIDs(ctx, t.SceneReaderUpdater); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := s.LoadStashIDs(ctx, t.SceneReaderUpdater); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
updater, err = t.getSceneUpdater(ctx, s, result)
|
updater, err = t.getSceneUpdater(ctx, s, result)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -205,8 +216,7 @@ func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manage
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = updater.Update(ctx, t.SceneReaderUpdater, t.ScreenshotSetter)
|
if _, err := updater.Update(ctx, t.SceneReaderUpdater, t.ScreenshotSetter); err != nil {
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("error updating scene: %w", err)
|
return fmt.Errorf("error updating scene: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -129,6 +129,9 @@ func TestSceneIdentifier_Identify(t *testing.T) {
|
|||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
scene := &models.Scene{
|
scene := &models.Scene{
|
||||||
ID: tt.sceneID,
|
ID: tt.sceneID,
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
}
|
}
|
||||||
if err := identifier.Identify(testCtx, &mocks.TxnManager{}, scene); (err != nil) != tt.wantErr {
|
if err := identifier.Identify(testCtx, &mocks.TxnManager{}, scene); (err != nil) != tt.wantErr {
|
||||||
t.Errorf("SceneIdentifier.Identify() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("SceneIdentifier.Identify() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
@@ -155,7 +158,11 @@ func TestSceneIdentifier_modifyScene(t *testing.T) {
|
|||||||
{
|
{
|
||||||
"empty update",
|
"empty update",
|
||||||
args{
|
args{
|
||||||
&models.Scene{},
|
&models.Scene{
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
|
},
|
||||||
&scrapeResult{
|
&scrapeResult{
|
||||||
result: &scraper.ScrapedScene{},
|
result: &scraper.ScrapedScene{},
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import (
|
|||||||
|
|
||||||
type PerformerCreator interface {
|
type PerformerCreator interface {
|
||||||
Create(ctx context.Context, newPerformer models.Performer) (*models.Performer, error)
|
Create(ctx context.Context, newPerformer models.Performer) (*models.Performer, error)
|
||||||
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []*models.StashID) error
|
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []models.StashID) error
|
||||||
}
|
}
|
||||||
|
|
||||||
func getPerformerID(ctx context.Context, endpoint string, w PerformerCreator, p *models.ScrapedPerformer, createMissing bool) (*int, error) {
|
func getPerformerID(ctx context.Context, endpoint string, w PerformerCreator, p *models.ScrapedPerformer, createMissing bool) (*int, error) {
|
||||||
@@ -39,7 +39,7 @@ func createMissingPerformer(ctx context.Context, endpoint string, w PerformerCre
|
|||||||
}
|
}
|
||||||
|
|
||||||
if endpoint != "" && p.RemoteSiteID != nil {
|
if endpoint != "" && p.RemoteSiteID != nil {
|
||||||
if err := w.UpdateStashIDs(ctx, created.ID, []*models.StashID{
|
if err := w.UpdateStashIDs(ctx, created.ID, []models.StashID{
|
||||||
{
|
{
|
||||||
Endpoint: endpoint,
|
Endpoint: endpoint,
|
||||||
StashID: *p.RemoteSiteID,
|
StashID: *p.RemoteSiteID,
|
||||||
|
|||||||
@@ -141,13 +141,13 @@ func Test_createMissingPerformer(t *testing.T) {
|
|||||||
return p.Name.String == invalidName
|
return p.Name.String == invalidName
|
||||||
})).Return(nil, errors.New("error creating performer"))
|
})).Return(nil, errors.New("error creating performer"))
|
||||||
|
|
||||||
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []*models.StashID{
|
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []models.StashID{
|
||||||
{
|
{
|
||||||
Endpoint: invalidEndpoint,
|
Endpoint: invalidEndpoint,
|
||||||
StashID: remoteSiteID,
|
StashID: remoteSiteID,
|
||||||
},
|
},
|
||||||
}).Return(errors.New("error updating stash ids"))
|
}).Return(errors.New("error updating stash ids"))
|
||||||
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []*models.StashID{
|
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []models.StashID{
|
||||||
{
|
{
|
||||||
Endpoint: validEndpoint,
|
Endpoint: validEndpoint,
|
||||||
StashID: remoteSiteID,
|
StashID: remoteSiteID,
|
||||||
|
|||||||
@@ -18,6 +18,9 @@ import (
|
|||||||
type SceneReaderUpdater interface {
|
type SceneReaderUpdater interface {
|
||||||
GetCover(ctx context.Context, sceneID int) ([]byte, error)
|
GetCover(ctx context.Context, sceneID int) ([]byte, error)
|
||||||
scene.Updater
|
scene.Updater
|
||||||
|
models.PerformerIDLoader
|
||||||
|
models.TagIDLoader
|
||||||
|
models.StashIDLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
type TagCreator interface {
|
type TagCreator interface {
|
||||||
@@ -82,7 +85,7 @@ func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]
|
|||||||
endpoint := g.result.source.RemoteSite
|
endpoint := g.result.source.RemoteSite
|
||||||
|
|
||||||
var performerIDs []int
|
var performerIDs []int
|
||||||
originalPerformerIDs := g.scene.PerformerIDs
|
originalPerformerIDs := g.scene.PerformerIDs.List()
|
||||||
|
|
||||||
if strategy == FieldStrategyMerge {
|
if strategy == FieldStrategyMerge {
|
||||||
// add to existing
|
// add to existing
|
||||||
@@ -129,7 +132,7 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var tagIDs []int
|
var tagIDs []int
|
||||||
originalTagIDs := target.TagIDs
|
originalTagIDs := target.TagIDs.List()
|
||||||
|
|
||||||
if strategy == FieldStrategyMerge {
|
if strategy == FieldStrategyMerge {
|
||||||
// add to existing
|
// add to existing
|
||||||
@@ -186,7 +189,7 @@ func (g sceneRelationships) stashIDs(ctx context.Context) ([]models.StashID, err
|
|||||||
}
|
}
|
||||||
|
|
||||||
var stashIDs []models.StashID
|
var stashIDs []models.StashID
|
||||||
originalStashIDs := target.StashIDs
|
originalStashIDs := target.StashIDs.List()
|
||||||
|
|
||||||
if strategy == FieldStrategyMerge {
|
if strategy == FieldStrategyMerge {
|
||||||
// add to existing
|
// add to existing
|
||||||
|
|||||||
@@ -158,13 +158,16 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||||||
|
|
||||||
emptyScene := &models.Scene{
|
emptyScene := &models.Scene{
|
||||||
ID: sceneID,
|
ID: sceneID,
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
}
|
}
|
||||||
|
|
||||||
sceneWithPerformer := &models.Scene{
|
sceneWithPerformer := &models.Scene{
|
||||||
ID: sceneWithPerformerID,
|
ID: sceneWithPerformerID,
|
||||||
PerformerIDs: []int{
|
PerformerIDs: models.NewRelatedIDs([]int{
|
||||||
existingPerformerID,
|
existingPerformerID,
|
||||||
},
|
}),
|
||||||
}
|
}
|
||||||
|
|
||||||
tr := sceneRelationships{
|
tr := sceneRelationships{
|
||||||
@@ -174,7 +177,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
sceneID *models.Scene
|
scene *models.Scene
|
||||||
fieldOptions *FieldOptions
|
fieldOptions *FieldOptions
|
||||||
scraped []*models.ScrapedPerformer
|
scraped []*models.ScrapedPerformer
|
||||||
ignoreMale bool
|
ignoreMale bool
|
||||||
@@ -301,7 +304,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
tr.scene = tt.sceneID
|
tr.scene = tt.scene
|
||||||
tr.fieldOptions["performers"] = tt.fieldOptions
|
tr.fieldOptions["performers"] = tt.fieldOptions
|
||||||
tr.result = &scrapeResult{
|
tr.result = &scrapeResult{
|
||||||
result: &scraper.ScrapedScene{
|
result: &scraper.ScrapedScene{
|
||||||
@@ -342,13 +345,18 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||||||
|
|
||||||
emptyScene := &models.Scene{
|
emptyScene := &models.Scene{
|
||||||
ID: sceneID,
|
ID: sceneID,
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
}
|
}
|
||||||
|
|
||||||
sceneWithTag := &models.Scene{
|
sceneWithTag := &models.Scene{
|
||||||
ID: sceneWithTagID,
|
ID: sceneWithTagID,
|
||||||
TagIDs: []int{
|
TagIDs: models.NewRelatedIDs([]int{
|
||||||
existingID,
|
existingID,
|
||||||
},
|
}),
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
}
|
}
|
||||||
|
|
||||||
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
|
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
|
||||||
@@ -531,12 +539,12 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
|
|||||||
|
|
||||||
sceneWithStashIDs := &models.Scene{
|
sceneWithStashIDs := &models.Scene{
|
||||||
ID: sceneWithStashID,
|
ID: sceneWithStashID,
|
||||||
StashIDs: []models.StashID{
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
||||||
{
|
{
|
||||||
StashID: remoteSiteID,
|
StashID: remoteSiteID,
|
||||||
Endpoint: existingEndpoint,
|
Endpoint: existingEndpoint,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
}
|
}
|
||||||
|
|
||||||
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
|
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import (
|
|||||||
|
|
||||||
type StudioCreator interface {
|
type StudioCreator interface {
|
||||||
Create(ctx context.Context, newStudio models.Studio) (*models.Studio, error)
|
Create(ctx context.Context, newStudio models.Studio) (*models.Studio, error)
|
||||||
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []*models.StashID) error
|
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []models.StashID) error
|
||||||
}
|
}
|
||||||
|
|
||||||
func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator, studio *models.ScrapedStudio) (*int, error) {
|
func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator, studio *models.ScrapedStudio) (*int, error) {
|
||||||
@@ -22,7 +22,7 @@ func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if endpoint != "" && studio.RemoteSiteID != nil {
|
if endpoint != "" && studio.RemoteSiteID != nil {
|
||||||
if err := w.UpdateStashIDs(ctx, created.ID, []*models.StashID{
|
if err := w.UpdateStashIDs(ctx, created.ID, []models.StashID{
|
||||||
{
|
{
|
||||||
Endpoint: endpoint,
|
Endpoint: endpoint,
|
||||||
StashID: *studio.RemoteSiteID,
|
StashID: *studio.RemoteSiteID,
|
||||||
|
|||||||
@@ -30,13 +30,13 @@ func Test_createMissingStudio(t *testing.T) {
|
|||||||
return p.Name.String == invalidName
|
return p.Name.String == invalidName
|
||||||
})).Return(nil, errors.New("error creating performer"))
|
})).Return(nil, errors.New("error creating performer"))
|
||||||
|
|
||||||
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []*models.StashID{
|
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []models.StashID{
|
||||||
{
|
{
|
||||||
Endpoint: invalidEndpoint,
|
Endpoint: invalidEndpoint,
|
||||||
StashID: remoteSiteID,
|
StashID: remoteSiteID,
|
||||||
},
|
},
|
||||||
}).Return(errors.New("error updating stash ids"))
|
}).Return(errors.New("error updating stash ids"))
|
||||||
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []*models.StashID{
|
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []models.StashID{
|
||||||
{
|
{
|
||||||
Endpoint: validEndpoint,
|
Endpoint: validEndpoint,
|
||||||
StashID: remoteSiteID,
|
StashID: remoteSiteID,
|
||||||
|
|||||||
@@ -403,6 +403,10 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
|||||||
for s := range jobChan {
|
for s := range jobChan {
|
||||||
sceneHash := s.GetHash(t.fileNamingAlgorithm)
|
sceneHash := s.GetHash(t.fileNamingAlgorithm)
|
||||||
|
|
||||||
|
if err := s.LoadRelationships(ctx, sceneReader); err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> error loading scene relationships: %v", sceneHash, err)
|
||||||
|
}
|
||||||
|
|
||||||
newSceneJSON, err := scene.ToBasicJSON(ctx, sceneReader, s)
|
newSceneJSON, err := scene.ToBasicJSON(ctx, sceneReader, s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("[scenes] <%s> error getting scene JSON: %s", sceneHash, err.Error())
|
logger.Errorf("[scenes] <%s> error getting scene JSON: %s", sceneHash, err.Error())
|
||||||
|
|||||||
@@ -166,7 +166,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
|
|||||||
_, err := r.Performer.Update(ctx, partial)
|
_, err := r.Performer.Update(ctx, partial)
|
||||||
|
|
||||||
if !t.refresh {
|
if !t.refresh {
|
||||||
err = r.Performer.UpdateStashIDs(ctx, t.performer.ID, []*models.StashID{
|
err = r.Performer.UpdateStashIDs(ctx, t.performer.ID, []models.StashID{
|
||||||
{
|
{
|
||||||
Endpoint: t.box.Endpoint,
|
Endpoint: t.box.Endpoint,
|
||||||
StashID: *performer.RemoteSiteID,
|
StashID: *performer.RemoteSiteID,
|
||||||
@@ -231,7 +231,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = r.Performer.UpdateStashIDs(ctx, createdPerformer.ID, []*models.StashID{
|
err = r.Performer.UpdateStashIDs(ctx, createdPerformer.ID, []models.StashID{
|
||||||
{
|
{
|
||||||
Endpoint: t.box.Endpoint,
|
Endpoint: t.box.Endpoint,
|
||||||
StashID: *performer.RemoteSiteID,
|
StashID: *performer.RemoteSiteID,
|
||||||
|
|||||||
@@ -91,8 +91,12 @@ func (s *Service) destroyFolderImages(ctx context.Context, i *models.Gallery, fi
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, img := range imgs {
|
for _, img := range imgs {
|
||||||
|
if err := img.LoadGalleryIDs(ctx, s.ImageFinder); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// only destroy images that are not attached to other galleries
|
// only destroy images that are not attached to other galleries
|
||||||
if len(img.GalleryIDs) > 1 {
|
if len(img.GalleryIDs.List()) > 1 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Importer struct {
|
type Importer struct {
|
||||||
ReaderWriter FinderCreatorUpdater
|
ReaderWriter FullCreatorUpdater
|
||||||
StudioWriter studio.NameFinderCreator
|
StudioWriter studio.NameFinderCreator
|
||||||
PerformerWriter performer.NameFinderCreator
|
PerformerWriter performer.NameFinderCreator
|
||||||
TagWriter tag.NameFinderCreator
|
TagWriter tag.NameFinderCreator
|
||||||
@@ -24,6 +24,11 @@ type Importer struct {
|
|||||||
gallery models.Gallery
|
gallery models.Gallery
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type FullCreatorUpdater interface {
|
||||||
|
FinderCreatorUpdater
|
||||||
|
Update(ctx context.Context, updatedGallery *models.Gallery) error
|
||||||
|
}
|
||||||
|
|
||||||
func (i *Importer) PreImport(ctx context.Context) error {
|
func (i *Importer) PreImport(ctx context.Context) error {
|
||||||
i.gallery = i.galleryJSONToGallery(i.Input)
|
i.gallery = i.galleryJSONToGallery(i.Input)
|
||||||
|
|
||||||
@@ -43,7 +48,10 @@ func (i *Importer) PreImport(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (i *Importer) galleryJSONToGallery(galleryJSON jsonschema.Gallery) models.Gallery {
|
func (i *Importer) galleryJSONToGallery(galleryJSON jsonschema.Gallery) models.Gallery {
|
||||||
newGallery := models.Gallery{}
|
newGallery := models.Gallery{
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
}
|
||||||
|
|
||||||
if galleryJSON.Title != "" {
|
if galleryJSON.Title != "" {
|
||||||
newGallery.Title = galleryJSON.Title
|
newGallery.Title = galleryJSON.Title
|
||||||
@@ -149,7 +157,7 @@ func (i *Importer) populatePerformers(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, p := range performers {
|
for _, p := range performers {
|
||||||
i.gallery.PerformerIDs = append(i.gallery.PerformerIDs, p.ID)
|
i.gallery.PerformerIDs.Add(p.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -207,7 +215,7 @@ func (i *Importer) populateTags(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, t := range tags {
|
for _, t := range tags {
|
||||||
i.gallery.TagIDs = append(i.gallery.TagIDs, t.ID)
|
i.gallery.TagIDs.Add(t.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/plugin"
|
"github.com/stashapp/stash/pkg/plugin"
|
||||||
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// const mutexType = "gallery"
|
// const mutexType = "gallery"
|
||||||
@@ -20,16 +19,17 @@ type FinderCreatorUpdater interface {
|
|||||||
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error)
|
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error)
|
||||||
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error)
|
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error)
|
||||||
Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error
|
Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error
|
||||||
Update(ctx context.Context, updatedGallery *models.Gallery) error
|
AddFileID(ctx context.Context, id int, fileID file.ID) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type SceneFinderUpdater interface {
|
type SceneFinderUpdater interface {
|
||||||
FindByPath(ctx context.Context, p string) ([]*models.Scene, error)
|
FindByPath(ctx context.Context, p string) ([]*models.Scene, error)
|
||||||
Update(ctx context.Context, updatedScene *models.Scene) error
|
Update(ctx context.Context, updatedScene *models.Scene) error
|
||||||
|
AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type ScanHandler struct {
|
type ScanHandler struct {
|
||||||
CreatorUpdater FinderCreatorUpdater
|
CreatorUpdater FullCreatorUpdater
|
||||||
SceneFinderUpdater SceneFinderUpdater
|
SceneFinderUpdater SceneFinderUpdater
|
||||||
|
|
||||||
PluginCache *plugin.Cache
|
PluginCache *plugin.Cache
|
||||||
@@ -97,8 +97,8 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
|
|||||||
i.Files = append(i.Files, f)
|
i.Files = append(i.Files, f)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := h.CreatorUpdater.Update(ctx, i); err != nil {
|
if err := h.CreatorUpdater.AddFileID(ctx, i.ID, f.Base().ID); err != nil {
|
||||||
return fmt.Errorf("updating gallery: %w", err)
|
return fmt.Errorf("adding file to gallery: %w", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,15 +122,10 @@ func (h *ScanHandler) associateScene(ctx context.Context, existing []*models.Gal
|
|||||||
|
|
||||||
for _, scene := range scenes {
|
for _, scene := range scenes {
|
||||||
// found related Scene
|
// found related Scene
|
||||||
newIDs := intslice.IntAppendUniques(scene.GalleryIDs, galleryIDs)
|
if err := h.SceneFinderUpdater.AddGalleryIDs(ctx, scene.ID, galleryIDs); err != nil {
|
||||||
if len(newIDs) > len(scene.GalleryIDs) {
|
|
||||||
logger.Infof("associate: Gallery %s is related to scene: %s", f.Base().Path, scene.GetTitle())
|
|
||||||
scene.GalleryIDs = newIDs
|
|
||||||
if err := h.SceneFinderUpdater.Update(ctx, scene); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ type Repository interface {
|
|||||||
type ImageFinder interface {
|
type ImageFinder interface {
|
||||||
FindByFolderID(ctx context.Context, folder file.FolderID) ([]*models.Image, error)
|
FindByFolderID(ctx context.Context, folder file.FolderID) ([]*models.Image, error)
|
||||||
FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error)
|
FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error)
|
||||||
|
models.GalleryIDLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
type ImageService interface {
|
type ImageService interface {
|
||||||
|
|||||||
@@ -26,36 +26,22 @@ func AddImage(ctx context.Context, qb ImageUpdater, galleryID int, imageID int)
|
|||||||
return qb.UpdateImages(ctx, galleryID, imageIDs)
|
return qb.UpdateImages(ctx, galleryID, imageIDs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Gallery, performerID int) (bool, error) {
|
func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Gallery, performerID int) error {
|
||||||
if !intslice.IntInclude(o.PerformerIDs, performerID) {
|
_, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{
|
||||||
if _, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{
|
|
||||||
PerformerIDs: &models.UpdateIDs{
|
PerformerIDs: &models.UpdateIDs{
|
||||||
IDs: []int{performerID},
|
IDs: []int{performerID},
|
||||||
Mode: models.RelationshipUpdateModeAdd,
|
Mode: models.RelationshipUpdateModeAdd,
|
||||||
},
|
},
|
||||||
}); err != nil {
|
})
|
||||||
return false, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
func AddTag(ctx context.Context, qb PartialUpdater, o *models.Gallery, tagID int) error {
|
||||||
}
|
_, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{
|
||||||
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddTag(ctx context.Context, qb PartialUpdater, o *models.Gallery, tagID int) (bool, error) {
|
|
||||||
if !intslice.IntInclude(o.TagIDs, tagID) {
|
|
||||||
if _, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{
|
|
||||||
TagIDs: &models.UpdateIDs{
|
TagIDs: &models.UpdateIDs{
|
||||||
IDs: []int{tagID},
|
IDs: []int{tagID},
|
||||||
Mode: models.RelationshipUpdateModeAdd,
|
Mode: models.RelationshipUpdateModeAdd,
|
||||||
},
|
},
|
||||||
}); err != nil {
|
})
|
||||||
return false, err
|
return err
|
||||||
}
|
|
||||||
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return false, nil
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,8 +17,13 @@ type GalleryChecksumsFinder interface {
|
|||||||
FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error)
|
FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type FullCreatorUpdater interface {
|
||||||
|
FinderCreatorUpdater
|
||||||
|
Update(ctx context.Context, updatedImage *models.Image) error
|
||||||
|
}
|
||||||
|
|
||||||
type Importer struct {
|
type Importer struct {
|
||||||
ReaderWriter FinderCreatorUpdater
|
ReaderWriter FullCreatorUpdater
|
||||||
StudioWriter studio.NameFinderCreator
|
StudioWriter studio.NameFinderCreator
|
||||||
GalleryWriter GalleryChecksumsFinder
|
GalleryWriter GalleryChecksumsFinder
|
||||||
PerformerWriter performer.NameFinderCreator
|
PerformerWriter performer.NameFinderCreator
|
||||||
@@ -57,6 +62,9 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
|
|||||||
newImage := models.Image{
|
newImage := models.Image{
|
||||||
// Checksum: imageJSON.Checksum,
|
// Checksum: imageJSON.Checksum,
|
||||||
// Path: i.Path,
|
// Path: i.Path,
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
}
|
}
|
||||||
|
|
||||||
if imageJSON.Title != "" {
|
if imageJSON.Title != "" {
|
||||||
@@ -145,7 +153,7 @@ func (i *Importer) populateGalleries(ctx context.Context) error {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
i.image.GalleryIDs = append(i.image.GalleryIDs, gallery[0].ID)
|
i.image.GalleryIDs.Add(gallery[0].ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -190,7 +198,7 @@ func (i *Importer) populatePerformers(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, p := range performers {
|
for _, p := range performers {
|
||||||
i.image.PerformerIDs = append(i.image.PerformerIDs, p.ID)
|
i.image.PerformerIDs.Add(p.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -222,7 +230,7 @@ func (i *Importer) populateTags(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, t := range tags {
|
for _, t := range tags {
|
||||||
i.image.TagIDs = append(i.image.TagIDs, t.ID)
|
i.image.TagIDs.Add(t.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,8 @@ type FinderCreatorUpdater interface {
|
|||||||
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error)
|
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error)
|
||||||
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error)
|
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error)
|
||||||
Create(ctx context.Context, newImage *models.ImageCreateInput) error
|
Create(ctx context.Context, newImage *models.ImageCreateInput) error
|
||||||
Update(ctx context.Context, updatedImage *models.Image) error
|
AddFileID(ctx context.Context, id int, fileID file.ID) error
|
||||||
|
models.GalleryIDLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
type GalleryFinderCreator interface {
|
type GalleryFinderCreator interface {
|
||||||
@@ -97,6 +98,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
|
|||||||
newImage := &models.Image{
|
newImage := &models.Image{
|
||||||
CreatedAt: now,
|
CreatedAt: now,
|
||||||
UpdatedAt: now,
|
UpdatedAt: now,
|
||||||
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
}
|
}
|
||||||
|
|
||||||
// if the file is in a zip, then associate it with the gallery
|
// if the file is in a zip, then associate it with the gallery
|
||||||
@@ -107,7 +109,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, gg := range g {
|
for _, gg := range g {
|
||||||
newImage.GalleryIDs = append(newImage.GalleryIDs, gg.ID)
|
newImage.GalleryIDs.Add(gg.ID)
|
||||||
}
|
}
|
||||||
} else if h.ScanConfig.GetCreateGalleriesFromFolders() {
|
} else if h.ScanConfig.GetCreateGalleriesFromFolders() {
|
||||||
if err := h.associateFolderBasedGallery(ctx, newImage, imageFile); err != nil {
|
if err := h.associateFolderBasedGallery(ctx, newImage, imageFile); err != nil {
|
||||||
@@ -162,8 +164,8 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := h.CreatorUpdater.Update(ctx, i); err != nil {
|
if err := h.CreatorUpdater.AddFileID(ctx, i.ID, f.ID); err != nil {
|
||||||
return fmt.Errorf("updating image: %w", err)
|
return fmt.Errorf("adding file to image: %w", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -210,8 +212,12 @@ func (h *ScanHandler) associateFolderBasedGallery(ctx context.Context, newImage
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if g != nil && !intslice.IntInclude(newImage.GalleryIDs, g.ID) {
|
if err := newImage.LoadGalleryIDs(ctx, h.CreatorUpdater); err != nil {
|
||||||
newImage.GalleryIDs = append(newImage.GalleryIDs, g.ID)
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if g != nil && !intslice.IntInclude(newImage.GalleryIDs.List(), g.ID) {
|
||||||
|
newImage.GalleryIDs.Add(g.ID)
|
||||||
logger.Infof("Adding %s to folder-based gallery %s", f.Base().Path, g.Path())
|
logger.Infof("Adding %s to folder-based gallery %s", f.Base().Path, g.Path())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,43 +4,29 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type PartialUpdater interface {
|
type PartialUpdater interface {
|
||||||
UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error)
|
UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddPerformer(ctx context.Context, qb PartialUpdater, i *models.Image, performerID int) (bool, error) {
|
func AddPerformer(ctx context.Context, qb PartialUpdater, i *models.Image, performerID int) error {
|
||||||
if !intslice.IntInclude(i.PerformerIDs, performerID) {
|
_, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{
|
||||||
if _, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{
|
|
||||||
PerformerIDs: &models.UpdateIDs{
|
PerformerIDs: &models.UpdateIDs{
|
||||||
IDs: []int{performerID},
|
IDs: []int{performerID},
|
||||||
Mode: models.RelationshipUpdateModeAdd,
|
Mode: models.RelationshipUpdateModeAdd,
|
||||||
},
|
},
|
||||||
}); err != nil {
|
})
|
||||||
return false, err
|
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
func AddTag(ctx context.Context, qb PartialUpdater, i *models.Image, tagID int) error {
|
||||||
}
|
_, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{
|
||||||
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddTag(ctx context.Context, qb PartialUpdater, i *models.Image, tagID int) (bool, error) {
|
|
||||||
if !intslice.IntInclude(i.TagIDs, tagID) {
|
|
||||||
if _, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{
|
|
||||||
TagIDs: &models.UpdateIDs{
|
TagIDs: &models.UpdateIDs{
|
||||||
IDs: []int{tagID},
|
IDs: []int{tagID},
|
||||||
Mode: models.RelationshipUpdateModeAdd,
|
Mode: models.RelationshipUpdateModeAdd,
|
||||||
},
|
},
|
||||||
}); err != nil {
|
})
|
||||||
return false, err
|
return err
|
||||||
}
|
|
||||||
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return false, nil
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,14 +74,23 @@ type GalleryDestroyInput struct {
|
|||||||
DeleteGenerated *bool `json:"delete_generated"`
|
DeleteGenerated *bool `json:"delete_generated"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type GalleryFinder interface {
|
||||||
|
FindMany(ctx context.Context, ids []int) ([]*Gallery, error)
|
||||||
|
}
|
||||||
|
|
||||||
type GalleryReader interface {
|
type GalleryReader interface {
|
||||||
Find(ctx context.Context, id int) (*Gallery, error)
|
Find(ctx context.Context, id int) (*Gallery, error)
|
||||||
FindMany(ctx context.Context, ids []int) ([]*Gallery, error)
|
GalleryFinder
|
||||||
FindByChecksum(ctx context.Context, checksum string) ([]*Gallery, error)
|
FindByChecksum(ctx context.Context, checksum string) ([]*Gallery, error)
|
||||||
FindByChecksums(ctx context.Context, checksums []string) ([]*Gallery, error)
|
FindByChecksums(ctx context.Context, checksums []string) ([]*Gallery, error)
|
||||||
FindByPath(ctx context.Context, path string) ([]*Gallery, error)
|
FindByPath(ctx context.Context, path string) ([]*Gallery, error)
|
||||||
FindBySceneID(ctx context.Context, sceneID int) ([]*Gallery, error)
|
FindBySceneID(ctx context.Context, sceneID int) ([]*Gallery, error)
|
||||||
FindByImageID(ctx context.Context, imageID int) ([]*Gallery, error)
|
FindByImageID(ctx context.Context, imageID int) ([]*Gallery, error)
|
||||||
|
|
||||||
|
SceneIDLoader
|
||||||
|
PerformerIDLoader
|
||||||
|
TagIDLoader
|
||||||
|
|
||||||
Count(ctx context.Context) (int, error)
|
Count(ctx context.Context) (int, error)
|
||||||
All(ctx context.Context) ([]*Gallery, error)
|
All(ctx context.Context) ([]*Gallery, error)
|
||||||
Query(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error)
|
Query(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error)
|
||||||
|
|||||||
@@ -102,6 +102,10 @@ type ImageReader interface {
|
|||||||
All(ctx context.Context) ([]*Image, error)
|
All(ctx context.Context) ([]*Image, error)
|
||||||
Query(ctx context.Context, options ImageQueryOptions) (*ImageQueryResult, error)
|
Query(ctx context.Context, options ImageQueryOptions) (*ImageQueryResult, error)
|
||||||
QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error)
|
QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error)
|
||||||
|
|
||||||
|
GalleryIDLoader
|
||||||
|
PerformerIDLoader
|
||||||
|
TagIDLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
type ImageWriter interface {
|
type ImageWriter interface {
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ type Performer struct {
|
|||||||
DeathDate string `json:"death_date,omitempty"`
|
DeathDate string `json:"death_date,omitempty"`
|
||||||
HairColor string `json:"hair_color,omitempty"`
|
HairColor string `json:"hair_color,omitempty"`
|
||||||
Weight int `json:"weight,omitempty"`
|
Weight int `json:"weight,omitempty"`
|
||||||
StashIDs []*models.StashID `json:"stash_ids,omitempty"`
|
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
||||||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ type Studio struct {
|
|||||||
Rating int `json:"rating,omitempty"`
|
Rating int `json:"rating,omitempty"`
|
||||||
Details string `json:"details,omitempty"`
|
Details string `json:"details,omitempty"`
|
||||||
Aliases []string `json:"aliases,omitempty"`
|
Aliases []string `json:"aliases,omitempty"`
|
||||||
StashIDs []*models.StashID `json:"stash_ids,omitempty"`
|
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
||||||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -272,6 +272,75 @@ func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, galleryID int) (
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetPerformerIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *GalleryReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetSceneIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *GalleryReaderWriter) GetSceneIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTagIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *GalleryReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// Query provides a mock function with given fields: ctx, galleryFilter, findFilter
|
// Query provides a mock function with given fields: ctx, galleryFilter, findFilter
|
||||||
func (_m *GalleryReaderWriter) Query(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) ([]*models.Gallery, int, error) {
|
func (_m *GalleryReaderWriter) Query(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) ([]*models.Gallery, int, error) {
|
||||||
ret := _m.Called(ctx, galleryFilter, findFilter)
|
ret := _m.Called(ctx, galleryFilter, findFilter)
|
||||||
|
|||||||
@@ -197,29 +197,6 @@ func (_m *ImageReaderWriter) FindByGalleryID(ctx context.Context, galleryID int)
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindByPath provides a mock function with given fields: ctx, path
|
|
||||||
func (_m *ImageReaderWriter) FindByPath(ctx context.Context, path string) ([]*models.Image, error) {
|
|
||||||
ret := _m.Called(ctx, path)
|
|
||||||
|
|
||||||
var r0 []*models.Image
|
|
||||||
if rf, ok := ret.Get(0).(func(context.Context, string) []*models.Image); ok {
|
|
||||||
r0 = rf(ctx, path)
|
|
||||||
} else {
|
|
||||||
if ret.Get(0) != nil {
|
|
||||||
r0 = ret.Get(0).([]*models.Image)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var r1 error
|
|
||||||
if rf, ok := ret.Get(1).(func(context.Context, string) error); ok {
|
|
||||||
r1 = rf(ctx, path)
|
|
||||||
} else {
|
|
||||||
r1 = ret.Error(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return r0, r1
|
|
||||||
}
|
|
||||||
|
|
||||||
// FindMany provides a mock function with given fields: ctx, ids
|
// FindMany provides a mock function with given fields: ctx, ids
|
||||||
func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Image, error) {
|
func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Image, error) {
|
||||||
ret := _m.Called(ctx, ids)
|
ret := _m.Called(ctx, ids)
|
||||||
@@ -243,6 +220,75 @@ func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetGalleryIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *ImageReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPerformerIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *ImageReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTagIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *ImageReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// IncrementOCounter provides a mock function with given fields: ctx, id
|
// IncrementOCounter provides a mock function with given fields: ctx, id
|
||||||
func (_m *ImageReaderWriter) IncrementOCounter(ctx context.Context, id int) (int, error) {
|
func (_m *ImageReaderWriter) IncrementOCounter(ctx context.Context, id int) (int, error) {
|
||||||
ret := _m.Called(ctx, id)
|
ret := _m.Called(ctx, id)
|
||||||
|
|||||||
@@ -360,22 +360,22 @@ func (_m *PerformerReaderWriter) GetImage(ctx context.Context, performerID int)
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetStashIDs provides a mock function with given fields: ctx, performerID
|
// GetStashIDs provides a mock function with given fields: ctx, relatedID
|
||||||
func (_m *PerformerReaderWriter) GetStashIDs(ctx context.Context, performerID int) ([]*models.StashID, error) {
|
func (_m *PerformerReaderWriter) GetStashIDs(ctx context.Context, relatedID int) ([]models.StashID, error) {
|
||||||
ret := _m.Called(ctx, performerID)
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
var r0 []*models.StashID
|
var r0 []models.StashID
|
||||||
if rf, ok := ret.Get(0).(func(context.Context, int) []*models.StashID); ok {
|
if rf, ok := ret.Get(0).(func(context.Context, int) []models.StashID); ok {
|
||||||
r0 = rf(ctx, performerID)
|
r0 = rf(ctx, relatedID)
|
||||||
} else {
|
} else {
|
||||||
if ret.Get(0) != nil {
|
if ret.Get(0) != nil {
|
||||||
r0 = ret.Get(0).([]*models.StashID)
|
r0 = ret.Get(0).([]models.StashID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var r1 error
|
var r1 error
|
||||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
r1 = rf(ctx, performerID)
|
r1 = rf(ctx, relatedID)
|
||||||
} else {
|
} else {
|
||||||
r1 = ret.Error(1)
|
r1 = ret.Error(1)
|
||||||
}
|
}
|
||||||
@@ -520,11 +520,11 @@ func (_m *PerformerReaderWriter) UpdateImage(ctx context.Context, performerID in
|
|||||||
}
|
}
|
||||||
|
|
||||||
// UpdateStashIDs provides a mock function with given fields: ctx, performerID, stashIDs
|
// UpdateStashIDs provides a mock function with given fields: ctx, performerID, stashIDs
|
||||||
func (_m *PerformerReaderWriter) UpdateStashIDs(ctx context.Context, performerID int, stashIDs []*models.StashID) error {
|
func (_m *PerformerReaderWriter) UpdateStashIDs(ctx context.Context, performerID int, stashIDs []models.StashID) error {
|
||||||
ret := _m.Called(ctx, performerID, stashIDs)
|
ret := _m.Called(ctx, performerID, stashIDs)
|
||||||
|
|
||||||
var r0 error
|
var r0 error
|
||||||
if rf, ok := ret.Get(0).(func(context.Context, int, []*models.StashID) error); ok {
|
if rf, ok := ret.Get(0).(func(context.Context, int, []models.StashID) error); ok {
|
||||||
r0 = rf(ctx, performerID, stashIDs)
|
r0 = rf(ctx, performerID, stashIDs)
|
||||||
} else {
|
} else {
|
||||||
r0 = ret.Error(0)
|
r0 = ret.Error(0)
|
||||||
|
|||||||
@@ -500,6 +500,121 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte,
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetGalleryIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *SceneReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMovies provides a mock function with given fields: ctx, id
|
||||||
|
func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.MoviesScenes, error) {
|
||||||
|
ret := _m.Called(ctx, id)
|
||||||
|
|
||||||
|
var r0 []models.MoviesScenes
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []models.MoviesScenes); ok {
|
||||||
|
r0 = rf(ctx, id)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]models.MoviesScenes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, id)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPerformerIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *SceneReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStashIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *SceneReaderWriter) GetStashIDs(ctx context.Context, relatedID int) ([]models.StashID, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []models.StashID
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []models.StashID); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]models.StashID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetTagIDs provides a mock function with given fields: ctx, relatedID
|
||||||
|
func (_m *SceneReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||||
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
|
var r0 []int
|
||||||
|
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||||
|
r0 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]int)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
|
r1 = rf(ctx, relatedID)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// IncrementOCounter provides a mock function with given fields: ctx, id
|
// IncrementOCounter provides a mock function with given fields: ctx, id
|
||||||
func (_m *SceneReaderWriter) IncrementOCounter(ctx context.Context, id int) (int, error) {
|
func (_m *SceneReaderWriter) IncrementOCounter(ctx context.Context, id int) (int, error) {
|
||||||
ret := _m.Called(ctx, id)
|
ret := _m.Called(ctx, id)
|
||||||
|
|||||||
@@ -270,22 +270,22 @@ func (_m *StudioReaderWriter) GetImage(ctx context.Context, studioID int) ([]byt
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetStashIDs provides a mock function with given fields: ctx, studioID
|
// GetStashIDs provides a mock function with given fields: ctx, relatedID
|
||||||
func (_m *StudioReaderWriter) GetStashIDs(ctx context.Context, studioID int) ([]*models.StashID, error) {
|
func (_m *StudioReaderWriter) GetStashIDs(ctx context.Context, relatedID int) ([]models.StashID, error) {
|
||||||
ret := _m.Called(ctx, studioID)
|
ret := _m.Called(ctx, relatedID)
|
||||||
|
|
||||||
var r0 []*models.StashID
|
var r0 []models.StashID
|
||||||
if rf, ok := ret.Get(0).(func(context.Context, int) []*models.StashID); ok {
|
if rf, ok := ret.Get(0).(func(context.Context, int) []models.StashID); ok {
|
||||||
r0 = rf(ctx, studioID)
|
r0 = rf(ctx, relatedID)
|
||||||
} else {
|
} else {
|
||||||
if ret.Get(0) != nil {
|
if ret.Get(0) != nil {
|
||||||
r0 = ret.Get(0).([]*models.StashID)
|
r0 = ret.Get(0).([]models.StashID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var r1 error
|
var r1 error
|
||||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||||
r1 = rf(ctx, studioID)
|
r1 = rf(ctx, relatedID)
|
||||||
} else {
|
} else {
|
||||||
r1 = ret.Error(1)
|
r1 = ret.Error(1)
|
||||||
}
|
}
|
||||||
@@ -442,11 +442,11 @@ func (_m *StudioReaderWriter) UpdateImage(ctx context.Context, studioID int, ima
|
|||||||
}
|
}
|
||||||
|
|
||||||
// UpdateStashIDs provides a mock function with given fields: ctx, studioID, stashIDs
|
// UpdateStashIDs provides a mock function with given fields: ctx, studioID, stashIDs
|
||||||
func (_m *StudioReaderWriter) UpdateStashIDs(ctx context.Context, studioID int, stashIDs []*models.StashID) error {
|
func (_m *StudioReaderWriter) UpdateStashIDs(ctx context.Context, studioID int, stashIDs []models.StashID) error {
|
||||||
ret := _m.Called(ctx, studioID, stashIDs)
|
ret := _m.Called(ctx, studioID, stashIDs)
|
||||||
|
|
||||||
var r0 error
|
var r0 error
|
||||||
if rf, ok := ret.Get(0).(func(context.Context, int, []*models.StashID) error); ok {
|
if rf, ok := ret.Get(0).(func(context.Context, int, []models.StashID) error); ok {
|
||||||
r0 = rf(ctx, studioID, stashIDs)
|
r0 = rf(ctx, studioID, stashIDs)
|
||||||
} else {
|
} else {
|
||||||
r0 = ret.Error(0)
|
r0 = ret.Error(0)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -35,9 +36,27 @@ type Gallery struct {
|
|||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
|
||||||
SceneIDs []int `json:"scene_ids"`
|
SceneIDs RelatedIDs `json:"scene_ids"`
|
||||||
TagIDs []int `json:"tag_ids"`
|
TagIDs RelatedIDs `json:"tag_ids"`
|
||||||
PerformerIDs []int `json:"performer_ids"`
|
PerformerIDs RelatedIDs `json:"performer_ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Gallery) LoadSceneIDs(ctx context.Context, l SceneIDLoader) error {
|
||||||
|
return g.SceneIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetSceneIDs(ctx, g.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Gallery) LoadPerformerIDs(ctx context.Context, l PerformerIDLoader) error {
|
||||||
|
return g.PerformerIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetPerformerIDs(ctx, g.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Gallery) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||||
|
return g.TagIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetTagIDs(ctx, g.ID)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g Gallery) PrimaryFile() file.File {
|
func (g Gallery) PrimaryFile() file.File {
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/file"
|
"github.com/stashapp/stash/pkg/file"
|
||||||
@@ -22,9 +23,27 @@ type Image struct {
|
|||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
|
||||||
GalleryIDs []int `json:"gallery_ids"`
|
GalleryIDs RelatedIDs `json:"gallery_ids"`
|
||||||
TagIDs []int `json:"tag_ids"`
|
TagIDs RelatedIDs `json:"tag_ids"`
|
||||||
PerformerIDs []int `json:"performer_ids"`
|
PerformerIDs RelatedIDs `json:"performer_ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) LoadGalleryIDs(ctx context.Context, l GalleryIDLoader) error {
|
||||||
|
return i.GalleryIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetGalleryIDs(ctx, i.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) LoadPerformerIDs(ctx context.Context, l PerformerIDLoader) error {
|
||||||
|
return i.PerformerIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetPerformerIDs(ctx, i.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||||
|
return i.TagIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetTagIDs(ctx, i.ID)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i Image) PrimaryFile() *file.ImageFile {
|
func (i Image) PrimaryFile() *file.ImageFile {
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
@@ -26,11 +27,65 @@ type Scene struct {
|
|||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
|
||||||
GalleryIDs []int `json:"gallery_ids"`
|
GalleryIDs RelatedIDs `json:"gallery_ids"`
|
||||||
TagIDs []int `json:"tag_ids"`
|
TagIDs RelatedIDs `json:"tag_ids"`
|
||||||
PerformerIDs []int `json:"performer_ids"`
|
PerformerIDs RelatedIDs `json:"performer_ids"`
|
||||||
Movies []MoviesScenes `json:"movies"`
|
Movies RelatedMovies `json:"movies"`
|
||||||
StashIDs []StashID `json:"stash_ids"`
|
StashIDs RelatedStashIDs `json:"stash_ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Scene) LoadGalleryIDs(ctx context.Context, l GalleryIDLoader) error {
|
||||||
|
return s.GalleryIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetGalleryIDs(ctx, s.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Scene) LoadPerformerIDs(ctx context.Context, l PerformerIDLoader) error {
|
||||||
|
return s.PerformerIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetPerformerIDs(ctx, s.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Scene) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||||
|
return s.TagIDs.load(func() ([]int, error) {
|
||||||
|
return l.GetTagIDs(ctx, s.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Scene) LoadMovies(ctx context.Context, l SceneMovieLoader) error {
|
||||||
|
return s.Movies.load(func() ([]MoviesScenes, error) {
|
||||||
|
return l.GetMovies(ctx, s.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Scene) LoadStashIDs(ctx context.Context, l StashIDLoader) error {
|
||||||
|
return s.StashIDs.load(func() ([]StashID, error) {
|
||||||
|
return l.GetStashIDs(ctx, s.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Scene) LoadRelationships(ctx context.Context, l SceneReader) error {
|
||||||
|
if err := s.LoadGalleryIDs(ctx, l); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.LoadPerformerIDs(ctx, l); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.LoadTagIDs(ctx, l); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.LoadMovies(ctx, l); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.LoadStashIDs(ctx, l); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s Scene) PrimaryFile() *file.VideoFile {
|
func (s Scene) PrimaryFile() *file.VideoFile {
|
||||||
|
|||||||
@@ -125,9 +125,13 @@ type PerformerFilterType struct {
|
|||||||
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PerformerFinder interface {
|
||||||
|
FindMany(ctx context.Context, ids []int) ([]*Performer, error)
|
||||||
|
}
|
||||||
|
|
||||||
type PerformerReader interface {
|
type PerformerReader interface {
|
||||||
Find(ctx context.Context, id int) (*Performer, error)
|
Find(ctx context.Context, id int) (*Performer, error)
|
||||||
FindMany(ctx context.Context, ids []int) ([]*Performer, error)
|
PerformerFinder
|
||||||
FindBySceneID(ctx context.Context, sceneID int) ([]*Performer, error)
|
FindBySceneID(ctx context.Context, sceneID int) ([]*Performer, error)
|
||||||
FindNamesBySceneID(ctx context.Context, sceneID int) ([]*Performer, error)
|
FindNamesBySceneID(ctx context.Context, sceneID int) ([]*Performer, error)
|
||||||
FindByImageID(ctx context.Context, imageID int) ([]*Performer, error)
|
FindByImageID(ctx context.Context, imageID int) ([]*Performer, error)
|
||||||
@@ -143,7 +147,7 @@ type PerformerReader interface {
|
|||||||
QueryForAutoTag(ctx context.Context, words []string) ([]*Performer, error)
|
QueryForAutoTag(ctx context.Context, words []string) ([]*Performer, error)
|
||||||
Query(ctx context.Context, performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error)
|
Query(ctx context.Context, performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error)
|
||||||
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
||||||
GetStashIDs(ctx context.Context, performerID int) ([]*StashID, error)
|
StashIDLoader
|
||||||
GetTagIDs(ctx context.Context, performerID int) ([]int, error)
|
GetTagIDs(ctx context.Context, performerID int) ([]int, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -154,7 +158,7 @@ type PerformerWriter interface {
|
|||||||
Destroy(ctx context.Context, id int) error
|
Destroy(ctx context.Context, id int) error
|
||||||
UpdateImage(ctx context.Context, performerID int, image []byte) error
|
UpdateImage(ctx context.Context, performerID int, image []byte) error
|
||||||
DestroyImage(ctx context.Context, performerID int) error
|
DestroyImage(ctx context.Context, performerID int) error
|
||||||
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []*StashID) error
|
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []StashID) error
|
||||||
UpdateTags(ctx context.Context, performerID int, tagIDs []int) error
|
UpdateTags(ctx context.Context, performerID int, tagIDs []int) error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
191
pkg/models/relationships.go
Normal file
191
pkg/models/relationships.go
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import "context"
|
||||||
|
|
||||||
|
type SceneIDLoader interface {
|
||||||
|
GetSceneIDs(ctx context.Context, relatedID int) ([]int, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type GalleryIDLoader interface {
|
||||||
|
GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type PerformerIDLoader interface {
|
||||||
|
GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type TagIDLoader interface {
|
||||||
|
GetTagIDs(ctx context.Context, relatedID int) ([]int, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneMovieLoader interface {
|
||||||
|
GetMovies(ctx context.Context, id int) ([]MoviesScenes, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type StashIDLoader interface {
|
||||||
|
GetStashIDs(ctx context.Context, relatedID int) ([]StashID, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RelatedIDs represents a list of related IDs.
|
||||||
|
// TODO - this can be made generic
|
||||||
|
type RelatedIDs struct {
|
||||||
|
list []int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRelatedIDs returns a loaded RelatedIDs object with the provided IDs.
|
||||||
|
// Loaded will return true when called on the returned object if the provided slice is not nil.
|
||||||
|
func NewRelatedIDs(ids []int) RelatedIDs {
|
||||||
|
return RelatedIDs{
|
||||||
|
list: ids,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loaded returns true if the related IDs have been loaded.
|
||||||
|
func (r RelatedIDs) Loaded() bool {
|
||||||
|
return r.list != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r RelatedIDs) mustLoaded() {
|
||||||
|
if !r.Loaded() {
|
||||||
|
panic("list has not been loaded")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List returns the related IDs. Panics if the relationship has not been loaded.
|
||||||
|
func (r RelatedIDs) List() []int {
|
||||||
|
r.mustLoaded()
|
||||||
|
|
||||||
|
return r.list
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add adds the provided ids to the list. Panics if the relationship has not been loaded.
|
||||||
|
func (r *RelatedIDs) Add(ids ...int) {
|
||||||
|
r.mustLoaded()
|
||||||
|
|
||||||
|
r.list = append(r.list, ids...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RelatedIDs) load(fn func() ([]int, error)) error {
|
||||||
|
if r.Loaded() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ids, err := fn()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ids == nil {
|
||||||
|
ids = []int{}
|
||||||
|
}
|
||||||
|
|
||||||
|
r.list = ids
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RelatedMovies represents a list of related Movies.
|
||||||
|
type RelatedMovies struct {
|
||||||
|
list []MoviesScenes
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRelatedMovies returns a loaded RelatedMovies object with the provided movies.
|
||||||
|
// Loaded will return true when called on the returned object if the provided slice is not nil.
|
||||||
|
func NewRelatedMovies(list []MoviesScenes) RelatedMovies {
|
||||||
|
return RelatedMovies{
|
||||||
|
list: list,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loaded returns true if the relationship has been loaded.
|
||||||
|
func (r RelatedMovies) Loaded() bool {
|
||||||
|
return r.list != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r RelatedMovies) mustLoaded() {
|
||||||
|
if !r.Loaded() {
|
||||||
|
panic("list has not been loaded")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List returns the related Movies. Panics if the relationship has not been loaded.
|
||||||
|
func (r RelatedMovies) List() []MoviesScenes {
|
||||||
|
r.mustLoaded()
|
||||||
|
|
||||||
|
return r.list
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add adds the provided ids to the list. Panics if the relationship has not been loaded.
|
||||||
|
func (r *RelatedMovies) Add(movies ...MoviesScenes) {
|
||||||
|
r.mustLoaded()
|
||||||
|
|
||||||
|
r.list = append(r.list, movies...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RelatedMovies) load(fn func() ([]MoviesScenes, error)) error {
|
||||||
|
if r.Loaded() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ids, err := fn()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ids == nil {
|
||||||
|
ids = []MoviesScenes{}
|
||||||
|
}
|
||||||
|
|
||||||
|
r.list = ids
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type RelatedStashIDs struct {
|
||||||
|
list []StashID
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRelatedStashIDs returns a RelatedStashIDs object with the provided ids.
|
||||||
|
// Loaded will return true when called on the returned object if the provided slice is not nil.
|
||||||
|
func NewRelatedStashIDs(list []StashID) RelatedStashIDs {
|
||||||
|
return RelatedStashIDs{
|
||||||
|
list: list,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r RelatedStashIDs) mustLoaded() {
|
||||||
|
if !r.Loaded() {
|
||||||
|
panic("list has not been loaded")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loaded returns true if the relationship has been loaded.
|
||||||
|
func (r RelatedStashIDs) Loaded() bool {
|
||||||
|
return r.list != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// List returns the related Stash IDs. Panics if the relationship has not been loaded.
|
||||||
|
func (r RelatedStashIDs) List() []StashID {
|
||||||
|
r.mustLoaded()
|
||||||
|
|
||||||
|
return r.list
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RelatedStashIDs) load(fn func() ([]StashID, error)) error {
|
||||||
|
if r.Loaded() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ids, err := fn()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ids == nil {
|
||||||
|
ids = []StashID{}
|
||||||
|
}
|
||||||
|
|
||||||
|
r.list = ids
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -133,6 +133,13 @@ type SceneReader interface {
|
|||||||
FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error)
|
FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error)
|
||||||
FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error)
|
FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error)
|
||||||
FindDuplicates(ctx context.Context, distance int) ([][]*Scene, error)
|
FindDuplicates(ctx context.Context, distance int) ([][]*Scene, error)
|
||||||
|
|
||||||
|
GalleryIDLoader
|
||||||
|
PerformerIDLoader
|
||||||
|
TagIDLoader
|
||||||
|
SceneMovieLoader
|
||||||
|
StashIDLoader
|
||||||
|
|
||||||
CountByPerformerID(ctx context.Context, performerID int) (int, error)
|
CountByPerformerID(ctx context.Context, performerID int) (int, error)
|
||||||
// FindByStudioID(studioID int) ([]*Scene, error)
|
// FindByStudioID(studioID int) ([]*Scene, error)
|
||||||
FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error)
|
FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error)
|
||||||
|
|||||||
@@ -30,9 +30,13 @@ type StudioFilterType struct {
|
|||||||
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type StudioFinder interface {
|
||||||
|
FindMany(ctx context.Context, ids []int) ([]*Studio, error)
|
||||||
|
}
|
||||||
|
|
||||||
type StudioReader interface {
|
type StudioReader interface {
|
||||||
Find(ctx context.Context, id int) (*Studio, error)
|
Find(ctx context.Context, id int) (*Studio, error)
|
||||||
FindMany(ctx context.Context, ids []int) ([]*Studio, error)
|
StudioFinder
|
||||||
FindChildren(ctx context.Context, id int) ([]*Studio, error)
|
FindChildren(ctx context.Context, id int) ([]*Studio, error)
|
||||||
FindByName(ctx context.Context, name string, nocase bool) (*Studio, error)
|
FindByName(ctx context.Context, name string, nocase bool) (*Studio, error)
|
||||||
FindByStashID(ctx context.Context, stashID StashID) ([]*Studio, error)
|
FindByStashID(ctx context.Context, stashID StashID) ([]*Studio, error)
|
||||||
@@ -44,7 +48,7 @@ type StudioReader interface {
|
|||||||
Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error)
|
Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error)
|
||||||
GetImage(ctx context.Context, studioID int) ([]byte, error)
|
GetImage(ctx context.Context, studioID int) ([]byte, error)
|
||||||
HasImage(ctx context.Context, studioID int) (bool, error)
|
HasImage(ctx context.Context, studioID int) (bool, error)
|
||||||
GetStashIDs(ctx context.Context, studioID int) ([]*StashID, error)
|
StashIDLoader
|
||||||
GetAliases(ctx context.Context, studioID int) ([]string, error)
|
GetAliases(ctx context.Context, studioID int) ([]string, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -55,7 +59,7 @@ type StudioWriter interface {
|
|||||||
Destroy(ctx context.Context, id int) error
|
Destroy(ctx context.Context, id int) error
|
||||||
UpdateImage(ctx context.Context, studioID int, image []byte) error
|
UpdateImage(ctx context.Context, studioID int, image []byte) error
|
||||||
DestroyImage(ctx context.Context, studioID int) error
|
DestroyImage(ctx context.Context, studioID int) error
|
||||||
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []*StashID) error
|
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []StashID) error
|
||||||
UpdateAliases(ctx context.Context, studioID int, aliases []string) error
|
UpdateAliases(ctx context.Context, studioID int, aliases []string) error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -34,9 +34,13 @@ type TagFilterType struct {
|
|||||||
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type TagFinder interface {
|
||||||
|
FindMany(ctx context.Context, ids []int) ([]*Tag, error)
|
||||||
|
}
|
||||||
|
|
||||||
type TagReader interface {
|
type TagReader interface {
|
||||||
Find(ctx context.Context, id int) (*Tag, error)
|
Find(ctx context.Context, id int) (*Tag, error)
|
||||||
FindMany(ctx context.Context, ids []int) ([]*Tag, error)
|
TagFinder
|
||||||
FindBySceneID(ctx context.Context, sceneID int) ([]*Tag, error)
|
FindBySceneID(ctx context.Context, sceneID int) ([]*Tag, error)
|
||||||
FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error)
|
FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error)
|
||||||
FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error)
|
FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error)
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import (
|
|||||||
|
|
||||||
type ImageStashIDGetter interface {
|
type ImageStashIDGetter interface {
|
||||||
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
||||||
GetStashIDs(ctx context.Context, performerID int) ([]*models.StashID, error)
|
models.StashIDLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToJSON converts a Performer object into its JSON equivalent.
|
// ToJSON converts a Performer object into its JSON equivalent.
|
||||||
@@ -100,9 +100,9 @@ func ToJSON(ctx context.Context, reader ImageStashIDGetter, performer *models.Pe
|
|||||||
}
|
}
|
||||||
|
|
||||||
stashIDs, _ := reader.GetStashIDs(ctx, performer.ID)
|
stashIDs, _ := reader.GetStashIDs(ctx, performer.ID)
|
||||||
var ret []*models.StashID
|
var ret []models.StashID
|
||||||
for _, stashID := range stashIDs {
|
for _, stashID := range stashIDs {
|
||||||
newJoin := &models.StashID{
|
newJoin := models.StashID{
|
||||||
StashID: stashID.StashID,
|
StashID: stashID.StashID,
|
||||||
Endpoint: stashID.Endpoint,
|
Endpoint: stashID.Endpoint,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,8 +50,8 @@ var stashID = models.StashID{
|
|||||||
StashID: "StashID",
|
StashID: "StashID",
|
||||||
Endpoint: "Endpoint",
|
Endpoint: "Endpoint",
|
||||||
}
|
}
|
||||||
var stashIDs = []*models.StashID{
|
var stashIDs = []models.StashID{
|
||||||
&stashID,
|
stashID,
|
||||||
}
|
}
|
||||||
|
|
||||||
const image = "aW1hZ2VCeXRlcw=="
|
const image = "aW1hZ2VCeXRlcw=="
|
||||||
@@ -155,8 +155,8 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer {
|
|||||||
DeathDate: deathDate.String,
|
DeathDate: deathDate.String,
|
||||||
HairColor: hairColor,
|
HairColor: hairColor,
|
||||||
Weight: weight,
|
Weight: weight,
|
||||||
StashIDs: []*models.StashID{
|
StashIDs: []models.StashID{
|
||||||
&stashID,
|
stashID,
|
||||||
},
|
},
|
||||||
IgnoreAutoTag: autoTagIgnored,
|
IgnoreAutoTag: autoTagIgnored,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ type NameFinderCreatorUpdater interface {
|
|||||||
UpdateFull(ctx context.Context, updatedPerformer models.Performer) (*models.Performer, error)
|
UpdateFull(ctx context.Context, updatedPerformer models.Performer) (*models.Performer, error)
|
||||||
UpdateTags(ctx context.Context, performerID int, tagIDs []int) error
|
UpdateTags(ctx context.Context, performerID int, tagIDs []int) error
|
||||||
UpdateImage(ctx context.Context, performerID int, image []byte) error
|
UpdateImage(ctx context.Context, performerID int, image []byte) error
|
||||||
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []*models.StashID) error
|
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []models.StashID) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type Importer struct {
|
type Importer struct {
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
|
|||||||
}
|
}
|
||||||
|
|
||||||
var ret []models.StashID
|
var ret []models.StashID
|
||||||
for _, stashID := range scene.StashIDs {
|
for _, stashID := range scene.StashIDs.List() {
|
||||||
newJoin := models.StashID{
|
newJoin := models.StashID{
|
||||||
StashID: stashID.StashID,
|
StashID: stashID.StashID,
|
||||||
Endpoint: stashID.Endpoint,
|
Endpoint: stashID.Endpoint,
|
||||||
@@ -219,7 +219,7 @@ type MovieFinder interface {
|
|||||||
// GetSceneMoviesJSON returns a slice of SceneMovie JSON representation objects
|
// GetSceneMoviesJSON returns a slice of SceneMovie JSON representation objects
|
||||||
// corresponding to the provided scene's scene movie relationships.
|
// corresponding to the provided scene's scene movie relationships.
|
||||||
func GetSceneMoviesJSON(ctx context.Context, movieReader MovieFinder, scene *models.Scene) ([]jsonschema.SceneMovie, error) {
|
func GetSceneMoviesJSON(ctx context.Context, movieReader MovieFinder, scene *models.Scene) ([]jsonschema.SceneMovie, error) {
|
||||||
sceneMovies := scene.Movies
|
sceneMovies := scene.Movies.List()
|
||||||
|
|
||||||
var results []jsonschema.SceneMovie
|
var results []jsonschema.SceneMovie
|
||||||
for _, sceneMovie := range sceneMovies {
|
for _, sceneMovie := range sceneMovies {
|
||||||
@@ -246,7 +246,7 @@ func GetSceneMoviesJSON(ctx context.Context, movieReader MovieFinder, scene *mod
|
|||||||
func GetDependentMovieIDs(ctx context.Context, scene *models.Scene) ([]int, error) {
|
func GetDependentMovieIDs(ctx context.Context, scene *models.Scene) ([]int, error) {
|
||||||
var ret []int
|
var ret []int
|
||||||
|
|
||||||
m := scene.Movies
|
m := scene.Movies.List()
|
||||||
for _, mm := range m {
|
for _, mm := range m {
|
||||||
ret = append(ret, mm.MovieID)
|
ret = append(ret, mm.MovieID)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import (
|
|||||||
|
|
||||||
type FullCreatorUpdater interface {
|
type FullCreatorUpdater interface {
|
||||||
CreatorUpdater
|
CreatorUpdater
|
||||||
|
Update(ctx context.Context, updatedScene *models.Scene) error
|
||||||
Updater
|
Updater
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -78,6 +79,11 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
|
|||||||
Title: sceneJSON.Title,
|
Title: sceneJSON.Title,
|
||||||
Details: sceneJSON.Details,
|
Details: sceneJSON.Details,
|
||||||
URL: sceneJSON.URL,
|
URL: sceneJSON.URL,
|
||||||
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
||||||
|
StashIDs: models.NewRelatedStashIDs(sceneJSON.StashIDs),
|
||||||
}
|
}
|
||||||
|
|
||||||
// if sceneJSON.Checksum != "" {
|
// if sceneJSON.Checksum != "" {
|
||||||
@@ -141,8 +147,6 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
|
|||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
newScene.StashIDs = append(newScene.StashIDs, i.Input.StashIDs...)
|
|
||||||
|
|
||||||
return newScene
|
return newScene
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -214,7 +218,7 @@ func (i *Importer) populateGalleries(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, o := range galleries {
|
for _, o := range galleries {
|
||||||
i.scene.GalleryIDs = append(i.scene.GalleryIDs, o.ID)
|
i.scene.GalleryIDs.Add(o.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -259,7 +263,7 @@ func (i *Importer) populatePerformers(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, p := range performers {
|
for _, p := range performers {
|
||||||
i.scene.PerformerIDs = append(i.scene.PerformerIDs, p.ID)
|
i.scene.PerformerIDs.Add(p.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -317,7 +321,7 @@ func (i *Importer) populateMovies(ctx context.Context) error {
|
|||||||
toAdd.SceneIndex = &index
|
toAdd.SceneIndex = &index
|
||||||
}
|
}
|
||||||
|
|
||||||
i.scene.Movies = append(i.scene.Movies, toAdd)
|
i.scene.Movies.Add(toAdd)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -344,7 +348,7 @@ func (i *Importer) populateTags(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, p := range tags {
|
for _, p := range tags {
|
||||||
i.scene.TagIDs = append(i.scene.TagIDs, p.ID)
|
i.scene.TagIDs.Add(p.ID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -22,8 +22,8 @@ type CreatorUpdater interface {
|
|||||||
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error)
|
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error)
|
||||||
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error)
|
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error)
|
||||||
Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error
|
Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error
|
||||||
Update(ctx context.Context, updatedScene *models.Scene) error
|
|
||||||
UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error)
|
UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error)
|
||||||
|
AddFileID(ctx context.Context, id int, fileID file.ID) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type ScanGenerator interface {
|
type ScanGenerator interface {
|
||||||
@@ -118,7 +118,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
|
|||||||
for _, s := range existing {
|
for _, s := range existing {
|
||||||
found := false
|
found := false
|
||||||
for _, sf := range s.Files {
|
for _, sf := range s.Files {
|
||||||
if sf.ID == f.Base().ID {
|
if sf.ID == f.ID {
|
||||||
found = true
|
found = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -127,10 +127,10 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
|
|||||||
if !found {
|
if !found {
|
||||||
logger.Infof("Adding %s to scene %s", f.Path, s.GetTitle())
|
logger.Infof("Adding %s to scene %s", f.Path, s.GetTitle())
|
||||||
s.Files = append(s.Files, f)
|
s.Files = append(s.Files, f)
|
||||||
}
|
|
||||||
|
|
||||||
if err := h.CreatorUpdater.Update(ctx, s); err != nil {
|
if err := h.CreatorUpdater.AddFileID(ctx, s.ID, f.ID); err != nil {
|
||||||
return fmt.Errorf("updating scene: %w", err)
|
return fmt.Errorf("adding file to scene: %w", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -87,53 +86,32 @@ func (u UpdateSet) UpdateInput() models.SceneUpdateInput {
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Scene, performerID int) (bool, error) {
|
func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Scene, performerID int) error {
|
||||||
if !intslice.IntInclude(o.PerformerIDs, performerID) {
|
_, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{
|
||||||
if _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{
|
|
||||||
PerformerIDs: &models.UpdateIDs{
|
PerformerIDs: &models.UpdateIDs{
|
||||||
IDs: []int{performerID},
|
IDs: []int{performerID},
|
||||||
Mode: models.RelationshipUpdateModeAdd,
|
Mode: models.RelationshipUpdateModeAdd,
|
||||||
},
|
},
|
||||||
}); err != nil {
|
})
|
||||||
return false, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
func AddTag(ctx context.Context, qb PartialUpdater, o *models.Scene, tagID int) error {
|
||||||
}
|
_, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{
|
||||||
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddTag(ctx context.Context, qb PartialUpdater, o *models.Scene, tagID int) (bool, error) {
|
|
||||||
if !intslice.IntInclude(o.TagIDs, tagID) {
|
|
||||||
if _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{
|
|
||||||
TagIDs: &models.UpdateIDs{
|
TagIDs: &models.UpdateIDs{
|
||||||
IDs: []int{tagID},
|
IDs: []int{tagID},
|
||||||
Mode: models.RelationshipUpdateModeAdd,
|
Mode: models.RelationshipUpdateModeAdd,
|
||||||
},
|
},
|
||||||
}); err != nil {
|
})
|
||||||
return false, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
func AddGallery(ctx context.Context, qb PartialUpdater, o *models.Scene, galleryID int) error {
|
||||||
}
|
_, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{
|
||||||
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddGallery(ctx context.Context, qb PartialUpdater, o *models.Scene, galleryID int) (bool, error) {
|
|
||||||
if !intslice.IntInclude(o.GalleryIDs, galleryID) {
|
|
||||||
if _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{
|
|
||||||
TagIDs: &models.UpdateIDs{
|
TagIDs: &models.UpdateIDs{
|
||||||
IDs: []int{galleryID},
|
IDs: []int{galleryID},
|
||||||
Mode: models.RelationshipUpdateModeAdd,
|
Mode: models.RelationshipUpdateModeAdd,
|
||||||
},
|
},
|
||||||
}); err != nil {
|
})
|
||||||
return false, err
|
return err
|
||||||
}
|
|
||||||
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return false, nil
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,20 +32,21 @@ import (
|
|||||||
|
|
||||||
type SceneReader interface {
|
type SceneReader interface {
|
||||||
Find(ctx context.Context, id int) (*models.Scene, error)
|
Find(ctx context.Context, id int) (*models.Scene, error)
|
||||||
|
models.StashIDLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
type PerformerReader interface {
|
type PerformerReader interface {
|
||||||
match.PerformerFinder
|
match.PerformerFinder
|
||||||
Find(ctx context.Context, id int) (*models.Performer, error)
|
Find(ctx context.Context, id int) (*models.Performer, error)
|
||||||
FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error)
|
FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error)
|
||||||
GetStashIDs(ctx context.Context, performerID int) ([]*models.StashID, error)
|
models.StashIDLoader
|
||||||
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type StudioReader interface {
|
type StudioReader interface {
|
||||||
match.StudioFinder
|
match.StudioFinder
|
||||||
studio.Finder
|
studio.Finder
|
||||||
GetStashIDs(ctx context.Context, studioID int) ([]*models.StashID, error)
|
models.StashIDLoader
|
||||||
}
|
}
|
||||||
type TagFinder interface {
|
type TagFinder interface {
|
||||||
tag.Queryer
|
tag.Queryer
|
||||||
@@ -227,7 +228,11 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
stashIDs := scene.StashIDs
|
if err := scene.LoadStashIDs(ctx, qb); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
stashIDs := scene.StashIDs.List()
|
||||||
sceneStashID := ""
|
sceneStashID := ""
|
||||||
for _, stashID := range stashIDs {
|
for _, stashID := range stashIDs {
|
||||||
if stashID.Endpoint == endpoint {
|
if stashID.Endpoint == endpoint {
|
||||||
@@ -827,8 +832,9 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, stashID := range stashIDs {
|
for _, stashID := range stashIDs {
|
||||||
|
c := stashID
|
||||||
if stashID.Endpoint == endpoint {
|
if stashID.Endpoint == endpoint {
|
||||||
performerDraft.ID = &stashID.StashID
|
performerDraft.ID = &c.StashID
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -855,7 +861,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stashIDs := scene.StashIDs
|
stashIDs := scene.StashIDs.List()
|
||||||
var stashID *string
|
var stashID *string
|
||||||
for _, v := range stashIDs {
|
for _, v := range stashIDs {
|
||||||
if v.Endpoint == endpoint {
|
if v.Endpoint == endpoint {
|
||||||
@@ -952,8 +958,9 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf
|
|||||||
}
|
}
|
||||||
var stashID *string
|
var stashID *string
|
||||||
for _, v := range stashIDs {
|
for _, v := range stashIDs {
|
||||||
|
c := v
|
||||||
if v.Endpoint == endpoint {
|
if v.Endpoint == endpoint {
|
||||||
stashID = &v.StashID
|
stashID = &c.StashID
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -133,15 +133,21 @@ func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, f
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := galleriesPerformersTableMgr.insertJoins(ctx, id, newObject.PerformerIDs); err != nil {
|
if newObject.PerformerIDs.Loaded() {
|
||||||
|
if err := galleriesPerformersTableMgr.insertJoins(ctx, id, newObject.PerformerIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := galleriesTagsTableMgr.insertJoins(ctx, id, newObject.TagIDs); err != nil {
|
}
|
||||||
|
if newObject.TagIDs.Loaded() {
|
||||||
|
if err := galleriesTagsTableMgr.insertJoins(ctx, id, newObject.TagIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := galleriesScenesTableMgr.insertJoins(ctx, id, newObject.SceneIDs); err != nil {
|
}
|
||||||
|
if newObject.SceneIDs.Loaded() {
|
||||||
|
if err := galleriesScenesTableMgr.insertJoins(ctx, id, newObject.SceneIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
updated, err := qb.Find(ctx, id)
|
updated, err := qb.Find(ctx, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -161,15 +167,21 @@ func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.Galler
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := galleriesPerformersTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.PerformerIDs); err != nil {
|
if updatedObject.PerformerIDs.Loaded() {
|
||||||
|
if err := galleriesPerformersTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.PerformerIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := galleriesTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.TagIDs); err != nil {
|
}
|
||||||
|
if updatedObject.TagIDs.Loaded() {
|
||||||
|
if err := galleriesTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.TagIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := galleriesScenesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.SceneIDs); err != nil {
|
}
|
||||||
|
if updatedObject.SceneIDs.Loaded() {
|
||||||
|
if err := galleriesScenesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.SceneIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fileIDs := make([]file.ID, len(updatedObject.Files))
|
fileIDs := make([]file.ID, len(updatedObject.Files))
|
||||||
for i, f := range updatedObject.Files {
|
for i, f := range updatedObject.Files {
|
||||||
@@ -249,16 +261,18 @@ func (qb *GalleryStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*
|
|||||||
|
|
||||||
s := f.resolve()
|
s := f.resolve()
|
||||||
|
|
||||||
if err := qb.resolveRelationships(ctx, s); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = append(ret, s)
|
ret = append(ret, s)
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, s := range ret {
|
||||||
|
if err := qb.resolveRelationships(ctx, s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -281,24 +295,6 @@ func (qb *GalleryStore) resolveRelationships(ctx context.Context, s *models.Gall
|
|||||||
s.FolderPath = folder.Path
|
s.FolderPath = folder.Path
|
||||||
}
|
}
|
||||||
|
|
||||||
// performers
|
|
||||||
s.PerformerIDs, err = qb.performersRepository().getIDs(ctx, s.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving gallery performers: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// tags
|
|
||||||
s.TagIDs, err = qb.tagsRepository().getIDs(ctx, s.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving gallery tags: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// scenes
|
|
||||||
s.SceneIDs, err = qb.scenesRepository().getIDs(ctx, s.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving gallery scenes: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -989,6 +985,11 @@ func (qb *GalleryStore) filesRepository() *filesRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *GalleryStore) AddFileID(ctx context.Context, id int, fileID file.ID) error {
|
||||||
|
const firstPrimary = false
|
||||||
|
return galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID})
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *GalleryStore) performersRepository() *joinRepository {
|
func (qb *GalleryStore) performersRepository() *joinRepository {
|
||||||
return &joinRepository{
|
return &joinRepository{
|
||||||
repository: repository{
|
repository: repository{
|
||||||
@@ -1000,6 +1001,10 @@ func (qb *GalleryStore) performersRepository() *joinRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *GalleryStore) GetPerformerIDs(ctx context.Context, id int) ([]int, error) {
|
||||||
|
return qb.performersRepository().getIDs(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *GalleryStore) tagsRepository() *joinRepository {
|
func (qb *GalleryStore) tagsRepository() *joinRepository {
|
||||||
return &joinRepository{
|
return &joinRepository{
|
||||||
repository: repository{
|
repository: repository{
|
||||||
@@ -1011,6 +1016,10 @@ func (qb *GalleryStore) tagsRepository() *joinRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *GalleryStore) GetTagIDs(ctx context.Context, id int) ([]int, error) {
|
||||||
|
return qb.tagsRepository().getIDs(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *GalleryStore) imagesRepository() *joinRepository {
|
func (qb *GalleryStore) imagesRepository() *joinRepository {
|
||||||
return &joinRepository{
|
return &joinRepository{
|
||||||
repository: repository{
|
repository: repository{
|
||||||
@@ -1041,3 +1050,7 @@ func (qb *GalleryStore) scenesRepository() *joinRepository {
|
|||||||
fkColumn: sceneIDColumn,
|
fkColumn: sceneIDColumn,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *GalleryStore) GetSceneIDs(ctx context.Context, id int) ([]int, error) {
|
||||||
|
return qb.scenesRepository().getIDs(ctx, id)
|
||||||
|
}
|
||||||
|
|||||||
@@ -17,6 +17,26 @@ import (
|
|||||||
|
|
||||||
var invalidID = -1
|
var invalidID = -1
|
||||||
|
|
||||||
|
func loadGalleryRelationships(ctx context.Context, expected models.Gallery, actual *models.Gallery) error {
|
||||||
|
if expected.SceneIDs.Loaded() {
|
||||||
|
if err := actual.LoadSceneIDs(ctx, db.Gallery); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if expected.TagIDs.Loaded() {
|
||||||
|
if err := actual.LoadTagIDs(ctx, db.Gallery); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if expected.PerformerIDs.Loaded() {
|
||||||
|
if err := actual.LoadPerformerIDs(ctx, db.Gallery); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func Test_galleryQueryBuilder_Create(t *testing.T) {
|
func Test_galleryQueryBuilder_Create(t *testing.T) {
|
||||||
var (
|
var (
|
||||||
title = "title"
|
title = "title"
|
||||||
@@ -48,9 +68,9 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
|||||||
StudioID: &studioIDs[studioIdxWithScene],
|
StudioID: &studioIDs[studioIdxWithScene],
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
SceneIDs: []int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]},
|
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||||
Files: []file.File{},
|
Files: []file.File{},
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
@@ -70,9 +90,9 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
|||||||
},
|
},
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
SceneIDs: []int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]},
|
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -86,21 +106,21 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
|||||||
{
|
{
|
||||||
"invalid scene id",
|
"invalid scene id",
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
SceneIDs: []int{invalidID},
|
SceneIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"invalid tag id",
|
"invalid tag id",
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
TagIDs: []int{invalidID},
|
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"invalid performer id",
|
"invalid performer id",
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
PerformerIDs: []int{invalidID},
|
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
@@ -132,6 +152,12 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
|||||||
copy := tt.newObject
|
copy := tt.newObject
|
||||||
copy.ID = s.ID
|
copy.ID = s.ID
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadGalleryRelationships(ctx, copy, &s); err != nil {
|
||||||
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(copy, s)
|
assert.Equal(copy, s)
|
||||||
|
|
||||||
// ensure can find the scene
|
// ensure can find the scene
|
||||||
@@ -144,6 +170,12 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadGalleryRelationships(ctx, copy, found); err != nil {
|
||||||
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(copy, *found)
|
assert.Equal(copy, *found)
|
||||||
|
|
||||||
return
|
return
|
||||||
@@ -190,9 +222,9 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
},
|
},
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
SceneIDs: []int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]},
|
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -203,9 +235,9 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []file.File{
|
Files: []file.File{
|
||||||
makeGalleryFileWithID(galleryIdxWithImage),
|
makeGalleryFileWithID(galleryIdxWithImage),
|
||||||
},
|
},
|
||||||
SceneIDs: []int{},
|
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Organized: true,
|
Organized: true,
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
@@ -219,9 +251,9 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []file.File{
|
Files: []file.File{
|
||||||
makeGalleryFileWithID(galleryIdxWithScene),
|
makeGalleryFileWithID(galleryIdxWithScene),
|
||||||
},
|
},
|
||||||
SceneIDs: []int{},
|
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Organized: true,
|
Organized: true,
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
@@ -235,9 +267,9 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []file.File{
|
Files: []file.File{
|
||||||
makeGalleryFileWithID(galleryIdxWithTag),
|
makeGalleryFileWithID(galleryIdxWithTag),
|
||||||
},
|
},
|
||||||
SceneIDs: []int{},
|
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Organized: true,
|
Organized: true,
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
@@ -251,9 +283,9 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []file.File{
|
Files: []file.File{
|
||||||
makeGalleryFileWithID(galleryIdxWithPerformer),
|
makeGalleryFileWithID(galleryIdxWithPerformer),
|
||||||
},
|
},
|
||||||
SceneIDs: []int{},
|
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Organized: true,
|
Organized: true,
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
@@ -282,7 +314,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
makeGalleryFileWithID(galleryIdxWithImage),
|
makeGalleryFileWithID(galleryIdxWithImage),
|
||||||
},
|
},
|
||||||
Organized: true,
|
Organized: true,
|
||||||
SceneIDs: []int{invalidID},
|
SceneIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
},
|
},
|
||||||
@@ -296,7 +328,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
makeGalleryFileWithID(galleryIdxWithImage),
|
makeGalleryFileWithID(galleryIdxWithImage),
|
||||||
},
|
},
|
||||||
Organized: true,
|
Organized: true,
|
||||||
TagIDs: []int{invalidID},
|
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
},
|
},
|
||||||
@@ -310,7 +342,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
makeGalleryFileWithID(galleryIdxWithImage),
|
makeGalleryFileWithID(galleryIdxWithImage),
|
||||||
},
|
},
|
||||||
Organized: true,
|
Organized: true,
|
||||||
PerformerIDs: []int{invalidID},
|
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
},
|
},
|
||||||
@@ -339,6 +371,12 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadGalleryRelationships(ctx, copy, s); err != nil {
|
||||||
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(copy, *s)
|
assert.Equal(copy, *s)
|
||||||
|
|
||||||
return
|
return
|
||||||
@@ -426,9 +464,9 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
},
|
},
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
SceneIDs: []int{sceneIDs[sceneIdxWithGallery]},
|
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdxWithGallery]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithGallery], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithGallery], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithGallery], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithGallery], performerIDs[performerIdx1WithDupName]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -441,9 +479,9 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
Files: []file.File{
|
Files: []file.File{
|
||||||
makeGalleryFile(galleryIdxWithImage),
|
makeGalleryFile(galleryIdxWithImage),
|
||||||
},
|
},
|
||||||
SceneIDs: []int{},
|
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -472,6 +510,11 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
clearGalleryFileIDs(got)
|
clearGalleryFileIDs(got)
|
||||||
|
// load relationships
|
||||||
|
if err := loadGalleryRelationships(ctx, tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
assert.Equal(tt.want, *got)
|
assert.Equal(tt.want, *got)
|
||||||
|
|
||||||
s, err := qb.Find(ctx, tt.id)
|
s, err := qb.Find(ctx, tt.id)
|
||||||
@@ -480,6 +523,11 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
clearGalleryFileIDs(s)
|
clearGalleryFileIDs(s)
|
||||||
|
// load relationships
|
||||||
|
if err := loadGalleryRelationships(ctx, tt.want, s); err != nil {
|
||||||
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
assert.Equal(tt.want, *s)
|
assert.Equal(tt.want, *s)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -503,10 +551,10 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
SceneIDs: append(indexesToIDs(sceneIDs, sceneGalleries.reverseLookup(galleryIdx1WithImage)),
|
SceneIDs: models.NewRelatedIDs(append(indexesToIDs(sceneIDs, sceneGalleries.reverseLookup(galleryIdx1WithImage)),
|
||||||
sceneIDs[sceneIdx1WithStudio],
|
sceneIDs[sceneIdx1WithStudio],
|
||||||
sceneIDs[sceneIdx1WithPerformer],
|
sceneIDs[sceneIdx1WithPerformer],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -520,10 +568,10 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
TagIDs: append(indexesToIDs(tagIDs, galleryTags[galleryIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, galleryTags[galleryIdxWithTwoTags]),
|
||||||
tagIDs[tagIdx1WithDupName],
|
tagIDs[tagIdx1WithDupName],
|
||||||
tagIDs[tagIdx1WithImage],
|
tagIDs[tagIdx1WithImage],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -537,10 +585,10 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
PerformerIDs: append(indexesToIDs(performerIDs, galleryPerformers[galleryIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(append(indexesToIDs(performerIDs, galleryPerformers[galleryIdxWithTwoPerformers]),
|
||||||
performerIDs[performerIdx1WithDupName],
|
performerIDs[performerIdx1WithDupName],
|
||||||
performerIDs[performerIdx1WithImage],
|
performerIDs[performerIdx1WithImage],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -554,9 +602,9 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
SceneIDs: append(indexesToIDs(sceneIDs, sceneGalleries.reverseLookup(galleryIdxWithScene)),
|
SceneIDs: models.NewRelatedIDs(append(indexesToIDs(sceneIDs, sceneGalleries.reverseLookup(galleryIdxWithScene)),
|
||||||
sceneIDs[sceneIdx1WithPerformer],
|
sceneIDs[sceneIdx1WithPerformer],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -570,9 +618,9 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
TagIDs: append(indexesToIDs(tagIDs, galleryTags[galleryIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, galleryTags[galleryIdxWithTwoTags]),
|
||||||
tagIDs[tagIdx1WithScene],
|
tagIDs[tagIdx1WithScene],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -586,9 +634,9 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
PerformerIDs: append(indexesToIDs(performerIDs, galleryPerformers[galleryIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(append(indexesToIDs(performerIDs, galleryPerformers[galleryIdxWithTwoPerformers]),
|
||||||
performerIDs[performerIdx1WithScene],
|
performerIDs[performerIdx1WithScene],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -638,7 +686,7 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
SceneIDs: []int{},
|
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -652,7 +700,7 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
TagIDs: []int{tagIDs[tagIdx2WithGallery]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx2WithGallery]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -666,7 +714,7 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
PerformerIDs: []int{performerIDs[performerIdx2WithGallery]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx2WithGallery]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -680,7 +728,7 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
SceneIDs: []int{sceneIDs[sceneIdxWithGallery]},
|
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdxWithGallery]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -694,7 +742,7 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
TagIDs: indexesToIDs(tagIDs, galleryTags[galleryIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(indexesToIDs(tagIDs, galleryTags[galleryIdxWithTwoTags])),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -708,7 +756,7 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Gallery{
|
models.Gallery{
|
||||||
PerformerIDs: indexesToIDs(performerIDs, galleryPerformers[galleryIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(indexesToIDs(performerIDs, galleryPerformers[galleryIdxWithTwoPerformers])),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -735,6 +783,16 @@ func Test_galleryQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
t.Errorf("galleryQueryBuilder.Find() error = %v", err)
|
t.Errorf("galleryQueryBuilder.Find() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadGalleryRelationships(ctx, tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := loadGalleryRelationships(ctx, tt.want, s); err != nil {
|
||||||
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// only compare fields that were in the partial
|
// only compare fields that were in the partial
|
||||||
if tt.partial.PerformerIDs != nil {
|
if tt.partial.PerformerIDs != nil {
|
||||||
assert.Equal(tt.want.PerformerIDs, got.PerformerIDs)
|
assert.Equal(tt.want.PerformerIDs, got.PerformerIDs)
|
||||||
@@ -851,12 +909,33 @@ func Test_galleryQueryBuilder_Find(t *testing.T) {
|
|||||||
|
|
||||||
if got != nil {
|
if got != nil {
|
||||||
clearGalleryFileIDs(got)
|
clearGalleryFileIDs(got)
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadGalleryRelationships(ctx, *tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func postFindGalleries(ctx context.Context, want []*models.Gallery, got []*models.Gallery) error {
|
||||||
|
for i, s := range got {
|
||||||
|
clearGalleryFileIDs(s)
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if i < len(want) {
|
||||||
|
if err := loadGalleryRelationships(ctx, *want[i], s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func Test_galleryQueryBuilder_FindMany(t *testing.T) {
|
func Test_galleryQueryBuilder_FindMany(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -893,8 +972,9 @@ func Test_galleryQueryBuilder_FindMany(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindGalleries(ctx, tt.want, got); err != nil {
|
||||||
clearGalleryFileIDs(f)
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -950,8 +1030,9 @@ func Test_galleryQueryBuilder_FindByChecksum(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindGalleries(ctx, tt.want, got); err != nil {
|
||||||
clearGalleryFileIDs(f)
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1012,8 +1093,9 @@ func Test_galleryQueryBuilder_FindByChecksums(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindGalleries(ctx, tt.want, got); err != nil {
|
||||||
clearGalleryFileIDs(f)
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1069,8 +1151,9 @@ func Test_galleryQueryBuilder_FindByPath(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindGalleries(ctx, tt.want, got); err != nil {
|
||||||
clearGalleryFileIDs(f)
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1110,8 +1193,9 @@ func Test_galleryQueryBuilder_FindBySceneID(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindGalleries(ctx, tt.want, got); err != nil {
|
||||||
clearGalleryFileIDs(f)
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1154,8 +1238,9 @@ func Test_galleryQueryBuilder_FindByImageID(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindGalleries(ctx, tt.want, got); err != nil {
|
||||||
clearGalleryFileIDs(f)
|
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -2143,7 +2228,11 @@ func verifyGalleriesTagCount(t *testing.T, tagCountCriterion models.IntCriterion
|
|||||||
assert.Greater(t, len(galleries), 0)
|
assert.Greater(t, len(galleries), 0)
|
||||||
|
|
||||||
for _, gallery := range galleries {
|
for _, gallery := range galleries {
|
||||||
verifyInt(t, len(gallery.TagIDs), tagCountCriterion)
|
if err := gallery.LoadTagIDs(ctx, sqb); err != nil {
|
||||||
|
t.Errorf("gallery.LoadTagIDs() error = %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
verifyInt(t, len(gallery.TagIDs.List()), tagCountCriterion)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@@ -2180,7 +2269,12 @@ func verifyGalleriesPerformerCount(t *testing.T, performerCountCriterion models.
|
|||||||
assert.Greater(t, len(galleries), 0)
|
assert.Greater(t, len(galleries), 0)
|
||||||
|
|
||||||
for _, gallery := range galleries {
|
for _, gallery := range galleries {
|
||||||
verifyInt(t, len(gallery.PerformerIDs), performerCountCriterion)
|
if err := gallery.LoadPerformerIDs(ctx, sqb); err != nil {
|
||||||
|
t.Errorf("gallery.LoadPerformerIDs() error = %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
verifyInt(t, len(gallery.PerformerIDs.List()), performerCountCriterion)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -116,18 +116,19 @@ func (qb *ImageStore) Create(ctx context.Context, newObject *models.ImageCreateI
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(newObject.GalleryIDs) > 0 {
|
if newObject.PerformerIDs.Loaded() {
|
||||||
if err := imageGalleriesTableMgr.insertJoins(ctx, id, newObject.GalleryIDs); err != nil {
|
if err := imagesPerformersTableMgr.insertJoins(ctx, id, newObject.PerformerIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(newObject.PerformerIDs) > 0 {
|
if newObject.TagIDs.Loaded() {
|
||||||
if err := imagesPerformersTableMgr.insertJoins(ctx, id, newObject.PerformerIDs); err != nil {
|
if err := imagesTagsTableMgr.insertJoins(ctx, id, newObject.TagIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(newObject.TagIDs) > 0 {
|
|
||||||
if err := imagesTagsTableMgr.insertJoins(ctx, id, newObject.TagIDs); err != nil {
|
if newObject.GalleryIDs.Loaded() {
|
||||||
|
if err := imageGalleriesTableMgr.insertJoins(ctx, id, newObject.GalleryIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -184,15 +185,23 @@ func (qb *ImageStore) Update(ctx context.Context, updatedObject *models.Image) e
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := imageGalleriesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.GalleryIDs); err != nil {
|
if updatedObject.PerformerIDs.Loaded() {
|
||||||
|
if err := imagesPerformersTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.PerformerIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := imagesPerformersTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.PerformerIDs); err != nil {
|
}
|
||||||
|
|
||||||
|
if updatedObject.TagIDs.Loaded() {
|
||||||
|
if err := imagesTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.TagIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := imagesTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.TagIDs); err != nil {
|
}
|
||||||
|
|
||||||
|
if updatedObject.GalleryIDs.Loaded() {
|
||||||
|
if err := imageGalleriesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.GalleryIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fileIDs := make([]file.ID, len(updatedObject.Files))
|
fileIDs := make([]file.ID, len(updatedObject.Files))
|
||||||
for i, f := range updatedObject.Files {
|
for i, f := range updatedObject.Files {
|
||||||
@@ -265,16 +274,18 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
|
|||||||
|
|
||||||
i := f.resolve()
|
i := f.resolve()
|
||||||
|
|
||||||
if err := qb.resolveRelationships(ctx, i); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = append(ret, i)
|
ret = append(ret, i)
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, i := range ret {
|
||||||
|
if err := qb.resolveRelationships(ctx, i); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -287,24 +298,6 @@ func (qb *ImageStore) resolveRelationships(ctx context.Context, i *models.Image)
|
|||||||
return fmt.Errorf("resolving image files: %w", err)
|
return fmt.Errorf("resolving image files: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// performers
|
|
||||||
i.PerformerIDs, err = qb.performersRepository().getIDs(ctx, i.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving image performers: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// tags
|
|
||||||
i.TagIDs, err = qb.tagsRepository().getIDs(ctx, i.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving image tags: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// galleries
|
|
||||||
i.GalleryIDs, err = qb.galleriesRepository().getIDs(ctx, i.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving image galleries: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1004,9 +997,14 @@ func (qb *ImageStore) filesRepository() *filesRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// func (qb *imageQueryBuilder) GetGalleryIDs(ctx context.Context, imageID int) ([]int, error) {
|
func (qb *ImageStore) AddFileID(ctx context.Context, id int, fileID file.ID) error {
|
||||||
// return qb.galleriesRepository().getIDs(ctx, imageID)
|
const firstPrimary = false
|
||||||
// }
|
return imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *ImageStore) GetGalleryIDs(ctx context.Context, imageID int) ([]int, error) {
|
||||||
|
return qb.galleriesRepository().getIDs(ctx, imageID)
|
||||||
|
}
|
||||||
|
|
||||||
// func (qb *imageQueryBuilder) UpdateGalleries(ctx context.Context, imageID int, galleryIDs []int) error {
|
// func (qb *imageQueryBuilder) UpdateGalleries(ctx context.Context, imageID int, galleryIDs []int) error {
|
||||||
// // Delete the existing joins and then create new ones
|
// // Delete the existing joins and then create new ones
|
||||||
|
|||||||
@@ -15,6 +15,26 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func loadImageRelationships(ctx context.Context, expected models.Image, actual *models.Image) error {
|
||||||
|
if expected.GalleryIDs.Loaded() {
|
||||||
|
if err := actual.LoadGalleryIDs(ctx, db.Image); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if expected.TagIDs.Loaded() {
|
||||||
|
if err := actual.LoadTagIDs(ctx, db.Image); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if expected.PerformerIDs.Loaded() {
|
||||||
|
if err := actual.LoadPerformerIDs(ctx, db.Image); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func Test_imageQueryBuilder_Create(t *testing.T) {
|
func Test_imageQueryBuilder_Create(t *testing.T) {
|
||||||
var (
|
var (
|
||||||
title = "title"
|
title = "title"
|
||||||
@@ -41,9 +61,9 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
|||||||
StudioID: &studioIDs[studioIdxWithImage],
|
StudioID: &studioIDs[studioIdxWithImage],
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithImage]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||||
Files: []*file.ImageFile{},
|
Files: []*file.ImageFile{},
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
@@ -61,9 +81,9 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
|||||||
},
|
},
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithImage]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -77,21 +97,21 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
|||||||
{
|
{
|
||||||
"invalid gallery id",
|
"invalid gallery id",
|
||||||
models.Image{
|
models.Image{
|
||||||
GalleryIDs: []int{invalidID},
|
GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"invalid tag id",
|
"invalid tag id",
|
||||||
models.Image{
|
models.Image{
|
||||||
TagIDs: []int{invalidID},
|
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"invalid performer id",
|
"invalid performer id",
|
||||||
models.Image{
|
models.Image{
|
||||||
PerformerIDs: []int{invalidID},
|
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
@@ -126,6 +146,12 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
|||||||
copy := tt.newObject
|
copy := tt.newObject
|
||||||
copy.ID = s.ID
|
copy.ID = s.ID
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadImageRelationships(ctx, copy, &s); err != nil {
|
||||||
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(copy, s)
|
assert.Equal(copy, s)
|
||||||
|
|
||||||
// ensure can find the image
|
// ensure can find the image
|
||||||
@@ -134,6 +160,12 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
|||||||
t.Errorf("imageQueryBuilder.Find() error = %v", err)
|
t.Errorf("imageQueryBuilder.Find() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadImageRelationships(ctx, copy, found); err != nil {
|
||||||
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(copy, *found)
|
assert.Equal(copy, *found)
|
||||||
|
|
||||||
return
|
return
|
||||||
@@ -181,9 +213,9 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
},
|
},
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithImage]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -194,9 +226,9 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.ImageFile{
|
Files: []*file.ImageFile{
|
||||||
makeImageFileWithID(imageIdxWithGallery),
|
makeImageFileWithID(imageIdxWithGallery),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Organized: true,
|
Organized: true,
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
@@ -210,9 +242,9 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.ImageFile{
|
Files: []*file.ImageFile{
|
||||||
makeImageFileWithID(imageIdxWithGallery),
|
makeImageFileWithID(imageIdxWithGallery),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Organized: true,
|
Organized: true,
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
@@ -226,9 +258,9 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.ImageFile{
|
Files: []*file.ImageFile{
|
||||||
makeImageFileWithID(imageIdxWithTag),
|
makeImageFileWithID(imageIdxWithTag),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Organized: true,
|
Organized: true,
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
@@ -242,9 +274,9 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.ImageFile{
|
Files: []*file.ImageFile{
|
||||||
makeImageFileWithID(imageIdxWithPerformer),
|
makeImageFileWithID(imageIdxWithPerformer),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Organized: true,
|
Organized: true,
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
@@ -273,7 +305,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
makeImageFileWithID(imageIdxWithGallery),
|
makeImageFileWithID(imageIdxWithGallery),
|
||||||
},
|
},
|
||||||
Organized: true,
|
Organized: true,
|
||||||
GalleryIDs: []int{invalidID},
|
GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
},
|
},
|
||||||
@@ -287,7 +319,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
makeImageFileWithID(imageIdxWithGallery),
|
makeImageFileWithID(imageIdxWithGallery),
|
||||||
},
|
},
|
||||||
Organized: true,
|
Organized: true,
|
||||||
TagIDs: []int{invalidID},
|
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
},
|
},
|
||||||
@@ -301,7 +333,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
makeImageFileWithID(imageIdxWithGallery),
|
makeImageFileWithID(imageIdxWithGallery),
|
||||||
},
|
},
|
||||||
Organized: true,
|
Organized: true,
|
||||||
PerformerIDs: []int{invalidID},
|
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
},
|
},
|
||||||
@@ -329,6 +361,12 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||||||
t.Errorf("imageQueryBuilder.Find() error = %v", err)
|
t.Errorf("imageQueryBuilder.Find() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadImageRelationships(ctx, copy, s); err != nil {
|
||||||
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(copy, *s)
|
assert.Equal(copy, *s)
|
||||||
|
|
||||||
return
|
return
|
||||||
@@ -400,9 +438,9 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
},
|
},
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithImage]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -416,9 +454,9 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
Files: []*file.ImageFile{
|
Files: []*file.ImageFile{
|
||||||
makeImageFile(imageIdx1WithGallery),
|
makeImageFile(imageIdx1WithGallery),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -447,6 +485,12 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
clearImageFileIDs(got)
|
clearImageFileIDs(got)
|
||||||
|
// load relationships
|
||||||
|
if err := loadImageRelationships(ctx, tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, *got)
|
assert.Equal(tt.want, *got)
|
||||||
|
|
||||||
s, err := qb.Find(ctx, tt.id)
|
s, err := qb.Find(ctx, tt.id)
|
||||||
@@ -455,6 +499,11 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
clearImageFileIDs(s)
|
clearImageFileIDs(s)
|
||||||
|
// load relationships
|
||||||
|
if err := loadImageRelationships(ctx, tt.want, s); err != nil {
|
||||||
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
assert.Equal(tt.want, *s)
|
assert.Equal(tt.want, *s)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -478,10 +527,10 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
GalleryIDs: append(indexesToIDs(galleryIDs, imageGalleries[imageIdxWithGallery]),
|
GalleryIDs: models.NewRelatedIDs(append(indexesToIDs(galleryIDs, imageGalleries[imageIdxWithGallery]),
|
||||||
galleryIDs[galleryIdx1WithImage],
|
galleryIDs[galleryIdx1WithImage],
|
||||||
galleryIDs[galleryIdx1WithPerformer],
|
galleryIDs[galleryIdx1WithPerformer],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -495,10 +544,10 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
TagIDs: append(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]),
|
||||||
tagIDs[tagIdx1WithDupName],
|
tagIDs[tagIdx1WithDupName],
|
||||||
tagIDs[tagIdx1WithGallery],
|
tagIDs[tagIdx1WithGallery],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -512,10 +561,10 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
PerformerIDs: append(indexesToIDs(performerIDs, imagePerformers[imageIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(append(indexesToIDs(performerIDs, imagePerformers[imageIdxWithTwoPerformers]),
|
||||||
performerIDs[performerIdx1WithDupName],
|
performerIDs[performerIdx1WithDupName],
|
||||||
performerIDs[performerIdx1WithGallery],
|
performerIDs[performerIdx1WithGallery],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -529,9 +578,9 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
GalleryIDs: append(indexesToIDs(galleryIDs, imageGalleries[imageIdxWithGallery]),
|
GalleryIDs: models.NewRelatedIDs(append(indexesToIDs(galleryIDs, imageGalleries[imageIdxWithGallery]),
|
||||||
galleryIDs[galleryIdx1WithPerformer],
|
galleryIDs[galleryIdx1WithPerformer],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -545,9 +594,9 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
TagIDs: append(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]),
|
||||||
tagIDs[tagIdx1WithGallery],
|
tagIDs[tagIdx1WithGallery],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -561,9 +610,9 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
PerformerIDs: append(indexesToIDs(performerIDs, imagePerformers[imageIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(append(indexesToIDs(performerIDs, imagePerformers[imageIdxWithTwoPerformers]),
|
||||||
performerIDs[performerIdx1WithGallery],
|
performerIDs[performerIdx1WithGallery],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -613,7 +662,7 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -627,7 +676,7 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
TagIDs: []int{tagIDs[tagIdx2WithImage]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx2WithImage]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -641,7 +690,7 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
PerformerIDs: []int{performerIDs[performerIdx2WithImage]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx2WithImage]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -655,7 +704,7 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithImage]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -669,7 +718,7 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
TagIDs: indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags])),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -683,7 +732,7 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Image{
|
models.Image{
|
||||||
PerformerIDs: indexesToIDs(performerIDs, imagePerformers[imageIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(indexesToIDs(performerIDs, imagePerformers[imageIdxWithTwoPerformers])),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -710,6 +759,16 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
t.Errorf("imageQueryBuilder.Find() error = %v", err)
|
t.Errorf("imageQueryBuilder.Find() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadImageRelationships(ctx, tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := loadImageRelationships(ctx, tt.want, s); err != nil {
|
||||||
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// only compare fields that were in the partial
|
// only compare fields that were in the partial
|
||||||
if tt.partial.PerformerIDs != nil {
|
if tt.partial.PerformerIDs != nil {
|
||||||
assert.Equal(tt.want.PerformerIDs, got.PerformerIDs)
|
assert.Equal(tt.want.PerformerIDs, got.PerformerIDs)
|
||||||
@@ -944,12 +1003,33 @@ func Test_imageQueryBuilder_Find(t *testing.T) {
|
|||||||
|
|
||||||
if got != nil {
|
if got != nil {
|
||||||
clearImageFileIDs(got)
|
clearImageFileIDs(got)
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadImageRelationships(ctx, *tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func postFindImages(ctx context.Context, want []*models.Image, got []*models.Image) error {
|
||||||
|
for i, s := range got {
|
||||||
|
clearImageFileIDs(s)
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if i < len(want) {
|
||||||
|
if err := loadImageRelationships(ctx, *want[i], s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func Test_imageQueryBuilder_FindMany(t *testing.T) {
|
func Test_imageQueryBuilder_FindMany(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -985,8 +1065,9 @@ func Test_imageQueryBuilder_FindMany(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindImages(ctx, tt.want, got); err != nil {
|
||||||
clearImageFileIDs(f)
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(got, tt.want) {
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
@@ -1044,8 +1125,9 @@ func Test_imageQueryBuilder_FindByChecksum(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindImages(ctx, tt.want, got); err != nil {
|
||||||
clearImageFileIDs(f)
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1121,8 +1203,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindImages(ctx, tt.want, got); err != nil {
|
||||||
clearImageFileIDs(f)
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1162,8 +1245,9 @@ func Test_imageQueryBuilder_FindByGalleryID(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range got {
|
if err := postFindImages(ctx, tt.want, got); err != nil {
|
||||||
clearImageFileIDs(f)
|
t.Errorf("loadImageRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
|
|||||||
@@ -6,7 +6,10 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/doug-martin/goqu/v9"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
const movieTable = "movies"
|
const movieTable = "movies"
|
||||||
@@ -66,21 +69,45 @@ func (qb *movieQueryBuilder) Find(ctx context.Context, id int) (*models.Movie, e
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (qb *movieQueryBuilder) FindMany(ctx context.Context, ids []int) ([]*models.Movie, error) {
|
func (qb *movieQueryBuilder) FindMany(ctx context.Context, ids []int) ([]*models.Movie, error) {
|
||||||
var movies []*models.Movie
|
tableMgr := movieTableMgr
|
||||||
for _, id := range ids {
|
q := goqu.Select("*").From(tableMgr.table).Where(tableMgr.byIDInts(ids...))
|
||||||
movie, err := qb.Find(ctx, id)
|
unsorted, err := qb.getMany(ctx, q)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if movie == nil {
|
ret := make([]*models.Movie, len(ids))
|
||||||
return nil, fmt.Errorf("movie with id %d not found", id)
|
|
||||||
|
for _, s := range unsorted {
|
||||||
|
i := intslice.IntIndex(ids, s.ID)
|
||||||
|
ret[i] = s
|
||||||
}
|
}
|
||||||
|
|
||||||
movies = append(movies, movie)
|
for i := range ret {
|
||||||
|
if ret[i] == nil {
|
||||||
|
return nil, fmt.Errorf("movie with id %d not found", ids[i])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return movies, nil
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *movieQueryBuilder) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Movie, error) {
|
||||||
|
const single = false
|
||||||
|
var ret []*models.Movie
|
||||||
|
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
|
||||||
|
var f models.Movie
|
||||||
|
if err := r.StructScan(&f); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, &f)
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *movieQueryBuilder) FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) {
|
func (qb *movieQueryBuilder) FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) {
|
||||||
@@ -156,16 +183,11 @@ func (qb *movieQueryBuilder) Query(ctx context.Context, movieFilter *models.Movi
|
|||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var movies []*models.Movie
|
movies, err := qb.FindMany(ctx, idsResult)
|
||||||
for _, id := range idsResult {
|
|
||||||
movie, err := qb.Find(ctx, id)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
movies = append(movies, movie)
|
|
||||||
}
|
|
||||||
|
|
||||||
return movies, countResult, nil
|
return movies, countResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,10 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/doug-martin/goqu/v9"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -92,21 +95,45 @@ func (qb *performerQueryBuilder) Find(ctx context.Context, id int) (*models.Perf
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (qb *performerQueryBuilder) FindMany(ctx context.Context, ids []int) ([]*models.Performer, error) {
|
func (qb *performerQueryBuilder) FindMany(ctx context.Context, ids []int) ([]*models.Performer, error) {
|
||||||
var performers []*models.Performer
|
tableMgr := performerTableMgr
|
||||||
for _, id := range ids {
|
q := goqu.Select("*").From(tableMgr.table).Where(tableMgr.byIDInts(ids...))
|
||||||
performer, err := qb.Find(ctx, id)
|
unsorted, err := qb.getMany(ctx, q)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if performer == nil {
|
ret := make([]*models.Performer, len(ids))
|
||||||
return nil, fmt.Errorf("performer with id %d not found", id)
|
|
||||||
|
for _, s := range unsorted {
|
||||||
|
i := intslice.IntIndex(ids, s.ID)
|
||||||
|
ret[i] = s
|
||||||
}
|
}
|
||||||
|
|
||||||
performers = append(performers, performer)
|
for i := range ret {
|
||||||
|
if ret[i] == nil {
|
||||||
|
return nil, fmt.Errorf("performer with id %d not found", ids[i])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return performers, nil
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Performer, error) {
|
||||||
|
const single = false
|
||||||
|
var ret []*models.Performer
|
||||||
|
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
|
||||||
|
var f models.Performer
|
||||||
|
if err := r.StructScan(&f); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, &f)
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *performerQueryBuilder) FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) {
|
func (qb *performerQueryBuilder) FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) {
|
||||||
@@ -324,14 +351,10 @@ func (qb *performerQueryBuilder) Query(ctx context.Context, performerFilter *mod
|
|||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var performers []*models.Performer
|
performers, err := qb.FindMany(ctx, idsResult)
|
||||||
for _, id := range idsResult {
|
|
||||||
performer, err := qb.Find(ctx, id)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
performers = append(performers, performer)
|
|
||||||
}
|
|
||||||
|
|
||||||
return performers, countResult, nil
|
return performers, countResult, nil
|
||||||
}
|
}
|
||||||
@@ -600,11 +623,11 @@ func (qb *performerQueryBuilder) stashIDRepository() *stashIDRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *performerQueryBuilder) GetStashIDs(ctx context.Context, performerID int) ([]*models.StashID, error) {
|
func (qb *performerQueryBuilder) GetStashIDs(ctx context.Context, performerID int) ([]models.StashID, error) {
|
||||||
return qb.stashIDRepository().get(ctx, performerID)
|
return qb.stashIDRepository().get(ctx, performerID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *performerQueryBuilder) UpdateStashIDs(ctx context.Context, performerID int, stashIDs []*models.StashID) error {
|
func (qb *performerQueryBuilder) UpdateStashIDs(ctx context.Context, performerID int, stashIDs []models.StashID) error {
|
||||||
return qb.stashIDRepository().replace(ctx, performerID, stashIDs)
|
return qb.stashIDRepository().replace(ctx, performerID, stashIDs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -441,24 +441,24 @@ type stashIDRepository struct {
|
|||||||
repository
|
repository
|
||||||
}
|
}
|
||||||
|
|
||||||
type stashIDs []*models.StashID
|
type stashIDs []models.StashID
|
||||||
|
|
||||||
func (s *stashIDs) Append(o interface{}) {
|
func (s *stashIDs) Append(o interface{}) {
|
||||||
*s = append(*s, o.(*models.StashID))
|
*s = append(*s, *o.(*models.StashID))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashIDs) New() interface{} {
|
func (s *stashIDs) New() interface{} {
|
||||||
return &models.StashID{}
|
return &models.StashID{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *stashIDRepository) get(ctx context.Context, id int) ([]*models.StashID, error) {
|
func (r *stashIDRepository) get(ctx context.Context, id int) ([]models.StashID, error) {
|
||||||
query := fmt.Sprintf("SELECT stash_id, endpoint from %s WHERE %s = ?", r.tableName, r.idColumn)
|
query := fmt.Sprintf("SELECT stash_id, endpoint from %s WHERE %s = ?", r.tableName, r.idColumn)
|
||||||
var ret stashIDs
|
var ret stashIDs
|
||||||
err := r.query(ctx, query, []interface{}{id}, &ret)
|
err := r.query(ctx, query, []interface{}{id}, &ret)
|
||||||
return []*models.StashID(ret), err
|
return []models.StashID(ret), err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *stashIDRepository) replace(ctx context.Context, id int, newIDs []*models.StashID) error {
|
func (r *stashIDRepository) replace(ctx context.Context, id int, newIDs []models.StashID) error {
|
||||||
if err := r.destroy(ctx, []int{id}); err != nil {
|
if err := r.destroy(ctx, []int{id}); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -157,21 +157,34 @@ func (qb *SceneStore) Create(ctx context.Context, newObject *models.Scene, fileI
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := scenesPerformersTableMgr.insertJoins(ctx, id, newObject.PerformerIDs); err != nil {
|
if newObject.PerformerIDs.Loaded() {
|
||||||
|
if err := scenesPerformersTableMgr.insertJoins(ctx, id, newObject.PerformerIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := scenesTagsTableMgr.insertJoins(ctx, id, newObject.TagIDs); err != nil {
|
}
|
||||||
|
if newObject.TagIDs.Loaded() {
|
||||||
|
if err := scenesTagsTableMgr.insertJoins(ctx, id, newObject.TagIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := scenesGalleriesTableMgr.insertJoins(ctx, id, newObject.GalleryIDs); err != nil {
|
}
|
||||||
|
|
||||||
|
if newObject.GalleryIDs.Loaded() {
|
||||||
|
if err := scenesGalleriesTableMgr.insertJoins(ctx, id, newObject.GalleryIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := scenesStashIDsTableMgr.insertJoins(ctx, id, newObject.StashIDs); err != nil {
|
}
|
||||||
|
|
||||||
|
if newObject.StashIDs.Loaded() {
|
||||||
|
if err := scenesStashIDsTableMgr.insertJoins(ctx, id, newObject.StashIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := scenesMoviesTableMgr.insertJoins(ctx, id, newObject.Movies); err != nil {
|
}
|
||||||
|
|
||||||
|
if newObject.Movies.Loaded() {
|
||||||
|
if err := scenesMoviesTableMgr.insertJoins(ctx, id, newObject.Movies.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
updated, err := qb.find(ctx, id)
|
updated, err := qb.find(ctx, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -235,21 +248,35 @@ func (qb *SceneStore) Update(ctx context.Context, updatedObject *models.Scene) e
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := scenesPerformersTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.PerformerIDs); err != nil {
|
if updatedObject.PerformerIDs.Loaded() {
|
||||||
|
if err := scenesPerformersTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.PerformerIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := scenesTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.TagIDs); err != nil {
|
}
|
||||||
|
|
||||||
|
if updatedObject.TagIDs.Loaded() {
|
||||||
|
if err := scenesTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.TagIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := scenesGalleriesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.GalleryIDs); err != nil {
|
}
|
||||||
|
|
||||||
|
if updatedObject.GalleryIDs.Loaded() {
|
||||||
|
if err := scenesGalleriesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.GalleryIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := scenesStashIDsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.StashIDs); err != nil {
|
}
|
||||||
|
|
||||||
|
if updatedObject.StashIDs.Loaded() {
|
||||||
|
if err := scenesStashIDsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.StashIDs.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := scenesMoviesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.Movies); err != nil {
|
}
|
||||||
|
|
||||||
|
if updatedObject.Movies.Loaded() {
|
||||||
|
if err := scenesMoviesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.Movies.List()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fileIDs := make([]file.ID, len(updatedObject.Files))
|
fileIDs := make([]file.ID, len(updatedObject.Files))
|
||||||
for i, f := range updatedObject.Files {
|
for i, f := range updatedObject.Files {
|
||||||
@@ -333,16 +360,18 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
|
|||||||
|
|
||||||
s := f.resolve()
|
s := f.resolve()
|
||||||
|
|
||||||
if err := qb.resolveRelationships(ctx, s); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = append(ret, s)
|
ret = append(ret, s)
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, s := range ret {
|
||||||
|
if err := qb.resolveRelationships(ctx, s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -355,36 +384,6 @@ func (qb *SceneStore) resolveRelationships(ctx context.Context, s *models.Scene)
|
|||||||
return fmt.Errorf("resolving scene files: %w", err)
|
return fmt.Errorf("resolving scene files: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// movies
|
|
||||||
s.Movies, err = qb.getMovies(ctx, s.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving scene movies: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// performers
|
|
||||||
s.PerformerIDs, err = qb.performersRepository().getIDs(ctx, s.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving scene performers: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// tags
|
|
||||||
s.TagIDs, err = qb.tagsRepository().getIDs(ctx, s.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving scene tags: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// galleries
|
|
||||||
s.GalleryIDs, err = qb.galleriesRepository().getIDs(ctx, s.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving scene galleries: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stash ids
|
|
||||||
s.StashIDs, err = qb.getStashIDs(ctx, s.ID)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("resolving scene stash ids: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -412,37 +411,6 @@ func (qb *SceneStore) getFiles(ctx context.Context, id int) ([]*file.VideoFile,
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *SceneStore) getMovies(ctx context.Context, id int) (ret []models.MoviesScenes, err error) {
|
|
||||||
ret = []models.MoviesScenes{}
|
|
||||||
if err := qb.moviesRepository().getAll(ctx, id, func(rows *sqlx.Rows) error {
|
|
||||||
var ms moviesScenesRow
|
|
||||||
if err := rows.StructScan(&ms); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = append(ret, ms.resolve(id))
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (qb *SceneStore) getStashIDs(ctx context.Context, id int) ([]models.StashID, error) {
|
|
||||||
stashIDs, err := qb.stashIDRepository().get(ctx, id)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret := make([]models.StashID, len(stashIDs))
|
|
||||||
for i, sid := range stashIDs {
|
|
||||||
ret[i] = *sid
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (qb *SceneStore) find(ctx context.Context, id int) (*models.Scene, error) {
|
func (qb *SceneStore) find(ctx context.Context, id int) (*models.Scene, error) {
|
||||||
q := qb.selectDataset().Where(qb.tableMgr.byID(id))
|
q := qb.selectDataset().Where(qb.tableMgr.byID(id))
|
||||||
|
|
||||||
@@ -1399,6 +1367,24 @@ func (qb *SceneStore) moviesRepository() *repository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *SceneStore) GetMovies(ctx context.Context, id int) (ret []models.MoviesScenes, err error) {
|
||||||
|
ret = []models.MoviesScenes{}
|
||||||
|
|
||||||
|
if err := qb.moviesRepository().getAll(ctx, id, func(rows *sqlx.Rows) error {
|
||||||
|
var ms moviesScenesRow
|
||||||
|
if err := rows.StructScan(&ms); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, ms.resolve(id))
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *SceneStore) filesRepository() *filesRepository {
|
func (qb *SceneStore) filesRepository() *filesRepository {
|
||||||
return &filesRepository{
|
return &filesRepository{
|
||||||
repository: repository{
|
repository: repository{
|
||||||
@@ -1409,6 +1395,11 @@ func (qb *SceneStore) filesRepository() *filesRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *SceneStore) AddFileID(ctx context.Context, id int, fileID file.ID) error {
|
||||||
|
const firstPrimary = false
|
||||||
|
return scenesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID})
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *SceneStore) performersRepository() *joinRepository {
|
func (qb *SceneStore) performersRepository() *joinRepository {
|
||||||
return &joinRepository{
|
return &joinRepository{
|
||||||
repository: repository{
|
repository: repository{
|
||||||
@@ -1420,6 +1411,10 @@ func (qb *SceneStore) performersRepository() *joinRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *SceneStore) GetPerformerIDs(ctx context.Context, id int) ([]int, error) {
|
||||||
|
return qb.performersRepository().getIDs(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *SceneStore) tagsRepository() *joinRepository {
|
func (qb *SceneStore) tagsRepository() *joinRepository {
|
||||||
return &joinRepository{
|
return &joinRepository{
|
||||||
repository: repository{
|
repository: repository{
|
||||||
@@ -1431,6 +1426,10 @@ func (qb *SceneStore) tagsRepository() *joinRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *SceneStore) GetTagIDs(ctx context.Context, id int) ([]int, error) {
|
||||||
|
return qb.tagsRepository().getIDs(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *SceneStore) galleriesRepository() *joinRepository {
|
func (qb *SceneStore) galleriesRepository() *joinRepository {
|
||||||
return &joinRepository{
|
return &joinRepository{
|
||||||
repository: repository{
|
repository: repository{
|
||||||
@@ -1442,6 +1441,14 @@ func (qb *SceneStore) galleriesRepository() *joinRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *SceneStore) GetGalleryIDs(ctx context.Context, id int) ([]int, error) {
|
||||||
|
return qb.galleriesRepository().getIDs(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *SceneStore) AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error {
|
||||||
|
return scenesGalleriesTableMgr.addJoins(ctx, sceneID, galleryIDs)
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *SceneStore) stashIDRepository() *stashIDRepository {
|
func (qb *SceneStore) stashIDRepository() *stashIDRepository {
|
||||||
return &stashIDRepository{
|
return &stashIDRepository{
|
||||||
repository{
|
repository{
|
||||||
@@ -1452,6 +1459,10 @@ func (qb *SceneStore) stashIDRepository() *stashIDRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *SceneStore) GetStashIDs(ctx context.Context, sceneID int) ([]models.StashID, error) {
|
||||||
|
return qb.stashIDRepository().get(ctx, sceneID)
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *SceneStore) FindDuplicates(ctx context.Context, distance int) ([][]*models.Scene, error) {
|
func (qb *SceneStore) FindDuplicates(ctx context.Context, distance int) ([][]*models.Scene, error) {
|
||||||
var dupeIds [][]int
|
var dupeIds [][]int
|
||||||
if distance == 0 {
|
if distance == 0 {
|
||||||
|
|||||||
@@ -157,7 +157,13 @@ func TestMarkerQuerySceneTags(t *testing.T) {
|
|||||||
t.Errorf("error getting marker tag ids: %v", err)
|
t.Errorf("error getting marker tag ids: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
tagIDs := s.TagIDs
|
|
||||||
|
if err := s.LoadTagIDs(ctx, db.Scene); err != nil {
|
||||||
|
t.Errorf("error getting marker tag ids: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
tagIDs := s.TagIDs.List()
|
||||||
if markerFilter.SceneTags.Modifier == models.CriterionModifierIsNull && len(tagIDs) > 0 {
|
if markerFilter.SceneTags.Modifier == models.CriterionModifierIsNull && len(tagIDs) > 0 {
|
||||||
t.Errorf("expected marker %d to have no scene tags - found %d", m.ID, len(tagIDs))
|
t.Errorf("expected marker %d to have no scene tags - found %d", m.ID, len(tagIDs))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,6 +21,36 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func loadSceneRelationships(ctx context.Context, expected models.Scene, actual *models.Scene) error {
|
||||||
|
if expected.GalleryIDs.Loaded() {
|
||||||
|
if err := actual.LoadGalleryIDs(ctx, db.Scene); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if expected.TagIDs.Loaded() {
|
||||||
|
if err := actual.LoadTagIDs(ctx, db.Scene); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if expected.PerformerIDs.Loaded() {
|
||||||
|
if err := actual.LoadPerformerIDs(ctx, db.Scene); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if expected.Movies.Loaded() {
|
||||||
|
if err := actual.LoadMovies(ctx, db.Scene); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if expected.StashIDs.Loaded() {
|
||||||
|
if err := actual.LoadStashIDs(ctx, db.Scene); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func Test_sceneQueryBuilder_Create(t *testing.T) {
|
func Test_sceneQueryBuilder_Create(t *testing.T) {
|
||||||
var (
|
var (
|
||||||
title = "title"
|
title = "title"
|
||||||
@@ -60,10 +90,10 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
StudioID: &studioIDs[studioIdxWithScene],
|
StudioID: &studioIDs[studioIdxWithScene],
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithScene]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||||
Movies: []models.MoviesScenes{
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: movieIDs[movieIdxWithScene],
|
MovieID: movieIDs[movieIdxWithScene],
|
||||||
SceneIndex: &sceneIndex,
|
SceneIndex: &sceneIndex,
|
||||||
@@ -72,8 +102,8 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
MovieID: movieIDs[movieIdxWithStudio],
|
MovieID: movieIDs[movieIdxWithStudio],
|
||||||
SceneIndex: &sceneIndex2,
|
SceneIndex: &sceneIndex2,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
StashIDs: []models.StashID{
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
||||||
{
|
{
|
||||||
StashID: stashID1,
|
StashID: stashID1,
|
||||||
Endpoint: endpoint1,
|
Endpoint: endpoint1,
|
||||||
@@ -82,7 +112,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
StashID: stashID2,
|
StashID: stashID2,
|
||||||
Endpoint: endpoint2,
|
Endpoint: endpoint2,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
Files: []*file.VideoFile{},
|
Files: []*file.VideoFile{},
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
@@ -103,10 +133,10 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
},
|
},
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithScene]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||||
Movies: []models.MoviesScenes{
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: movieIDs[movieIdxWithScene],
|
MovieID: movieIDs[movieIdxWithScene],
|
||||||
SceneIndex: &sceneIndex,
|
SceneIndex: &sceneIndex,
|
||||||
@@ -115,8 +145,8 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
MovieID: movieIDs[movieIdxWithStudio],
|
MovieID: movieIDs[movieIdxWithStudio],
|
||||||
SceneIndex: &sceneIndex2,
|
SceneIndex: &sceneIndex2,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
StashIDs: []models.StashID{
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
||||||
{
|
{
|
||||||
StashID: stashID1,
|
StashID: stashID1,
|
||||||
Endpoint: endpoint1,
|
Endpoint: endpoint1,
|
||||||
@@ -125,7 +155,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
StashID: stashID2,
|
StashID: stashID2,
|
||||||
Endpoint: endpoint2,
|
Endpoint: endpoint2,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -139,33 +169,33 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
{
|
{
|
||||||
"invalid gallery id",
|
"invalid gallery id",
|
||||||
models.Scene{
|
models.Scene{
|
||||||
GalleryIDs: []int{invalidID},
|
GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"invalid tag id",
|
"invalid tag id",
|
||||||
models.Scene{
|
models.Scene{
|
||||||
TagIDs: []int{invalidID},
|
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"invalid performer id",
|
"invalid performer id",
|
||||||
models.Scene{
|
models.Scene{
|
||||||
PerformerIDs: []int{invalidID},
|
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"invalid movie id",
|
"invalid movie id",
|
||||||
models.Scene{
|
models.Scene{
|
||||||
Movies: []models.MoviesScenes{
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: invalidID,
|
MovieID: invalidID,
|
||||||
SceneIndex: &sceneIndex,
|
SceneIndex: &sceneIndex,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
@@ -197,6 +227,12 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
copy := tt.newObject
|
copy := tt.newObject
|
||||||
copy.ID = s.ID
|
copy.ID = s.ID
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadSceneRelationships(ctx, copy, &s); err != nil {
|
||||||
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(copy, s)
|
assert.Equal(copy, s)
|
||||||
|
|
||||||
// ensure can find the scene
|
// ensure can find the scene
|
||||||
@@ -208,6 +244,12 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||||||
if !assert.NotNil(found) {
|
if !assert.NotNil(found) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadSceneRelationships(ctx, copy, found); err != nil {
|
||||||
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
assert.Equal(copy, *found)
|
assert.Equal(copy, *found)
|
||||||
|
|
||||||
return
|
return
|
||||||
@@ -268,10 +310,10 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
StudioID: &studioIDs[studioIdxWithScene],
|
StudioID: &studioIDs[studioIdxWithScene],
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithScene]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||||
Movies: []models.MoviesScenes{
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: movieIDs[movieIdxWithScene],
|
MovieID: movieIDs[movieIdxWithScene],
|
||||||
SceneIndex: &sceneIndex,
|
SceneIndex: &sceneIndex,
|
||||||
@@ -280,8 +322,8 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
MovieID: movieIDs[movieIdxWithStudio],
|
MovieID: movieIDs[movieIdxWithStudio],
|
||||||
SceneIndex: &sceneIndex2,
|
SceneIndex: &sceneIndex2,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
StashIDs: []models.StashID{
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
||||||
{
|
{
|
||||||
StashID: stashID1,
|
StashID: stashID1,
|
||||||
Endpoint: endpoint1,
|
Endpoint: endpoint1,
|
||||||
@@ -290,7 +332,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
StashID: stashID2,
|
StashID: stashID2,
|
||||||
Endpoint: endpoint2,
|
Endpoint: endpoint2,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -301,11 +343,11 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithSpacedName),
|
makeSceneFileWithID(sceneIdxWithSpacedName),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Movies: []models.MoviesScenes{},
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
||||||
StashIDs: []models.StashID{},
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -316,11 +358,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithGallery),
|
makeSceneFileWithID(sceneIdxWithGallery),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
|
||||||
PerformerIDs: []int{},
|
|
||||||
Movies: []models.MoviesScenes{},
|
|
||||||
StashIDs: []models.StashID{},
|
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -331,11 +369,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithTag),
|
makeSceneFileWithID(sceneIdxWithTag),
|
||||||
},
|
},
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
GalleryIDs: []int{},
|
|
||||||
PerformerIDs: []int{},
|
|
||||||
Movies: []models.MoviesScenes{},
|
|
||||||
StashIDs: []models.StashID{},
|
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -346,11 +380,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithPerformer),
|
makeSceneFileWithID(sceneIdxWithPerformer),
|
||||||
},
|
},
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
|
||||||
GalleryIDs: []int{},
|
|
||||||
Movies: []models.MoviesScenes{},
|
|
||||||
StashIDs: []models.StashID{},
|
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -361,11 +391,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithMovie),
|
makeSceneFileWithID(sceneIdxWithMovie),
|
||||||
},
|
},
|
||||||
Movies: []models.MoviesScenes{},
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
||||||
GalleryIDs: []int{},
|
|
||||||
TagIDs: []int{},
|
|
||||||
PerformerIDs: []int{},
|
|
||||||
StashIDs: []models.StashID{},
|
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -377,11 +403,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
makeSceneFileWithID(sceneIdxWithGallery),
|
makeSceneFileWithID(sceneIdxWithGallery),
|
||||||
},
|
},
|
||||||
StudioID: &invalidID,
|
StudioID: &invalidID,
|
||||||
GalleryIDs: []int{},
|
|
||||||
TagIDs: []int{},
|
|
||||||
PerformerIDs: []int{},
|
|
||||||
Movies: []models.MoviesScenes{},
|
|
||||||
StashIDs: []models.StashID{},
|
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
@@ -392,7 +413,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithGallery),
|
makeSceneFileWithID(sceneIdxWithGallery),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{invalidID},
|
GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
@@ -403,7 +424,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithGallery),
|
makeSceneFileWithID(sceneIdxWithGallery),
|
||||||
},
|
},
|
||||||
TagIDs: []int{invalidID},
|
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
@@ -414,7 +435,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithGallery),
|
makeSceneFileWithID(sceneIdxWithGallery),
|
||||||
},
|
},
|
||||||
PerformerIDs: []int{invalidID},
|
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
@@ -425,12 +446,12 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFileWithID(sceneIdxWithSpacedName),
|
makeSceneFileWithID(sceneIdxWithSpacedName),
|
||||||
},
|
},
|
||||||
Movies: []models.MoviesScenes{
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: invalidID,
|
MovieID: invalidID,
|
||||||
SceneIndex: &sceneIndex,
|
SceneIndex: &sceneIndex,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
@@ -456,6 +477,12 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||||||
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadSceneRelationships(ctx, copy, s); err != nil {
|
||||||
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(copy, *s)
|
assert.Equal(copy, *s)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -571,10 +598,10 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
StudioID: &studioIDs[studioIdxWithScene],
|
StudioID: &studioIDs[studioIdxWithScene],
|
||||||
CreatedAt: createdAt,
|
CreatedAt: createdAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithScene]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||||
TagIDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||||
PerformerIDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||||
Movies: []models.MoviesScenes{
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: movieIDs[movieIdxWithScene],
|
MovieID: movieIDs[movieIdxWithScene],
|
||||||
SceneIndex: &sceneIndex,
|
SceneIndex: &sceneIndex,
|
||||||
@@ -583,8 +610,8 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
MovieID: movieIDs[movieIdxWithStudio],
|
MovieID: movieIDs[movieIdxWithStudio],
|
||||||
SceneIndex: &sceneIndex2,
|
SceneIndex: &sceneIndex2,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
StashIDs: []models.StashID{
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
||||||
{
|
{
|
||||||
StashID: stashID1,
|
StashID: stashID1,
|
||||||
Endpoint: endpoint1,
|
Endpoint: endpoint1,
|
||||||
@@ -593,7 +620,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
StashID: stashID2,
|
StashID: stashID2,
|
||||||
Endpoint: endpoint2,
|
Endpoint: endpoint2,
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -606,11 +633,11 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
Files: []*file.VideoFile{
|
Files: []*file.VideoFile{
|
||||||
makeSceneFile(sceneIdxWithSpacedName),
|
makeSceneFile(sceneIdxWithSpacedName),
|
||||||
},
|
},
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
TagIDs: []int{},
|
TagIDs: models.NewRelatedIDs([]int{}),
|
||||||
PerformerIDs: []int{},
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||||
Movies: []models.MoviesScenes{},
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
||||||
StashIDs: []models.StashID{},
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -641,6 +668,12 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
// ignore file ids
|
// ignore file ids
|
||||||
clearSceneFileIDs(got)
|
clearSceneFileIDs(got)
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadSceneRelationships(ctx, tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, *got)
|
assert.Equal(tt.want, *got)
|
||||||
|
|
||||||
s, err := qb.Find(ctx, tt.id)
|
s, err := qb.Find(ctx, tt.id)
|
||||||
@@ -651,6 +684,12 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
|||||||
// ignore file ids
|
// ignore file ids
|
||||||
clearSceneFileIDs(s)
|
clearSceneFileIDs(s)
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadSceneRelationships(ctx, tt.want, s); err != nil {
|
||||||
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, *s)
|
assert.Equal(tt.want, *s)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -705,10 +744,10 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
GalleryIDs: append(indexesToIDs(galleryIDs, sceneGalleries[sceneIdxWithGallery]),
|
GalleryIDs: models.NewRelatedIDs(append(indexesToIDs(galleryIDs, sceneGalleries[sceneIdxWithGallery]),
|
||||||
galleryIDs[galleryIdx1WithImage],
|
galleryIDs[galleryIdx1WithImage],
|
||||||
galleryIDs[galleryIdx1WithPerformer],
|
galleryIDs[galleryIdx1WithPerformer],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -722,10 +761,10 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
TagIDs: append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
||||||
tagIDs[tagIdx1WithDupName],
|
tagIDs[tagIdx1WithDupName],
|
||||||
tagIDs[tagIdx1WithGallery],
|
tagIDs[tagIdx1WithGallery],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -739,10 +778,10 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
PerformerIDs: append(indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(append(indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers]),
|
||||||
performerIDs[performerIdx1WithDupName],
|
performerIDs[performerIdx1WithDupName],
|
||||||
performerIDs[performerIdx1WithGallery],
|
performerIDs[performerIdx1WithGallery],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -756,11 +795,11 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
Movies: append([]models.MoviesScenes{
|
Movies: models.NewRelatedMovies(append([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
||||||
},
|
},
|
||||||
}, movieScenes...),
|
}, movieScenes...)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -774,7 +813,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
StashIDs: append([]models.StashID{sceneStashID(sceneIdxWithSpacedName)}, stashIDs...),
|
StashIDs: models.NewRelatedStashIDs(append([]models.StashID{sceneStashID(sceneIdxWithSpacedName)}, stashIDs...)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -788,9 +827,9 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
GalleryIDs: append(indexesToIDs(galleryIDs, sceneGalleries[sceneIdxWithGallery]),
|
GalleryIDs: models.NewRelatedIDs(append(indexesToIDs(galleryIDs, sceneGalleries[sceneIdxWithGallery]),
|
||||||
galleryIDs[galleryIdx1WithPerformer],
|
galleryIDs[galleryIdx1WithPerformer],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -804,9 +843,9 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
TagIDs: append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
||||||
tagIDs[tagIdx1WithGallery],
|
tagIDs[tagIdx1WithGallery],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -820,9 +859,9 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
PerformerIDs: append(indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(append(indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers]),
|
||||||
performerIDs[performerIdx1WithGallery],
|
performerIDs[performerIdx1WithGallery],
|
||||||
),
|
)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -843,11 +882,11 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
Movies: append([]models.MoviesScenes{
|
Movies: models.NewRelatedMovies(append([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
||||||
},
|
},
|
||||||
}, movieScenes...),
|
}, movieScenes...)),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -863,7 +902,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
StashIDs: []models.StashID{sceneStashID(sceneIdxWithSpacedName)},
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{sceneStashID(sceneIdxWithSpacedName)}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -929,7 +968,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
GalleryIDs: []int{},
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -943,7 +982,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
TagIDs: []int{tagIDs[tagIdx2WithScene]},
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx2WithScene]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -957,7 +996,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
PerformerIDs: []int{performerIDs[performerIdx2WithScene]},
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx2WithScene]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -975,7 +1014,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
Movies: []models.MoviesScenes{},
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -989,7 +1028,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
StashIDs: []models.StashID{},
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -1003,7 +1042,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
GalleryIDs: []int{galleryIDs[galleryIdxWithScene]},
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -1017,7 +1056,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
TagIDs: indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
TagIDs: models.NewRelatedIDs(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags])),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -1031,7 +1070,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
PerformerIDs: indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers]),
|
PerformerIDs: models.NewRelatedIDs(indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers])),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -1049,11 +1088,11 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
Movies: []models.MoviesScenes{
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||||
{
|
{
|
||||||
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
||||||
},
|
},
|
||||||
},
|
}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -1067,7 +1106,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
models.Scene{
|
models.Scene{
|
||||||
StashIDs: []models.StashID{sceneStashID(sceneIdxWithGallery)},
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{sceneStashID(sceneIdxWithGallery)}),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@@ -1094,6 +1133,16 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||||||
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadSceneRelationships(ctx, tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := loadSceneRelationships(ctx, tt.want, s); err != nil {
|
||||||
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// only compare fields that were in the partial
|
// only compare fields that were in the partial
|
||||||
if tt.partial.PerformerIDs != nil {
|
if tt.partial.PerformerIDs != nil {
|
||||||
assert.Equal(tt.want.PerformerIDs, got.PerformerIDs)
|
assert.Equal(tt.want.PerformerIDs, got.PerformerIDs)
|
||||||
@@ -1353,6 +1402,12 @@ func Test_sceneQueryBuilder_Find(t *testing.T) {
|
|||||||
|
|
||||||
if got != nil {
|
if got != nil {
|
||||||
clearSceneFileIDs(got)
|
clearSceneFileIDs(got)
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if err := loadSceneRelationships(ctx, *tt.want, got); err != nil {
|
||||||
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1362,6 +1417,21 @@ func Test_sceneQueryBuilder_Find(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func postFindScenes(ctx context.Context, want []*models.Scene, got []*models.Scene) error {
|
||||||
|
for i, s := range got {
|
||||||
|
clearSceneFileIDs(s)
|
||||||
|
|
||||||
|
// load relationships
|
||||||
|
if i < len(want) {
|
||||||
|
if err := loadSceneRelationships(ctx, *want[i], s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func Test_sceneQueryBuilder_FindMany(t *testing.T) {
|
func Test_sceneQueryBuilder_FindMany(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -1404,8 +1474,9 @@ func Test_sceneQueryBuilder_FindMany(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range got {
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
||||||
clearSceneFileIDs(s)
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1474,8 +1545,9 @@ func Test_sceneQueryBuilder_FindByChecksum(t *testing.T) {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range got {
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
||||||
clearSceneFileIDs(s)
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1546,8 +1618,9 @@ func Test_sceneQueryBuilder_FindByOSHash(t *testing.T) {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range got {
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
||||||
clearSceneFileIDs(s)
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(got, tt.want) {
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
@@ -1620,8 +1693,9 @@ func Test_sceneQueryBuilder_FindByPath(t *testing.T) {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range got {
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
||||||
clearSceneFileIDs(s)
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -1664,8 +1738,9 @@ func Test_sceneQueryBuilder_FindByGalleryID(t *testing.T) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, s := range got {
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
||||||
clearSceneFileIDs(s)
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(tt.want, got)
|
assert.Equal(tt.want, got)
|
||||||
@@ -3539,7 +3614,11 @@ func verifyScenesTagCount(t *testing.T, tagCountCriterion models.IntCriterionInp
|
|||||||
assert.Greater(t, len(scenes), 0)
|
assert.Greater(t, len(scenes), 0)
|
||||||
|
|
||||||
for _, scene := range scenes {
|
for _, scene := range scenes {
|
||||||
verifyInt(t, len(scene.TagIDs), tagCountCriterion)
|
if err := scene.LoadTagIDs(ctx, sqb); err != nil {
|
||||||
|
t.Errorf("scene.LoadTagIDs() error = %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
verifyInt(t, len(scene.TagIDs.List()), tagCountCriterion)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@@ -3576,7 +3655,12 @@ func verifyScenesPerformerCount(t *testing.T, performerCountCriterion models.Int
|
|||||||
assert.Greater(t, len(scenes), 0)
|
assert.Greater(t, len(scenes), 0)
|
||||||
|
|
||||||
for _, scene := range scenes {
|
for _, scene := range scenes {
|
||||||
verifyInt(t, len(scene.PerformerIDs), performerCountCriterion)
|
if err := scene.LoadPerformerIDs(ctx, sqb); err != nil {
|
||||||
|
t.Errorf("scene.LoadPerformerIDs() error = %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
verifyInt(t, len(scene.PerformerIDs.List()), performerCountCriterion)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@@ -3776,6 +3860,10 @@ func TestSceneStashIDs(t *testing.T) {
|
|||||||
return fmt.Errorf("Error creating scene: %s", err.Error())
|
return fmt.Errorf("Error creating scene: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := scene.LoadStashIDs(ctx, qb); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
testSceneStashIDs(ctx, t, scene)
|
testSceneStashIDs(ctx, t, scene)
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
@@ -3785,7 +3873,7 @@ func TestSceneStashIDs(t *testing.T) {
|
|||||||
|
|
||||||
func testSceneStashIDs(ctx context.Context, t *testing.T, s *models.Scene) {
|
func testSceneStashIDs(ctx context.Context, t *testing.T, s *models.Scene) {
|
||||||
// ensure no stash IDs to begin with
|
// ensure no stash IDs to begin with
|
||||||
assert.Len(t, s.StashIDs, 0)
|
assert.Len(t, s.StashIDs.List(), 0)
|
||||||
|
|
||||||
// add stash ids
|
// add stash ids
|
||||||
const stashIDStr = "stashID"
|
const stashIDStr = "stashID"
|
||||||
@@ -3809,7 +3897,12 @@ func testSceneStashIDs(ctx context.Context, t *testing.T, s *models.Scene) {
|
|||||||
t.Error(err.Error())
|
t.Error(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, []models.StashID{stashID}, s.StashIDs)
|
if err := s.LoadStashIDs(ctx, qb); err != nil {
|
||||||
|
t.Error(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, []models.StashID{stashID}, s.StashIDs.List())
|
||||||
|
|
||||||
// remove stash ids and ensure was updated
|
// remove stash ids and ensure was updated
|
||||||
s, err = qb.UpdatePartial(ctx, s.ID, models.ScenePartial{
|
s, err = qb.UpdatePartial(ctx, s.ID, models.ScenePartial{
|
||||||
@@ -3822,7 +3915,12 @@ func testSceneStashIDs(ctx context.Context, t *testing.T, s *models.Scene) {
|
|||||||
t.Error(err.Error())
|
t.Error(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Len(t, s.StashIDs, 0)
|
if err := s.LoadStashIDs(ctx, qb); err != nil {
|
||||||
|
t.Error(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Len(t, s.StashIDs.List(), 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSceneQueryQTrim(t *testing.T) {
|
func TestSceneQueryQTrim(t *testing.T) {
|
||||||
|
|||||||
@@ -975,13 +975,13 @@ func makeScene(i int) *models.Scene {
|
|||||||
OCounter: getOCounter(i),
|
OCounter: getOCounter(i),
|
||||||
Date: getObjectDateObject(i),
|
Date: getObjectDateObject(i),
|
||||||
StudioID: studioID,
|
StudioID: studioID,
|
||||||
GalleryIDs: gids,
|
GalleryIDs: models.NewRelatedIDs(gids),
|
||||||
PerformerIDs: pids,
|
PerformerIDs: models.NewRelatedIDs(pids),
|
||||||
TagIDs: tids,
|
TagIDs: models.NewRelatedIDs(tids),
|
||||||
Movies: movies,
|
Movies: models.NewRelatedMovies(movies),
|
||||||
StashIDs: []models.StashID{
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
||||||
sceneStashID(i),
|
sceneStashID(i),
|
||||||
},
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1051,9 +1051,9 @@ func makeImage(i int) *models.Image {
|
|||||||
Rating: getIntPtr(getRating(i)),
|
Rating: getIntPtr(getRating(i)),
|
||||||
OCounter: getOCounter(i),
|
OCounter: getOCounter(i),
|
||||||
StudioID: studioID,
|
StudioID: studioID,
|
||||||
GalleryIDs: gids,
|
GalleryIDs: models.NewRelatedIDs(gids),
|
||||||
PerformerIDs: pids,
|
PerformerIDs: models.NewRelatedIDs(pids),
|
||||||
TagIDs: tids,
|
TagIDs: models.NewRelatedIDs(tids),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1135,12 +1135,12 @@ func makeGallery(i int, includeScenes bool) *models.Gallery {
|
|||||||
Rating: getIntPtr(getRating(i)),
|
Rating: getIntPtr(getRating(i)),
|
||||||
Date: getObjectDateObject(i),
|
Date: getObjectDateObject(i),
|
||||||
StudioID: studioID,
|
StudioID: studioID,
|
||||||
PerformerIDs: pids,
|
PerformerIDs: models.NewRelatedIDs(pids),
|
||||||
TagIDs: tids,
|
TagIDs: models.NewRelatedIDs(tids),
|
||||||
}
|
}
|
||||||
|
|
||||||
if includeScenes {
|
if includeScenes {
|
||||||
ret.SceneIDs = indexesToIDs(sceneIDs, sceneGalleries.reverseLookup(i))
|
ret.SceneIDs = models.NewRelatedIDs(indexesToIDs(sceneIDs, sceneGalleries.reverseLookup(i)))
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|||||||
@@ -12,8 +12,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type stashIDReaderWriter interface {
|
type stashIDReaderWriter interface {
|
||||||
GetStashIDs(ctx context.Context, performerID int) ([]*models.StashID, error)
|
GetStashIDs(ctx context.Context, performerID int) ([]models.StashID, error)
|
||||||
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []*models.StashID) error
|
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []models.StashID) error
|
||||||
}
|
}
|
||||||
|
|
||||||
func testStashIDReaderWriter(ctx context.Context, t *testing.T, r stashIDReaderWriter, id int) {
|
func testStashIDReaderWriter(ctx context.Context, t *testing.T, r stashIDReaderWriter, id int) {
|
||||||
@@ -26,25 +26,25 @@ func testStashIDReaderWriter(ctx context.Context, t *testing.T, r stashIDReaderW
|
|||||||
// add stash ids
|
// add stash ids
|
||||||
const stashIDStr = "stashID"
|
const stashIDStr = "stashID"
|
||||||
const endpoint = "endpoint"
|
const endpoint = "endpoint"
|
||||||
stashID := &models.StashID{
|
stashID := models.StashID{
|
||||||
StashID: stashIDStr,
|
StashID: stashIDStr,
|
||||||
Endpoint: endpoint,
|
Endpoint: endpoint,
|
||||||
}
|
}
|
||||||
|
|
||||||
// update stash ids and ensure was updated
|
// update stash ids and ensure was updated
|
||||||
if err := r.UpdateStashIDs(ctx, id, []*models.StashID{stashID}); err != nil {
|
if err := r.UpdateStashIDs(ctx, id, []models.StashID{stashID}); err != nil {
|
||||||
t.Error(err.Error())
|
t.Error(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
testStashIDs(ctx, t, r, id, []*models.StashID{stashID})
|
testStashIDs(ctx, t, r, id, []models.StashID{stashID})
|
||||||
|
|
||||||
// update non-existing id - should return error
|
// update non-existing id - should return error
|
||||||
if err := r.UpdateStashIDs(ctx, -1, []*models.StashID{stashID}); err == nil {
|
if err := r.UpdateStashIDs(ctx, -1, []models.StashID{stashID}); err == nil {
|
||||||
t.Error("expected error when updating non-existing id")
|
t.Error("expected error when updating non-existing id")
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove stash ids and ensure was updated
|
// remove stash ids and ensure was updated
|
||||||
if err := r.UpdateStashIDs(ctx, id, []*models.StashID{}); err != nil {
|
if err := r.UpdateStashIDs(ctx, id, []models.StashID{}); err != nil {
|
||||||
t.Error(err.Error())
|
t.Error(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -62,7 +62,7 @@ func testNoStashIDs(ctx context.Context, t *testing.T, r stashIDReaderWriter, id
|
|||||||
assert.Len(t, stashIDs, 0)
|
assert.Len(t, stashIDs, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testStashIDs(ctx context.Context, t *testing.T, r stashIDReaderWriter, id int, expected []*models.StashID) {
|
func testStashIDs(ctx context.Context, t *testing.T, r stashIDReaderWriter, id int, expected []models.StashID) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
stashIDs, err := r.GetStashIDs(ctx, id)
|
stashIDs, err := r.GetStashIDs(ctx, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -7,7 +7,10 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/doug-martin/goqu/v9"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
const studioTable = "studios"
|
const studioTable = "studios"
|
||||||
@@ -76,21 +79,45 @@ func (qb *studioQueryBuilder) Find(ctx context.Context, id int) (*models.Studio,
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (qb *studioQueryBuilder) FindMany(ctx context.Context, ids []int) ([]*models.Studio, error) {
|
func (qb *studioQueryBuilder) FindMany(ctx context.Context, ids []int) ([]*models.Studio, error) {
|
||||||
var studios []*models.Studio
|
tableMgr := studioTableMgr
|
||||||
for _, id := range ids {
|
q := goqu.Select("*").From(tableMgr.table).Where(tableMgr.byIDInts(ids...))
|
||||||
studio, err := qb.Find(ctx, id)
|
unsorted, err := qb.getMany(ctx, q)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if studio == nil {
|
ret := make([]*models.Studio, len(ids))
|
||||||
return nil, fmt.Errorf("studio with id %d not found", id)
|
|
||||||
|
for _, s := range unsorted {
|
||||||
|
i := intslice.IntIndex(ids, s.ID)
|
||||||
|
ret[i] = s
|
||||||
}
|
}
|
||||||
|
|
||||||
studios = append(studios, studio)
|
for i := range ret {
|
||||||
|
if ret[i] == nil {
|
||||||
|
return nil, fmt.Errorf("studio with id %d not found", ids[i])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return studios, nil
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Studio, error) {
|
||||||
|
const single = false
|
||||||
|
var ret []*models.Studio
|
||||||
|
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
|
||||||
|
var f models.Studio
|
||||||
|
if err := r.StructScan(&f); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, &f)
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *studioQueryBuilder) FindChildren(ctx context.Context, id int) ([]*models.Studio, error) {
|
func (qb *studioQueryBuilder) FindChildren(ctx context.Context, id int) ([]*models.Studio, error) {
|
||||||
@@ -258,16 +285,11 @@ func (qb *studioQueryBuilder) Query(ctx context.Context, studioFilter *models.St
|
|||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var studios []*models.Studio
|
studios, err := qb.FindMany(ctx, idsResult)
|
||||||
for _, id := range idsResult {
|
|
||||||
studio, err := qb.Find(ctx, id)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
studios = append(studios, studio)
|
|
||||||
}
|
|
||||||
|
|
||||||
return studios, countResult, nil
|
return studios, countResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -425,11 +447,11 @@ func (qb *studioQueryBuilder) stashIDRepository() *stashIDRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *studioQueryBuilder) GetStashIDs(ctx context.Context, studioID int) ([]*models.StashID, error) {
|
func (qb *studioQueryBuilder) GetStashIDs(ctx context.Context, studioID int) ([]models.StashID, error) {
|
||||||
return qb.stashIDRepository().get(ctx, studioID)
|
return qb.stashIDRepository().get(ctx, studioID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *studioQueryBuilder) UpdateStashIDs(ctx context.Context, studioID int, stashIDs []*models.StashID) error {
|
func (qb *studioQueryBuilder) UpdateStashIDs(ctx context.Context, studioID int, stashIDs []models.StashID) error {
|
||||||
return qb.stashIDRepository().replace(ctx, studioID, stashIDs)
|
return qb.stashIDRepository().replace(ctx, studioID, stashIDs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -69,6 +69,14 @@ func (t *table) byID(id interface{}) exp.Expression {
|
|||||||
return t.idColumn.Eq(id)
|
return t.idColumn.Eq(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *table) byIDInts(ids ...int) exp.Expression {
|
||||||
|
ii := make([]interface{}, len(ids))
|
||||||
|
for i, id := range ids {
|
||||||
|
ii[i] = id
|
||||||
|
}
|
||||||
|
return t.idColumn.In(ii...)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *table) idExists(ctx context.Context, id interface{}) (bool, error) {
|
func (t *table) idExists(ctx context.Context, id interface{}) (bool, error) {
|
||||||
q := dialect.Select(goqu.COUNT("*")).From(t.table).Where(t.byID(id))
|
q := dialect.Select(goqu.COUNT("*")).From(t.table).Where(t.byID(id))
|
||||||
|
|
||||||
|
|||||||
@@ -174,3 +174,31 @@ var (
|
|||||||
idColumn: goqu.T(fingerprintTable).Col(idColumn),
|
idColumn: goqu.T(fingerprintTable).Col(idColumn),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
performerTableMgr = &table{
|
||||||
|
table: goqu.T(performerTable),
|
||||||
|
idColumn: goqu.T(performerTable).Col(idColumn),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
studioTableMgr = &table{
|
||||||
|
table: goqu.T(studioTable),
|
||||||
|
idColumn: goqu.T(studioTable).Col(idColumn),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
tagTableMgr = &table{
|
||||||
|
table: goqu.T(tagTable),
|
||||||
|
idColumn: goqu.T(tagTable).Col(idColumn),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
movieTableMgr = &table{
|
||||||
|
table: goqu.T(movieTable),
|
||||||
|
idColumn: goqu.T(movieTable).Col(idColumn),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|||||||
@@ -7,7 +7,10 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/doug-martin/goqu/v9"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
const tagTable = "tags"
|
const tagTable = "tags"
|
||||||
@@ -94,21 +97,45 @@ func (qb *tagQueryBuilder) Find(ctx context.Context, id int) (*models.Tag, error
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (qb *tagQueryBuilder) FindMany(ctx context.Context, ids []int) ([]*models.Tag, error) {
|
func (qb *tagQueryBuilder) FindMany(ctx context.Context, ids []int) ([]*models.Tag, error) {
|
||||||
var tags []*models.Tag
|
tableMgr := tagTableMgr
|
||||||
for _, id := range ids {
|
q := goqu.Select("*").From(tableMgr.table).Where(tableMgr.byIDInts(ids...))
|
||||||
tag, err := qb.Find(ctx, id)
|
unsorted, err := qb.getMany(ctx, q)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if tag == nil {
|
ret := make([]*models.Tag, len(ids))
|
||||||
return nil, fmt.Errorf("tag with id %d not found", id)
|
|
||||||
|
for _, s := range unsorted {
|
||||||
|
i := intslice.IntIndex(ids, s.ID)
|
||||||
|
ret[i] = s
|
||||||
}
|
}
|
||||||
|
|
||||||
tags = append(tags, tag)
|
for i := range ret {
|
||||||
|
if ret[i] == nil {
|
||||||
|
return nil, fmt.Errorf("tag with id %d not found", ids[i])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return tags, nil
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Tag, error) {
|
||||||
|
const single = false
|
||||||
|
var ret []*models.Tag
|
||||||
|
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
|
||||||
|
var f models.Tag
|
||||||
|
if err := r.StructScan(&f); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, &f)
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (qb *tagQueryBuilder) FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error) {
|
func (qb *tagQueryBuilder) FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error) {
|
||||||
@@ -343,14 +370,10 @@ func (qb *tagQueryBuilder) Query(ctx context.Context, tagFilter *models.TagFilte
|
|||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var tags []*models.Tag
|
tags, err := qb.FindMany(ctx, idsResult)
|
||||||
for _, id := range idsResult {
|
|
||||||
tag, err := qb.Find(ctx, id)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, 0, err
|
return nil, 0, err
|
||||||
}
|
}
|
||||||
tags = append(tags, tag)
|
|
||||||
}
|
|
||||||
|
|
||||||
return tags, countResult, nil
|
return tags, countResult, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -961,7 +961,10 @@ func TestTagMerge(t *testing.T) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
sceneTagIDs := s.TagIDs
|
if err := s.LoadTagIDs(ctx, db.Scene); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
sceneTagIDs := s.TagIDs.List()
|
||||||
|
|
||||||
assert.Contains(sceneTagIDs, destID)
|
assert.Contains(sceneTagIDs, destID)
|
||||||
|
|
||||||
@@ -993,8 +996,12 @@ func TestTagMerge(t *testing.T) {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := g.LoadTagIDs(ctx, db.Gallery); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// ensure gallery points to new tag
|
// ensure gallery points to new tag
|
||||||
assert.Contains(g.TagIDs, destID)
|
assert.Contains(g.TagIDs.List(), destID)
|
||||||
|
|
||||||
// ensure performer points to new tag
|
// ensure performer points to new tag
|
||||||
performerTagIDs, err := sqlite.PerformerReaderWriter.GetTagIDs(ctx, performerIDs[performerIdxWithTwoTags])
|
performerTagIDs, err := sqlite.PerformerReaderWriter.GetTagIDs(ctx, performerIDs[performerIdxWithTwoTags])
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ type FinderImageStashIDGetter interface {
|
|||||||
Finder
|
Finder
|
||||||
GetAliases(ctx context.Context, studioID int) ([]string, error)
|
GetAliases(ctx context.Context, studioID int) ([]string, error)
|
||||||
GetImage(ctx context.Context, studioID int) ([]byte, error)
|
GetImage(ctx context.Context, studioID int) ([]byte, error)
|
||||||
GetStashIDs(ctx context.Context, studioID int) ([]*models.StashID, error)
|
models.StashIDLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToJSON converts a Studio object into its JSON equivalent.
|
// ToJSON converts a Studio object into its JSON equivalent.
|
||||||
@@ -69,9 +69,9 @@ func ToJSON(ctx context.Context, reader FinderImageStashIDGetter, studio *models
|
|||||||
}
|
}
|
||||||
|
|
||||||
stashIDs, _ := reader.GetStashIDs(ctx, studio.ID)
|
stashIDs, _ := reader.GetStashIDs(ctx, studio.ID)
|
||||||
var ret []*models.StashID
|
var ret []models.StashID
|
||||||
for _, stashID := range stashIDs {
|
for _, stashID := range stashIDs {
|
||||||
newJoin := &models.StashID{
|
newJoin := models.StashID{
|
||||||
StashID: stashID.StashID,
|
StashID: stashID.StashID,
|
||||||
Endpoint: stashID.Endpoint,
|
Endpoint: stashID.Endpoint,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,8 +46,8 @@ var stashID = models.StashID{
|
|||||||
StashID: "StashID",
|
StashID: "StashID",
|
||||||
Endpoint: "Endpoint",
|
Endpoint: "Endpoint",
|
||||||
}
|
}
|
||||||
var stashIDs = []*models.StashID{
|
var stashIDs = []models.StashID{
|
||||||
&stashID,
|
stashID,
|
||||||
}
|
}
|
||||||
|
|
||||||
const image = "aW1hZ2VCeXRlcw=="
|
const image = "aW1hZ2VCeXRlcw=="
|
||||||
@@ -107,8 +107,8 @@ func createFullJSONStudio(parentStudio, image string, aliases []string) *jsonsch
|
|||||||
Image: image,
|
Image: image,
|
||||||
Rating: rating,
|
Rating: rating,
|
||||||
Aliases: aliases,
|
Aliases: aliases,
|
||||||
StashIDs: []*models.StashID{
|
StashIDs: []models.StashID{
|
||||||
&stashID,
|
stashID,
|
||||||
},
|
},
|
||||||
IgnoreAutoTag: autoTagIgnored,
|
IgnoreAutoTag: autoTagIgnored,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ type NameFinderCreatorUpdater interface {
|
|||||||
UpdateFull(ctx context.Context, updatedStudio models.Studio) (*models.Studio, error)
|
UpdateFull(ctx context.Context, updatedStudio models.Studio) (*models.Studio, error)
|
||||||
UpdateImage(ctx context.Context, studioID int, image []byte) error
|
UpdateImage(ctx context.Context, studioID int, image []byte) error
|
||||||
UpdateAliases(ctx context.Context, studioID int, aliases []string) error
|
UpdateAliases(ctx context.Context, studioID int, aliases []string) error
|
||||||
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []*models.StashID) error
|
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []models.StashID) error
|
||||||
}
|
}
|
||||||
|
|
||||||
var ErrParentStudioNotExist = errors.New("parent studio does not exist")
|
var ErrParentStudioNotExist = errors.New("parent studio does not exist")
|
||||||
|
|||||||
1
tools.go
1
tools.go
@@ -6,5 +6,6 @@ package main
|
|||||||
import (
|
import (
|
||||||
_ "github.com/99designs/gqlgen"
|
_ "github.com/99designs/gqlgen"
|
||||||
_ "github.com/Yamashou/gqlgenc"
|
_ "github.com/Yamashou/gqlgenc"
|
||||||
|
_ "github.com/vektah/dataloaden"
|
||||||
_ "github.com/vektra/mockery/v2"
|
_ "github.com/vektra/mockery/v2"
|
||||||
)
|
)
|
||||||
|
|||||||
2
vendor/github.com/vektah/dataloaden/.gitignore
generated
vendored
Normal file
2
vendor/github.com/vektah/dataloaden/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
/vendor
|
||||||
|
/.idea
|
||||||
97
vendor/github.com/vektah/dataloaden/README.md
generated
vendored
Normal file
97
vendor/github.com/vektah/dataloaden/README.md
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
### The DATALOADer gENerator [](https://circleci.com/gh/vektah/dataloaden) [](https://goreportcard.com/report/github.com/vektah/dataloaden) [](https://codecov.io/gh/vektah/dataloaden)
|
||||||
|
|
||||||
|
Requires golang 1.11+ for modules support.
|
||||||
|
|
||||||
|
This is a tool for generating type safe data loaders for go, inspired by https://github.com/facebook/dataloader.
|
||||||
|
|
||||||
|
The intended use is in graphql servers, to reduce the number of queries being sent to the database. These dataloader
|
||||||
|
objects should be request scoped and short lived. They should be cheap to create in every request even if they dont
|
||||||
|
get used.
|
||||||
|
|
||||||
|
#### Getting started
|
||||||
|
|
||||||
|
From inside the package you want to have the dataloader in:
|
||||||
|
```bash
|
||||||
|
go run github.com/vektah/dataloaden UserLoader string *github.com/dataloaden/example.User
|
||||||
|
```
|
||||||
|
|
||||||
|
This will generate a dataloader called `UserLoader` that looks up `*github.com/dataloaden/example.User`'s objects
|
||||||
|
based on a `string` key.
|
||||||
|
|
||||||
|
In another file in the same package, create the constructor method:
|
||||||
|
```go
|
||||||
|
func NewUserLoader() *UserLoader {
|
||||||
|
return &UserLoader{
|
||||||
|
wait: 2 * time.Millisecond,
|
||||||
|
maxBatch: 100,
|
||||||
|
fetch: func(keys []string) ([]*User, []error) {
|
||||||
|
users := make([]*User, len(keys))
|
||||||
|
errors := make([]error, len(keys))
|
||||||
|
|
||||||
|
for i, key := range keys {
|
||||||
|
users[i] = &User{ID: key, Name: "user " + key}
|
||||||
|
}
|
||||||
|
return users, errors
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Then wherever you want to call the dataloader
|
||||||
|
```go
|
||||||
|
loader := NewUserLoader()
|
||||||
|
|
||||||
|
user, err := loader.Load("123")
|
||||||
|
```
|
||||||
|
|
||||||
|
This method will block for a short amount of time, waiting for any other similar requests to come in, call your fetch
|
||||||
|
function once. It also caches values and wont request duplicates in a batch.
|
||||||
|
|
||||||
|
#### Returning Slices
|
||||||
|
|
||||||
|
You may want to generate a dataloader that returns slices instead of single values. Both key and value types can be a
|
||||||
|
simple go type expression:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go run github.com/vektah/dataloaden UserSliceLoader string []*github.com/dataloaden/example.User
|
||||||
|
```
|
||||||
|
|
||||||
|
Now each key is expected to return a slice of values and the `fetch` function has the return type `[][]*User`.
|
||||||
|
|
||||||
|
#### Using with go modules
|
||||||
|
|
||||||
|
Create a tools.go that looks like this:
|
||||||
|
```go
|
||||||
|
// +build tools
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import _ "github.com/vektah/dataloaden"
|
||||||
|
```
|
||||||
|
|
||||||
|
This will allow go modules to see the dependency.
|
||||||
|
|
||||||
|
You can invoke it from anywhere within your module now using `go run github.com/vektah/dataloaden` and
|
||||||
|
always get the pinned version.
|
||||||
|
|
||||||
|
#### Wait, how do I use context with this?
|
||||||
|
|
||||||
|
I don't think context makes sense to be passed through a data loader. Consider a few scenarios:
|
||||||
|
1. a dataloader shared between requests: request A and B both get batched together, which context should be passed to the DB? context.Background is probably more suitable.
|
||||||
|
2. a dataloader per request for graphql: two different nodes in the graph get batched together, they have different context for tracing purposes, which should be passed to the db? neither, you should just use the root request context.
|
||||||
|
|
||||||
|
|
||||||
|
So be explicit about your context:
|
||||||
|
```go
|
||||||
|
func NewLoader(ctx context.Context) *UserLoader {
|
||||||
|
return &UserLoader{
|
||||||
|
wait: 2 * time.Millisecond,
|
||||||
|
maxBatch: 100,
|
||||||
|
fetch: func(keys []string) ([]*User, []error) {
|
||||||
|
// you now have a ctx to work with
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If you feel like I'm wrong please raise an issue.
|
||||||
32
vendor/github.com/vektah/dataloaden/appveyor.yml
generated
vendored
Normal file
32
vendor/github.com/vektah/dataloaden/appveyor.yml
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
version: "{build}"
|
||||||
|
|
||||||
|
# Source Config
|
||||||
|
|
||||||
|
skip_branch_with_pr: true
|
||||||
|
clone_folder: c:\projects\dataloaden
|
||||||
|
|
||||||
|
# Build host
|
||||||
|
|
||||||
|
environment:
|
||||||
|
GOPATH: c:\gopath
|
||||||
|
GOVERSION: 1.11.5
|
||||||
|
PATH: '%PATH%;c:\gopath\bin'
|
||||||
|
|
||||||
|
init:
|
||||||
|
- git config --global core.autocrlf input
|
||||||
|
|
||||||
|
# Build
|
||||||
|
|
||||||
|
install:
|
||||||
|
# Install the specific Go version.
|
||||||
|
- rmdir c:\go /s /q
|
||||||
|
- appveyor DownloadFile https://storage.googleapis.com/golang/go%GOVERSION%.windows-amd64.msi
|
||||||
|
- msiexec /i go%GOVERSION%.windows-amd64.msi /q
|
||||||
|
- go version
|
||||||
|
|
||||||
|
build: false
|
||||||
|
deploy: false
|
||||||
|
|
||||||
|
test_script:
|
||||||
|
- go generate ./...
|
||||||
|
- go test -parallel 8 ./...
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user