File storage rewrite (#2676)

* Restructure data layer part 2 (#2599)
* Refactor and separate image model
* Refactor image query builder
* Handle relationships in image query builder
* Remove relationship management methods
* Refactor gallery model/query builder
* Add scenes to gallery model
* Convert scene model
* Refactor scene models
* Remove unused methods
* Add unit tests for gallery
* Add image tests
* Add scene tests
* Convert unnecessary scene value pointers to values
* Convert unnecessary pointer values to values
* Refactor scene partial
* Add scene partial tests
* Refactor ImagePartial
* Add image partial tests
* Refactor gallery partial update
* Add partial gallery update tests
* Use zero/null package for null values
* Add files and scan system
* Add sqlite implementation for files/folders
* Add unit tests for files/folders
* Image refactors
* Update image data layer
* Refactor gallery model and creation
* Refactor scene model
* Refactor scenes
* Don't set title from filename
* Allow galleries to freely add/remove images
* Add multiple scene file support to graphql and UI
* Add multiple file support for images in graphql/UI
* Add multiple file for galleries in graphql/UI
* Remove use of some deprecated fields
* Remove scene path usage
* Remove gallery path usage
* Remove path from image
* Move funscript to video file
* Refactor caption detection
* Migrate existing data
* Add post commit/rollback hook system
* Lint. Comment out import/export tests
* Add WithDatabase read only wrapper
* Prepend tasks to list
* Add 32 pre-migration
* Add warnings in release and migration notes
This commit is contained in:
WithoutPants
2022-07-13 16:30:54 +10:00
parent 30877c75fb
commit 5495d72849
359 changed files with 43690 additions and 16000 deletions

4
go.mod
View File

@@ -19,7 +19,7 @@ require (
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
github.com/jmoiron/sqlx v1.3.1 github.com/jmoiron/sqlx v1.3.1
github.com/json-iterator/go v1.1.12 github.com/json-iterator/go v1.1.12
github.com/mattn/go-sqlite3 v1.14.6 github.com/mattn/go-sqlite3 v1.14.7
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8
github.com/remeh/sizedwaitgroup v1.0.0 github.com/remeh/sizedwaitgroup v1.0.0
@@ -47,6 +47,7 @@ require (
require ( require (
github.com/asticode/go-astisub v0.20.0 github.com/asticode/go-astisub v0.20.0
github.com/doug-martin/goqu/v9 v9.18.0
github.com/go-chi/httplog v0.2.1 github.com/go-chi/httplog v0.2.1
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
github.com/hashicorp/golang-lru v0.5.4 github.com/hashicorp/golang-lru v0.5.4
@@ -56,6 +57,7 @@ require (
github.com/spf13/cast v1.4.1 github.com/spf13/cast v1.4.1
github.com/vearutop/statigz v1.1.6 github.com/vearutop/statigz v1.1.6
github.com/vektah/gqlparser/v2 v2.4.1 github.com/vektah/gqlparser/v2 v2.4.1
gopkg.in/guregu/null.v4 v4.0.0
) )
require ( require (

15
go.sum
View File

@@ -65,6 +65,8 @@ github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBp
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI=
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
@@ -206,6 +208,8 @@ github.com/docker/docker v17.12.0-ce-rc1.0.20210128214336-420b1d36250f+incompati
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/doug-martin/goqu/v9 v9.18.0 h1:/6bcuEtAe6nsSMVK/M+fOiXUNfyFF3yYtE07DBPFMYY=
github.com/doug-martin/goqu/v9 v9.18.0/go.mod h1:nf0Wc2/hV3gYK9LiyqIrzBEVGlI8qW3GuDCEobC4wBQ=
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
@@ -248,8 +252,9 @@ github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE= github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10= github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10=
@@ -535,8 +540,9 @@ github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.1 h1:6VXZrLU0jHBYyAqrSPa+MgPfnSvTPuMgK+k0o5kVFWo=
github.com/lib/pq v1.10.1/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc= github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
@@ -570,8 +576,9 @@ github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOA
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.7 h1:fxWBnXkxfM6sRiuH3bqJ4CfzZojMOLVc0UTsTglEghA=
github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
@@ -1300,6 +1307,8 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8X
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/guregu/null.v4 v4.0.0 h1:1Wm3S1WEA2I26Kq+6vcW+w0gcDo44YKYD7YIEJNHDjg=
gopkg.in/guregu/null.v4 v4.0.0/go.mod h1:YoQhUrADuG3i9WqesrCmpNRwm1ypAgSHYqoOcTu/JrI=
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=

View File

@@ -22,10 +22,18 @@ autobind:
- github.com/stashapp/stash/pkg/scraper/stashbox - github.com/stashapp/stash/pkg/scraper/stashbox
models: models:
# autobind on config causes generation issues
# Scalars # Scalars
Timestamp: Timestamp:
model: github.com/stashapp/stash/pkg/models.Timestamp model: github.com/stashapp/stash/pkg/models.Timestamp
Int64:
model: github.com/stashapp/stash/pkg/models.Int64
# define to force resolvers
Image:
model: github.com/stashapp/stash/pkg/models.Image
fields:
title:
resolver: true
# autobind on config causes generation issues
StashConfig: StashConfig:
model: github.com/stashapp/stash/internal/manager/config.StashConfig model: github.com/stashapp/stash/internal/manager/config.StashConfig
StashConfigInput: StashConfigInput:
@@ -83,6 +91,8 @@ models:
ScanMetaDataFilterInput: ScanMetaDataFilterInput:
model: github.com/stashapp/stash/internal/manager.ScanMetaDataFilterInput model: github.com/stashapp/stash/internal/manager.ScanMetaDataFilterInput
# renamed types # renamed types
BulkUpdateIdMode:
model: github.com/stashapp/stash/pkg/models.RelationshipUpdateMode
DLNAStatus: DLNAStatus:
model: github.com/stashapp/stash/internal/dlna.Status model: github.com/stashapp/stash/internal/dlna.Status
DLNAIP: DLNAIP:
@@ -102,6 +112,8 @@ models:
ScraperSource: ScraperSource:
model: github.com/stashapp/stash/pkg/scraper.Source model: github.com/stashapp/stash/pkg/scraper.Source
# rebind inputs to types # rebind inputs to types
StashIDInput:
model: github.com/stashapp/stash/pkg/models.StashID
IdentifySourceInput: IdentifySourceInput:
model: github.com/stashapp/stash/internal/identify.Source model: github.com/stashapp/stash/internal/identify.Source
IdentifyFieldOptionsInput: IdentifyFieldOptionsInput:

View File

@@ -0,0 +1,40 @@
fragment FolderData on Folder {
id
path
}
fragment VideoFileData on VideoFile {
path
size
duration
video_codec
audio_codec
width
height
frame_rate
bit_rate
fingerprints {
type
value
}
}
fragment ImageFileData on ImageFile {
path
size
width
height
fingerprints {
type
value
}
}
fragment GalleryFileData on GalleryFile {
path
size
fingerprints {
type
value
}
}

View File

@@ -1,19 +1,21 @@
fragment SlimGalleryData on Gallery { fragment SlimGalleryData on Gallery {
id id
checksum
path
title title
date date
url url
details details
rating rating
organized organized
files {
...GalleryFileData
}
folder {
...FolderData
}
image_count image_count
cover { cover {
file { files {
size ...ImageFileData
width
height
} }
paths { paths {
@@ -37,8 +39,6 @@ fragment SlimGalleryData on Gallery {
image_path image_path
} }
scenes { scenes {
id ...SlimSceneData
title
path
} }
} }

View File

@@ -1,7 +1,5 @@
fragment GalleryData on Gallery { fragment GalleryData on Gallery {
id id
checksum
path
created_at created_at
updated_at updated_at
title title
@@ -10,6 +8,14 @@ fragment GalleryData on Gallery {
details details
rating rating
organized organized
files {
...GalleryFileData
}
folder {
...FolderData
}
images { images {
...SlimImageData ...SlimImageData
} }

View File

@@ -1,16 +1,12 @@
fragment SlimImageData on Image { fragment SlimImageData on Image {
id id
checksum
title title
rating rating
organized organized
o_counter o_counter
path
file { files {
size ...ImageFileData
width
height
} }
paths { paths {
@@ -20,8 +16,13 @@ fragment SlimImageData on Image {
galleries { galleries {
id id
path
title title
files {
path
}
folder {
path
}
} }
studio { studio {

View File

@@ -1,18 +1,14 @@
fragment ImageData on Image { fragment ImageData on Image {
id id
checksum
title title
rating rating
organized organized
o_counter o_counter
path
created_at created_at
updated_at updated_at
file { files {
size ...ImageFileData
width
height
} }
paths { paths {

View File

@@ -1,7 +1,5 @@
fragment SlimSceneData on Scene { fragment SlimSceneData on Scene {
id id
checksum
oshash
title title
details details
url url
@@ -9,8 +7,6 @@ fragment SlimSceneData on Scene {
rating rating
o_counter o_counter
organized organized
path
phash
interactive interactive
interactive_speed interactive_speed
captions { captions {
@@ -18,15 +14,8 @@ fragment SlimSceneData on Scene {
caption_type caption_type
} }
file { files {
size ...VideoFileData
duration
video_codec
audio_codec
width
height
framerate
bitrate
} }
paths { paths {

View File

@@ -1,7 +1,5 @@
fragment SceneData on Scene { fragment SceneData on Scene {
id id
checksum
oshash
title title
details details
url url
@@ -9,8 +7,6 @@ fragment SceneData on Scene {
rating rating
o_counter o_counter
organized organized
path
phash
interactive interactive
interactive_speed interactive_speed
captions { captions {
@@ -20,15 +16,8 @@ fragment SceneData on Scene {
created_at created_at
updated_at updated_at
file { files {
size ...VideoFileData
duration
video_codec
audio_codec
width
height
framerate
bitrate
} }
paths { paths {

View File

@@ -0,0 +1,97 @@
type Fingerprint {
type: String!
value: String!
}
type Folder {
id: ID!
path: String!
parent_folder_id: ID
zip_file_id: ID
mod_time: Time!
created_at: Time!
updated_at: Time!
}
interface BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
created_at: Time!
updated_at: Time!
}
type VideoFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
format: String!
width: Int!
height: Int!
duration: Float!
video_codec: String!
audio_codec: String!
frame_rate: Float!
bit_rate: Int!
created_at: Time!
updated_at: Time!
}
type ImageFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
width: Int!
height: Int!
created_at: Time!
updated_at: Time!
}
type GalleryFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
created_at: Time!
updated_at: Time!
}

View File

@@ -1,8 +1,8 @@
"""Gallery type""" """Gallery type"""
type Gallery { type Gallery {
id: ID! id: ID!
checksum: String! checksum: String! @deprecated(reason: "Use files.fingerprints")
path: String path: String @deprecated(reason: "Use files.path")
title: String title: String
url: String url: String
date: String date: String
@@ -11,7 +11,10 @@ type Gallery {
organized: Boolean! organized: Boolean!
created_at: Time! created_at: Time!
updated_at: Time! updated_at: Time!
file_mod_time: Time file_mod_time: Time @deprecated(reason: "Use files.mod_time")
files: [GalleryFile!]!
folder: Folder
scenes: [Scene!]! scenes: [Scene!]!
studio: Studio studio: Studio
@@ -24,12 +27,6 @@ type Gallery {
cover: Image cover: Image
} }
type GalleryFilesType {
index: Int!
name: String
path: String
}
input GalleryCreateInput { input GalleryCreateInput {
title: String! title: String!
url: String url: String

View File

@@ -1,16 +1,18 @@
type Image { type Image {
id: ID! id: ID!
checksum: String checksum: String @deprecated(reason: "Use files.fingerprints")
title: String title: String
rating: Int rating: Int
o_counter: Int o_counter: Int
organized: Boolean! organized: Boolean!
path: String! path: String! @deprecated(reason: "Use files.path")
created_at: Time! created_at: Time!
updated_at: Time! updated_at: Time!
file_mod_time: Time
file: ImageFileType! # Resolver file_mod_time: Time @deprecated(reason: "Use files.mod_time")
file: ImageFileType! @deprecated(reason: "Use files.mod_time")
files: [ImageFile!]!
paths: ImagePathsType! # Resolver paths: ImagePathsType! # Resolver
galleries: [Gallery!]! galleries: [Gallery!]!
@@ -20,9 +22,10 @@ type Image {
} }
type ImageFileType { type ImageFileType {
size: Int mod_time: Time!
width: Int size: Int!
height: Int width: Int!
height: Int!
} }
type ImagePathsType { type ImagePathsType {

View File

@@ -10,3 +10,5 @@ scalar Timestamp
scalar Map scalar Map
scalar Any scalar Any
scalar Int64

View File

@@ -27,15 +27,15 @@ type SceneMovie {
scene_index: Int scene_index: Int
} }
type SceneCaption { type VideoCaption {
language_code: String! language_code: String!
caption_type: String! caption_type: String!
} }
type Scene { type Scene {
id: ID! id: ID!
checksum: String checksum: String @deprecated(reason: "Use files.fingerprints")
oshash: String oshash: String @deprecated(reason: "Use files.fingerprints")
title: String title: String
details: String details: String
url: String url: String
@@ -43,16 +43,17 @@ type Scene {
rating: Int rating: Int
organized: Boolean! organized: Boolean!
o_counter: Int o_counter: Int
path: String! path: String! @deprecated(reason: "Use files.path")
phash: String phash: String @deprecated(reason: "Use files.fingerprints")
interactive: Boolean! interactive: Boolean!
interactive_speed: Int interactive_speed: Int
captions: [SceneCaption!] captions: [VideoCaption!]
created_at: Time! created_at: Time!
updated_at: Time! updated_at: Time!
file_mod_time: Time file_mod_time: Time
file: SceneFileType! # Resolver file: SceneFileType! @deprecated(reason: "Use files")
files: [VideoFile!]!
paths: ScenePathsType! # Resolver paths: ScenePathsType! # Resolver
scene_markers: [SceneMarker!]! scene_markers: [SceneMarker!]!

View File

@@ -3,6 +3,7 @@ package api
import ( import (
"context" "context"
"database/sql" "database/sql"
"fmt"
"strconv" "strconv"
"github.com/99designs/gqlgen/graphql" "github.com/99designs/gqlgen/graphql"
@@ -89,6 +90,14 @@ func (t changesetTranslator) nullString(value *string, field string) *sql.NullSt
return ret return ret
} }
func (t changesetTranslator) optionalString(value *string, field string) models.OptionalString {
if !t.hasField(field) {
return models.OptionalString{}
}
return models.NewOptionalStringPtr(value)
}
func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQLiteDate { func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQLiteDate {
if !t.hasField(field) { if !t.hasField(field) {
return nil return nil
@@ -104,6 +113,21 @@ func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQL
return ret return ret
} }
func (t changesetTranslator) optionalDate(value *string, field string) models.OptionalDate {
if !t.hasField(field) {
return models.OptionalDate{}
}
if value == nil {
return models.OptionalDate{
Set: true,
Null: true,
}
}
return models.NewOptionalDate(models.NewDate(*value))
}
func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64 { func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64 {
if !t.hasField(field) { if !t.hasField(field) {
return nil return nil
@@ -119,6 +143,14 @@ func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64
return ret return ret
} }
func (t changesetTranslator) optionalInt(value *int, field string) models.OptionalInt {
if !t.hasField(field) {
return models.OptionalInt{}
}
return models.NewOptionalIntPtr(value)
}
func (t changesetTranslator) nullInt64FromString(value *string, field string) *sql.NullInt64 { func (t changesetTranslator) nullInt64FromString(value *string, field string) *sql.NullInt64 {
if !t.hasField(field) { if !t.hasField(field) {
return nil return nil
@@ -134,6 +166,25 @@ func (t changesetTranslator) nullInt64FromString(value *string, field string) *s
return ret return ret
} }
func (t changesetTranslator) optionalIntFromString(value *string, field string) (models.OptionalInt, error) {
if !t.hasField(field) {
return models.OptionalInt{}, nil
}
if value == nil {
return models.OptionalInt{
Set: true,
Null: true,
}, nil
}
vv, err := strconv.Atoi(*value)
if err != nil {
return models.OptionalInt{}, fmt.Errorf("converting %v to int: %w", *value, err)
}
return models.NewOptionalInt(vv), nil
}
func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool { func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool {
if !t.hasField(field) { if !t.hasField(field) {
return nil return nil
@@ -148,3 +199,11 @@ func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool {
return ret return ret
} }
func (t changesetTranslator) optionalBool(value *bool, field string) models.OptionalBool {
if !t.hasField(field) {
return models.OptionalBool{}
}
return models.NewOptionalBoolPtr(value)
}

View File

@@ -32,7 +32,10 @@ type hookExecutor interface {
type Resolver struct { type Resolver struct {
txnManager txn.Manager txnManager txn.Manager
repository models.Repository repository manager.Repository
sceneService manager.SceneService
imageService manager.ImageService
galleryService manager.GalleryService
hookExecutor hookExecutor hookExecutor hookExecutor
} }

View File

@@ -2,24 +2,91 @@ package api
import ( import (
"context" "context"
"strconv"
"time" "time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
) )
func (r *galleryResolver) Path(ctx context.Context, obj *models.Gallery) (*string, error) { func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*GalleryFile, error) {
if obj.Path.Valid { ret := make([]*GalleryFile, len(obj.Files))
return &obj.Path.String, nil
for i, f := range obj.Files {
base := f.Base()
ret[i] = &GalleryFile{
ID: strconv.Itoa(int(base.ID)),
Path: base.Path,
Basename: base.Basename,
ParentFolderID: strconv.Itoa(int(base.ParentFolderID)),
ModTime: base.ModTime,
Size: base.Size,
CreatedAt: base.CreatedAt,
UpdatedAt: base.UpdatedAt,
Fingerprints: resolveFingerprints(base),
} }
if base.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*base.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return ret, nil
}
func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Folder, error) {
if obj.FolderID == nil {
return nil, nil return nil, nil
} }
func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*string, error) { var ret *file.Folder
if obj.Title.Valid {
return &obj.Title.String, nil if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = r.repository.Folder.Find(ctx, *obj.FolderID)
if err != nil {
return err
} }
return err
}); err != nil {
return nil, err
}
if ret == nil {
return nil, nil
}
rr := &Folder{
ID: ret.ID.String(),
Path: ret.Path,
ModTime: ret.ModTime,
CreatedAt: ret.CreatedAt,
UpdatedAt: ret.UpdatedAt,
}
if ret.ParentFolderID != nil {
pfidStr := ret.ParentFolderID.String()
rr.ParentFolderID = &pfidStr
}
if ret.ZipFileID != nil {
zfidStr := ret.ZipFileID.String()
rr.ZipFileID = &zfidStr
}
return rr, nil
}
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
f := obj.PrimaryFile()
if f != nil {
return &f.Base().ModTime, nil
}
return nil, nil return nil, nil
} }
@@ -70,35 +137,13 @@ func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (ret *
} }
func (r *galleryResolver) Date(ctx context.Context, obj *models.Gallery) (*string, error) { func (r *galleryResolver) Date(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.Date.Valid { if obj.Date != nil {
result := utils.GetYMDFromDatabaseDate(obj.Date.String) result := obj.Date.String()
return &result, nil return &result, nil
} }
return nil, nil return nil, nil
} }
func (r *galleryResolver) URL(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.URL.Valid {
return &obj.URL.String, nil
}
return nil, nil
}
func (r *galleryResolver) Details(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.Details.Valid {
return &obj.Details.String, nil
}
return nil, nil
}
func (r *galleryResolver) Rating(ctx context.Context, obj *models.Gallery) (*int, error) {
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret []*models.Scene, err error) { func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret []*models.Scene, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
@@ -112,13 +157,13 @@ func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret
} }
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) { func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) {
if !obj.StudioID.Valid { if obj.StudioID == nil {
return nil, nil return nil, nil
} }
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
ret, err = r.repository.Studio.Find(ctx, int(obj.StudioID.Int64)) ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -162,15 +207,3 @@ func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (
return ret, nil return ret, nil
} }
func (r *galleryResolver) CreatedAt(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.CreatedAt.Timestamp, nil
}
func (r *galleryResolver) UpdatedAt(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
}

View File

@@ -2,35 +2,64 @@ package api
import ( import (
"context" "context"
"strconv"
"time" "time"
"github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) { func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) {
ret := image.GetTitle(obj) ret := obj.GetTitle()
return &ret, nil return &ret, nil
} }
func (r *imageResolver) Rating(ctx context.Context, obj *models.Image) (*int, error) { func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFileType, error) {
if obj.Rating.Valid { f := obj.PrimaryFile()
rating := int(obj.Rating.Int64) width := f.Width
return &rating, nil height := f.Height
} size := f.Size
return nil, nil return &ImageFileType{
Size: int(size),
Width: width,
Height: height,
}, nil
} }
func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*models.ImageFileType, error) { func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) {
width := int(obj.Width.Int64) ret := make([]*ImageFile, len(obj.Files))
height := int(obj.Height.Int64)
size := int(obj.Size.Int64) for i, f := range obj.Files {
return &models.ImageFileType{ ret[i] = &ImageFile{
Size: &size, ID: strconv.Itoa(int(f.ID)),
Width: &width, Path: f.Path,
Height: &height, Basename: f.Basename,
}, nil ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
ModTime: f.ModTime,
Size: f.Size,
Width: f.Width,
Height: f.Height,
CreatedAt: f.CreatedAt,
UpdatedAt: f.UpdatedAt,
Fingerprints: resolveFingerprints(f.Base()),
}
if f.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*f.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return ret, nil
}
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
f := obj.PrimaryFile()
if f != nil {
return &f.ModTime, nil
}
return nil, nil
} }
func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePathsType, error) { func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePathsType, error) {
@@ -47,7 +76,7 @@ func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePat
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) { func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error var err error
ret, err = r.repository.Gallery.FindByImageID(ctx, obj.ID) ret, err = r.repository.Gallery.FindMany(ctx, obj.GalleryIDs)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -57,12 +86,12 @@ func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret [
} }
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) { func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) {
if !obj.StudioID.Valid { if obj.StudioID == nil {
return nil, nil return nil, nil
} }
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Studio.Find(ctx, int(obj.StudioID.Int64)) ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -73,7 +102,7 @@ func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *mod
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) { func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Tag.FindByImageID(ctx, obj.ID) ret, err = r.repository.Tag.FindMany(ctx, obj.TagIDs)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -84,7 +113,7 @@ func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*mod
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) { func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Performer.FindByImageID(ctx, obj.ID) ret, err = r.repository.Performer.FindMany(ctx, obj.PerformerIDs)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -92,15 +121,3 @@ func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret
return ret, nil return ret, nil
} }
func (r *imageResolver) CreatedAt(ctx context.Context, obj *models.Image) (*time.Time, error) {
return &obj.CreatedAt.Timestamp, nil
}
func (r *imageResolver) UpdatedAt(ctx context.Context, obj *models.Image) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
}

View File

@@ -2,95 +2,115 @@ package api
import ( import (
"context" "context"
"fmt"
"strconv"
"time" "time"
"github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
func (r *sceneResolver) Checksum(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
if obj.Checksum.Valid { if obj.PrimaryFile() != nil {
return &obj.Checksum.String, nil return &obj.PrimaryFile().ModTime, nil
}
return nil, nil
}
func (r *sceneResolver) Oshash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.OSHash.Valid {
return &obj.OSHash.String, nil
}
return nil, nil
}
func (r *sceneResolver) Title(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Title.Valid {
return &obj.Title.String, nil
}
return nil, nil
}
func (r *sceneResolver) Details(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Details.Valid {
return &obj.Details.String, nil
}
return nil, nil
}
func (r *sceneResolver) URL(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.URL.Valid {
return &obj.URL.String, nil
} }
return nil, nil return nil, nil
} }
func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Date.Valid { if obj.Date != nil {
result := utils.GetYMDFromDatabaseDate(obj.Date.String) result := obj.Date.String()
return &result, nil return &result, nil
} }
return nil, nil return nil, nil
} }
func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, error) { // File is deprecated
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
if obj.InteractiveSpeed.Valid {
interactive_speed := int(obj.InteractiveSpeed.Int64)
return &interactive_speed, nil
}
return nil, nil
}
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) { func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) {
width := int(obj.Width.Int64) f := obj.PrimaryFile()
height := int(obj.Height.Int64) if f == nil {
bitrate := int(obj.Bitrate.Int64) return nil, nil
}
bitrate := int(f.BitRate)
size := strconv.FormatInt(f.Size, 10)
return &models.SceneFileType{ return &models.SceneFileType{
Size: &obj.Size.String, Size: &size,
Duration: handleFloat64(obj.Duration.Float64), Duration: handleFloat64(f.Duration),
VideoCodec: &obj.VideoCodec.String, VideoCodec: &f.VideoCodec,
AudioCodec: &obj.AudioCodec.String, AudioCodec: &f.AudioCodec,
Width: &width, Width: &f.Width,
Height: &height, Height: &f.Height,
Framerate: handleFloat64(obj.Framerate.Float64), Framerate: handleFloat64(f.FrameRate),
Bitrate: &bitrate, Bitrate: &bitrate,
}, nil }, nil
} }
func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) {
ret := make([]*VideoFile, len(obj.Files))
for i, f := range obj.Files {
ret[i] = &VideoFile{
ID: strconv.Itoa(int(f.ID)),
Path: f.Path,
Basename: f.Basename,
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
ModTime: f.ModTime,
Format: f.Format,
Size: f.Size,
Duration: handleFloat64Value(f.Duration),
VideoCodec: f.VideoCodec,
AudioCodec: f.AudioCodec,
Width: f.Width,
Height: f.Height,
FrameRate: handleFloat64Value(f.FrameRate),
BitRate: int(f.BitRate),
CreatedAt: f.CreatedAt,
UpdatedAt: f.UpdatedAt,
Fingerprints: resolveFingerprints(f.Base()),
}
if f.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*f.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return ret, nil
}
func resolveFingerprints(f *file.BaseFile) []*Fingerprint {
ret := make([]*Fingerprint, len(f.Fingerprints))
for i, fp := range f.Fingerprints {
ret[i] = &Fingerprint{
Type: fp.Type,
Value: formatFingerprint(fp.Fingerprint),
}
}
return ret
}
func formatFingerprint(fp interface{}) string {
switch v := fp.(type) {
case int64:
return strconv.FormatUint(uint64(v), 16)
default:
return fmt.Sprintf("%v", fp)
}
}
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) { func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string) baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
config := manager.GetInstance().Config config := manager.GetInstance().Config
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID) builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
builder.APIKey = config.GetAPIKey() builder.APIKey = config.GetAPIKey()
screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt.Timestamp) screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt)
previewPath := builder.GetStreamPreviewURL() previewPath := builder.GetStreamPreviewURL()
streamPath := builder.GetStreamURL() streamPath := builder.GetStreamURL()
webpPath := builder.GetStreamPreviewImageURL() webpPath := builder.GetStreamPreviewImageURL()
@@ -126,9 +146,14 @@ func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (re
return ret, nil return ret, nil
} }
func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.SceneCaption, err error) { func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.VideoCaption, err error) {
primaryFile := obj.PrimaryFile()
if primaryFile == nil {
return nil, nil
}
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Scene.GetCaptions(ctx, obj.ID) ret, err = r.repository.File.GetCaptions(ctx, primaryFile.Base().ID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -149,12 +174,12 @@ func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret [
} }
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) { func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) {
if !obj.StudioID.Valid { if obj.StudioID == nil {
return nil, nil return nil, nil
} }
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Studio.Find(ctx, int(obj.StudioID.Int64)) ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -165,15 +190,9 @@ func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *mod
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*SceneMovie, err error) { func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*SceneMovie, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
mqb := r.repository.Movie mqb := r.repository.Movie
sceneMovies, err := qb.GetMovies(ctx, obj.ID) for _, sm := range obj.Movies {
if err != nil {
return err
}
for _, sm := range sceneMovies {
movie, err := mqb.Find(ctx, sm.MovieID) movie, err := mqb.Find(ctx, sm.MovieID)
if err != nil { if err != nil {
return err return err
@@ -182,11 +201,7 @@ func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*S
sceneIdx := sm.SceneIndex sceneIdx := sm.SceneIndex
sceneMovie := &SceneMovie{ sceneMovie := &SceneMovie{
Movie: movie, Movie: movie,
} SceneIndex: sceneIdx,
if sceneIdx.Valid {
idx := int(sceneIdx.Int64)
sceneMovie.SceneIndex = &idx
} }
ret = append(ret, sceneMovie) ret = append(ret, sceneMovie)
@@ -221,37 +236,15 @@ func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret
return ret, nil return ret, nil
} }
func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []*models.StashID, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Scene.GetStashIDs(ctx, obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) { func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Phash.Valid { phash := obj.Phash()
hexval := utils.PhashToString(obj.Phash.Int64) if phash != 0 {
hexval := utils.PhashToString(phash)
return &hexval, nil return &hexval, nil
} }
return nil, nil return nil, nil
} }
func (r *sceneResolver) CreatedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
return &obj.CreatedAt.Timestamp, nil
}
func (r *sceneResolver) UpdatedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
}
func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*manager.SceneStreamEndpoint, error) { func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*manager.SceneStreamEndpoint, error) {
config := manager.GetInstance().Config config := manager.GetInstance().Config
@@ -260,3 +253,21 @@ func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]
return manager.GetSceneStreamPaths(obj, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize()) return manager.GetSceneStreamPaths(obj, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize())
} }
func (r *sceneResolver) Interactive(ctx context.Context, obj *models.Scene) (bool, error) {
primaryFile := obj.PrimaryFile()
if primaryFile == nil {
return false, nil
}
return primaryFile.Interactive, nil
}
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
primaryFile := obj.PrimaryFile()
if primaryFile == nil {
return nil, nil
}
return primaryFile.InteractiveSpeed, nil
}

View File

@@ -2,7 +2,6 @@ package api
import ( import (
"context" "context"
"database/sql"
"errors" "errors"
"fmt" "fmt"
"os" "os"
@@ -11,8 +10,6 @@ import (
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/plugin"
@@ -38,69 +35,49 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
return nil, errors.New("title must not be empty") return nil, errors.New("title must not be empty")
} }
// for manually created galleries, generate checksum from title
checksum := md5.FromString(input.Title)
// Populate a new performer from the input // Populate a new performer from the input
currentTime := time.Now() currentTime := time.Now()
newGallery := models.Gallery{ newGallery := models.Gallery{
Title: sql.NullString{ Title: input.Title,
String: input.Title, CreatedAt: currentTime,
Valid: true, UpdatedAt: currentTime,
},
Checksum: checksum,
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
} }
if input.URL != nil { if input.URL != nil {
newGallery.URL = sql.NullString{String: *input.URL, Valid: true} newGallery.URL = *input.URL
} }
if input.Details != nil { if input.Details != nil {
newGallery.Details = sql.NullString{String: *input.Details, Valid: true} newGallery.Details = *input.Details
}
if input.URL != nil {
newGallery.URL = sql.NullString{String: *input.URL, Valid: true}
}
if input.Date != nil {
newGallery.Date = models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
newGallery.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
} else {
// rating must be nullable
newGallery.Rating = sql.NullInt64{Valid: false}
} }
if input.Date != nil {
d := models.NewDate(*input.Date)
newGallery.Date = &d
}
newGallery.Rating = input.Rating
if input.StudioID != nil { if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64) studioID, _ := strconv.Atoi(*input.StudioID)
newGallery.StudioID = sql.NullInt64{Int64: studioID, Valid: true} newGallery.StudioID = &studioID
} else { }
// studio must be nullable
newGallery.StudioID = sql.NullInt64{Valid: false} var err error
newGallery.PerformerIDs, err = stringslice.StringSliceToIntSlice(input.PerformerIds)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
newGallery.TagIDs, err = stringslice.StringSliceToIntSlice(input.TagIds)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
newGallery.SceneIDs, err = stringslice.StringSliceToIntSlice(input.SceneIds)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
} }
// Start the transaction and save the gallery // Start the transaction and save the gallery
var gallery *models.Gallery
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Gallery qb := r.repository.Gallery
var err error if err := qb.Create(ctx, &newGallery, nil); err != nil {
gallery, err = qb.Create(ctx, newGallery)
if err != nil {
return err
}
// Save the performers
if err := r.updateGalleryPerformers(ctx, qb, gallery.ID, input.PerformerIds); err != nil {
return err
}
// Save the tags
if err := r.updateGalleryTags(ctx, qb, gallery.ID, input.TagIds); err != nil {
return err
}
// Save the scenes
if err := r.updateGalleryScenes(ctx, qb, gallery.ID, input.SceneIds); err != nil {
return err return err
} }
@@ -109,38 +86,14 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
return nil, err return nil, err
} }
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryCreatePost, input, nil) r.hookExecutor.ExecutePostHooks(ctx, newGallery.ID, plugin.GalleryCreatePost, input, nil)
return r.getGallery(ctx, gallery.ID) return r.getGallery(ctx, newGallery.ID)
}
func (r *mutationResolver) updateGalleryPerformers(ctx context.Context, qb gallery.PerformerUpdater, galleryID int, performerIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return qb.UpdatePerformers(ctx, galleryID, ids)
}
func (r *mutationResolver) updateGalleryTags(ctx context.Context, qb gallery.TagUpdater, galleryID int, tagIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagIDs)
if err != nil {
return err
}
return qb.UpdateTags(ctx, galleryID, ids)
} }
type GallerySceneUpdater interface { type GallerySceneUpdater interface {
UpdateScenes(ctx context.Context, galleryID int, sceneIDs []int) error UpdateScenes(ctx context.Context, galleryID int, sceneIDs []int) error
} }
func (r *mutationResolver) updateGalleryScenes(ctx context.Context, qb GallerySceneUpdater, galleryID int, sceneIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(sceneIDs)
if err != nil {
return err
}
return qb.UpdateScenes(ctx, galleryID, ids)
}
func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (ret *models.Gallery, err error) { func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (ret *models.Gallery, err error) {
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx), inputMap: getUpdateInputMap(ctx),
@@ -219,11 +172,7 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle
return nil, errors.New("not found") return nil, errors.New("not found")
} }
updatedTime := time.Now() updatedGallery := models.NewGalleryPartial()
updatedGallery := models.GalleryPartial{
ID: galleryID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if input.Title != nil { if input.Title != nil {
// ensure title is not empty // ensure title is not empty
@@ -231,71 +180,90 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle
return nil, errors.New("title must not be empty") return nil, errors.New("title must not be empty")
} }
// if gallery is not zip-based, then generate the checksum from the title updatedGallery.Title = models.NewOptionalString(*input.Title)
if !originalGallery.Path.Valid {
checksum := md5.FromString(*input.Title)
updatedGallery.Checksum = &checksum
} }
updatedGallery.Title = &sql.NullString{String: *input.Title, Valid: true} updatedGallery.Details = translator.optionalString(input.Details, "details")
updatedGallery.URL = translator.optionalString(input.URL, "url")
updatedGallery.Date = translator.optionalDate(input.Date, "date")
updatedGallery.Rating = translator.optionalInt(input.Rating, "rating")
updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedGallery.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("performer_ids") {
updatedGallery.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
} }
updatedGallery.Details = translator.nullString(input.Details, "details") if translator.hasField("tag_ids") {
updatedGallery.URL = translator.nullString(input.URL, "url") updatedGallery.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
updatedGallery.Date = translator.sqliteDate(input.Date, "date") if err != nil {
updatedGallery.Rating = translator.nullInt64(input.Rating, "rating") return nil, fmt.Errorf("converting tag ids: %w", err)
updatedGallery.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") }
updatedGallery.Organized = input.Organized }
if translator.hasField("scene_ids") {
updatedGallery.SceneIDs, err = translateUpdateIDs(input.SceneIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
}
}
// gallery scene is set from the scene only // gallery scene is set from the scene only
gallery, err := qb.UpdatePartial(ctx, updatedGallery) gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Save the performers
if translator.hasField("performer_ids") {
if err := r.updateGalleryPerformers(ctx, qb, galleryID, input.PerformerIds); err != nil {
return nil, err
}
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updateGalleryTags(ctx, qb, galleryID, input.TagIds); err != nil {
return nil, err
}
}
// Save the scenes
if translator.hasField("scene_ids") {
if err := r.updateGalleryScenes(ctx, qb, galleryID, input.SceneIds); err != nil {
return nil, err
}
}
return gallery, nil return gallery, nil
} }
func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGalleryUpdateInput) ([]*models.Gallery, error) { func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGalleryUpdateInput) ([]*models.Gallery, error) {
// Populate gallery from the input // Populate gallery from the input
updatedTime := time.Now()
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx), inputMap: getUpdateInputMap(ctx),
} }
updatedGallery := models.GalleryPartial{ updatedGallery := models.NewGalleryPartial()
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
updatedGallery.Details = translator.optionalString(input.Details, "details")
updatedGallery.URL = translator.optionalString(input.URL, "url")
updatedGallery.Date = translator.optionalDate(input.Date, "date")
updatedGallery.Rating = translator.optionalInt(input.Rating, "rating")
var err error
updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedGallery.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("performer_ids") {
updatedGallery.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
} }
updatedGallery.Details = translator.nullString(input.Details, "details") if translator.hasField("tag_ids") {
updatedGallery.URL = translator.nullString(input.URL, "url") updatedGallery.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
updatedGallery.Date = translator.sqliteDate(input.Date, "date") if err != nil {
updatedGallery.Rating = translator.nullInt64(input.Rating, "rating") return nil, fmt.Errorf("converting tag ids: %w", err)
updatedGallery.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") }
updatedGallery.Organized = input.Organized }
if translator.hasField("scene_ids") {
updatedGallery.SceneIDs, err = translateUpdateIDs(input.SceneIds.Ids, input.SceneIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
}
}
ret := []*models.Gallery{} ret := []*models.Gallery{}
@@ -305,50 +273,13 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall
for _, galleryIDStr := range input.Ids { for _, galleryIDStr := range input.Ids {
galleryID, _ := strconv.Atoi(galleryIDStr) galleryID, _ := strconv.Atoi(galleryIDStr)
updatedGallery.ID = galleryID
gallery, err := qb.UpdatePartial(ctx, updatedGallery) gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery)
if err != nil { if err != nil {
return err return err
} }
ret = append(ret, gallery) ret = append(ret, gallery)
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := adjustGalleryPerformerIDs(ctx, qb, galleryID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(ctx, galleryID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustGalleryTagIDs(ctx, qb, galleryID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(ctx, galleryID, tagIDs); err != nil {
return err
}
}
// Save the scenes
if translator.hasField("scene_ids") {
sceneIDs, err := adjustGallerySceneIDs(ctx, qb, galleryID, *input.SceneIds)
if err != nil {
return err
}
if err := qb.UpdateScenes(ctx, galleryID, sceneIDs); err != nil {
return err
}
}
} }
return nil return nil
@@ -372,45 +303,10 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall
return newRet, nil return newRet, nil
} }
type GalleryPerformerGetter interface {
GetPerformerIDs(ctx context.Context, galleryID int) ([]int, error)
}
type GalleryTagGetter interface {
GetTagIDs(ctx context.Context, galleryID int) ([]int, error)
}
type GallerySceneGetter interface { type GallerySceneGetter interface {
GetSceneIDs(ctx context.Context, galleryID int) ([]int, error) GetSceneIDs(ctx context.Context, galleryID int) ([]int, error)
} }
func adjustGalleryPerformerIDs(ctx context.Context, qb GalleryPerformerGetter, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetPerformerIDs(ctx, galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustGalleryTagIDs(ctx context.Context, qb GalleryTagGetter, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(ctx, galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustGallerySceneIDs(ctx context.Context, qb GallerySceneGetter, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetSceneIDs(ctx, galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) { func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) {
galleryIDs, err := stringslice.StringSliceToIntSlice(input.Ids) galleryIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil { if err != nil {
@@ -420,7 +316,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
var galleries []*models.Gallery var galleries []*models.Gallery
var imgsDestroyed []*models.Image var imgsDestroyed []*models.Image
fileDeleter := &image.FileDeleter{ fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
@@ -429,7 +325,6 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Gallery qb := r.repository.Gallery
iqb := r.repository.Image
for _, id := range galleryIDs { for _, id := range galleryIDs {
gallery, err := qb.Find(ctx, id) gallery, err := qb.Find(ctx, id)
@@ -443,55 +338,10 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
galleries = append(galleries, gallery) galleries = append(galleries, gallery)
// if this is a zip-based gallery, delete the images as well first imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile)
if gallery.Zip {
imgs, err := iqb.FindByGalleryID(ctx, id)
if err != nil { if err != nil {
return err return err
} }
for _, img := range imgs {
if err := image.Destroy(ctx, img, iqb, fileDeleter, deleteGenerated, false); err != nil {
return err
}
imgsDestroyed = append(imgsDestroyed, img)
}
if deleteFile {
if err := fileDeleter.Files([]string{gallery.Path.String}); err != nil {
return err
}
}
} else if deleteFile {
// Delete image if it is only attached to this gallery
imgs, err := iqb.FindByGalleryID(ctx, id)
if err != nil {
return err
}
for _, img := range imgs {
imgGalleries, err := qb.FindByImageID(ctx, img.ID)
if err != nil {
return err
}
if len(imgGalleries) == 1 {
if err := image.Destroy(ctx, img, iqb, fileDeleter, deleteGenerated, deleteFile); err != nil {
return err
}
imgsDestroyed = append(imgsDestroyed, img)
}
}
// we only want to delete a folder-based gallery if it is empty.
// don't do this with the file deleter
}
if err := qb.Destroy(ctx, id); err != nil {
return err
}
} }
return nil return nil
@@ -505,10 +355,11 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
for _, gallery := range galleries { for _, gallery := range galleries {
// don't delete stash library paths // don't delete stash library paths
if utils.IsTrue(input.DeleteFile) && !gallery.Zip && gallery.Path.Valid && !isStashPath(gallery.Path.String) { path := gallery.Path()
if deleteFile && path != "" && !isStashPath(path) {
// try to remove the folder - it is possible that it is not empty // try to remove the folder - it is possible that it is not empty
// so swallow the error if present // so swallow the error if present
_ = os.Remove(gallery.Path.String) _ = os.Remove(path)
} }
} }
@@ -516,16 +367,16 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
for _, gallery := range galleries { for _, gallery := range galleries {
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
GalleryDestroyInput: input, GalleryDestroyInput: input,
Checksum: gallery.Checksum, Checksum: gallery.Checksum(),
Path: gallery.Path.String, Path: gallery.Path(),
}, nil) }, nil)
} }
// call image destroy post hook as well // call image destroy post hook as well
for _, img := range imgsDestroyed { for _, img := range imgsDestroyed {
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: img.Checksum, Checksum: img.Checksum(),
Path: img.Path, Path: img.Path(),
}, nil) }, nil)
} }
@@ -565,10 +416,6 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input GalleryAd
return errors.New("gallery not found") return errors.New("gallery not found")
} }
if gallery.Zip {
return errors.New("cannot modify zip gallery images")
}
newIDs, err := qb.GetImageIDs(ctx, galleryID) newIDs, err := qb.GetImageIDs(ctx, galleryID)
if err != nil { if err != nil {
return err return err
@@ -605,10 +452,6 @@ func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input Galler
return errors.New("gallery not found") return errors.New("gallery not found")
} }
if gallery.Zip {
return errors.New("cannot modify zip gallery images")
}
newIDs, err := qb.GetImageIDs(ctx, galleryID) newIDs, err := qb.GetImageIDs(ctx, galleryID)
if err != nil { if err != nil {
return err return err

View File

@@ -4,7 +4,6 @@ import (
"context" "context"
"fmt" "fmt"
"strconv" "strconv"
"time"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
@@ -93,70 +92,45 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp
return nil, err return nil, err
} }
updatedTime := time.Now() updatedImage := models.NewImagePartial()
updatedImage := models.ImagePartial{ updatedImage.Title = translator.optionalString(input.Title, "title")
ID: imageID, updatedImage.Rating = translator.optionalInt(input.Rating, "rating")
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, updatedImage.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
}
updatedImage.Title = translator.nullString(input.Title, "title")
updatedImage.Rating = translator.nullInt64(input.Rating, "rating")
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedImage.Organized = input.Organized
qb := r.repository.Image
image, err := qb.Update(ctx, updatedImage)
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("converting studio id: %w", err)
} }
updatedImage.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("gallery_ids") { if translator.hasField("gallery_ids") {
if err := r.updateImageGalleries(ctx, imageID, input.GalleryIds); err != nil { updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet)
return nil, err if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
} }
} }
// Save the performers
if translator.hasField("performer_ids") { if translator.hasField("performer_ids") {
if err := r.updateImagePerformers(ctx, imageID, input.PerformerIds); err != nil { updatedImage.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
return nil, err if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
} }
} }
// Save the tags
if translator.hasField("tag_ids") { if translator.hasField("tag_ids") {
if err := r.updateImageTags(ctx, imageID, input.TagIds); err != nil { updatedImage.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
return nil, err if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
} }
} }
qb := r.repository.Image
image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
if err != nil {
return nil, err
}
return image, nil return image, nil
} }
func (r *mutationResolver) updateImageGalleries(ctx context.Context, imageID int, galleryIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(galleryIDs)
if err != nil {
return err
}
return r.repository.Image.UpdateGalleries(ctx, imageID, ids)
}
func (r *mutationResolver) updateImagePerformers(ctx context.Context, imageID int, performerIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return r.repository.Image.UpdatePerformers(ctx, imageID, ids)
}
func (r *mutationResolver) updateImageTags(ctx context.Context, imageID int, tagsIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return r.repository.Image.UpdateTags(ctx, imageID, ids)
}
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageUpdateInput) (ret []*models.Image, err error) { func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageUpdateInput) (ret []*models.Image, err error) {
imageIDs, err := stringslice.StringSliceToIntSlice(input.Ids) imageIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil { if err != nil {
@@ -164,70 +138,52 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
} }
// Populate image from the input // Populate image from the input
updatedTime := time.Now() updatedImage := models.NewImagePartial()
updatedImage := models.ImagePartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx), inputMap: getUpdateInputMap(ctx),
} }
updatedImage.Title = translator.nullString(input.Title, "title") updatedImage.Title = translator.optionalString(input.Title, "title")
updatedImage.Rating = translator.nullInt64(input.Rating, "rating") updatedImage.Rating = translator.optionalInt(input.Rating, "rating")
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") updatedImage.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
updatedImage.Organized = input.Organized if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedImage.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("gallery_ids") {
updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds.Ids, input.GalleryIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
if translator.hasField("performer_ids") {
updatedImage.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
}
if translator.hasField("tag_ids") {
updatedImage.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
// Start the transaction and save the image marker // Start the transaction and save the image marker
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Image qb := r.repository.Image
for _, imageID := range imageIDs { for _, imageID := range imageIDs {
updatedImage.ID = imageID image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
image, err := qb.Update(ctx, updatedImage)
if err != nil { if err != nil {
return err return err
} }
ret = append(ret, image) ret = append(ret, image)
// Save the galleries
if translator.hasField("gallery_ids") {
galleryIDs, err := r.adjustImageGalleryIDs(ctx, imageID, *input.GalleryIds)
if err != nil {
return err
}
if err := qb.UpdateGalleries(ctx, imageID, galleryIDs); err != nil {
return err
}
}
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := r.adjustImagePerformerIDs(ctx, imageID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(ctx, imageID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := r.adjustImageTagIDs(ctx, imageID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(ctx, imageID, tagIDs); err != nil {
return err
}
}
} }
return nil return nil
@@ -251,33 +207,6 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
return newRet, nil return newRet, nil
} }
func (r *mutationResolver) adjustImageGalleryIDs(ctx context.Context, imageID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Image.GetGalleryIDs(ctx, imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) adjustImagePerformerIDs(ctx context.Context, imageID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Image.GetPerformerIDs(ctx, imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) adjustImageTagIDs(ctx context.Context, imageID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Image.GetTagIDs(ctx, imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (ret bool, err error) { func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (ret bool, err error) {
imageID, err := strconv.Atoi(input.ID) imageID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -286,12 +215,10 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
var i *models.Image var i *models.Image
fileDeleter := &image.FileDeleter{ fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Image
i, err = r.repository.Image.Find(ctx, imageID) i, err = r.repository.Image.Find(ctx, imageID)
if err != nil { if err != nil {
return err return err
@@ -301,7 +228,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
return fmt.Errorf("image with id %d not found", imageID) return fmt.Errorf("image with id %d not found", imageID)
} }
return image.Destroy(ctx, i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)) return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile))
}); err != nil { }); err != nil {
fileDeleter.Rollback() fileDeleter.Rollback()
return false, err return false, err
@@ -313,8 +240,8 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
ImageDestroyInput: input, ImageDestroyInput: input,
Checksum: i.Checksum, Checksum: i.Checksum(),
Path: i.Path, Path: i.Path(),
}, nil) }, nil)
return true, nil return true, nil
@@ -328,14 +255,13 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
var images []*models.Image var images []*models.Image
fileDeleter := &image.FileDeleter{ fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Image qb := r.repository.Image
for _, imageID := range imageIDs { for _, imageID := range imageIDs {
i, err := qb.Find(ctx, imageID) i, err := qb.Find(ctx, imageID)
if err != nil { if err != nil {
return err return err
@@ -347,7 +273,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
images = append(images, i) images = append(images, i)
if err := image.Destroy(ctx, i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil { if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil {
return err return err
} }
} }
@@ -365,8 +291,8 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, plugin.ImagesDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, plugin.ImagesDestroyInput{
ImagesDestroyInput: input, ImagesDestroyInput: input,
Checksum: image.Checksum, Checksum: image.Checksum(),
Path: image.Path, Path: image.Path(),
}, nil) }, nil)
} }

View File

@@ -152,7 +152,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC
// Save the stash_ids // Save the stash_ids
if input.StashIds != nil { if input.StashIds != nil {
stashIDJoins := models.StashIDsFromInput(input.StashIds) stashIDJoins := input.StashIds
if err := qb.UpdateStashIDs(ctx, performer.ID, stashIDJoins); err != nil { if err := qb.UpdateStashIDs(ctx, performer.ID, stashIDJoins); err != nil {
return err return err
} }
@@ -275,7 +275,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU
// Save the stash_ids // Save the stash_ids
if translator.hasField("stash_ids") { if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds) stashIDJoins := input.StashIds
if err := qb.UpdateStashIDs(ctx, performerID, stashIDJoins); err != nil { if err := qb.UpdateStashIDs(ctx, performerID, stashIDJoins); err != nil {
return err return err
} }

View File

@@ -98,19 +98,55 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
var coverImageData []byte var coverImageData []byte
updatedTime := time.Now() updatedScene := models.NewScenePartial()
updatedScene := models.ScenePartial{ updatedScene.Title = translator.optionalString(input.Title, "title")
ID: sceneID, updatedScene.Details = translator.optionalString(input.Details, "details")
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, updatedScene.URL = translator.optionalString(input.URL, "url")
updatedScene.Date = translator.optionalDate(input.Date, "date")
updatedScene.Rating = translator.optionalInt(input.Rating, "rating")
updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
} }
updatedScene.Title = translator.nullString(input.Title, "title") updatedScene.Organized = translator.optionalBool(input.Organized, "organized")
updatedScene.Details = translator.nullString(input.Details, "details")
updatedScene.URL = translator.nullString(input.URL, "url") if translator.hasField("performer_ids") {
updatedScene.Date = translator.sqliteDate(input.Date, "date") updatedScene.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
updatedScene.Rating = translator.nullInt64(input.Rating, "rating") if err != nil {
updatedScene.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") return nil, fmt.Errorf("converting performer ids: %w", err)
updatedScene.Organized = input.Organized }
}
if translator.hasField("tag_ids") {
updatedScene.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
if translator.hasField("gallery_ids") {
updatedScene.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
// Save the movies
if translator.hasField("movies") {
updatedScene.MovieIDs, err = models.UpdateMovieIDsFromInput(input.Movies)
if err != nil {
return nil, fmt.Errorf("converting movie ids: %w", err)
}
}
// Save the stash_ids
if translator.hasField("stash_ids") {
updatedScene.StashIDs = &models.UpdateStashIDs{
StashIDs: input.StashIds,
Mode: models.RelationshipUpdateModeSet,
}
}
if input.CoverImage != nil && *input.CoverImage != "" { if input.CoverImage != nil && *input.CoverImage != "" {
var err error var err error
@@ -123,7 +159,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
} }
qb := r.repository.Scene qb := r.repository.Scene
s, err := qb.Update(ctx, updatedScene) s, err := qb.UpdatePartial(ctx, sceneID, updatedScene)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -135,42 +171,6 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
} }
} }
// Save the performers
if translator.hasField("performer_ids") {
if err := r.updateScenePerformers(ctx, sceneID, input.PerformerIds); err != nil {
return nil, err
}
}
// Save the movies
if translator.hasField("movies") {
if err := r.updateSceneMovies(ctx, sceneID, input.Movies); err != nil {
return nil, err
}
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updateSceneTags(ctx, sceneID, input.TagIds); err != nil {
return nil, err
}
}
// Save the galleries
if translator.hasField("gallery_ids") {
if err := r.updateSceneGalleries(ctx, sceneID, input.GalleryIds); err != nil {
return nil, err
}
}
// Save the stash_ids
if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
if err := qb.UpdateStashIDs(ctx, sceneID, stashIDJoins); err != nil {
return nil, err
}
}
// only update the cover image if provided and everything else was successful // only update the cover image if provided and everything else was successful
if coverImageData != nil { if coverImageData != nil {
err = scene.SetScreenshot(manager.GetInstance().Paths, s.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), coverImageData) err = scene.SetScreenshot(manager.GetInstance().Paths, s.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), coverImageData)
@@ -182,56 +182,6 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
return s, nil return s, nil
} }
func (r *mutationResolver) updateScenePerformers(ctx context.Context, sceneID int, performerIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return r.repository.Scene.UpdatePerformers(ctx, sceneID, ids)
}
func (r *mutationResolver) updateSceneMovies(ctx context.Context, sceneID int, movies []*models.SceneMovieInput) error {
var movieJoins []models.MoviesScenes
for _, movie := range movies {
movieID, err := strconv.Atoi(movie.MovieID)
if err != nil {
return err
}
movieJoin := models.MoviesScenes{
MovieID: movieID,
}
if movie.SceneIndex != nil {
movieJoin.SceneIndex = sql.NullInt64{
Int64: int64(*movie.SceneIndex),
Valid: true,
}
}
movieJoins = append(movieJoins, movieJoin)
}
return r.repository.Scene.UpdateMovies(ctx, sceneID, movieJoins)
}
func (r *mutationResolver) updateSceneTags(ctx context.Context, sceneID int, tagsIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return r.repository.Scene.UpdateTags(ctx, sceneID, ids)
}
func (r *mutationResolver) updateSceneGalleries(ctx context.Context, sceneID int, galleryIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(galleryIDs)
if err != nil {
return err
}
return r.repository.Scene.UpdateGalleries(ctx, sceneID, ids)
}
func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneUpdateInput) ([]*models.Scene, error) { func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneUpdateInput) ([]*models.Scene, error) {
sceneIDs, err := stringslice.StringSliceToIntSlice(input.Ids) sceneIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil { if err != nil {
@@ -239,23 +189,51 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
} }
// Populate scene from the input // Populate scene from the input
updatedTime := time.Now()
translator := changesetTranslator{ translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx), inputMap: getUpdateInputMap(ctx),
} }
updatedScene := models.ScenePartial{ updatedScene := models.NewScenePartial()
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, updatedScene.Title = translator.optionalString(input.Title, "title")
updatedScene.Details = translator.optionalString(input.Details, "details")
updatedScene.URL = translator.optionalString(input.URL, "url")
updatedScene.Date = translator.optionalDate(input.Date, "date")
updatedScene.Rating = translator.optionalInt(input.Rating, "rating")
updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
} }
updatedScene.Title = translator.nullString(input.Title, "title") updatedScene.Organized = translator.optionalBool(input.Organized, "organized")
updatedScene.Details = translator.nullString(input.Details, "details")
updatedScene.URL = translator.nullString(input.URL, "url") if translator.hasField("performer_ids") {
updatedScene.Date = translator.sqliteDate(input.Date, "date") updatedScene.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
updatedScene.Rating = translator.nullInt64(input.Rating, "rating") if err != nil {
updatedScene.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id") return nil, fmt.Errorf("converting performer ids: %w", err)
updatedScene.Organized = input.Organized }
}
if translator.hasField("tag_ids") {
updatedScene.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
if translator.hasField("gallery_ids") {
updatedScene.GalleryIDs, err = translateUpdateIDs(input.GalleryIds.Ids, input.GalleryIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
// Save the movies
if translator.hasField("movies") {
updatedScene.MovieIDs, err = translateSceneMovieIDs(*input.MovieIds)
if err != nil {
return nil, fmt.Errorf("converting movie ids: %w", err)
}
}
ret := []*models.Scene{} ret := []*models.Scene{}
@@ -264,62 +242,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
qb := r.repository.Scene qb := r.repository.Scene
for _, sceneID := range sceneIDs { for _, sceneID := range sceneIDs {
updatedScene.ID = sceneID scene, err := qb.UpdatePartial(ctx, sceneID, updatedScene)
scene, err := qb.Update(ctx, updatedScene)
if err != nil { if err != nil {
return err return err
} }
ret = append(ret, scene) ret = append(ret, scene)
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := r.adjustScenePerformerIDs(ctx, sceneID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(ctx, sceneID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustTagIDs(ctx, qb, sceneID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(ctx, sceneID, tagIDs); err != nil {
return err
}
}
// Save the galleries
if translator.hasField("gallery_ids") {
galleryIDs, err := r.adjustSceneGalleryIDs(ctx, sceneID, *input.GalleryIds)
if err != nil {
return err
}
if err := qb.UpdateGalleries(ctx, sceneID, galleryIDs); err != nil {
return err
}
}
// Save the movies
if translator.hasField("movie_ids") {
movies, err := r.adjustSceneMovieIDs(ctx, sceneID, *input.MovieIds)
if err != nil {
return err
}
if err := qb.UpdateMovies(ctx, sceneID, movies); err != nil {
return err
}
}
} }
return nil return nil
@@ -345,7 +273,7 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int { func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
// if we are setting the ids, just return the ids // if we are setting the ids, just return the ids
if updateIDs.Mode == BulkUpdateIDModeSet { if updateIDs.Mode == models.RelationshipUpdateModeSet {
existingIDs = []int{} existingIDs = []int{}
for _, idStr := range updateIDs.Ids { for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr) id, _ := strconv.Atoi(idStr)
@@ -362,7 +290,7 @@ func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
foundExisting := false foundExisting := false
for idx, existingID := range existingIDs { for idx, existingID := range existingIDs {
if existingID == id { if existingID == id {
if updateIDs.Mode == BulkUpdateIDModeRemove { if updateIDs.Mode == models.RelationshipUpdateModeRemove {
// remove from the list // remove from the list
existingIDs = append(existingIDs[:idx], existingIDs[idx+1:]...) existingIDs = append(existingIDs[:idx], existingIDs[idx+1:]...)
} }
@@ -372,7 +300,7 @@ func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
} }
} }
if !foundExisting && updateIDs.Mode != BulkUpdateIDModeRemove { if !foundExisting && updateIDs.Mode != models.RelationshipUpdateModeRemove {
existingIDs = append(existingIDs, id) existingIDs = append(existingIDs, id)
} }
} }
@@ -380,15 +308,6 @@ func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
return existingIDs return existingIDs
} }
func (r *mutationResolver) adjustScenePerformerIDs(ctx context.Context, sceneID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Scene.GetPerformerIDs(ctx, sceneID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
type tagIDsGetter interface { type tagIDsGetter interface {
GetTagIDs(ctx context.Context, id int) ([]int, error) GetTagIDs(ctx context.Context, id int) ([]int, error)
} }
@@ -402,57 +321,6 @@ func adjustTagIDs(ctx context.Context, qb tagIDsGetter, sceneID int, ids BulkUpd
return adjustIDs(ret, ids), nil return adjustIDs(ret, ids), nil
} }
func (r *mutationResolver) adjustSceneGalleryIDs(ctx context.Context, sceneID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Scene.GetGalleryIDs(ctx, sceneID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) adjustSceneMovieIDs(ctx context.Context, sceneID int, updateIDs BulkUpdateIds) ([]models.MoviesScenes, error) {
existingMovies, err := r.repository.Scene.GetMovies(ctx, sceneID)
if err != nil {
return nil, err
}
// if we are setting the ids, just return the ids
if updateIDs.Mode == BulkUpdateIDModeSet {
existingMovies = []models.MoviesScenes{}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
}
return existingMovies, nil
}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
// look for the id in the list
foundExisting := false
for idx, existingMovie := range existingMovies {
if existingMovie.MovieID == id {
if updateIDs.Mode == BulkUpdateIDModeRemove {
// remove from the list
existingMovies = append(existingMovies[:idx], existingMovies[idx+1:]...)
}
foundExisting = true
break
}
}
if !foundExisting && updateIDs.Mode != BulkUpdateIDModeRemove {
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
}
}
return existingMovies, err
}
func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) { func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) {
sceneID, err := strconv.Atoi(input.ID) sceneID, err := strconv.Atoi(input.ID)
if err != nil { if err != nil {
@@ -463,7 +331,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
var s *models.Scene var s *models.Scene
fileDeleter := &scene.FileDeleter{ fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
@@ -486,7 +354,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// kill any running encoders // kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo) manager.KillRunningStreams(s, fileNamingAlgo)
return scene.Destroy(ctx, s, r.repository.Scene, r.repository.SceneMarker, fileDeleter, deleteGenerated, deleteFile) return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile)
}); err != nil { }); err != nil {
fileDeleter.Rollback() fileDeleter.Rollback()
return false, err return false, err
@@ -498,9 +366,9 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
SceneDestroyInput: input, SceneDestroyInput: input,
Checksum: s.Checksum.String, Checksum: s.Checksum(),
OSHash: s.OSHash.String, OSHash: s.OSHash(),
Path: s.Path, Path: s.Path(),
}, nil) }, nil)
return true, nil return true, nil
@@ -511,7 +379,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{ fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
@@ -536,7 +404,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// kill any running encoders // kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo) manager.KillRunningStreams(s, fileNamingAlgo)
if err := scene.Destroy(ctx, s, r.repository.Scene, r.repository.SceneMarker, fileDeleter, deleteGenerated, deleteFile); err != nil { if err := r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile); err != nil {
return err return err
} }
} }
@@ -554,9 +422,9 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// call post hook after performing the other actions // call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{ r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{
ScenesDestroyInput: input, ScenesDestroyInput: input,
Checksum: scene.Checksum.String, Checksum: scene.Checksum(),
OSHash: scene.OSHash.String, OSHash: scene.OSHash(),
Path: scene.Path, Path: scene.Path(),
}, nil) }, nil)
} }
@@ -661,7 +529,7 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{ fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }
@@ -707,7 +575,7 @@ func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, cha
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{ fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(), Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths, Paths: manager.GetInstance().Paths,
} }

View File

@@ -90,7 +90,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateI
// Save the stash_ids // Save the stash_ids
if input.StashIds != nil { if input.StashIds != nil {
stashIDJoins := models.StashIDsFromInput(input.StashIds) stashIDJoins := input.StashIds
if err := qb.UpdateStashIDs(ctx, s.ID, stashIDJoins); err != nil { if err := qb.UpdateStashIDs(ctx, s.ID, stashIDJoins); err != nil {
return err return err
} }
@@ -182,7 +182,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateI
// Save the stash_ids // Save the stash_ids
if translator.hasField("stash_ids") { if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds) stashIDJoins := input.StashIds
if err := qb.UpdateStashIDs(ctx, studioID, stashIDJoins); err != nil { if err := qb.UpdateStashIDs(ctx, studioID, stashIDJoins); err != nil {
return err return err
} }

View File

@@ -5,6 +5,7 @@ import (
"errors" "errors"
"testing" "testing"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/plugin"
@@ -15,9 +16,13 @@ import (
// TODO - move this into a common area // TODO - move this into a common area
func newResolver() *Resolver { func newResolver() *Resolver {
txnMgr := &mocks.TxnManager{}
return &Resolver{ return &Resolver{
txnManager: &mocks.TxnManager{}, txnManager: txnMgr,
repository: mocks.NewTxnRepository(), repository: manager.Repository{
TxnManager: txnMgr,
Tag: &mocks.TagReaderWriter{},
},
hookExecutor: &mockHookExecutor{}, hookExecutor: &mockHookExecutor{},
} }
} }

View File

@@ -27,7 +27,15 @@ func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *str
return err return err
} }
} else if checksum != nil { } else if checksum != nil {
image, err = qb.FindByChecksum(ctx, *checksum) var images []*models.Image
images, err = qb.FindByChecksum(ctx, *checksum)
if err != nil {
return err
}
if len(images) > 0 {
image = images[0]
}
} }
return err return err

View File

@@ -25,7 +25,11 @@ func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *str
return err return err
} }
} else if checksum != nil { } else if checksum != nil {
scene, err = qb.FindByChecksum(ctx, *checksum) var scenes []*models.Scene
scenes, err = qb.FindByChecksum(ctx, *checksum)
if len(scenes) > 0 {
scene = scenes[0]
}
} }
return err return err
@@ -41,19 +45,24 @@ func (r *queryResolver) FindSceneByHash(ctx context.Context, input SceneHashInpu
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene qb := r.repository.Scene
var err error
if input.Checksum != nil { if input.Checksum != nil {
scene, err = qb.FindByChecksum(ctx, *input.Checksum) scenes, err := qb.FindByChecksum(ctx, *input.Checksum)
if err != nil { if err != nil {
return err return err
} }
if len(scenes) > 0 {
scene = scenes[0]
}
} }
if scene == nil && input.Oshash != nil { if scene == nil && input.Oshash != nil {
scene, err = qb.FindByOSHash(ctx, *input.Oshash) scenes, err := qb.FindByOSHash(ctx, *input.Oshash)
if err != nil { if err != nil {
return err return err
} }
if len(scenes) > 0 {
scene = scenes[0]
}
} }
return nil return nil
@@ -77,9 +86,14 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
if err == nil { if err == nil {
result.Count = len(scenes) result.Count = len(scenes)
for _, s := range scenes { for _, s := range scenes {
result.TotalDuration += s.Duration.Float64 f := s.PrimaryFile()
size, _ := strconv.ParseFloat(s.Size.String, 64) if f == nil {
result.TotalSize += size continue
}
result.TotalDuration += f.Duration
result.TotalSize += float64(f.Size)
} }
} }
} else { } else {

View File

@@ -9,6 +9,7 @@ import (
"github.com/go-chi/chi" "github.com/go-chi/chi"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
@@ -18,7 +19,7 @@ import (
type ImageFinder interface { type ImageFinder interface {
Find(ctx context.Context, id int) (*models.Image, error) Find(ctx context.Context, id int) (*models.Image, error)
FindByChecksum(ctx context.Context, checksum string) (*models.Image, error) FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error)
} }
type imageRoutes struct { type imageRoutes struct {
@@ -43,7 +44,7 @@ func (rs imageRoutes) Routes() chi.Router {
func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) { func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
img := r.Context().Value(imageKey).(*models.Image) img := r.Context().Value(imageKey).(*models.Image)
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum, models.DefaultGthumbWidth) filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum(), models.DefaultGthumbWidth)
w.Header().Add("Cache-Control", "max-age=604800000") w.Header().Add("Cache-Control", "max-age=604800000")
@@ -52,8 +53,16 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
if exists { if exists {
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
} else { } else {
// don't return anything if there is no file
f := img.PrimaryFile()
if f == nil {
// TODO - probably want to return a placeholder
http.Error(w, http.StatusText(404), 404)
return
}
encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG) encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG)
data, err := encoder.GetThumbnail(img, models.DefaultGthumbWidth) data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth)
if err != nil { if err != nil {
// don't log for unsupported image format // don't log for unsupported image format
if !errors.Is(err, image.ErrNotSupportedForThumbnail) { if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
@@ -72,7 +81,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
// write the generated thumbnail to disk if enabled // write the generated thumbnail to disk if enabled
if manager.GetInstance().Config.IsWriteImageThumbnails() { if manager.GetInstance().Config.IsWriteImageThumbnails() {
logger.Debugf("writing thumbnail to disk: %s", img.Path) logger.Debugf("writing thumbnail to disk: %s", img.Path())
if err := fsutil.WriteFile(filepath, data); err != nil { if err := fsutil.WriteFile(filepath, data); err != nil {
logger.Errorf("error writing thumbnail for image %s: %s", img.Path, err) logger.Errorf("error writing thumbnail for image %s: %s", img.Path, err)
} }
@@ -87,7 +96,13 @@ func (rs imageRoutes) Image(w http.ResponseWriter, r *http.Request) {
i := r.Context().Value(imageKey).(*models.Image) i := r.Context().Value(imageKey).(*models.Image)
// if image is in a zip file, we need to serve it specifically // if image is in a zip file, we need to serve it specifically
image.Serve(w, r, i.Path)
if len(i.Files) == 0 {
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
return
}
i.Files[0].Serve(&file.OsFS{}, w, r)
} }
// endregion // endregion
@@ -101,7 +116,10 @@ func (rs imageRoutes) ImageCtx(next http.Handler) http.Handler {
readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error { readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
qb := rs.imageFinder qb := rs.imageFinder
if imageID == 0 { if imageID == 0 {
image, _ = qb.FindByChecksum(ctx, imageIdentifierQueryParam) images, _ := qb.FindByChecksum(ctx, imageIdentifierQueryParam)
if len(images) > 0 {
image = images[0]
}
} else { } else {
image, _ = qb.Find(ctx, imageID) image, _ = qb.Find(ctx, imageID)
} }

View File

@@ -11,6 +11,8 @@ import (
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
@@ -23,9 +25,8 @@ type SceneFinder interface {
manager.SceneCoverGetter manager.SceneCoverGetter
scene.IDFinder scene.IDFinder
FindByChecksum(ctx context.Context, checksum string) (*models.Scene, error) FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error)
FindByOSHash(ctx context.Context, oshash string) (*models.Scene, error) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error)
GetCaptions(ctx context.Context, sceneID int) ([]*models.SceneCaption, error)
} }
type SceneMarkerFinder interface { type SceneMarkerFinder interface {
@@ -33,9 +34,14 @@ type SceneMarkerFinder interface {
FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error)
} }
type CaptionFinder interface {
GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error)
}
type sceneRoutes struct { type sceneRoutes struct {
txnManager txn.Manager txnManager txn.Manager
sceneFinder SceneFinder sceneFinder SceneFinder
captionFinder CaptionFinder
sceneMarkerFinder SceneMarkerFinder sceneMarkerFinder SceneMarkerFinder
tagFinder scene.MarkerTagFinder tagFinder scene.MarkerTagFinder
} }
@@ -116,7 +122,7 @@ func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
ffprobe := manager.GetInstance().FFProbe ffprobe := manager.GetInstance().FFProbe
videoFile, err := ffprobe.NewVideoFile(scene.Path) videoFile, err := ffprobe.NewVideoFile(scene.Path())
if err != nil { if err != nil {
logger.Errorf("[stream] error reading video file: %v", err) logger.Errorf("[stream] error reading video file: %v", err)
return return
@@ -149,9 +155,11 @@ func (rs sceneRoutes) StreamTS(w http.ResponseWriter, r *http.Request) {
} }
func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamFormat ffmpeg.StreamFormat) { func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamFormat ffmpeg.StreamFormat) {
logger.Debugf("Streaming as %s", streamFormat.MimeType)
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
f := scene.PrimaryFile()
logger.Debugf("Streaming as %s", streamFormat.MimeType)
// start stream based on query param, if provided // start stream based on query param, if provided
if err := r.ParseForm(); err != nil { if err := r.ParseForm(); err != nil {
logger.Warnf("[stream] error parsing query form: %v", err) logger.Warnf("[stream] error parsing query form: %v", err)
@@ -162,17 +170,20 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, st
requestedSize := r.Form.Get("resolution") requestedSize := r.Form.Get("resolution")
audioCodec := ffmpeg.MissingUnsupported audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec.Valid { if f.AudioCodec != "" {
audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec.String) audioCodec = ffmpeg.ProbeAudioCodec(f.AudioCodec)
} }
width := f.Width
height := f.Height
options := ffmpeg.TranscodeStreamOptions{ options := ffmpeg.TranscodeStreamOptions{
Input: scene.Path, Input: f.Path,
Codec: streamFormat, Codec: streamFormat,
VideoOnly: audioCodec == ffmpeg.MissingUnsupported, VideoOnly: audioCodec == ffmpeg.MissingUnsupported,
VideoWidth: int(scene.Width.Int64), VideoWidth: width,
VideoHeight: int(scene.Height.Int64), VideoHeight: height,
StartTime: ss, StartTime: ss,
MaxTranscodeSize: config.GetInstance().GetMaxStreamingTranscodeSize().GetMaxResolution(), MaxTranscodeSize: config.GetInstance().GetMaxStreamingTranscodeSize().GetMaxResolution(),
@@ -186,7 +197,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, st
lm := manager.GetInstance().ReadLockManager lm := manager.GetInstance().ReadLockManager
streamRequestCtx := manager.NewStreamRequestContext(w, r) streamRequestCtx := manager.NewStreamRequestContext(w, r)
lockCtx := lm.ReadLock(streamRequestCtx, scene.Path) lockCtx := lm.ReadLock(streamRequestCtx, f.Path)
defer lockCtx.Cancel() defer lockCtx.Cancel()
stream, err := encoder.GetTranscodeStream(lockCtx, options) stream, err := encoder.GetTranscodeStream(lockCtx, options)
@@ -295,7 +306,7 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
s := r.Context().Value(sceneKey).(*models.Scene) s := r.Context().Value(sceneKey).(*models.Scene)
funscript := scene.GetFunscriptPath(s.Path) funscript := video.GetFunscriptPath(s.Path())
serveFileNoCache(w, r, funscript) serveFileNoCache(w, r, funscript)
} }
@@ -311,10 +322,15 @@ func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang strin
if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error { if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error var err error
captions, err := rs.sceneFinder.GetCaptions(ctx, s.ID) primaryFile := s.PrimaryFile()
if primaryFile == nil {
return nil
}
captions, err := rs.captionFinder.GetCaptions(ctx, primaryFile.Base().ID)
for _, caption := range captions { for _, caption := range captions {
if lang == caption.LanguageCode && ext == caption.CaptionType { if lang == caption.LanguageCode && ext == caption.CaptionType {
sub, err := scene.ReadSubs(caption.Path(s.Path)) sub, err := video.ReadSubs(caption.Path(s.Path()))
if err == nil { if err == nil {
var b bytes.Buffer var b bytes.Buffer
err = sub.WriteToWebVTT(&b) err = sub.WriteToWebVTT(&b)
@@ -460,11 +476,17 @@ func (rs sceneRoutes) SceneCtx(next http.Handler) http.Handler {
readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error { readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
qb := rs.sceneFinder qb := rs.sceneFinder
if sceneID == 0 { if sceneID == 0 {
var scenes []*models.Scene
// determine checksum/os by the length of the query param // determine checksum/os by the length of the query param
if len(sceneIdentifierQueryParam) == 32 { if len(sceneIdentifierQueryParam) == 32 {
scene, _ = qb.FindByChecksum(ctx, sceneIdentifierQueryParam) scenes, _ = qb.FindByChecksum(ctx, sceneIdentifierQueryParam)
} else { } else {
scene, _ = qb.FindByOSHash(ctx, sceneIdentifierQueryParam) scenes, _ = qb.FindByOSHash(ctx, sceneIdentifierQueryParam)
}
if len(scenes) > 0 {
scene = scenes[0]
} }
} else { } else {
scene, _ = qb.Find(ctx, sceneID) scene, _ = qb.Find(ctx, sceneID)

View File

@@ -75,9 +75,15 @@ func Start() error {
txnManager := manager.GetInstance().Repository txnManager := manager.GetInstance().Repository
pluginCache := manager.GetInstance().PluginCache pluginCache := manager.GetInstance().PluginCache
sceneService := manager.GetInstance().SceneService
imageService := manager.GetInstance().ImageService
galleryService := manager.GetInstance().GalleryService
resolver := &Resolver{ resolver := &Resolver{
txnManager: txnManager, txnManager: txnManager,
repository: txnManager, repository: txnManager,
sceneService: sceneService,
imageService: imageService,
galleryService: galleryService,
hookExecutor: pluginCache, hookExecutor: pluginCache,
} }
@@ -125,6 +131,7 @@ func Start() error {
r.Mount("/scene", sceneRoutes{ r.Mount("/scene", sceneRoutes{
txnManager: txnManager, txnManager: txnManager,
sceneFinder: txnManager.Scene, sceneFinder: txnManager.Scene,
captionFinder: txnManager.File,
sceneMarkerFinder: txnManager.SceneMarker, sceneMarkerFinder: txnManager.SceneMarker,
tagFinder: txnManager.Tag, tagFinder: txnManager.Tag,
}.Routes()) }.Routes())

View File

@@ -1,6 +1,12 @@
package api package api
import "math" import (
"fmt"
"math"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
// An enum https://golang.org/ref/spec#Iota // An enum https://golang.org/ref/spec#Iota
const ( const (
@@ -17,3 +23,41 @@ func handleFloat64(v float64) *float64 {
return &v return &v
} }
func handleFloat64Value(v float64) float64 {
if math.IsInf(v, 0) || math.IsNaN(v) {
return 0
}
return v
}
func translateUpdateIDs(strIDs []string, mode models.RelationshipUpdateMode) (*models.UpdateIDs, error) {
ids, err := stringslice.StringSliceToIntSlice(strIDs)
if err != nil {
return nil, fmt.Errorf("converting ids [%v]: %w", strIDs, err)
}
return &models.UpdateIDs{
IDs: ids,
Mode: mode,
}, nil
}
func translateSceneMovieIDs(input BulkUpdateIds) (*models.UpdateMovieIDs, error) {
ids, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil {
return nil, fmt.Errorf("converting ids [%v]: %w", input.Ids, err)
}
ret := &models.UpdateMovieIDs{
Mode: input.Mode,
}
for _, id := range ids {
ret.Movies = append(ret.Movies, models.MoviesScenes{
MovieID: id,
})
}
return ret, nil
}

View File

@@ -1,8 +1,9 @@
package urlbuilders package urlbuilders
import ( import (
"github.com/stashapp/stash/pkg/models"
"strconv" "strconv"
"github.com/stashapp/stash/pkg/models"
) )
type ImageURLBuilder struct { type ImageURLBuilder struct {
@@ -15,7 +16,7 @@ func NewImageURLBuilder(baseURL string, image *models.Image) ImageURLBuilder {
return ImageURLBuilder{ return ImageURLBuilder{
BaseURL: baseURL, BaseURL: baseURL,
ImageID: strconv.Itoa(image.ID), ImageID: strconv.Itoa(image.ID),
UpdatedAt: strconv.FormatInt(image.UpdatedAt.Timestamp.Unix(), 10), UpdatedAt: strconv.FormatInt(image.UpdatedAt.Unix(), 10),
} }
} }

View File

@@ -9,25 +9,30 @@ import (
) )
func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger {
var path string
if s.Path() != "" {
path = s.Path()
}
// only trim the extension if gallery is file-based // only trim the extension if gallery is file-based
trimExt := s.Zip trimExt := s.PrimaryFile() != nil
return tagger{ return tagger{
ID: s.ID, ID: s.ID,
Type: "gallery", Type: "gallery",
Name: s.GetTitle(), Name: s.GetTitle(),
Path: s.Path.String, Path: path,
trimExt: trimExt, trimExt: trimExt,
cache: cache, cache: cache,
} }
} }
// GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path. // GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path.
func GalleryPerformers(ctx context.Context, s *models.Gallery, rw gallery.PerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { func GalleryPerformers(ctx context.Context, s *models.Gallery, rw gallery.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getGalleryFileTagger(s, cache) t := getGalleryFileTagger(s, cache)
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return gallery.AddPerformer(ctx, rw, subjectID, otherID) return gallery.AddPerformer(ctx, rw, s, otherID)
}) })
} }
@@ -35,7 +40,7 @@ func GalleryPerformers(ctx context.Context, s *models.Gallery, rw gallery.Perfor
// //
// Gallerys will not be tagged if studio is already set. // Gallerys will not be tagged if studio is already set.
func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid { if s.StudioID != nil {
// don't modify // don't modify
return nil return nil
} }
@@ -43,15 +48,15 @@ func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpda
t := getGalleryFileTagger(s, cache) t := getGalleryFileTagger(s, cache)
return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) { return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addGalleryStudio(ctx, rw, subjectID, otherID) return addGalleryStudio(ctx, rw, s, otherID)
}) })
} }
// GalleryTags tags the provided gallery with tags whose name matches the gallery's path. // GalleryTags tags the provided gallery with tags whose name matches the gallery's path.
func GalleryTags(ctx context.Context, s *models.Gallery, rw gallery.TagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { func GalleryTags(ctx context.Context, s *models.Gallery, rw gallery.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getGalleryFileTagger(s, cache) t := getGalleryFileTagger(s, cache)
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return gallery.AddTag(ctx, rw, subjectID, otherID) return gallery.AddTag(ctx, rw, s, otherID)
}) })
} }

View File

@@ -4,6 +4,7 @@ import (
"context" "context"
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@@ -44,13 +45,21 @@ func TestGalleryPerformers(t *testing.T) {
mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches { if test.Matches {
mockGalleryReader.On("GetPerformerIDs", testCtx, galleryID).Return(nil, nil).Once() mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdatePerformers", testCtx, galleryID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Path: models.NullString(test.Path), Files: []file.File{
&file.BaseFile{
Path: test.Path,
},
},
} }
err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil) err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil)
@@ -65,7 +74,7 @@ func TestGalleryStudios(t *testing.T) {
const galleryID = 1 const galleryID = 1
const studioName = "studio name" const studioName = "studio name"
const studioID = 2 var studioID = 2
studio := models.Studio{ studio := models.Studio{
ID: studioID, ID: studioID,
Name: models.NullString(studioName), Name: models.NullString(studioName),
@@ -84,17 +93,19 @@ func TestGalleryStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockGalleryReader.On("Find", testCtx, galleryID).Return(&models.Gallery{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdatePartial", testCtx, models.GalleryPartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: galleryID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Path: models.NullString(test.Path), Files: []file.File{
&file.BaseFile{
Path: test.Path,
},
},
} }
err := GalleryStudios(testCtx, &gallery, mockGalleryReader, mockStudioReader, nil) err := GalleryStudios(testCtx, &gallery, mockGalleryReader, mockStudioReader, nil)
@@ -157,13 +168,21 @@ func TestGalleryTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockGalleryReader.On("GetTagIDs", testCtx, galleryID).Return(nil, nil).Once() mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdateTags", testCtx, galleryID, []int{tagID}).Return(nil).Once() TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
gallery := models.Gallery{ gallery := models.Gallery{
ID: galleryID, ID: galleryID,
Path: models.NullString(test.Path), Files: []file.File{
&file.BaseFile{
Path: test.Path,
},
},
} }
err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil) err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil)

View File

@@ -13,17 +13,17 @@ func getImageFileTagger(s *models.Image, cache *match.Cache) tagger {
ID: s.ID, ID: s.ID,
Type: "image", Type: "image",
Name: s.GetTitle(), Name: s.GetTitle(),
Path: s.Path, Path: s.Path(),
cache: cache, cache: cache,
} }
} }
// ImagePerformers tags the provided image with performers whose name matches the image's path. // ImagePerformers tags the provided image with performers whose name matches the image's path.
func ImagePerformers(ctx context.Context, s *models.Image, rw image.PerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { func ImagePerformers(ctx context.Context, s *models.Image, rw image.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getImageFileTagger(s, cache) t := getImageFileTagger(s, cache)
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return image.AddPerformer(ctx, rw, subjectID, otherID) return image.AddPerformer(ctx, rw, s, otherID)
}) })
} }
@@ -31,7 +31,7 @@ func ImagePerformers(ctx context.Context, s *models.Image, rw image.PerformerUpd
// //
// Images will not be tagged if studio is already set. // Images will not be tagged if studio is already set.
func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid { if s.StudioID != nil {
// don't modify // don't modify
return nil return nil
} }
@@ -39,15 +39,15 @@ func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, s
t := getImageFileTagger(s, cache) t := getImageFileTagger(s, cache)
return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) { return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addImageStudio(ctx, rw, subjectID, otherID) return addImageStudio(ctx, rw, s, otherID)
}) })
} }
// ImageTags tags the provided image with tags whose name matches the image's path. // ImageTags tags the provided image with tags whose name matches the image's path.
func ImageTags(ctx context.Context, s *models.Image, rw image.TagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { func ImageTags(ctx context.Context, s *models.Image, rw image.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getImageFileTagger(s, cache) t := getImageFileTagger(s, cache)
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return image.AddTag(ctx, rw, subjectID, otherID) return image.AddTag(ctx, rw, s, otherID)
}) })
} }

View File

@@ -3,6 +3,7 @@ package autotag
import ( import (
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@@ -11,6 +12,14 @@ import (
const imageExt = "jpg" const imageExt = "jpg"
func makeImageFile(p string) *file.ImageFile {
return &file.ImageFile{
BaseFile: &file.BaseFile{
Path: p,
},
}
}
func TestImagePerformers(t *testing.T) { func TestImagePerformers(t *testing.T) {
t.Parallel() t.Parallel()
@@ -41,13 +50,17 @@ func TestImagePerformers(t *testing.T) {
mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches { if test.Matches {
mockImageReader.On("GetPerformerIDs", testCtx, imageID).Return(nil, nil).Once() mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("UpdatePerformers", testCtx, imageID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
image := models.Image{ image := models.Image{
ID: imageID, ID: imageID,
Path: test.Path, Files: []*file.ImageFile{makeImageFile(test.Path)},
} }
err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil) err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil)
@@ -62,7 +75,7 @@ func TestImageStudios(t *testing.T) {
const imageID = 1 const imageID = 1
const studioName = "studio name" const studioName = "studio name"
const studioID = 2 var studioID = 2
studio := models.Studio{ studio := models.Studio{
ID: studioID, ID: studioID,
Name: models.NullString(studioName), Name: models.NullString(studioName),
@@ -81,17 +94,15 @@ func TestImageStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockImageReader.On("Find", testCtx, imageID).Return(&models.Image{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("Update", testCtx, models.ImagePartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: imageID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
image := models.Image{ image := models.Image{
ID: imageID, ID: imageID,
Path: test.Path, Files: []*file.ImageFile{makeImageFile(test.Path)},
} }
err := ImageStudios(testCtx, &image, mockImageReader, mockStudioReader, nil) err := ImageStudios(testCtx, &image, mockImageReader, mockStudioReader, nil)
@@ -154,13 +165,17 @@ func TestImageTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockImageReader.On("GetTagIDs", testCtx, imageID).Return(nil, nil).Once() mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("UpdateTags", testCtx, imageID, []int{tagID}).Return(nil).Once() TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
image := models.Image{ image := models.Image{
ID: imageID, ID: imageID,
Path: test.Path, Files: []*file.ImageFile{makeImageFile(test.Path)},
} }
err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil) err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil)

View File

@@ -8,15 +8,19 @@ import (
"database/sql" "database/sql"
"fmt" "fmt"
"os" "os"
"path/filepath"
"testing" "testing"
"github.com/stashapp/stash/pkg/hash/md5" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sqlite" "github.com/stashapp/stash/pkg/sqlite"
"github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/txn"
_ "github.com/golang-migrate/migrate/v4/database/sqlite3" _ "github.com/golang-migrate/migrate/v4/database/sqlite3"
_ "github.com/golang-migrate/migrate/v4/source/file" _ "github.com/golang-migrate/migrate/v4/source/file"
// necessary to register custom migrations
_ "github.com/stashapp/stash/pkg/sqlite/migrations"
) )
const testName = "Foo's Bar" const testName = "Foo's Bar"
@@ -28,6 +32,8 @@ const existingStudioGalleryName = testName + ".dontChangeStudio.mp4"
var existingStudioID int var existingStudioID int
const expectedMatchTitle = "expected match"
var db *sqlite.Database var db *sqlite.Database
var r models.Repository var r models.Repository
@@ -53,7 +59,7 @@ func runTests(m *testing.M) int {
f.Close() f.Close()
databaseFile := f.Name() databaseFile := f.Name()
db = &sqlite.Database{} db = sqlite.NewDatabase()
if err := db.Open(databaseFile); err != nil { if err := db.Open(databaseFile); err != nil {
panic(fmt.Sprintf("Could not initialize database: %s", err.Error())) panic(fmt.Sprintf("Could not initialize database: %s", err.Error()))
} }
@@ -117,187 +123,354 @@ func createTag(ctx context.Context, qb models.TagWriter) error {
return nil return nil
} }
func createScenes(ctx context.Context, sqb models.SceneReaderWriter) error { func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore file.FolderStore, fileStore file.Store) error {
// create the scenes // create the scenes
scenePatterns, falseScenePatterns := generateTestPaths(testName, sceneExt) scenePatterns, falseScenePatterns := generateTestPaths(testName, sceneExt)
for _, fn := range scenePatterns { for _, fn := range scenePatterns {
err := createScene(ctx, sqb, makeScene(fn, true)) f, err := createSceneFile(ctx, fn, folderStore, fileStore)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = true
if err := createScene(ctx, sqb, makeScene(expectedResult), f); err != nil {
return err
} }
}
for _, fn := range falseScenePatterns { for _, fn := range falseScenePatterns {
err := createScene(ctx, sqb, makeScene(fn, false)) f, err := createSceneFile(ctx, fn, folderStore, fileStore)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = false
if err := createScene(ctx, sqb, makeScene(expectedResult), f); err != nil {
return err
}
} }
// add organized scenes // add organized scenes
for _, fn := range scenePatterns { for _, fn := range scenePatterns {
s := makeScene("organized"+fn, false) f, err := createSceneFile(ctx, "organized"+fn, folderStore, fileStore)
s.Organized = true
err := createScene(ctx, sqb, s)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = false
s := makeScene(expectedResult)
s.Organized = true
if err := createScene(ctx, sqb, s, f); err != nil {
return err
}
} }
// create scene with existing studio io // create scene with existing studio io
studioScene := makeScene(existingStudioSceneName, true) f, err := createSceneFile(ctx, existingStudioSceneName, folderStore, fileStore)
studioScene.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createScene(ctx, sqb, studioScene)
if err != nil { if err != nil {
return err return err
} }
s := &models.Scene{
Title: expectedMatchTitle,
URL: existingStudioSceneName,
StudioID: &existingStudioID,
}
if err := createScene(ctx, sqb, s, f); err != nil {
return err
}
return nil return nil
} }
func makeScene(name string, expectedResult bool) *models.Scene { func makeScene(expectedResult bool) *models.Scene {
scene := &models.Scene{ s := &models.Scene{}
Checksum: sql.NullString{String: md5.FromString(name), Valid: true},
Path: name,
}
// if expectedResult is true then we expect it to match, set the title accordingly // if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult { if expectedResult {
scene.Title = sql.NullString{Valid: true, String: name} s.Title = expectedMatchTitle
} }
return scene return s
} }
func createScene(ctx context.Context, sqb models.SceneWriter, scene *models.Scene) error { func createSceneFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.VideoFile, error) {
_, err := sqb.Create(ctx, *scene) folderPath := filepath.Dir(name)
basename := filepath.Base(name)
folder, err := getOrCreateFolder(ctx, folderStore, folderPath)
if err != nil {
return nil, err
}
folderID := folder.ID
f := &file.VideoFile{
BaseFile: &file.BaseFile{
Basename: basename,
ParentFolderID: folderID,
},
}
if err := fileStore.Create(ctx, f); err != nil {
return nil, err
}
return f, nil
}
func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folderPath string) (*file.Folder, error) {
f, err := folderStore.FindByPath(ctx, folderPath)
if err != nil {
return nil, fmt.Errorf("getting folder by path: %w", err)
}
if f != nil {
return f, nil
}
var parentID file.FolderID
dir := filepath.Dir(folderPath)
if dir != "." {
parent, err := getOrCreateFolder(ctx, folderStore, dir)
if err != nil {
return nil, err
}
parentID = parent.ID
}
f = &file.Folder{
Path: folderPath,
}
if parentID != 0 {
f.ParentFolderID = &parentID
}
if err := folderStore.Create(ctx, f); err != nil {
return nil, fmt.Errorf("creating folder: %w", err)
}
return f, nil
}
func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f *file.VideoFile) error {
err := sqb.Create(ctx, s, []file.ID{f.ID})
if err != nil { if err != nil {
return fmt.Errorf("Failed to create scene with name '%s': %s", scene.Path, err.Error()) return fmt.Errorf("Failed to create scene with path '%s': %s", f.Path, err.Error())
} }
return nil return nil
} }
func createImages(ctx context.Context, sqb models.ImageReaderWriter) error { func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore file.FolderStore, fileStore file.Store) error {
// create the images // create the images
imagePatterns, falseImagePatterns := generateTestPaths(testName, imageExt) imagePatterns, falseImagePatterns := generateTestPaths(testName, imageExt)
for _, fn := range imagePatterns { for _, fn := range imagePatterns {
err := createImage(ctx, sqb, makeImage(fn, true)) f, err := createImageFile(ctx, fn, folderStore, fileStore)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = true
if err := createImage(ctx, w, makeImage(expectedResult), f); err != nil {
return err
}
} }
for _, fn := range falseImagePatterns { for _, fn := range falseImagePatterns {
err := createImage(ctx, sqb, makeImage(fn, false)) f, err := createImageFile(ctx, fn, folderStore, fileStore)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = false
if err := createImage(ctx, w, makeImage(expectedResult), f); err != nil {
return err
}
} }
// add organized images // add organized images
for _, fn := range imagePatterns { for _, fn := range imagePatterns {
s := makeImage("organized"+fn, false) f, err := createImageFile(ctx, "organized"+fn, folderStore, fileStore)
s.Organized = true
err := createImage(ctx, sqb, s)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = false
s := makeImage(expectedResult)
s.Organized = true
if err := createImage(ctx, w, s, f); err != nil {
return err
}
} }
// create image with existing studio io // create image with existing studio io
studioImage := makeImage(existingStudioImageName, true) f, err := createImageFile(ctx, existingStudioImageName, folderStore, fileStore)
studioImage.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createImage(ctx, sqb, studioImage)
if err != nil { if err != nil {
return err return err
} }
s := &models.Image{
Title: existingStudioImageName,
StudioID: &existingStudioID,
}
if err := createImage(ctx, w, s, f); err != nil {
return err
}
return nil return nil
} }
func makeImage(name string, expectedResult bool) *models.Image { func createImageFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.ImageFile, error) {
image := &models.Image{ folderPath := filepath.Dir(name)
Checksum: md5.FromString(name), basename := filepath.Base(name)
Path: name,
folder, err := getOrCreateFolder(ctx, folderStore, folderPath)
if err != nil {
return nil, err
} }
folderID := folder.ID
f := &file.ImageFile{
BaseFile: &file.BaseFile{
Basename: basename,
ParentFolderID: folderID,
},
}
if err := fileStore.Create(ctx, f); err != nil {
return nil, err
}
return f, nil
}
func makeImage(expectedResult bool) *models.Image {
o := &models.Image{}
// if expectedResult is true then we expect it to match, set the title accordingly // if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult { if expectedResult {
image.Title = sql.NullString{Valid: true, String: name} o.Title = expectedMatchTitle
} }
return image return o
} }
func createImage(ctx context.Context, sqb models.ImageWriter, image *models.Image) error { func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *file.ImageFile) error {
_, err := sqb.Create(ctx, *image) err := w.Create(ctx, &models.ImageCreateInput{
Image: o,
FileIDs: []file.ID{f.ID},
})
if err != nil { if err != nil {
return fmt.Errorf("Failed to create image with name '%s': %s", image.Path, err.Error()) return fmt.Errorf("Failed to create image with path '%s': %s", f.Path, err.Error())
} }
return nil return nil
} }
func createGalleries(ctx context.Context, sqb models.GalleryReaderWriter) error { func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderStore file.FolderStore, fileStore file.Store) error {
// create the galleries // create the galleries
galleryPatterns, falseGalleryPatterns := generateTestPaths(testName, galleryExt) galleryPatterns, falseGalleryPatterns := generateTestPaths(testName, galleryExt)
for _, fn := range galleryPatterns { for _, fn := range galleryPatterns {
err := createGallery(ctx, sqb, makeGallery(fn, true)) f, err := createGalleryFile(ctx, fn, folderStore, fileStore)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = true
if err := createGallery(ctx, w, makeGallery(expectedResult), f); err != nil {
return err
}
} }
for _, fn := range falseGalleryPatterns { for _, fn := range falseGalleryPatterns {
err := createGallery(ctx, sqb, makeGallery(fn, false)) f, err := createGalleryFile(ctx, fn, folderStore, fileStore)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = false
if err := createGallery(ctx, w, makeGallery(expectedResult), f); err != nil {
return err
}
} }
// add organized galleries // add organized galleries
for _, fn := range galleryPatterns { for _, fn := range galleryPatterns {
s := makeGallery("organized"+fn, false) f, err := createGalleryFile(ctx, "organized"+fn, folderStore, fileStore)
s.Organized = true
err := createGallery(ctx, sqb, s)
if err != nil { if err != nil {
return err return err
} }
const expectedResult = false
s := makeGallery(expectedResult)
s.Organized = true
if err := createGallery(ctx, w, s, f); err != nil {
return err
}
} }
// create gallery with existing studio io // create gallery with existing studio io
studioGallery := makeGallery(existingStudioGalleryName, true) f, err := createGalleryFile(ctx, existingStudioGalleryName, folderStore, fileStore)
studioGallery.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createGallery(ctx, sqb, studioGallery)
if err != nil { if err != nil {
return err return err
} }
s := &models.Gallery{
Title: existingStudioGalleryName,
StudioID: &existingStudioID,
}
if err := createGallery(ctx, w, s, f); err != nil {
return err
}
return nil return nil
} }
func makeGallery(name string, expectedResult bool) *models.Gallery { func createGalleryFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.BaseFile, error) {
gallery := &models.Gallery{ folderPath := filepath.Dir(name)
Checksum: md5.FromString(name), basename := filepath.Base(name)
Path: models.NullString(name),
folder, err := getOrCreateFolder(ctx, folderStore, folderPath)
if err != nil {
return nil, err
} }
folderID := folder.ID
f := &file.BaseFile{
Basename: basename,
ParentFolderID: folderID,
}
if err := fileStore.Create(ctx, f); err != nil {
return nil, err
}
return f, nil
}
func makeGallery(expectedResult bool) *models.Gallery {
o := &models.Gallery{}
// if expectedResult is true then we expect it to match, set the title accordingly // if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult { if expectedResult {
gallery.Title = sql.NullString{Valid: true, String: name} o.Title = expectedMatchTitle
} }
return gallery return o
} }
func createGallery(ctx context.Context, sqb models.GalleryWriter, gallery *models.Gallery) error { func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *file.BaseFile) error {
_, err := sqb.Create(ctx, *gallery) err := w.Create(ctx, o, []file.ID{f.ID})
if err != nil { if err != nil {
return fmt.Errorf("Failed to create gallery with name '%s': %s", gallery.Path.String, err.Error()) return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error())
} }
return nil return nil
@@ -332,17 +505,17 @@ func populateDB() error {
return err return err
} }
err = createScenes(ctx, r.Scene) err = createScenes(ctx, r.Scene, r.Folder, r.File)
if err != nil { if err != nil {
return err return err
} }
err = createImages(ctx, r.Image) err = createImages(ctx, r.Image, r.Folder, r.File)
if err != nil { if err != nil {
return err return err
} }
err = createGalleries(ctx, r.Gallery) err = createGalleries(ctx, r.Gallery, r.Folder, r.File)
if err != nil { if err != nil {
return err return err
} }
@@ -391,10 +564,10 @@ func TestParsePerformerScenes(t *testing.T) {
} }
// title is only set on scenes where we expect performer to be set // title is only set on scenes where we expect performer to be set
if scene.Title.String == scene.Path && len(performers) == 0 { if scene.Title == expectedMatchTitle && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path) t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path())
} else if scene.Title.String != scene.Path && len(performers) > 0 { } else if scene.Title != expectedMatchTitle && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path) t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path())
} }
} }
@@ -435,21 +608,21 @@ func TestParseStudioScenes(t *testing.T) {
for _, scene := range scenes { for _, scene := range scenes {
// check for existing studio id scene first // check for existing studio id scene first
if scene.Path == existingStudioSceneName { if scene.URL == existingStudioSceneName {
if scene.StudioID.Int64 != int64(existingStudioID) { if scene.StudioID == nil || *scene.StudioID != existingStudioID {
t.Error("Incorrectly overwrote studio ID for scene with existing studio ID") t.Error("Incorrectly overwrote studio ID for scene with existing studio ID")
} }
} else { } else {
// title is only set on scenes where we expect studio to be set // title is only set on scenes where we expect studio to be set
if scene.Title.String == scene.Path { if scene.Title == expectedMatchTitle {
if !scene.StudioID.Valid { if scene.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path) t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path())
} else if scene.StudioID.Int64 != int64(studios[1].ID) { } else if scene.StudioID != nil && *scene.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID.Int64, scene.Path) t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID, scene.Path())
} }
} else if scene.Title.String != scene.Path && scene.StudioID.Int64 == int64(studios[1].ID) { } else if scene.Title != expectedMatchTitle && scene.StudioID != nil && *scene.StudioID == studios[1].ID {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path) t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path())
} }
} }
} }
@@ -499,10 +672,10 @@ func TestParseTagScenes(t *testing.T) {
} }
// title is only set on scenes where we expect tag to be set // title is only set on scenes where we expect tag to be set
if scene.Title.String == scene.Path && len(tags) == 0 { if scene.Title == expectedMatchTitle && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path) t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path())
} else if scene.Title.String != scene.Path && len(tags) > 0 { } else if (scene.Title != expectedMatchTitle) && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path) t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path())
} }
} }
@@ -546,10 +719,11 @@ func TestParsePerformerImages(t *testing.T) {
} }
// title is only set on images where we expect performer to be set // title is only set on images where we expect performer to be set
if image.Title.String == image.Path && len(performers) == 0 { expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName
t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path) if expectedMatch && len(performers) == 0 {
} else if image.Title.String != image.Path && len(performers) > 0 { t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path())
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path) } else if !expectedMatch && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path())
} }
} }
@@ -590,21 +764,21 @@ func TestParseStudioImages(t *testing.T) {
for _, image := range images { for _, image := range images {
// check for existing studio id image first // check for existing studio id image first
if image.Path == existingStudioImageName { if image.Title == existingStudioImageName {
if image.StudioID.Int64 != int64(existingStudioID) { if *image.StudioID != existingStudioID {
t.Error("Incorrectly overwrote studio ID for image with existing studio ID") t.Error("Incorrectly overwrote studio ID for image with existing studio ID")
} }
} else { } else {
// title is only set on images where we expect studio to be set // title is only set on images where we expect studio to be set
if image.Title.String == image.Path { if image.Title == expectedMatchTitle {
if !image.StudioID.Valid { if image.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path) t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path())
} else if image.StudioID.Int64 != int64(studios[1].ID) { } else if *image.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", image.StudioID.Int64, image.Path) t.Errorf("Incorrect studio id %d set for path '%s'", *image.StudioID, image.Path())
} }
} else if image.Title.String != image.Path && image.StudioID.Int64 == int64(studios[1].ID) { } else if image.Title != expectedMatchTitle && image.StudioID != nil && *image.StudioID == studios[1].ID {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path) t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path())
} }
} }
} }
@@ -654,10 +828,11 @@ func TestParseTagImages(t *testing.T) {
} }
// title is only set on images where we expect performer to be set // title is only set on images where we expect performer to be set
if image.Title.String == image.Path && len(tags) == 0 { expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName
t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path) if expectedMatch && len(tags) == 0 {
} else if image.Title.String != image.Path && len(tags) > 0 { t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path())
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path) } else if !expectedMatch && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path())
} }
} }
@@ -701,10 +876,11 @@ func TestParsePerformerGalleries(t *testing.T) {
} }
// title is only set on galleries where we expect performer to be set // title is only set on galleries where we expect performer to be set
if gallery.Title.String == gallery.Path.String && len(performers) == 0 { expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName
t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path.String) if expectedMatch && len(performers) == 0 {
} else if gallery.Title.String != gallery.Path.String && len(performers) > 0 { t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path())
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path.String) } else if !expectedMatch && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path())
} }
} }
@@ -745,21 +921,21 @@ func TestParseStudioGalleries(t *testing.T) {
for _, gallery := range galleries { for _, gallery := range galleries {
// check for existing studio id gallery first // check for existing studio id gallery first
if gallery.Path.String == existingStudioGalleryName { if gallery.Title == existingStudioGalleryName {
if gallery.StudioID.Int64 != int64(existingStudioID) { if *gallery.StudioID != existingStudioID {
t.Error("Incorrectly overwrote studio ID for gallery with existing studio ID") t.Error("Incorrectly overwrote studio ID for gallery with existing studio ID")
} }
} else { } else {
// title is only set on galleries where we expect studio to be set // title is only set on galleries where we expect studio to be set
if gallery.Title.String == gallery.Path.String { if gallery.Title == expectedMatchTitle {
if !gallery.StudioID.Valid { if gallery.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path.String) t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path())
} else if gallery.StudioID.Int64 != int64(studios[1].ID) { } else if *gallery.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", gallery.StudioID.Int64, gallery.Path.String) t.Errorf("Incorrect studio id %d set for path '%s'", *gallery.StudioID, gallery.Path())
} }
} else if gallery.Title.String != gallery.Path.String && gallery.StudioID.Int64 == int64(studios[1].ID) { } else if gallery.Title != expectedMatchTitle && (gallery.StudioID != nil && *gallery.StudioID == studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path.String) t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path())
} }
} }
} }
@@ -809,10 +985,11 @@ func TestParseTagGalleries(t *testing.T) {
} }
// title is only set on galleries where we expect performer to be set // title is only set on galleries where we expect performer to be set
if gallery.Title.String == gallery.Path.String && len(tags) == 0 { expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName
t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path.String) if expectedMatch && len(tags) == 0 {
} else if gallery.Title.String != gallery.Path.String && len(tags) > 0 { t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path())
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path.String) } else if !expectedMatch && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path())
} }
} }

View File

@@ -12,17 +12,17 @@ import (
type SceneQueryPerformerUpdater interface { type SceneQueryPerformerUpdater interface {
scene.Queryer scene.Queryer
scene.PerformerUpdater scene.PartialUpdater
} }
type ImageQueryPerformerUpdater interface { type ImageQueryPerformerUpdater interface {
image.Queryer image.Queryer
image.PerformerUpdater image.PartialUpdater
} }
type GalleryQueryPerformerUpdater interface { type GalleryQueryPerformerUpdater interface {
gallery.Queryer gallery.Queryer
gallery.PerformerUpdater gallery.PartialUpdater
} }
func getPerformerTagger(p *models.Performer, cache *match.Cache) tagger { func getPerformerTagger(p *models.Performer, cache *match.Cache) tagger {
@@ -38,8 +38,8 @@ func getPerformerTagger(p *models.Performer, cache *match.Cache) tagger {
func PerformerScenes(ctx context.Context, p *models.Performer, paths []string, rw SceneQueryPerformerUpdater, cache *match.Cache) error { func PerformerScenes(ctx context.Context, p *models.Performer, paths []string, rw SceneQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache) t := getPerformerTagger(p, cache)
return t.tagScenes(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { return t.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return scene.AddPerformer(ctx, rw, otherID, subjectID) return scene.AddPerformer(ctx, rw, o, p.ID)
}) })
} }
@@ -47,8 +47,8 @@ func PerformerScenes(ctx context.Context, p *models.Performer, paths []string, r
func PerformerImages(ctx context.Context, p *models.Performer, paths []string, rw ImageQueryPerformerUpdater, cache *match.Cache) error { func PerformerImages(ctx context.Context, p *models.Performer, paths []string, rw ImageQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache) t := getPerformerTagger(p, cache)
return t.tagImages(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { return t.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
return image.AddPerformer(ctx, rw, otherID, subjectID) return image.AddPerformer(ctx, rw, i, p.ID)
}) })
} }
@@ -56,7 +56,7 @@ func PerformerImages(ctx context.Context, p *models.Performer, paths []string, r
func PerformerGalleries(ctx context.Context, p *models.Performer, paths []string, rw GalleryQueryPerformerUpdater, cache *match.Cache) error { func PerformerGalleries(ctx context.Context, p *models.Performer, paths []string, rw GalleryQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache) t := getPerformerTagger(p, cache)
return t.tagGalleries(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { return t.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return gallery.AddPerformer(ctx, rw, otherID, subjectID) return gallery.AddPerformer(ctx, rw, o, p.ID)
}) })
} }

View File

@@ -3,6 +3,7 @@ package autotag
import ( import (
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
@@ -48,7 +49,13 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{ scenes = append(scenes, &models.Scene{
ID: i + 1, ID: i + 1,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: p, Path: p,
},
},
},
}) })
} }
@@ -77,8 +84,12 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
for i := range matchingPaths { for i := range matchingPaths {
sceneID := i + 1 sceneID := i + 1
mockSceneReader.On("GetPerformerIDs", testCtx, sceneID).Return(nil, nil).Once() mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("UpdatePerformers", testCtx, sceneID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := PerformerScenes(testCtx, &performer, nil, mockSceneReader, nil) err := PerformerScenes(testCtx, &performer, nil, mockSceneReader, nil)
@@ -123,7 +134,7 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{ images = append(images, &models.Image{
ID: i + 1, ID: i + 1,
Path: p, Files: []*file.ImageFile{makeImageFile(p)},
}) })
} }
@@ -152,8 +163,12 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
for i := range matchingPaths { for i := range matchingPaths {
imageID := i + 1 imageID := i + 1
mockImageReader.On("GetPerformerIDs", testCtx, imageID).Return(nil, nil).Once() mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("UpdatePerformers", testCtx, imageID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := PerformerImages(testCtx, &performer, nil, mockImageReader, nil) err := PerformerImages(testCtx, &performer, nil, mockImageReader, nil)
@@ -196,9 +211,14 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
var galleries []*models.Gallery var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(performerName, galleryExt) matchingPaths, falsePaths := generateTestPaths(performerName, galleryExt)
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Path: models.NullString(p), Files: []file.File{
&file.BaseFile{
Path: v,
},
},
}) })
} }
@@ -226,8 +246,12 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
for i := range matchingPaths { for i := range matchingPaths {
galleryID := i + 1 galleryID := i + 1
mockGalleryReader.On("GetPerformerIDs", testCtx, galleryID).Return(nil, nil).Once() mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdatePerformers", testCtx, galleryID, []int{performerID}).Return(nil).Once() PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := PerformerGalleries(testCtx, &performer, nil, mockGalleryReader, nil) err := PerformerGalleries(testCtx, &performer, nil, mockGalleryReader, nil)

View File

@@ -13,17 +13,17 @@ func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger {
ID: s.ID, ID: s.ID,
Type: "scene", Type: "scene",
Name: s.GetTitle(), Name: s.GetTitle(),
Path: s.Path, Path: s.Path(),
cache: cache, cache: cache,
} }
} }
// ScenePerformers tags the provided scene with performers whose name matches the scene's path. // ScenePerformers tags the provided scene with performers whose name matches the scene's path.
func ScenePerformers(ctx context.Context, s *models.Scene, rw scene.PerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { func ScenePerformers(ctx context.Context, s *models.Scene, rw scene.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getSceneFileTagger(s, cache) t := getSceneFileTagger(s, cache)
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return scene.AddPerformer(ctx, rw, subjectID, otherID) return scene.AddPerformer(ctx, rw, s, otherID)
}) })
} }
@@ -31,7 +31,7 @@ func ScenePerformers(ctx context.Context, s *models.Scene, rw scene.PerformerUpd
// //
// Scenes will not be tagged if studio is already set. // Scenes will not be tagged if studio is already set.
func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid { if s.StudioID != nil {
// don't modify // don't modify
return nil return nil
} }
@@ -39,15 +39,15 @@ func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, s
t := getSceneFileTagger(s, cache) t := getSceneFileTagger(s, cache)
return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) { return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addSceneStudio(ctx, rw, subjectID, otherID) return addSceneStudio(ctx, rw, s, otherID)
}) })
} }
// SceneTags tags the provided scene with tags whose name matches the scene's path. // SceneTags tags the provided scene with tags whose name matches the scene's path.
func SceneTags(ctx context.Context, s *models.Scene, rw scene.TagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { func SceneTags(ctx context.Context, s *models.Scene, rw scene.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getSceneFileTagger(s, cache) t := getSceneFileTagger(s, cache)
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return scene.AddTag(ctx, rw, subjectID, otherID) return scene.AddTag(ctx, rw, s, otherID)
}) })
} }

View File

@@ -5,6 +5,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@@ -176,15 +177,26 @@ func TestScenePerformers(t *testing.T) {
mockPerformerReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil) mockPerformerReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches {
mockSceneReader.On("GetPerformerIDs", testCtx, sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdatePerformers", testCtx, sceneID, []int{performerID}).Return(nil).Once()
}
scene := models.Scene{ scene := models.Scene{
ID: sceneID, ID: sceneID,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: test.Path, Path: test.Path,
},
},
},
} }
if test.Matches {
mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := ScenePerformers(testCtx, &scene, mockSceneReader, mockPerformerReader, nil) err := ScenePerformers(testCtx, &scene, mockSceneReader, mockPerformerReader, nil)
assert.Nil(err) assert.Nil(err)
@@ -196,9 +208,11 @@ func TestScenePerformers(t *testing.T) {
func TestSceneStudios(t *testing.T) { func TestSceneStudios(t *testing.T) {
t.Parallel() t.Parallel()
const sceneID = 1 var (
const studioName = "studio name" sceneID = 1
const studioID = 2 studioName = "studio name"
studioID = 2
)
studio := models.Studio{ studio := models.Studio{
ID: studioID, ID: studioID,
Name: models.NullString(studioName), Name: models.NullString(studioName),
@@ -217,17 +231,21 @@ func TestSceneStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockSceneReader.On("Find", testCtx, sceneID).Return(&models.Scene{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("Update", testCtx, models.ScenePartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: sceneID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
scene := models.Scene{ scene := models.Scene{
ID: sceneID, ID: sceneID,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: test.Path, Path: test.Path,
},
},
},
} }
err := SceneStudios(testCtx, &scene, mockSceneReader, mockStudioReader, nil) err := SceneStudios(testCtx, &scene, mockSceneReader, mockStudioReader, nil)
@@ -290,13 +308,23 @@ func TestSceneTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) {
if test.Matches { if test.Matches {
mockSceneReader.On("GetTagIDs", testCtx, sceneID).Return(nil, nil).Once() mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("UpdateTags", testCtx, sceneID, []int{tagID}).Return(nil).Once() TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
scene := models.Scene{ scene := models.Scene{
ID: sceneID, ID: sceneID,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: test.Path, Path: test.Path,
},
},
},
} }
err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil) err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil)

View File

@@ -2,7 +2,6 @@ package autotag
import ( import (
"context" "context"
"database/sql"
"github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
@@ -11,73 +10,52 @@ import (
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
) )
func addSceneStudio(ctx context.Context, sceneWriter SceneFinderUpdater, sceneID, studioID int) (bool, error) { func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *models.Scene, studioID int) (bool, error) {
// don't set if already set // don't set if already set
scene, err := sceneWriter.Find(ctx, sceneID) if o.StudioID != nil {
if err != nil {
return false, err
}
if scene.StudioID.Valid {
return false, nil return false, nil
} }
// set the studio id // set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
scenePartial := models.ScenePartial{ scenePartial := models.ScenePartial{
ID: sceneID, StudioID: models.NewOptionalInt(studioID),
StudioID: &s,
} }
if _, err := sceneWriter.Update(ctx, scenePartial); err != nil { if _, err := sceneWriter.UpdatePartial(ctx, o.ID, scenePartial); err != nil {
return false, err return false, err
} }
return true, nil return true, nil
} }
func addImageStudio(ctx context.Context, imageWriter ImageFinderUpdater, imageID, studioID int) (bool, error) { func addImageStudio(ctx context.Context, imageWriter image.PartialUpdater, i *models.Image, studioID int) (bool, error) {
// don't set if already set // don't set if already set
image, err := imageWriter.Find(ctx, imageID) if i.StudioID != nil {
if err != nil {
return false, err
}
if image.StudioID.Valid {
return false, nil return false, nil
} }
// set the studio id // set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
imagePartial := models.ImagePartial{ imagePartial := models.ImagePartial{
ID: imageID, StudioID: models.NewOptionalInt(studioID),
StudioID: &s,
} }
if _, err := imageWriter.Update(ctx, imagePartial); err != nil { if _, err := imageWriter.UpdatePartial(ctx, i.ID, imagePartial); err != nil {
return false, err return false, err
} }
return true, nil return true, nil
} }
func addGalleryStudio(ctx context.Context, galleryWriter GalleryFinderUpdater, galleryID, studioID int) (bool, error) { func addGalleryStudio(ctx context.Context, galleryWriter GalleryFinderUpdater, o *models.Gallery, studioID int) (bool, error) {
// don't set if already set // don't set if already set
gallery, err := galleryWriter.Find(ctx, galleryID) if o.StudioID != nil {
if err != nil {
return false, err
}
if gallery.StudioID.Valid {
return false, nil return false, nil
} }
// set the studio id // set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
galleryPartial := models.GalleryPartial{ galleryPartial := models.GalleryPartial{
ID: galleryID, StudioID: models.NewOptionalInt(studioID),
StudioID: &s,
} }
if _, err := galleryWriter.UpdatePartial(ctx, galleryPartial); err != nil { if _, err := galleryWriter.UpdatePartial(ctx, o.ID, galleryPartial); err != nil {
return false, err return false, err
} }
return true, nil return true, nil
@@ -104,8 +82,7 @@ func getStudioTagger(p *models.Studio, aliases []string, cache *match.Cache) []t
type SceneFinderUpdater interface { type SceneFinderUpdater interface {
scene.Queryer scene.Queryer
Find(ctx context.Context, id int) (*models.Scene, error) scene.PartialUpdater
Update(ctx context.Context, updatedScene models.ScenePartial) (*models.Scene, error)
} }
// StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene. // StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene.
@@ -113,8 +90,8 @@ func StudioScenes(ctx context.Context, p *models.Studio, paths []string, aliases
t := getStudioTagger(p, aliases, cache) t := getStudioTagger(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagScenes(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return addSceneStudio(ctx, rw, otherID, subjectID) return addSceneStudio(ctx, rw, o, p.ID)
}); err != nil { }); err != nil {
return err return err
} }
@@ -126,7 +103,7 @@ func StudioScenes(ctx context.Context, p *models.Studio, paths []string, aliases
type ImageFinderUpdater interface { type ImageFinderUpdater interface {
image.Queryer image.Queryer
Find(ctx context.Context, id int) (*models.Image, error) Find(ctx context.Context, id int) (*models.Image, error)
Update(ctx context.Context, updatedImage models.ImagePartial) (*models.Image, error) UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error)
} }
// StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image. // StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image.
@@ -134,8 +111,8 @@ func StudioImages(ctx context.Context, p *models.Studio, paths []string, aliases
t := getStudioTagger(p, aliases, cache) t := getStudioTagger(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagImages(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
return addImageStudio(ctx, rw, otherID, subjectID) return addImageStudio(ctx, rw, i, p.ID)
}); err != nil { }); err != nil {
return err return err
} }
@@ -146,8 +123,8 @@ func StudioImages(ctx context.Context, p *models.Studio, paths []string, aliases
type GalleryFinderUpdater interface { type GalleryFinderUpdater interface {
gallery.Queryer gallery.Queryer
gallery.PartialUpdater
Find(ctx context.Context, id int) (*models.Gallery, error) Find(ctx context.Context, id int) (*models.Gallery, error)
UpdatePartial(ctx context.Context, updatedGallery models.GalleryPartial) (*models.Gallery, error)
} }
// StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery. // StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery.
@@ -155,8 +132,8 @@ func StudioGalleries(ctx context.Context, p *models.Studio, paths []string, alia
t := getStudioTagger(p, aliases, cache) t := getStudioTagger(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagGalleries(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return addGalleryStudio(ctx, rw, otherID, subjectID) return addGalleryStudio(ctx, rw, o, p.ID)
}); err != nil { }); err != nil {
return err return err
} }

View File

@@ -3,6 +3,7 @@ package autotag
import ( import (
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
@@ -72,7 +73,7 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
mockSceneReader := &mocks.SceneReaderWriter{} mockSceneReader := &mocks.SceneReaderWriter{}
const studioID = 2 var studioID = 2
var aliases []string var aliases []string
@@ -88,7 +89,13 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{ scenes = append(scenes, &models.Scene{
ID: i + 1, ID: i + 1,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: p, Path: p,
},
},
},
}) })
} }
@@ -134,11 +141,9 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
for i := range matchingPaths { for i := range matchingPaths {
sceneID := i + 1 sceneID := i + 1
mockSceneReader.On("Find", testCtx, sceneID).Return(&models.Scene{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("Update", testCtx, models.ScenePartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: sceneID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
@@ -166,7 +171,7 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
mockImageReader := &mocks.ImageReaderWriter{} mockImageReader := &mocks.ImageReaderWriter{}
const studioID = 2 var studioID = 2
var aliases []string var aliases []string
@@ -181,7 +186,7 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{ images = append(images, &models.Image{
ID: i + 1, ID: i + 1,
Path: p, Files: []*file.ImageFile{makeImageFile(p)},
}) })
} }
@@ -226,11 +231,9 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
for i := range matchingPaths { for i := range matchingPaths {
imageID := i + 1 imageID := i + 1
mockImageReader.On("Find", testCtx, imageID).Return(&models.Image{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
mockImageReader.On("Update", testCtx, models.ImagePartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: imageID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }
@@ -257,7 +260,7 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
aliasRegex := tc.aliasRegex aliasRegex := tc.aliasRegex
mockGalleryReader := &mocks.GalleryReaderWriter{} mockGalleryReader := &mocks.GalleryReaderWriter{}
const studioID = 2 var studioID = 2
var aliases []string var aliases []string
@@ -270,9 +273,14 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
var galleries []*models.Gallery var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(testPathName, galleryExt) matchingPaths, falsePaths := generateTestPaths(testPathName, galleryExt)
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Path: models.NullString(p), Files: []file.File{
&file.BaseFile{
Path: v,
},
},
}) })
} }
@@ -316,11 +324,9 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
for i := range matchingPaths { for i := range matchingPaths {
galleryID := i + 1 galleryID := i + 1
mockGalleryReader.On("Find", testCtx, galleryID).Return(&models.Gallery{}, nil).Once() expectedStudioID := studioID
expectedStudioID := models.NullInt64(studioID) mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
mockGalleryReader.On("UpdatePartial", testCtx, models.GalleryPartial{ StudioID: models.NewOptionalInt(expectedStudioID),
ID: galleryID,
StudioID: &expectedStudioID,
}).Return(nil, nil).Once() }).Return(nil, nil).Once()
} }

View File

@@ -12,17 +12,17 @@ import (
type SceneQueryTagUpdater interface { type SceneQueryTagUpdater interface {
scene.Queryer scene.Queryer
scene.TagUpdater scene.PartialUpdater
} }
type ImageQueryTagUpdater interface { type ImageQueryTagUpdater interface {
image.Queryer image.Queryer
image.TagUpdater image.PartialUpdater
} }
type GalleryQueryTagUpdater interface { type GalleryQueryTagUpdater interface {
gallery.Queryer gallery.Queryer
gallery.TagUpdater gallery.PartialUpdater
} }
func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger { func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger {
@@ -50,8 +50,8 @@ func TagScenes(ctx context.Context, p *models.Tag, paths []string, aliases []str
t := getTagTaggers(p, aliases, cache) t := getTagTaggers(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagScenes(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return scene.AddTag(ctx, rw, otherID, subjectID) return scene.AddTag(ctx, rw, o, p.ID)
}); err != nil { }); err != nil {
return err return err
} }
@@ -64,8 +64,8 @@ func TagImages(ctx context.Context, p *models.Tag, paths []string, aliases []str
t := getTagTaggers(p, aliases, cache) t := getTagTaggers(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagImages(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
return image.AddTag(ctx, rw, otherID, subjectID) return image.AddTag(ctx, rw, i, p.ID)
}); err != nil { }); err != nil {
return err return err
} }
@@ -78,8 +78,8 @@ func TagGalleries(ctx context.Context, p *models.Tag, paths []string, aliases []
t := getTagTaggers(p, aliases, cache) t := getTagTaggers(p, aliases, cache)
for _, tt := range t { for _, tt := range t {
if err := tt.tagGalleries(ctx, paths, rw, func(subjectID, otherID int) (bool, error) { if err := tt.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return gallery.AddTag(ctx, rw, otherID, subjectID) return gallery.AddTag(ctx, rw, o, p.ID)
}); err != nil { }); err != nil {
return err return err
} }

View File

@@ -3,6 +3,7 @@ package autotag
import ( import (
"testing" "testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
@@ -88,7 +89,13 @@ func testTagScenes(t *testing.T, tc testTagCase) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{ scenes = append(scenes, &models.Scene{
ID: i + 1, ID: i + 1,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: p, Path: p,
},
},
},
}) })
} }
@@ -133,8 +140,12 @@ func testTagScenes(t *testing.T, tc testTagCase) {
for i := range matchingPaths { for i := range matchingPaths {
sceneID := i + 1 sceneID := i + 1
mockSceneReader.On("GetTagIDs", testCtx, sceneID).Return(nil, nil).Once() mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
mockSceneReader.On("UpdateTags", testCtx, sceneID, []int{tagID}).Return(nil).Once() TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := TagScenes(testCtx, &tag, nil, aliases, mockSceneReader, nil) err := TagScenes(testCtx, &tag, nil, aliases, mockSceneReader, nil)
@@ -176,7 +187,7 @@ func testTagImages(t *testing.T, tc testTagCase) {
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{ images = append(images, &models.Image{
ID: i + 1, ID: i + 1,
Path: p, Files: []*file.ImageFile{makeImageFile(p)},
}) })
} }
@@ -221,8 +232,13 @@ func testTagImages(t *testing.T, tc testTagCase) {
for i := range matchingPaths { for i := range matchingPaths {
imageID := i + 1 imageID := i + 1
mockImageReader.On("GetTagIDs", testCtx, imageID).Return(nil, nil).Once()
mockImageReader.On("UpdateTags", testCtx, imageID, []int{tagID}).Return(nil).Once() mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := TagImages(testCtx, &tag, nil, aliases, mockImageReader, nil) err := TagImages(testCtx, &tag, nil, aliases, mockImageReader, nil)
@@ -262,9 +278,14 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
var galleries []*models.Gallery var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4") matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) { for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{ galleries = append(galleries, &models.Gallery{
ID: i + 1, ID: i + 1,
Path: models.NullString(p), Files: []file.File{
&file.BaseFile{
Path: v,
},
},
}) })
} }
@@ -308,8 +329,14 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
for i := range matchingPaths { for i := range matchingPaths {
galleryID := i + 1 galleryID := i + 1
mockGalleryReader.On("GetTagIDs", testCtx, galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdateTags", testCtx, galleryID, []int{tagID}).Return(nil).Once() mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
} }
err := TagGalleries(testCtx, &tag, nil, aliases, mockGalleryReader, nil) err := TagGalleries(testCtx, &tag, nil, aliases, mockGalleryReader, nil)

View File

@@ -21,6 +21,7 @@ import (
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
) )
@@ -35,6 +36,9 @@ type tagger struct {
} }
type addLinkFunc func(subjectID, otherID int) (bool, error) type addLinkFunc func(subjectID, otherID int) (bool, error)
type addImageLinkFunc func(o *models.Image) (bool, error)
type addGalleryLinkFunc func(o *models.Gallery) (bool, error)
type addSceneLinkFunc func(o *models.Scene) (bool, error)
func (t *tagger) addError(otherType, otherName string, err error) error { func (t *tagger) addError(otherType, otherName string, err error) error {
return fmt.Errorf("error adding %s '%s' to %s '%s': %s", otherType, otherName, t.Type, t.Name, err.Error()) return fmt.Errorf("error adding %s '%s' to %s '%s': %s", otherType, otherName, t.Type, t.Name, err.Error())
@@ -107,14 +111,14 @@ func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer,
return nil return nil
} }
func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scene.Queryer, addFunc addLinkFunc) error { func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scene.Queryer, addFunc addSceneLinkFunc) error {
others, err := match.PathToScenes(ctx, t.Name, paths, sceneReader) others, err := match.PathToScenes(ctx, t.Name, paths, sceneReader)
if err != nil { if err != nil {
return err return err
} }
for _, p := range others { for _, p := range others {
added, err := addFunc(t.ID, p.ID) added, err := addFunc(p)
if err != nil { if err != nil {
return t.addError("scene", p.GetTitle(), err) return t.addError("scene", p.GetTitle(), err)
@@ -128,14 +132,14 @@ func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scen
return nil return nil
} }
func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader image.Queryer, addFunc addLinkFunc) error { func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader image.Queryer, addFunc addImageLinkFunc) error {
others, err := match.PathToImages(ctx, t.Name, paths, imageReader) others, err := match.PathToImages(ctx, t.Name, paths, imageReader)
if err != nil { if err != nil {
return err return err
} }
for _, p := range others { for _, p := range others {
added, err := addFunc(t.ID, p.ID) added, err := addFunc(p)
if err != nil { if err != nil {
return t.addError("image", p.GetTitle(), err) return t.addError("image", p.GetTitle(), err)
@@ -149,14 +153,14 @@ func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader imag
return nil return nil
} }
func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader gallery.Queryer, addFunc addLinkFunc) error { func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader gallery.Queryer, addFunc addGalleryLinkFunc) error {
others, err := match.PathToGalleries(ctx, t.Name, paths, galleryReader) others, err := match.PathToGalleries(ctx, t.Name, paths, galleryReader)
if err != nil { if err != nil {
return err return err
} }
for _, p := range others { for _, p := range others {
added, err := addFunc(t.ID, p.ID) added, err := addFunc(p)
if err != nil { if err != nil {
return t.addError("gallery", p.GetTitle(), err) return t.addError("gallery", p.GetTitle(), err)

View File

@@ -108,9 +108,18 @@ func sceneToContainer(scene *models.Scene, parent string, host string) interface
} }
mimeType := "video/mp4" mimeType := "video/mp4"
size, _ := strconv.Atoi(scene.Size.String) var (
size int
bitrate uint
duration int64
)
duration := int64(scene.Duration.Float64) f := scene.PrimaryFile()
if f != nil {
size = int(f.Size)
bitrate = uint(f.BitRate)
duration = int64(f.Duration)
}
item.Res = append(item.Res, upnpav.Resource{ item.Res = append(item.Res, upnpav.Resource{
URL: (&url.URL{ URL: (&url.URL{
@@ -124,8 +133,7 @@ func sceneToContainer(scene *models.Scene, parent string, host string) interface
ProtocolInfo: fmt.Sprintf("http-get:*:%s:%s", mimeType, dlna.ContentFeatures{ ProtocolInfo: fmt.Sprintf("http-get:*:%s:%s", mimeType, dlna.ContentFeatures{
SupportRange: true, SupportRange: true,
}.String()), }.String()),
Bitrate: uint(scene.Bitrate.Int64), Bitrate: bitrate,
// TODO - make %d:%02d:%02d string
Duration: formatDurationSexagesimal(time.Duration(duration) * time.Second), Duration: formatDurationSexagesimal(time.Duration(duration) * time.Second),
Size: uint64(size), Size: uint64(size),
// Resolution: resolution, // Resolution: resolution,
@@ -370,7 +378,7 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string)
// http://upnp.org/specs/av/UPnP-av-ContentDirectory-v1-Service.pdf // http://upnp.org/specs/av/UPnP-av-ContentDirectory-v1-Service.pdf
// maximum update ID is 2**32, then rolls back to 0 // maximum update ID is 2**32, then rolls back to 0
const maxUpdateID int64 = 1 << 32 const maxUpdateID int64 = 1 << 32
updateID = fmt.Sprint(scene.UpdatedAt.Timestamp.Unix() % maxUpdateID) updateID = fmt.Sprint(scene.UpdatedAt.Unix() % maxUpdateID)
} else { } else {
return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "scene not found") return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "scene not found")
} }

View File

@@ -2,7 +2,6 @@ package identify
import ( import (
"context" "context"
"database/sql"
"fmt" "fmt"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
@@ -129,10 +128,7 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene,
} }
if studioID != nil { if studioID != nil {
ret.Partial.StudioID = &sql.NullInt64{ ret.Partial.StudioID = models.NewOptionalInt(*studioID)
Int64: *studioID,
Valid: true,
}
} }
ignoreMale := false ignoreMale := false
@@ -143,20 +139,38 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene,
} }
} }
ret.PerformerIDs, err = rel.performers(ctx, ignoreMale) performerIDs, err := rel.performers(ctx, ignoreMale)
if err != nil { if err != nil {
return nil, err return nil, err
} }
if performerIDs != nil {
ret.Partial.PerformerIDs = &models.UpdateIDs{
IDs: performerIDs,
Mode: models.RelationshipUpdateModeSet,
}
}
ret.TagIDs, err = rel.tags(ctx) tagIDs, err := rel.tags(ctx)
if err != nil { if err != nil {
return nil, err return nil, err
} }
if tagIDs != nil {
ret.Partial.TagIDs = &models.UpdateIDs{
IDs: tagIDs,
Mode: models.RelationshipUpdateModeSet,
}
}
ret.StashIDs, err = rel.stashIDs(ctx) stashIDs, err := rel.stashIDs(ctx)
if err != nil { if err != nil {
return nil, err return nil, err
} }
if stashIDs != nil {
ret.Partial.StashIDs = &models.UpdateStashIDs{
StashIDs: stashIDs,
Mode: models.RelationshipUpdateModeSet,
}
}
setCoverImage := false setCoverImage := false
for _, o := range options { for _, o := range options {
@@ -198,8 +212,8 @@ func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manage
as := "" as := ""
title := updater.Partial.Title title := updater.Partial.Title
if title != nil { if title.Ptr() != nil {
as = fmt.Sprintf(" as %s", title.String) as = fmt.Sprintf(" as %s", title.Value)
} }
logger.Infof("Successfully identified %s%s using %s", s.Path, as, result.source.Name) logger.Infof("Successfully identified %s%s using %s", s.Path, as, result.source.Name)
@@ -233,37 +247,33 @@ func getFieldOptions(options []MetadataOptions) map[string]*FieldOptions {
} }
func getScenePartial(scene *models.Scene, scraped *scraper.ScrapedScene, fieldOptions map[string]*FieldOptions, setOrganized bool) models.ScenePartial { func getScenePartial(scene *models.Scene, scraped *scraper.ScrapedScene, fieldOptions map[string]*FieldOptions, setOrganized bool) models.ScenePartial {
partial := models.ScenePartial{ partial := models.ScenePartial{}
ID: scene.ID,
}
if scraped.Title != nil && scene.Title.String != *scraped.Title { if scraped.Title != nil && (scene.Title != *scraped.Title) {
if shouldSetSingleValueField(fieldOptions["title"], scene.Title.String != "") { if shouldSetSingleValueField(fieldOptions["title"], scene.Title != "") {
partial.Title = models.NullStringPtr(*scraped.Title) partial.Title = models.NewOptionalString(*scraped.Title)
} }
} }
if scraped.Date != nil && scene.Date.String != *scraped.Date { if scraped.Date != nil && (scene.Date == nil || scene.Date.String() != *scraped.Date) {
if shouldSetSingleValueField(fieldOptions["date"], scene.Date.Valid) { if shouldSetSingleValueField(fieldOptions["date"], scene.Date != nil) {
partial.Date = &models.SQLiteDate{ d := models.NewDate(*scraped.Date)
String: *scraped.Date, partial.Date = models.NewOptionalDate(d)
Valid: true,
} }
} }
} if scraped.Details != nil && (scene.Details != *scraped.Details) {
if scraped.Details != nil && scene.Details.String != *scraped.Details { if shouldSetSingleValueField(fieldOptions["details"], scene.Details != "") {
if shouldSetSingleValueField(fieldOptions["details"], scene.Details.String != "") { partial.Details = models.NewOptionalString(*scraped.Details)
partial.Details = models.NullStringPtr(*scraped.Details)
} }
} }
if scraped.URL != nil && scene.URL.String != *scraped.URL { if scraped.URL != nil && (scene.URL != *scraped.URL) {
if shouldSetSingleValueField(fieldOptions["url"], scene.URL.String != "") { if shouldSetSingleValueField(fieldOptions["url"], scene.URL != "") {
partial.URL = models.NullStringPtr(*scraped.URL) partial.URL = models.NewOptionalString(*scraped.URL)
} }
} }
if setOrganized && !scene.Organized { if setOrganized && !scene.Organized {
// just reuse the boolean since we know it's true // just reuse the boolean since we know it's true
partial.Organized = &setOrganized partial.Organized = models.NewOptionalBool(setOrganized)
} }
return partial return partial

View File

@@ -74,12 +74,12 @@ func TestSceneIdentifier_Identify(t *testing.T) {
mockSceneReaderWriter := &mocks.SceneReaderWriter{} mockSceneReaderWriter := &mocks.SceneReaderWriter{}
mockSceneReaderWriter.On("Update", testCtx, mock.MatchedBy(func(partial models.ScenePartial) bool { mockSceneReaderWriter.On("UpdatePartial", testCtx, mock.MatchedBy(func(id int) bool {
return partial.ID != errUpdateID return id == errUpdateID
})).Return(nil, nil) }), mock.Anything).Return(nil, errors.New("update error"))
mockSceneReaderWriter.On("Update", testCtx, mock.MatchedBy(func(partial models.ScenePartial) bool { mockSceneReaderWriter.On("UpdatePartial", testCtx, mock.MatchedBy(func(id int) bool {
return partial.ID == errUpdateID return id != errUpdateID
})).Return(nil, errors.New("update error")) }), mock.Anything).Return(nil, nil)
tests := []struct { tests := []struct {
name string name string
@@ -245,26 +245,26 @@ func Test_getFieldOptions(t *testing.T) {
func Test_getScenePartial(t *testing.T) { func Test_getScenePartial(t *testing.T) {
var ( var (
originalTitle = "originalTitle" originalTitle = "originalTitle"
originalDate = "originalDate" originalDate = "2001-01-01"
originalDetails = "originalDetails" originalDetails = "originalDetails"
originalURL = "originalURL" originalURL = "originalURL"
) )
var ( var (
scrapedTitle = "scrapedTitle" scrapedTitle = "scrapedTitle"
scrapedDate = "scrapedDate" scrapedDate = "2002-02-02"
scrapedDetails = "scrapedDetails" scrapedDetails = "scrapedDetails"
scrapedURL = "scrapedURL" scrapedURL = "scrapedURL"
) )
originalDateObj := models.NewDate(originalDate)
scrapedDateObj := models.NewDate(scrapedDate)
originalScene := &models.Scene{ originalScene := &models.Scene{
Title: models.NullString(originalTitle), Title: originalTitle,
Date: models.SQLiteDate{ Date: &originalDateObj,
String: originalDate, Details: originalDetails,
Valid: true, URL: originalURL,
},
Details: models.NullString(originalDetails),
URL: models.NullString(originalURL),
} }
organisedScene := *originalScene organisedScene := *originalScene
@@ -273,13 +273,10 @@ func Test_getScenePartial(t *testing.T) {
emptyScene := &models.Scene{} emptyScene := &models.Scene{}
postPartial := models.ScenePartial{ postPartial := models.ScenePartial{
Title: models.NullStringPtr(scrapedTitle), Title: models.NewOptionalString(scrapedTitle),
Date: &models.SQLiteDate{ Date: models.NewOptionalDate(scrapedDateObj),
String: scrapedDate, Details: models.NewOptionalString(scrapedDetails),
Valid: true, URL: models.NewOptionalString(scrapedURL),
},
Details: models.NullStringPtr(scrapedDetails),
URL: models.NullStringPtr(scrapedURL),
} }
scrapedScene := &scraper.ScrapedScene{ scrapedScene := &scraper.ScrapedScene{
@@ -387,7 +384,7 @@ func Test_getScenePartial(t *testing.T) {
true, true,
}, },
models.ScenePartial{ models.ScenePartial{
Organized: &setOrganised, Organized: models.NewOptionalBool(setOrganised),
}, },
}, },
{ {

View File

@@ -13,7 +13,7 @@ import (
type PerformerCreator interface { type PerformerCreator interface {
Create(ctx context.Context, newPerformer models.Performer) (*models.Performer, error) Create(ctx context.Context, newPerformer models.Performer) (*models.Performer, error)
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []models.StashID) error UpdateStashIDs(ctx context.Context, performerID int, stashIDs []*models.StashID) error
} }
func getPerformerID(ctx context.Context, endpoint string, w PerformerCreator, p *models.ScrapedPerformer, createMissing bool) (*int, error) { func getPerformerID(ctx context.Context, endpoint string, w PerformerCreator, p *models.ScrapedPerformer, createMissing bool) (*int, error) {
@@ -39,7 +39,7 @@ func createMissingPerformer(ctx context.Context, endpoint string, w PerformerCre
} }
if endpoint != "" && p.RemoteSiteID != nil { if endpoint != "" && p.RemoteSiteID != nil {
if err := w.UpdateStashIDs(ctx, created.ID, []models.StashID{ if err := w.UpdateStashIDs(ctx, created.ID, []*models.StashID{
{ {
Endpoint: endpoint, Endpoint: endpoint,
StashID: *p.RemoteSiteID, StashID: *p.RemoteSiteID,

View File

@@ -141,13 +141,13 @@ func Test_createMissingPerformer(t *testing.T) {
return p.Name.String == invalidName return p.Name.String == invalidName
})).Return(nil, errors.New("error creating performer")) })).Return(nil, errors.New("error creating performer"))
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []models.StashID{ mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []*models.StashID{
{ {
Endpoint: invalidEndpoint, Endpoint: invalidEndpoint,
StashID: remoteSiteID, StashID: remoteSiteID,
}, },
}).Return(errors.New("error updating stash ids")) }).Return(errors.New("error updating stash ids"))
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []models.StashID{ mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []*models.StashID{
{ {
Endpoint: validEndpoint, Endpoint: validEndpoint,
StashID: remoteSiteID, StashID: remoteSiteID,

View File

@@ -16,9 +16,6 @@ import (
) )
type SceneReaderUpdater interface { type SceneReaderUpdater interface {
GetPerformerIDs(ctx context.Context, sceneID int) ([]int, error)
GetTagIDs(ctx context.Context, sceneID int) ([]int, error)
GetStashIDs(ctx context.Context, sceneID int) ([]*models.StashID, error)
GetCover(ctx context.Context, sceneID int) ([]byte, error) GetCover(ctx context.Context, sceneID int) ([]byte, error)
scene.Updater scene.Updater
} }
@@ -37,7 +34,7 @@ type sceneRelationships struct {
fieldOptions map[string]*FieldOptions fieldOptions map[string]*FieldOptions
} }
func (g sceneRelationships) studio(ctx context.Context) (*int64, error) { func (g sceneRelationships) studio(ctx context.Context) (*int, error) {
existingID := g.scene.StudioID existingID := g.scene.StudioID
fieldStrategy := g.fieldOptions["studio"] fieldStrategy := g.fieldOptions["studio"]
createMissing := fieldStrategy != nil && utils.IsTrue(fieldStrategy.CreateMissing) createMissing := fieldStrategy != nil && utils.IsTrue(fieldStrategy.CreateMissing)
@@ -45,19 +42,19 @@ func (g sceneRelationships) studio(ctx context.Context) (*int64, error) {
scraped := g.result.result.Studio scraped := g.result.result.Studio
endpoint := g.result.source.RemoteSite endpoint := g.result.source.RemoteSite
if scraped == nil || !shouldSetSingleValueField(fieldStrategy, existingID.Valid) { if scraped == nil || !shouldSetSingleValueField(fieldStrategy, existingID != nil) {
return nil, nil return nil, nil
} }
if scraped.StoredID != nil { if scraped.StoredID != nil {
// existing studio, just set it // existing studio, just set it
studioID, err := strconv.ParseInt(*scraped.StoredID, 10, 64) studioID, err := strconv.Atoi(*scraped.StoredID)
if err != nil { if err != nil {
return nil, fmt.Errorf("error converting studio ID %s: %w", *scraped.StoredID, err) return nil, fmt.Errorf("error converting studio ID %s: %w", *scraped.StoredID, err)
} }
// only return value if different to current // only return value if different to current
if existingID.Int64 != studioID { if existingID == nil || *existingID != studioID {
return &studioID, nil return &studioID, nil
} }
} else if createMissing { } else if createMissing {
@@ -85,10 +82,7 @@ func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]
endpoint := g.result.source.RemoteSite endpoint := g.result.source.RemoteSite
var performerIDs []int var performerIDs []int
originalPerformerIDs, err := g.sceneReader.GetPerformerIDs(ctx, g.scene.ID) originalPerformerIDs := g.scene.PerformerIDs
if err != nil {
return nil, fmt.Errorf("error getting scene performers: %w", err)
}
if strategy == FieldStrategyMerge { if strategy == FieldStrategyMerge {
// add to existing // add to existing
@@ -135,10 +129,7 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
} }
var tagIDs []int var tagIDs []int
originalTagIDs, err := g.sceneReader.GetTagIDs(ctx, target.ID) originalTagIDs := target.TagIDs
if err != nil {
return nil, fmt.Errorf("error getting scene tags: %w", err)
}
if strategy == FieldStrategyMerge { if strategy == FieldStrategyMerge {
// add to existing // add to existing
@@ -194,21 +185,13 @@ func (g sceneRelationships) stashIDs(ctx context.Context) ([]models.StashID, err
strategy = fieldStrategy.Strategy strategy = fieldStrategy.Strategy
} }
var originalStashIDs []models.StashID
var stashIDs []models.StashID var stashIDs []models.StashID
stashIDPtrs, err := g.sceneReader.GetStashIDs(ctx, target.ID) originalStashIDs := target.StashIDs
if err != nil {
return nil, fmt.Errorf("error getting scene tag: %w", err)
}
// convert existing to non-pointer types
for _, stashID := range stashIDPtrs {
originalStashIDs = append(originalStashIDs, *stashID)
}
if strategy == FieldStrategyMerge { if strategy == FieldStrategyMerge {
// add to existing // add to existing
stashIDs = originalStashIDs // make a copy so we don't modify the original
stashIDs = append(stashIDs, originalStashIDs...)
} }
for i, stashID := range stashIDs { for i, stashID := range stashIDs {

View File

@@ -16,7 +16,7 @@ import (
func Test_sceneRelationships_studio(t *testing.T) { func Test_sceneRelationships_studio(t *testing.T) {
validStoredID := "1" validStoredID := "1"
var validStoredIDInt int64 = 1 var validStoredIDInt = 1
invalidStoredID := "invalidStoredID" invalidStoredID := "invalidStoredID"
createMissing := true createMissing := true
@@ -39,7 +39,7 @@ func Test_sceneRelationships_studio(t *testing.T) {
scene *models.Scene scene *models.Scene
fieldOptions *FieldOptions fieldOptions *FieldOptions
result *models.ScrapedStudio result *models.ScrapedStudio
want *int64 want *int
wantErr bool wantErr bool
}{ }{
{ {
@@ -75,7 +75,7 @@ func Test_sceneRelationships_studio(t *testing.T) {
{ {
"same stored id", "same stored id",
&models.Scene{ &models.Scene{
StudioID: models.NullInt64(validStoredIDInt), StudioID: &validStoredIDInt,
}, },
defaultOptions, defaultOptions,
&models.ScrapedStudio{ &models.ScrapedStudio{
@@ -156,19 +156,25 @@ func Test_sceneRelationships_performers(t *testing.T) {
Strategy: FieldStrategyMerge, Strategy: FieldStrategyMerge,
} }
mockSceneReaderWriter := &mocks.SceneReaderWriter{} emptyScene := &models.Scene{
mockSceneReaderWriter.On("GetPerformerIDs", testCtx, sceneID).Return(nil, nil) ID: sceneID,
mockSceneReaderWriter.On("GetPerformerIDs", testCtx, sceneWithPerformerID).Return([]int{existingPerformerID}, nil) }
mockSceneReaderWriter.On("GetPerformerIDs", testCtx, errSceneID).Return(nil, errors.New("error getting IDs"))
sceneWithPerformer := &models.Scene{
ID: sceneWithPerformerID,
PerformerIDs: []int{
existingPerformerID,
},
}
tr := sceneRelationships{ tr := sceneRelationships{
sceneReader: mockSceneReaderWriter, sceneReader: &mocks.SceneReaderWriter{},
fieldOptions: make(map[string]*FieldOptions), fieldOptions: make(map[string]*FieldOptions),
} }
tests := []struct { tests := []struct {
name string name string
sceneID int sceneID *models.Scene
fieldOptions *FieldOptions fieldOptions *FieldOptions
scraped []*models.ScrapedPerformer scraped []*models.ScrapedPerformer
ignoreMale bool ignoreMale bool
@@ -177,7 +183,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
}{ }{
{ {
"ignore", "ignore",
sceneID, emptyScene,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyIgnore, Strategy: FieldStrategyIgnore,
}, },
@@ -192,27 +198,16 @@ func Test_sceneRelationships_performers(t *testing.T) {
}, },
{ {
"none", "none",
sceneID, emptyScene,
defaultOptions, defaultOptions,
[]*models.ScrapedPerformer{}, []*models.ScrapedPerformer{},
false, false,
nil, nil,
false, false,
}, },
{
"error getting ids",
errSceneID,
defaultOptions,
[]*models.ScrapedPerformer{
{},
},
false,
nil,
true,
},
{ {
"merge existing", "merge existing",
sceneWithPerformerID, sceneWithPerformer,
defaultOptions, defaultOptions,
[]*models.ScrapedPerformer{ []*models.ScrapedPerformer{
{ {
@@ -226,7 +221,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
}, },
{ {
"merge add", "merge add",
sceneWithPerformerID, sceneWithPerformer,
defaultOptions, defaultOptions,
[]*models.ScrapedPerformer{ []*models.ScrapedPerformer{
{ {
@@ -240,7 +235,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
}, },
{ {
"ignore male", "ignore male",
sceneID, emptyScene,
defaultOptions, defaultOptions,
[]*models.ScrapedPerformer{ []*models.ScrapedPerformer{
{ {
@@ -255,7 +250,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
}, },
{ {
"overwrite", "overwrite",
sceneWithPerformerID, sceneWithPerformer,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
}, },
@@ -271,7 +266,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
}, },
{ {
"ignore male (not male)", "ignore male (not male)",
sceneWithPerformerID, sceneWithPerformer,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
}, },
@@ -288,7 +283,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
}, },
{ {
"error getting tag ID", "error getting tag ID",
sceneID, emptyScene,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
CreateMissing: &createMissing, CreateMissing: &createMissing,
@@ -306,9 +301,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
tr.scene = &models.Scene{ tr.scene = tt.sceneID
ID: tt.sceneID,
}
tr.fieldOptions["performers"] = tt.fieldOptions tr.fieldOptions["performers"] = tt.fieldOptions
tr.result = &scrapeResult{ tr.result = &scrapeResult{
result: &scraper.ScrapedScene{ result: &scraper.ScrapedScene{
@@ -347,11 +340,19 @@ func Test_sceneRelationships_tags(t *testing.T) {
Strategy: FieldStrategyMerge, Strategy: FieldStrategyMerge,
} }
emptyScene := &models.Scene{
ID: sceneID,
}
sceneWithTag := &models.Scene{
ID: sceneWithTagID,
TagIDs: []int{
existingID,
},
}
mockSceneReaderWriter := &mocks.SceneReaderWriter{} mockSceneReaderWriter := &mocks.SceneReaderWriter{}
mockTagReaderWriter := &mocks.TagReaderWriter{} mockTagReaderWriter := &mocks.TagReaderWriter{}
mockSceneReaderWriter.On("GetTagIDs", testCtx, sceneID).Return(nil, nil)
mockSceneReaderWriter.On("GetTagIDs", testCtx, sceneWithTagID).Return([]int{existingID}, nil)
mockSceneReaderWriter.On("GetTagIDs", testCtx, errSceneID).Return(nil, errors.New("error getting IDs"))
mockTagReaderWriter.On("Create", testCtx, mock.MatchedBy(func(p models.Tag) bool { mockTagReaderWriter.On("Create", testCtx, mock.MatchedBy(func(p models.Tag) bool {
return p.Name == validName return p.Name == validName
@@ -370,7 +371,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
sceneID int scene *models.Scene
fieldOptions *FieldOptions fieldOptions *FieldOptions
scraped []*models.ScrapedTag scraped []*models.ScrapedTag
want []int want []int
@@ -378,7 +379,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
}{ }{
{ {
"ignore", "ignore",
sceneID, emptyScene,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyIgnore, Strategy: FieldStrategyIgnore,
}, },
@@ -392,25 +393,15 @@ func Test_sceneRelationships_tags(t *testing.T) {
}, },
{ {
"none", "none",
sceneID, emptyScene,
defaultOptions, defaultOptions,
[]*models.ScrapedTag{}, []*models.ScrapedTag{},
nil, nil,
false, false,
}, },
{
"error getting ids",
errSceneID,
defaultOptions,
[]*models.ScrapedTag{
{},
},
nil,
true,
},
{ {
"merge existing", "merge existing",
sceneWithTagID, sceneWithTag,
defaultOptions, defaultOptions,
[]*models.ScrapedTag{ []*models.ScrapedTag{
{ {
@@ -423,7 +414,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
}, },
{ {
"merge add", "merge add",
sceneWithTagID, sceneWithTag,
defaultOptions, defaultOptions,
[]*models.ScrapedTag{ []*models.ScrapedTag{
{ {
@@ -436,7 +427,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
}, },
{ {
"overwrite", "overwrite",
sceneWithTagID, sceneWithTag,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
}, },
@@ -451,7 +442,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
}, },
{ {
"error getting tag ID", "error getting tag ID",
sceneID, emptyScene,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
}, },
@@ -466,7 +457,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
}, },
{ {
"create missing", "create missing",
sceneID, emptyScene,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
CreateMissing: &createMissing, CreateMissing: &createMissing,
@@ -481,7 +472,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
}, },
{ {
"error creating", "error creating",
sceneID, emptyScene,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
CreateMissing: &createMissing, CreateMissing: &createMissing,
@@ -497,9 +488,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
tr.scene = &models.Scene{ tr.scene = tt.scene
ID: tt.sceneID,
}
tr.fieldOptions["tags"] = tt.fieldOptions tr.fieldOptions["tags"] = tt.fieldOptions
tr.result = &scrapeResult{ tr.result = &scrapeResult{
result: &scraper.ScrapedScene{ result: &scraper.ScrapedScene{
@@ -536,15 +525,21 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
Strategy: FieldStrategyMerge, Strategy: FieldStrategyMerge,
} }
mockSceneReaderWriter := &mocks.SceneReaderWriter{} emptyScene := &models.Scene{
mockSceneReaderWriter.On("GetStashIDs", testCtx, sceneID).Return(nil, nil) ID: sceneID,
mockSceneReaderWriter.On("GetStashIDs", testCtx, sceneWithStashID).Return([]*models.StashID{ }
sceneWithStashIDs := &models.Scene{
ID: sceneWithStashID,
StashIDs: []models.StashID{
{ {
StashID: remoteSiteID, StashID: remoteSiteID,
Endpoint: existingEndpoint, Endpoint: existingEndpoint,
}, },
}, nil) },
mockSceneReaderWriter.On("GetStashIDs", testCtx, errSceneID).Return(nil, errors.New("error getting IDs")) }
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
tr := sceneRelationships{ tr := sceneRelationships{
sceneReader: mockSceneReaderWriter, sceneReader: mockSceneReaderWriter,
@@ -553,7 +548,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
sceneID int scene *models.Scene
fieldOptions *FieldOptions fieldOptions *FieldOptions
endpoint string endpoint string
remoteSiteID *string remoteSiteID *string
@@ -562,7 +557,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}{ }{
{ {
"ignore", "ignore",
sceneID, emptyScene,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyIgnore, Strategy: FieldStrategyIgnore,
}, },
@@ -573,7 +568,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}, },
{ {
"no endpoint", "no endpoint",
sceneID, emptyScene,
defaultOptions, defaultOptions,
"", "",
&remoteSiteID, &remoteSiteID,
@@ -582,25 +577,16 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}, },
{ {
"no site id", "no site id",
sceneID, emptyScene,
defaultOptions, defaultOptions,
newEndpoint, newEndpoint,
nil, nil,
nil, nil,
false, false,
}, },
{
"error getting ids",
errSceneID,
defaultOptions,
newEndpoint,
&remoteSiteID,
nil,
true,
},
{ {
"merge existing", "merge existing",
sceneWithStashID, sceneWithStashIDs,
defaultOptions, defaultOptions,
existingEndpoint, existingEndpoint,
&remoteSiteID, &remoteSiteID,
@@ -609,7 +595,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}, },
{ {
"merge existing new value", "merge existing new value",
sceneWithStashID, sceneWithStashIDs,
defaultOptions, defaultOptions,
existingEndpoint, existingEndpoint,
&newRemoteSiteID, &newRemoteSiteID,
@@ -623,7 +609,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}, },
{ {
"merge add", "merge add",
sceneWithStashID, sceneWithStashIDs,
defaultOptions, defaultOptions,
newEndpoint, newEndpoint,
&newRemoteSiteID, &newRemoteSiteID,
@@ -641,7 +627,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}, },
{ {
"overwrite", "overwrite",
sceneWithStashID, sceneWithStashIDs,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
}, },
@@ -657,7 +643,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}, },
{ {
"overwrite same", "overwrite same",
sceneWithStashID, sceneWithStashIDs,
&FieldOptions{ &FieldOptions{
Strategy: FieldStrategyOverwrite, Strategy: FieldStrategyOverwrite,
}, },
@@ -669,9 +655,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
tr.scene = &models.Scene{ tr.scene = tt.scene
ID: tt.sceneID,
}
tr.fieldOptions["stash_ids"] = tt.fieldOptions tr.fieldOptions["stash_ids"] = tt.fieldOptions
tr.result = &scrapeResult{ tr.result = &scrapeResult{
source: ScraperSource{ source: ScraperSource{
@@ -688,7 +672,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
return return
} }
if !reflect.DeepEqual(got, tt.want) { if !reflect.DeepEqual(got, tt.want) {
t.Errorf("sceneRelationships.stashIDs() = %v, want %v", got, tt.want) t.Errorf("sceneRelationships.stashIDs() = %+v, want %+v", got, tt.want)
} }
}) })
} }

View File

@@ -12,17 +12,17 @@ import (
type StudioCreator interface { type StudioCreator interface {
Create(ctx context.Context, newStudio models.Studio) (*models.Studio, error) Create(ctx context.Context, newStudio models.Studio) (*models.Studio, error)
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []models.StashID) error UpdateStashIDs(ctx context.Context, studioID int, stashIDs []*models.StashID) error
} }
func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator, studio *models.ScrapedStudio) (*int64, error) { func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator, studio *models.ScrapedStudio) (*int, error) {
created, err := w.Create(ctx, scrapedToStudioInput(studio)) created, err := w.Create(ctx, scrapedToStudioInput(studio))
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating studio: %w", err) return nil, fmt.Errorf("error creating studio: %w", err)
} }
if endpoint != "" && studio.RemoteSiteID != nil { if endpoint != "" && studio.RemoteSiteID != nil {
if err := w.UpdateStashIDs(ctx, created.ID, []models.StashID{ if err := w.UpdateStashIDs(ctx, created.ID, []*models.StashID{
{ {
Endpoint: endpoint, Endpoint: endpoint,
StashID: *studio.RemoteSiteID, StashID: *studio.RemoteSiteID,
@@ -32,8 +32,7 @@ func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator,
} }
} }
createdID := int64(created.ID) return &created.ID, nil
return &createdID, nil
} }
func scrapedToStudioInput(studio *models.ScrapedStudio) models.Studio { func scrapedToStudioInput(studio *models.ScrapedStudio) models.Studio {

View File

@@ -18,7 +18,6 @@ func Test_createMissingStudio(t *testing.T) {
validName := "validName" validName := "validName"
invalidName := "invalidName" invalidName := "invalidName"
createdID := 1 createdID := 1
createdID64 := int64(createdID)
repo := mocks.NewTxnRepository() repo := mocks.NewTxnRepository()
mockStudioReaderWriter := repo.Studio.(*mocks.StudioReaderWriter) mockStudioReaderWriter := repo.Studio.(*mocks.StudioReaderWriter)
@@ -31,13 +30,13 @@ func Test_createMissingStudio(t *testing.T) {
return p.Name.String == invalidName return p.Name.String == invalidName
})).Return(nil, errors.New("error creating performer")) })).Return(nil, errors.New("error creating performer"))
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []models.StashID{ mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []*models.StashID{
{ {
Endpoint: invalidEndpoint, Endpoint: invalidEndpoint,
StashID: remoteSiteID, StashID: remoteSiteID,
}, },
}).Return(errors.New("error updating stash ids")) }).Return(errors.New("error updating stash ids"))
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []models.StashID{ mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []*models.StashID{
{ {
Endpoint: validEndpoint, Endpoint: validEndpoint,
StashID: remoteSiteID, StashID: remoteSiteID,
@@ -51,7 +50,7 @@ func Test_createMissingStudio(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
args args args args
want *int64 want *int
wantErr bool wantErr bool
}{ }{
{ {
@@ -62,7 +61,7 @@ func Test_createMissingStudio(t *testing.T) {
Name: validName, Name: validName,
}, },
}, },
&createdID64, &createdID,
false, false,
}, },
{ {
@@ -85,7 +84,7 @@ func Test_createMissingStudio(t *testing.T) {
RemoteSiteID: &remoteSiteID, RemoteSiteID: &remoteSiteID,
}, },
}, },
&createdID64, &createdID,
false, false,
}, },
{ {
@@ -109,7 +108,7 @@ func Test_createMissingStudio(t *testing.T) {
return return
} }
if !reflect.DeepEqual(got, tt.want) { if !reflect.DeepEqual(got, tt.want) {
t.Errorf("createMissingStudio() = %v, want %v", got, tt.want) t.Errorf("createMissingStudio() = %d, want %d", got, tt.want)
} }
}) })
} }

View File

@@ -2,7 +2,6 @@ package manager
import ( import (
"context" "context"
"database/sql"
"errors" "errors"
"path/filepath" "path/filepath"
"regexp" "regexp"
@@ -239,8 +238,9 @@ type sceneHolder struct {
func newSceneHolder(scene *models.Scene) *sceneHolder { func newSceneHolder(scene *models.Scene) *sceneHolder {
sceneCopy := models.Scene{ sceneCopy := models.Scene{
ID: scene.ID, ID: scene.ID,
Checksum: scene.Checksum, Files: scene.Files,
Path: scene.Path, // Checksum: scene.Checksum,
// Path: scene.Path,
} }
ret := sceneHolder{ ret := sceneHolder{
scene: scene, scene: scene,
@@ -307,11 +307,9 @@ func (h *sceneHolder) setDate(field *parserField, value string) {
// ensure the date is valid // ensure the date is valid
// only set if new value is different from the old // only set if new value is different from the old
if validateDate(fullDate) && h.scene.Date.String != fullDate { if validateDate(fullDate) && h.scene.Date != nil && h.scene.Date.String() != fullDate {
h.result.Date = models.SQLiteDate{ d := models.NewDate(fullDate)
String: fullDate, h.result.Date = &d
Valid: true,
}
} }
} }
@@ -337,24 +335,17 @@ func (h *sceneHolder) setField(field parserField, value interface{}) {
switch field.field { switch field.field {
case "title": case "title":
h.result.Title = sql.NullString{ v := value.(string)
String: value.(string), h.result.Title = v
Valid: true,
}
case "date": case "date":
if validateDate(value.(string)) { if validateDate(value.(string)) {
h.result.Date = models.SQLiteDate{ d := models.NewDate(value.(string))
String: value.(string), h.result.Date = &d
Valid: true,
}
} }
case "rating": case "rating":
rating, _ := strconv.Atoi(value.(string)) rating, _ := strconv.Atoi(value.(string))
if validateRating(rating) { if validateRating(rating) {
h.result.Rating = sql.NullInt64{ h.result.Rating = &rating
Int64: int64(rating),
Valid: true,
}
} }
case "performer": case "performer":
// add performer to list // add performer to list
@@ -394,9 +385,9 @@ func (m parseMapper) parse(scene *models.Scene) *sceneHolder {
// scene path in the match. Otherwise, use the default behaviour of just // scene path in the match. Otherwise, use the default behaviour of just
// the file's basename // the file's basename
// must be double \ because of the regex escaping // must be double \ because of the regex escaping
filename := filepath.Base(scene.Path) filename := filepath.Base(scene.Path())
if strings.Contains(m.regexString, `\\`) || strings.Contains(m.regexString, "/") { if strings.Contains(m.regexString, `\\`) || strings.Contains(m.regexString, "/") {
filename = scene.Path filename = scene.Path()
} }
result := m.regex.FindStringSubmatch(filename) result := m.regex.FindStringSubmatch(filename)
@@ -694,8 +685,8 @@ func (p *SceneFilenameParser) setMovies(ctx context.Context, qb MovieNameFinder,
} }
func (p *SceneFilenameParser) setParserResult(ctx context.Context, repo SceneFilenameParserRepository, h sceneHolder, result *SceneParserResult) { func (p *SceneFilenameParser) setParserResult(ctx context.Context, repo SceneFilenameParserRepository, h sceneHolder, result *SceneParserResult) {
if h.result.Title.Valid { if h.result.Title != "" {
title := h.result.Title.String title := h.result.Title
title = p.replaceWhitespaceCharacters(title) title = p.replaceWhitespaceCharacters(title)
if p.ParserInput.CapitalizeTitle != nil && *p.ParserInput.CapitalizeTitle { if p.ParserInput.CapitalizeTitle != nil && *p.ParserInput.CapitalizeTitle {
@@ -705,13 +696,13 @@ func (p *SceneFilenameParser) setParserResult(ctx context.Context, repo SceneFil
result.Title = &title result.Title = &title
} }
if h.result.Date.Valid { if h.result.Date != nil {
result.Date = &h.result.Date.String dateStr := h.result.Date.String()
result.Date = &dateStr
} }
if h.result.Rating.Valid { if h.result.Rating != nil {
rating := int(h.result.Rating.Int64) result.Rating = h.result.Rating
result.Rating = &rating
} }
if len(h.performers) > 0 { if len(h.performers) > 0 {
@@ -725,5 +716,4 @@ func (p *SceneFilenameParser) setParserResult(ctx context.Context, repo SceneFil
if len(h.movies) > 0 { if len(h.movies) > 0 {
p.setMovies(ctx, repo.Movie, h, result) p.setMovies(ctx, repo.Movie, h, result)
} }
} }

View File

@@ -0,0 +1,88 @@
package manager
import (
"errors"
"fmt"
"io"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/hash/oshash"
)
type fingerprintCalculator struct {
Config *config.Instance
}
func (c *fingerprintCalculator) calculateOshash(f *file.BaseFile, o file.Opener) (*file.Fingerprint, error) {
r, err := o.Open()
if err != nil {
return nil, fmt.Errorf("opening file: %w", err)
}
defer r.Close()
rc, isRC := r.(io.ReadSeeker)
if !isRC {
return nil, errors.New("cannot calculate oshash for non-readcloser")
}
hash, err := oshash.FromReader(rc, f.Size)
if err != nil {
return nil, fmt.Errorf("calculating oshash: %w", err)
}
return &file.Fingerprint{
Type: file.FingerprintTypeOshash,
Fingerprint: hash,
}, nil
}
func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint, error) {
r, err := o.Open()
if err != nil {
return nil, fmt.Errorf("opening file: %w", err)
}
defer r.Close()
hash, err := md5.FromReader(r)
if err != nil {
return nil, fmt.Errorf("calculating md5: %w", err)
}
return &file.Fingerprint{
Type: file.FingerprintTypeMD5,
Fingerprint: hash,
}, nil
}
func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener) ([]file.Fingerprint, error) {
var ret []file.Fingerprint
calculateMD5 := true
if isVideo(f.Basename) {
// calculate oshash first
fp, err := c.calculateOshash(f, o)
if err != nil {
return nil, err
}
ret = append(ret, *fp)
// only calculate MD5 if enabled in config
calculateMD5 = c.Config.IsCalculateMD5()
}
if calculateMD5 {
fp, err := c.calculateMD5(o)
if err != nil {
return nil, err
}
ret = append(ret, *fp)
}
return ret, nil
}

View File

@@ -8,10 +8,11 @@ import (
) )
func DeleteGalleryFile(gallery *models.Gallery) { func DeleteGalleryFile(gallery *models.Gallery) {
if gallery.Path.Valid { path := gallery.Path()
err := os.Remove(gallery.Path.String) if path != "" {
err := os.Remove(path)
if err != nil { if err != nil {
logger.Warnf("Could not delete file %s: %s", gallery.Path.String, err.Error()) logger.Warnf("Could not delete file %s: %s", path, err.Error())
} }
} }
} }

View File

@@ -14,7 +14,7 @@ import (
) )
type InteractiveHeatmapSpeedGenerator struct { type InteractiveHeatmapSpeedGenerator struct {
InteractiveSpeed int64 InteractiveSpeed int
Funscript Script Funscript Script
FunscriptPath string FunscriptPath string
HeatmapPath string HeatmapPath string
@@ -175,7 +175,7 @@ func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap() error {
return err return err
} }
func (funscript *Script) CalculateMedian() int64 { func (funscript *Script) CalculateMedian() int {
sort.Slice(funscript.Actions, func(i, j int) bool { sort.Slice(funscript.Actions, func(i, j int) bool {
return funscript.Actions[i].Speed < funscript.Actions[j].Speed return funscript.Actions[i].Speed < funscript.Actions[j].Speed
}) })
@@ -183,10 +183,10 @@ func (funscript *Script) CalculateMedian() int64 {
mNumber := len(funscript.Actions) / 2 mNumber := len(funscript.Actions) / 2
if len(funscript.Actions)%2 != 0 { if len(funscript.Actions)%2 != 0 {
return int64(funscript.Actions[mNumber].Speed) return int(funscript.Actions[mNumber].Speed)
} }
return int64((funscript.Actions[mNumber-1].Speed + funscript.Actions[mNumber].Speed) / 2) return int((funscript.Actions[mNumber-1].Speed + funscript.Actions[mNumber].Speed) / 2)
} }
func (gt GradientTable) GetInterpolatedColorFor(t float64) colorful.Color { func (gt GradientTable) GetInterpolatedColorFor(t float64) colorful.Color {

View File

@@ -1,59 +0,0 @@
package manager
import (
"archive/zip"
"strings"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
)
func walkGalleryZip(path string, walkFunc func(file *zip.File) error) error {
readCloser, err := zip.OpenReader(path)
if err != nil {
return err
}
defer readCloser.Close()
excludeImgRegex := generateRegexps(config.GetInstance().GetImageExcludes())
for _, f := range readCloser.File {
if f.FileInfo().IsDir() {
continue
}
if strings.Contains(f.Name, "__MACOSX") {
continue
}
if !isImage(f.Name) {
continue
}
if matchFileRegex(file.ZipFile(path, f).Path(), excludeImgRegex) {
continue
}
err := walkFunc(f)
if err != nil {
return err
}
}
return nil
}
func countImagesInZip(path string) int {
ret := 0
err := walkGalleryZip(path, func(file *zip.File) error {
ret++
return nil
})
if err != nil {
logger.Warnf("Error while walking gallery zip: %v", err)
}
return ret
}

View File

@@ -18,17 +18,27 @@ import (
"github.com/stashapp/stash/internal/log" "github.com/stashapp/stash/internal/log"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file"
file_image "github.com/stashapp/stash/pkg/file/image"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/paths" "github.com/stashapp/stash/pkg/models/paths"
"github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate"
"github.com/stashapp/stash/pkg/scraper" "github.com/stashapp/stash/pkg/scraper"
"github.com/stashapp/stash/pkg/session" "github.com/stashapp/stash/pkg/session"
"github.com/stashapp/stash/pkg/sqlite" "github.com/stashapp/stash/pkg/sqlite"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
"github.com/stashapp/stash/ui" "github.com/stashapp/stash/ui"
// register custom migrations
_ "github.com/stashapp/stash/pkg/sqlite/migrations"
) )
type SystemStatus struct { type SystemStatus struct {
@@ -115,7 +125,14 @@ type Manager struct {
DLNAService *dlna.Service DLNAService *dlna.Service
Database *sqlite.Database Database *sqlite.Database
Repository models.Repository Repository Repository
SceneService SceneService
ImageService ImageService
GalleryService GalleryService
Scanner *file.Scanner
Cleaner *file.Cleaner
scanSubs *subscriptionManager scanSubs *subscriptionManager
} }
@@ -150,7 +167,7 @@ func initialize() error {
l := initLog() l := initLog()
initProfiling(cfg.GetCPUProfilePath()) initProfiling(cfg.GetCPUProfilePath())
db := &sqlite.Database{} db := sqlite.NewDatabase()
instance = &Manager{ instance = &Manager{
Config: cfg, Config: cfg,
@@ -160,23 +177,28 @@ func initialize() error {
PluginCache: plugin.NewCache(cfg), PluginCache: plugin.NewCache(cfg),
Database: db, Database: db,
Repository: models.Repository{ Repository: sqliteRepository(db),
TxnManager: db,
Gallery: sqlite.GalleryReaderWriter,
Image: sqlite.ImageReaderWriter,
Movie: sqlite.MovieReaderWriter,
Performer: sqlite.PerformerReaderWriter,
Scene: sqlite.SceneReaderWriter,
SceneMarker: sqlite.SceneMarkerReaderWriter,
ScrapedItem: sqlite.ScrapedItemReaderWriter,
Studio: sqlite.StudioReaderWriter,
Tag: sqlite.TagReaderWriter,
SavedFilter: sqlite.SavedFilterReaderWriter,
},
scanSubs: &subscriptionManager{}, scanSubs: &subscriptionManager{},
} }
instance.SceneService = &scene.Service{
File: db.File,
Repository: db.Scene,
MarkerDestroyer: instance.Repository.SceneMarker,
}
instance.ImageService = &image.Service{
File: db.File,
Repository: db.Image,
}
instance.GalleryService = &gallery.Service{
Repository: db.Gallery,
ImageFinder: db.Image,
ImageService: instance.ImageService,
}
instance.JobManager = initJobManager() instance.JobManager = initJobManager()
sceneServer := SceneServer{ sceneServer := SceneServer{
@@ -200,13 +222,15 @@ func initialize() error {
} }
if err != nil { if err != nil {
panic(fmt.Sprintf("error initializing configuration: %s", err.Error())) return fmt.Errorf("error initializing configuration: %w", err)
} else if err := instance.PostInit(ctx); err != nil { }
if err := instance.PostInit(ctx); err != nil {
var migrationNeededErr *sqlite.MigrationNeededError var migrationNeededErr *sqlite.MigrationNeededError
if errors.As(err, &migrationNeededErr) { if errors.As(err, &migrationNeededErr) {
logger.Warn(err.Error()) logger.Warn(err.Error())
} else { } else {
panic(err) return err
} }
} }
@@ -228,6 +252,9 @@ func initialize() error {
logger.Warnf("could not initialize FFMPEG subsystem: %v", err) logger.Warnf("could not initialize FFMPEG subsystem: %v", err)
} }
instance.Scanner = makeScanner(db, instance.PluginCache)
instance.Cleaner = makeCleaner(db, instance.PluginCache)
// if DLNA is enabled, start it now // if DLNA is enabled, start it now
if instance.Config.GetDLNADefaultEnabled() { if instance.Config.GetDLNADefaultEnabled() {
if err := instance.DLNAService.Start(nil); err != nil { if err := instance.DLNAService.Start(nil); err != nil {
@@ -238,6 +265,71 @@ func initialize() error {
return nil return nil
} }
func videoFileFilter(f file.File) bool {
return isVideo(f.Base().Basename)
}
func imageFileFilter(f file.File) bool {
return isImage(f.Base().Basename)
}
func galleryFileFilter(f file.File) bool {
return isZip(f.Base().Basename)
}
type coverGenerator struct {
}
func (g *coverGenerator) GenerateCover(ctx context.Context, scene *models.Scene, f *file.VideoFile) error {
gg := generate.Generator{
Encoder: instance.FFMPEG,
LockManager: instance.ReadLockManager,
ScenePaths: instance.Paths.Scene,
}
return gg.Screenshot(ctx, f.Path, scene.GetHash(instance.Config.GetVideoFileNamingAlgorithm()), f.Width, f.Duration, generate.ScreenshotOptions{})
}
func makeScanner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Scanner {
return &file.Scanner{
Repository: file.Repository{
Manager: db,
DatabaseProvider: db,
Store: db.File,
FolderStore: db.Folder,
},
FileDecorators: []file.Decorator{
&file.FilteredDecorator{
Decorator: &video.Decorator{
FFProbe: instance.FFProbe,
},
Filter: file.FilterFunc(videoFileFilter),
},
&file.FilteredDecorator{
Decorator: &file_image.Decorator{},
Filter: file.FilterFunc(imageFileFilter),
},
},
FingerprintCalculator: &fingerprintCalculator{instance.Config},
FS: &file.OsFS{},
}
}
func makeCleaner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Cleaner {
return &file.Cleaner{
FS: &file.OsFS{},
Repository: file.Repository{
Manager: db,
DatabaseProvider: db,
Store: db.File,
FolderStore: db.Folder,
},
Handlers: []file.CleanHandler{
&cleanHandler{},
},
}
}
func initJobManager() *job.Manager { func initJobManager() *job.Manager {
ret := job.NewManager() ret := job.NewManager()
@@ -370,9 +462,13 @@ func (s *Manager) PostInit(ctx context.Context) error {
if err := fsutil.EmptyDir(instance.Paths.Generated.Downloads); err != nil { if err := fsutil.EmptyDir(instance.Paths.Generated.Downloads); err != nil {
logger.Warnf("could not empty Downloads directory: %v", err) logger.Warnf("could not empty Downloads directory: %v", err)
} }
if err := fsutil.EnsureDir(instance.Paths.Generated.Tmp); err != nil {
logger.Warnf("could not create Tmp directory: %v", err)
} else {
if err := fsutil.EmptyDir(instance.Paths.Generated.Tmp); err != nil { if err := fsutil.EmptyDir(instance.Paths.Generated.Tmp); err != nil {
logger.Warnf("could not empty Tmp directory: %v", err) logger.Warnf("could not empty Tmp directory: %v", err)
} }
}
}, deleteTimeout, func(done chan struct{}) { }, deleteTimeout, func(done chan struct{}) {
logger.Info("Please wait. Deleting temporary files...") // print logger.Info("Please wait. Deleting temporary files...") // print
<-done // and wait for deletion <-done // and wait for deletion
@@ -526,6 +622,8 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error {
return fmt.Errorf("error initializing FFMPEG subsystem: %v", err) return fmt.Errorf("error initializing FFMPEG subsystem: %v", err)
} }
instance.Scanner = makeScanner(instance.Database, instance.PluginCache)
return nil return nil
} }

View File

@@ -13,18 +13,13 @@ import (
"github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
) )
func isGallery(pathname string) bool { func isZip(pathname string) bool {
gExt := config.GetInstance().GetGalleryExtensions() gExt := config.GetInstance().GetGalleryExtensions()
return fsutil.MatchExtension(pathname, gExt) return fsutil.MatchExtension(pathname, gExt)
} }
func isCaptions(pathname string) bool {
return fsutil.MatchExtension(pathname, scene.CaptionExts)
}
func isVideo(pathname string) bool { func isVideo(pathname string) bool {
vidExt := config.GetInstance().GetVideoExtensions() vidExt := config.GetInstance().GetVideoExtensions()
return fsutil.MatchExtension(pathname, vidExt) return fsutil.MatchExtension(pathname, vidExt)
@@ -36,13 +31,15 @@ func isImage(pathname string) bool {
} }
func getScanPaths(inputPaths []string) []*config.StashConfig { func getScanPaths(inputPaths []string) []*config.StashConfig {
stashPaths := config.GetInstance().GetStashPaths()
if len(inputPaths) == 0 { if len(inputPaths) == 0 {
return config.GetInstance().GetStashPaths() return stashPaths
} }
var ret []*config.StashConfig var ret []*config.StashConfig
for _, p := range inputPaths { for _, p := range inputPaths {
s := getStashFromDirPath(p) s := getStashFromDirPath(stashPaths, p)
if s == nil { if s == nil {
logger.Warnf("%s is not in the configured stash paths", p) logger.Warnf("%s is not in the configured stash paths", p)
continue continue
@@ -84,7 +81,7 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error
} }
scanJob := ScanJob{ scanJob := ScanJob{
txnManager: s.Repository, scanner: s.Scanner,
input: input, input: input,
subscriptions: s.scanSubs, subscriptions: s.scanSubs,
} }
@@ -237,7 +234,10 @@ type CleanMetadataInput struct {
func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int { func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
j := cleanJob{ j := cleanJob{
cleaner: s.Cleaner,
txnManager: s.Repository, txnManager: s.Repository,
sceneService: s.SceneService,
imageService: s.ImageService,
input: input, input: input,
scanSubs: s.scanSubs, scanSubs: s.scanSubs,
} }

View File

@@ -0,0 +1,93 @@
package manager
import (
"context"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/sqlite"
"github.com/stashapp/stash/pkg/txn"
)
type ImageReaderWriter interface {
models.ImageReaderWriter
image.FinderCreatorUpdater
}
type GalleryReaderWriter interface {
models.GalleryReaderWriter
gallery.FinderCreatorUpdater
}
type SceneReaderWriter interface {
models.SceneReaderWriter
scene.CreatorUpdater
}
type FileReaderWriter interface {
file.Store
file.Finder
Query(ctx context.Context, options models.FileQueryOptions) (*models.FileQueryResult, error)
GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error)
}
type FolderReaderWriter interface {
file.FolderStore
Find(ctx context.Context, id file.FolderID) (*file.Folder, error)
}
type Repository struct {
models.TxnManager
File FileReaderWriter
Folder FolderReaderWriter
Gallery GalleryReaderWriter
Image ImageReaderWriter
Movie models.MovieReaderWriter
Performer models.PerformerReaderWriter
Scene SceneReaderWriter
SceneMarker models.SceneMarkerReaderWriter
ScrapedItem models.ScrapedItemReaderWriter
Studio models.StudioReaderWriter
Tag models.TagReaderWriter
SavedFilter models.SavedFilterReaderWriter
}
func (r *Repository) WithTxn(ctx context.Context, fn txn.TxnFunc) error {
return txn.WithTxn(ctx, r, fn)
}
func sqliteRepository(d *sqlite.Database) Repository {
txnRepo := d.TxnRepository()
return Repository{
TxnManager: txnRepo,
File: d.File,
Folder: d.Folder,
Gallery: d.Gallery,
Image: d.Image,
Movie: txnRepo.Movie,
Performer: txnRepo.Performer,
Scene: d.Scene,
SceneMarker: txnRepo.SceneMarker,
ScrapedItem: txnRepo.ScrapedItem,
Studio: txnRepo.Studio,
Tag: txnRepo.Tag,
SavedFilter: txnRepo.SavedFilter,
}
}
type SceneService interface {
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error
}
type ImageService interface {
Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error
}
type GalleryService interface {
Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error)
}

View File

@@ -38,7 +38,7 @@ func (c *StreamRequestContext) Cancel() {
} }
func KillRunningStreams(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) { func KillRunningStreams(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
instance.ReadLockManager.Cancel(scene.Path) instance.ReadLockManager.Cancel(scene.Path())
sceneHash := scene.GetHash(fileNamingAlgo) sceneHash := scene.GetHash(fileNamingAlgo)
@@ -62,7 +62,7 @@ type SceneServer struct {
func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWriter, r *http.Request) { func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWriter, r *http.Request) {
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm() fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo)) filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path(), scene.GetHash(fileNamingAlgo))
streamRequestCtx := NewStreamRequestContext(w, r) streamRequestCtx := NewStreamRequestContext(w, r)
// #2579 - hijacking and closing the connection here causes video playback to fail in Safari // #2579 - hijacking and closing the connection here causes video playback to fail in Safari

View File

@@ -11,17 +11,18 @@ import (
func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) { func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) {
var container ffmpeg.Container var container ffmpeg.Container
if scene.Format.Valid { format := scene.Format()
container = ffmpeg.Container(scene.Format.String) if format != "" {
container = ffmpeg.Container(format)
} else { // container isn't in the DB } else { // container isn't in the DB
// shouldn't happen, fallback to ffprobe // shouldn't happen, fallback to ffprobe
ffprobe := GetInstance().FFProbe ffprobe := GetInstance().FFProbe
tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path) tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path())
if err != nil { if err != nil {
return ffmpeg.Container(""), fmt.Errorf("error reading video file: %v", err) return ffmpeg.Container(""), fmt.Errorf("error reading video file: %v", err)
} }
return ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path) return ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path())
} }
return container, nil return container, nil
@@ -32,7 +33,7 @@ func includeSceneStreamPath(scene *models.Scene, streamingResolution models.Stre
// resolution // resolution
convertedRes := models.ResolutionEnum(streamingResolution) convertedRes := models.ResolutionEnum(streamingResolution)
minResolution := int64(convertedRes.GetMinResolution()) minResolution := convertedRes.GetMinResolution()
sceneResolution := scene.GetMinResolution() sceneResolution := scene.GetMinResolution()
// don't include if scene resolution is smaller than the streamingResolution // don't include if scene resolution is smaller than the streamingResolution
@@ -47,7 +48,7 @@ func includeSceneStreamPath(scene *models.Scene, streamingResolution models.Stre
// convert StreamingResolutionEnum to ResolutionEnum // convert StreamingResolutionEnum to ResolutionEnum
maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize) maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize)
return int64(maxStreamingResolution.GetMinResolution()) >= minResolution return maxStreamingResolution.GetMinResolution() >= minResolution
} }
type SceneStreamEndpoint struct { type SceneStreamEndpoint struct {
@@ -79,8 +80,8 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
// direct stream should only apply when the audio codec is supported // direct stream should only apply when the audio codec is supported
audioCodec := ffmpeg.MissingUnsupported audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec.Valid { if scene.AudioCodec() != "" {
audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec.String) audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec())
} }
// don't care if we can't get the container // don't care if we can't get the container

View File

@@ -19,7 +19,7 @@ import (
) )
type autoTagJob struct { type autoTagJob struct {
txnManager models.Repository txnManager Repository
input AutoTagMetadataInput input AutoTagMetadataInput
cache match.Cache cache match.Cache
@@ -165,13 +165,13 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := j.txnManager r := j.txnManager
if err := autotag.PerformerScenes(ctx, performer, paths, r.Scene, &j.cache); err != nil { if err := autotag.PerformerScenes(ctx, performer, paths, r.Scene, &j.cache); err != nil {
return err return fmt.Errorf("processing scenes: %w", err)
} }
if err := autotag.PerformerImages(ctx, performer, paths, r.Image, &j.cache); err != nil { if err := autotag.PerformerImages(ctx, performer, paths, r.Image, &j.cache); err != nil {
return err return fmt.Errorf("processing images: %w", err)
} }
if err := autotag.PerformerGalleries(ctx, performer, paths, r.Gallery, &j.cache); err != nil { if err := autotag.PerformerGalleries(ctx, performer, paths, r.Gallery, &j.cache); err != nil {
return err return fmt.Errorf("processing galleries: %w", err)
} }
return nil return nil
@@ -241,17 +241,17 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress,
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
aliases, err := r.Studio.GetAliases(ctx, studio.ID) aliases, err := r.Studio.GetAliases(ctx, studio.ID)
if err != nil { if err != nil {
return err return fmt.Errorf("getting studio aliases: %w", err)
} }
if err := autotag.StudioScenes(ctx, studio, paths, aliases, r.Scene, &j.cache); err != nil { if err := autotag.StudioScenes(ctx, studio, paths, aliases, r.Scene, &j.cache); err != nil {
return err return fmt.Errorf("processing scenes: %w", err)
} }
if err := autotag.StudioImages(ctx, studio, paths, aliases, r.Image, &j.cache); err != nil { if err := autotag.StudioImages(ctx, studio, paths, aliases, r.Image, &j.cache); err != nil {
return err return fmt.Errorf("processing images: %w", err)
} }
if err := autotag.StudioGalleries(ctx, studio, paths, aliases, r.Gallery, &j.cache); err != nil { if err := autotag.StudioGalleries(ctx, studio, paths, aliases, r.Gallery, &j.cache); err != nil {
return err return fmt.Errorf("processing galleries: %w", err)
} }
return nil return nil
@@ -315,17 +315,17 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
aliases, err := r.Tag.GetAliases(ctx, tag.ID) aliases, err := r.Tag.GetAliases(ctx, tag.ID)
if err != nil { if err != nil {
return err return fmt.Errorf("getting tag aliases: %w", err)
} }
if err := autotag.TagScenes(ctx, tag, paths, aliases, r.Scene, &j.cache); err != nil { if err := autotag.TagScenes(ctx, tag, paths, aliases, r.Scene, &j.cache); err != nil {
return err return fmt.Errorf("processing scenes: %w", err)
} }
if err := autotag.TagImages(ctx, tag, paths, aliases, r.Image, &j.cache); err != nil { if err := autotag.TagImages(ctx, tag, paths, aliases, r.Image, &j.cache); err != nil {
return err return fmt.Errorf("processing images: %w", err)
} }
if err := autotag.TagGalleries(ctx, tag, paths, aliases, r.Gallery, &j.cache); err != nil { if err := autotag.TagGalleries(ctx, tag, paths, aliases, r.Gallery, &j.cache); err != nil {
return err return fmt.Errorf("processing galleries: %w", err)
} }
return nil return nil
@@ -351,7 +351,7 @@ type autoTagFilesTask struct {
tags bool tags bool
progress *job.Progress progress *job.Progress
txnManager models.Repository txnManager Repository
cache *match.Cache cache *match.Cache
} }
@@ -431,7 +431,7 @@ func (t *autoTagFilesTask) makeGalleryFilter() *models.GalleryFilterType {
return ret return ret
} }
func (t *autoTagFilesTask) getCount(ctx context.Context, r models.Repository) (int, error) { func (t *autoTagFilesTask) getCount(ctx context.Context, r Repository) (int, error) {
pp := 0 pp := 0
findFilter := &models.FindFilterType{ findFilter := &models.FindFilterType{
PerPage: &pp, PerPage: &pp,
@@ -445,7 +445,7 @@ func (t *autoTagFilesTask) getCount(ctx context.Context, r models.Repository) (i
SceneFilter: t.makeSceneFilter(), SceneFilter: t.makeSceneFilter(),
}) })
if err != nil { if err != nil {
return 0, err return 0, fmt.Errorf("getting scene count: %w", err)
} }
sceneCount := sceneResults.Count sceneCount := sceneResults.Count
@@ -458,20 +458,20 @@ func (t *autoTagFilesTask) getCount(ctx context.Context, r models.Repository) (i
ImageFilter: t.makeImageFilter(), ImageFilter: t.makeImageFilter(),
}) })
if err != nil { if err != nil {
return 0, err return 0, fmt.Errorf("getting image count: %w", err)
} }
imageCount := imageResults.Count imageCount := imageResults.Count
_, galleryCount, err := r.Gallery.Query(ctx, t.makeGalleryFilter(), findFilter) _, galleryCount, err := r.Gallery.Query(ctx, t.makeGalleryFilter(), findFilter)
if err != nil { if err != nil {
return 0, err return 0, fmt.Errorf("getting gallery count: %w", err)
} }
return sceneCount + imageCount + galleryCount, nil return sceneCount + imageCount + galleryCount, nil
} }
func (t *autoTagFilesTask) processScenes(ctx context.Context, r models.Repository) error { func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) error {
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
return nil return nil
} }
@@ -483,9 +483,13 @@ func (t *autoTagFilesTask) processScenes(ctx context.Context, r models.Repositor
more := true more := true
for more { for more {
scenes, err := scene.Query(ctx, r.Scene, sceneFilter, findFilter) var scenes []*models.Scene
if err != nil { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
scenes, err = scene.Query(ctx, r.Scene, sceneFilter, findFilter)
return err return err
}); err != nil {
return fmt.Errorf("querying scenes: %w", err)
} }
for _, ss := range scenes { for _, ss := range scenes {
@@ -524,7 +528,7 @@ func (t *autoTagFilesTask) processScenes(ctx context.Context, r models.Repositor
return nil return nil
} }
func (t *autoTagFilesTask) processImages(ctx context.Context, r models.Repository) error { func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) error {
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
return nil return nil
} }
@@ -536,9 +540,13 @@ func (t *autoTagFilesTask) processImages(ctx context.Context, r models.Repositor
more := true more := true
for more { for more {
images, err := image.Query(ctx, r.Image, imageFilter, findFilter) var images []*models.Image
if err != nil { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
images, err = image.Query(ctx, r.Image, imageFilter, findFilter)
return err return err
}); err != nil {
return fmt.Errorf("querying images: %w", err)
} }
for _, ss := range images { for _, ss := range images {
@@ -577,7 +585,7 @@ func (t *autoTagFilesTask) processImages(ctx context.Context, r models.Repositor
return nil return nil
} }
func (t *autoTagFilesTask) processGalleries(ctx context.Context, r models.Repository) error { func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) error {
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
return nil return nil
} }
@@ -589,9 +597,13 @@ func (t *autoTagFilesTask) processGalleries(ctx context.Context, r models.Reposi
more := true more := true
for more { for more {
galleries, _, err := r.Gallery.Query(ctx, galleryFilter, findFilter) var galleries []*models.Gallery
if err != nil { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
galleries, _, err = r.Gallery.Query(ctx, galleryFilter, findFilter)
return err return err
}); err != nil {
return fmt.Errorf("querying galleries: %w", err)
} }
for _, ss := range galleries { for _, ss := range galleries {
@@ -639,36 +651,39 @@ func (t *autoTagFilesTask) process(ctx context.Context) {
} }
t.progress.SetTotal(total) t.progress.SetTotal(total)
logger.Infof("Starting autotag of %d files", total) logger.Infof("Starting autotag of %d files", total)
return nil
}); err != nil {
logger.Errorf("error getting count for autotag task: %v", err)
return
}
logger.Info("Autotagging scenes...") logger.Info("Autotagging scenes...")
if err := t.processScenes(ctx, r); err != nil { if err := t.processScenes(ctx, r); err != nil {
return err logger.Errorf("error processing scenes: %w", err)
return
} }
logger.Info("Autotagging images...") logger.Info("Autotagging images...")
if err := t.processImages(ctx, r); err != nil { if err := t.processImages(ctx, r); err != nil {
return err logger.Errorf("error processing images: %w", err)
return
} }
logger.Info("Autotagging galleries...") logger.Info("Autotagging galleries...")
if err := t.processGalleries(ctx, r); err != nil { if err := t.processGalleries(ctx, r); err != nil {
return err logger.Errorf("error processing galleries: %w", err)
return
} }
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request") logger.Info("Stopping due to user request")
} }
return nil
}); err != nil {
logger.Error(err.Error())
}
} }
type autoTagSceneTask struct { type autoTagSceneTask struct {
txnManager models.Repository txnManager Repository
scene *models.Scene scene *models.Scene
performers bool performers bool
@@ -684,17 +699,17 @@ func (t *autoTagSceneTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers { if t.performers {
if err := autotag.ScenePerformers(ctx, t.scene, r.Scene, r.Performer, t.cache); err != nil { if err := autotag.ScenePerformers(ctx, t.scene, r.Scene, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.Path, err) return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.Path(), err)
} }
} }
if t.studios { if t.studios {
if err := autotag.SceneStudios(ctx, t.scene, r.Scene, r.Studio, t.cache); err != nil { if err := autotag.SceneStudios(ctx, t.scene, r.Scene, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.Path, err) return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.Path(), err)
} }
} }
if t.tags { if t.tags {
if err := autotag.SceneTags(ctx, t.scene, r.Scene, r.Tag, t.cache); err != nil { if err := autotag.SceneTags(ctx, t.scene, r.Scene, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.Path, err) return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.Path(), err)
} }
} }
@@ -705,7 +720,7 @@ func (t *autoTagSceneTask) Start(ctx context.Context, wg *sync.WaitGroup) {
} }
type autoTagImageTask struct { type autoTagImageTask struct {
txnManager models.Repository txnManager Repository
image *models.Image image *models.Image
performers bool performers bool
@@ -721,17 +736,17 @@ func (t *autoTagImageTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers { if t.performers {
if err := autotag.ImagePerformers(ctx, t.image, r.Image, r.Performer, t.cache); err != nil { if err := autotag.ImagePerformers(ctx, t.image, r.Image, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging image performers for %s: %v", t.image.Path, err) return fmt.Errorf("error tagging image performers for %s: %v", t.image.Path(), err)
} }
} }
if t.studios { if t.studios {
if err := autotag.ImageStudios(ctx, t.image, r.Image, r.Studio, t.cache); err != nil { if err := autotag.ImageStudios(ctx, t.image, r.Image, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging image studio for %s: %v", t.image.Path, err) return fmt.Errorf("error tagging image studio for %s: %v", t.image.Path(), err)
} }
} }
if t.tags { if t.tags {
if err := autotag.ImageTags(ctx, t.image, r.Image, r.Tag, t.cache); err != nil { if err := autotag.ImageTags(ctx, t.image, r.Image, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging image tags for %s: %v", t.image.Path, err) return fmt.Errorf("error tagging image tags for %s: %v", t.image.Path(), err)
} }
} }
@@ -742,7 +757,7 @@ func (t *autoTagImageTask) Start(ctx context.Context, wg *sync.WaitGroup) {
} }
type autoTagGalleryTask struct { type autoTagGalleryTask struct {
txnManager models.Repository txnManager Repository
gallery *models.Gallery gallery *models.Gallery
performers bool performers bool
@@ -758,17 +773,17 @@ func (t *autoTagGalleryTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers { if t.performers {
if err := autotag.GalleryPerformers(ctx, t.gallery, r.Gallery, r.Performer, t.cache); err != nil { if err := autotag.GalleryPerformers(ctx, t.gallery, r.Gallery, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.Path.String, err) return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.Path(), err)
} }
} }
if t.studios { if t.studios {
if err := autotag.GalleryStudios(ctx, t.gallery, r.Gallery, r.Studio, t.cache); err != nil { if err := autotag.GalleryStudios(ctx, t.gallery, r.Gallery, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.Path.String, err) return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.Path(), err)
} }
} }
if t.tags { if t.tags {
if err := autotag.GalleryTags(ctx, t.gallery, r.Gallery, r.Tag, t.cache); err != nil { if err := autotag.GalleryTags(ctx, t.gallery, r.Gallery, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.Path.String, err) return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.Path(), err)
} }
} }

View File

@@ -3,61 +3,45 @@ package manager
import ( import (
"context" "context"
"fmt" "fmt"
"io/fs"
"path/filepath" "path/filepath"
"time"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
) )
type cleaner interface {
Clean(ctx context.Context, options file.CleanOptions, progress *job.Progress)
}
type cleanJob struct { type cleanJob struct {
txnManager models.Repository cleaner cleaner
txnManager Repository
input CleanMetadataInput input CleanMetadataInput
sceneService SceneService
imageService ImageService
scanSubs *subscriptionManager scanSubs *subscriptionManager
} }
func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) { func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) {
logger.Infof("Starting cleaning of tracked files") logger.Infof("Starting cleaning of tracked files")
start := time.Now()
if j.input.DryRun { if j.input.DryRun {
logger.Infof("Running in Dry Mode") logger.Infof("Running in Dry Mode")
} }
r := j.txnManager j.cleaner.Clean(ctx, file.CleanOptions{
Paths: j.input.Paths,
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error { DryRun: j.input.DryRun,
total, err := j.getCount(ctx, r) PathFilter: newCleanFilter(instance.Config),
if err != nil { }, progress)
return fmt.Errorf("error getting count: %w", err)
}
progress.SetTotal(total)
if job.IsCancelled(ctx) {
return nil
}
if err := j.processScenes(ctx, progress, r.Scene); err != nil {
return fmt.Errorf("error cleaning scenes: %w", err)
}
if err := j.processImages(ctx, progress, r.Image); err != nil {
return fmt.Errorf("error cleaning images: %w", err)
}
if err := j.processGalleries(ctx, progress, r.Gallery, r.Image); err != nil {
return fmt.Errorf("error cleaning galleries: %w", err)
}
return nil
}); err != nil {
logger.Error(err.Error())
return
}
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request") logger.Info("Stopping due to user request")
@@ -65,303 +49,91 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) {
} }
j.scanSubs.notify() j.scanSubs.notify()
logger.Info("Finished Cleaning") elapsed := time.Since(start)
logger.Info(fmt.Sprintf("Finished Cleaning (%s)", elapsed))
} }
func (j *cleanJob) getCount(ctx context.Context, r models.Repository) (int, error) { type cleanFilter struct {
sceneFilter := scene.PathsFilter(j.input.Paths) scanFilter
sceneResult, err := r.Scene.Query(ctx, models.SceneQueryOptions{ }
QueryOptions: models.QueryOptions{
Count: true, func newCleanFilter(c *config.Instance) *cleanFilter {
return &cleanFilter{
scanFilter: scanFilter{
stashPaths: c.GetStashPaths(),
generatedPath: c.GetGeneratedPath(),
vidExt: c.GetVideoExtensions(),
imgExt: c.GetImageExtensions(),
zipExt: c.GetGalleryExtensions(),
videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
}, },
SceneFilter: sceneFilter,
})
if err != nil {
return 0, err
}
imageCount, err := r.Image.QueryCount(ctx, image.PathsFilter(j.input.Paths), nil)
if err != nil {
return 0, err
}
galleryCount, err := r.Gallery.QueryCount(ctx, gallery.PathsFilter(j.input.Paths), nil)
if err != nil {
return 0, err
}
return sceneResult.Count + imageCount + galleryCount, nil
}
func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb scene.Queryer) error {
batchSize := 1000
findFilter := models.BatchFindFilter(batchSize)
sceneFilter := scene.PathsFilter(j.input.Paths)
sort := "path"
findFilter.Sort = &sort
var toDelete []int
more := true
for more {
if job.IsCancelled(ctx) {
return nil
}
scenes, err := scene.Query(ctx, qb, sceneFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for scenes: %w", err)
}
for _, scene := range scenes {
progress.ExecuteTask(fmt.Sprintf("Assessing scene %s for clean", scene.Path), func() {
if j.shouldCleanScene(scene) {
toDelete = append(toDelete, scene.ID)
} else {
// increment progress, no further processing
progress.Increment()
}
})
}
if len(scenes) != batchSize {
more = false
} else {
*findFilter.Page++
} }
} }
if j.input.DryRun && len(toDelete) > 0 { func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
// add progress for scenes that would've been deleted
progress.AddProcessed(len(toDelete))
}
fileNamingAlgorithm := instance.Config.GetVideoFileNamingAlgorithm()
if !j.input.DryRun && len(toDelete) > 0 {
progress.ExecuteTask(fmt.Sprintf("Cleaning %d scenes", len(toDelete)), func() {
for _, sceneID := range toDelete {
if job.IsCancelled(ctx) {
return
}
j.deleteScene(ctx, fileNamingAlgorithm, sceneID)
progress.Increment()
}
})
}
return nil
}
func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress, qb gallery.Queryer, iqb models.ImageReader) error {
batchSize := 1000
findFilter := models.BatchFindFilter(batchSize)
galleryFilter := gallery.PathsFilter(j.input.Paths)
sort := "path"
findFilter.Sort = &sort
var toDelete []int
more := true
for more {
if job.IsCancelled(ctx) {
return nil
}
galleries, _, err := qb.Query(ctx, galleryFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for galleries: %w", err)
}
for _, gallery := range galleries {
progress.ExecuteTask(fmt.Sprintf("Assessing gallery %s for clean", gallery.GetTitle()), func() {
if j.shouldCleanGallery(ctx, gallery, iqb) {
toDelete = append(toDelete, gallery.ID)
} else {
// increment progress, no further processing
progress.Increment()
}
})
}
if len(galleries) != batchSize {
more = false
} else {
*findFilter.Page++
}
}
if j.input.DryRun && len(toDelete) > 0 {
// add progress for galleries that would've been deleted
progress.AddProcessed(len(toDelete))
}
if !j.input.DryRun && len(toDelete) > 0 {
progress.ExecuteTask(fmt.Sprintf("Cleaning %d galleries", len(toDelete)), func() {
for _, galleryID := range toDelete {
if job.IsCancelled(ctx) {
return
}
j.deleteGallery(ctx, galleryID)
progress.Increment()
}
})
}
return nil
}
func (j *cleanJob) processImages(ctx context.Context, progress *job.Progress, qb image.Queryer) error {
batchSize := 1000
findFilter := models.BatchFindFilter(batchSize)
imageFilter := image.PathsFilter(j.input.Paths)
// performance consideration: order by path since default ordering by
// title is slow
sortBy := "path"
findFilter.Sort = &sortBy
var toDelete []int
more := true
for more {
if job.IsCancelled(ctx) {
return nil
}
images, err := image.Query(ctx, qb, imageFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for images: %w", err)
}
for _, image := range images {
progress.ExecuteTask(fmt.Sprintf("Assessing image %s for clean", image.Path), func() {
if j.shouldCleanImage(image) {
toDelete = append(toDelete, image.ID)
} else {
// increment progress, no further processing
progress.Increment()
}
})
}
if len(images) != batchSize {
more = false
} else {
*findFilter.Page++
}
}
if j.input.DryRun && len(toDelete) > 0 {
// add progress for images that would've been deleted
progress.AddProcessed(len(toDelete))
}
if !j.input.DryRun && len(toDelete) > 0 {
progress.ExecuteTask(fmt.Sprintf("Cleaning %d images", len(toDelete)), func() {
for _, imageID := range toDelete {
if job.IsCancelled(ctx) {
return
}
j.deleteImage(ctx, imageID)
progress.Increment()
}
})
}
return nil
}
func (j *cleanJob) shouldClean(path string) bool {
// use image.FileExists for zip file checking
fileExists := image.FileExists(path)
// #1102 - clean anything in generated path // #1102 - clean anything in generated path
generatedPath := config.GetInstance().GetGeneratedPath() generatedPath := f.generatedPath
if !fileExists || getStashFromPath(path) == nil || fsutil.IsPathInDir(generatedPath, path) {
logger.Infof("File not found. Marking to clean: \"%s\"", path)
return true
}
return false var stash *config.StashConfig
} fileOrFolder := "File"
func (j *cleanJob) shouldCleanScene(s *models.Scene) bool { if info.IsDir() {
if j.shouldClean(s.Path) { fileOrFolder = "Folder"
return true stash = getStashFromDirPath(f.stashPaths, path)
}
stash := getStashFromPath(s.Path)
if stash.ExcludeVideo {
logger.Infof("File in stash library that excludes video. Marking to clean: \"%s\"", s.Path)
return true
}
config := config.GetInstance()
if !fsutil.MatchExtension(s.Path, config.GetVideoExtensions()) {
logger.Infof("File extension does not match video extensions. Marking to clean: \"%s\"", s.Path)
return true
}
if matchFile(s.Path, config.GetExcludes()) {
logger.Infof("File matched regex. Marking to clean: \"%s\"", s.Path)
return true
}
return false
}
func (j *cleanJob) shouldCleanGallery(ctx context.Context, g *models.Gallery, qb models.ImageReader) bool {
// never clean manually created galleries
if !g.Path.Valid {
return false
}
path := g.Path.String
if j.shouldClean(path) {
return true
}
stash := getStashFromPath(path)
if stash.ExcludeImage {
logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", path)
return true
}
config := config.GetInstance()
if g.Zip {
if !fsutil.MatchExtension(path, config.GetGalleryExtensions()) {
logger.Infof("File extension does not match gallery extensions. Marking to clean: \"%s\"", path)
return true
}
if countImagesInZip(path) == 0 {
logger.Infof("Gallery has 0 images. Marking to clean: \"%s\"", path)
return true
}
} else { } else {
// folder-based - delete if it has no images stash = getStashFromPath(f.stashPaths, path)
count, err := qb.CountByGalleryID(ctx, g.ID) }
if err != nil {
logger.Warnf("Error trying to count gallery images for %q: %v", path, err) if stash == nil {
logger.Infof("%s not in any stash library directories. Marking to clean: \"%s\"", fileOrFolder, path)
return false return false
} }
if count == 0 { if fsutil.IsPathInDir(generatedPath, path) {
logger.Infof("%s is in generated path. Marking to clean: \"%s\"", fileOrFolder, path)
return false
}
if info.IsDir() {
return !f.shouldCleanFolder(path, stash)
}
return !f.shouldCleanFile(path, info, stash)
}
func (f *cleanFilter) shouldCleanFolder(path string, s *config.StashConfig) bool {
// only delete folders where it is excluded from everything
pathExcludeTest := path + string(filepath.Separator)
if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, f.videoExcludeRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, f.imageExcludeRegex)) {
logger.Infof("Folder is excluded from both video and image. Marking to clean: \"%s\"", path)
return true
}
return false
}
func (f *cleanFilter) shouldCleanFile(path string, info fs.FileInfo, stash *config.StashConfig) bool {
switch {
case info.IsDir() || fsutil.MatchExtension(path, f.zipExt):
return f.shouldCleanGallery(path, stash)
case fsutil.MatchExtension(path, f.vidExt):
return f.shouldCleanVideoFile(path, stash)
case fsutil.MatchExtension(path, f.imgExt):
return f.shouldCleanImage(path, stash)
default:
logger.Infof("File extension does not match any media extensions. Marking to clean: \"%s\"", path)
return true return true
} }
} }
if matchFile(path, config.GetImageExcludes()) { func (f *cleanFilter) shouldCleanVideoFile(path string, stash *config.StashConfig) bool {
if stash.ExcludeVideo {
logger.Infof("File in stash library that excludes video. Marking to clean: \"%s\"", path)
return true
}
if matchFileRegex(path, f.videoExcludeRegex) {
logger.Infof("File matched regex. Marking to clean: \"%s\"", path) logger.Infof("File matched regex. Marking to clean: \"%s\"", path)
return true return true
} }
@@ -369,141 +141,186 @@ func (j *cleanJob) shouldCleanGallery(ctx context.Context, g *models.Gallery, qb
return false return false
} }
func (j *cleanJob) shouldCleanImage(s *models.Image) bool { func (f *cleanFilter) shouldCleanGallery(path string, stash *config.StashConfig) bool {
if j.shouldClean(s.Path) {
return true
}
stash := getStashFromPath(s.Path)
if stash.ExcludeImage { if stash.ExcludeImage {
logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", s.Path) logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", path)
return true return true
} }
config := config.GetInstance() if matchFileRegex(path, f.imageExcludeRegex) {
if !fsutil.MatchExtension(s.Path, config.GetImageExtensions()) { logger.Infof("File matched regex. Marking to clean: \"%s\"", path)
logger.Infof("File extension does not match image extensions. Marking to clean: \"%s\"", s.Path)
return true
}
if matchFile(s.Path, config.GetImageExcludes()) {
logger.Infof("File matched regex. Marking to clean: \"%s\"", s.Path)
return true return true
} }
return false return false
} }
func (j *cleanJob) deleteScene(ctx context.Context, fileNamingAlgorithm models.HashAlgorithm, sceneID int) { func (f *cleanFilter) shouldCleanImage(path string, stash *config.StashConfig) bool {
fileNamingAlgo := GetInstance().Config.GetVideoFileNamingAlgorithm() if stash.ExcludeImage {
logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", path)
return true
}
fileDeleter := &scene.FileDeleter{ if matchFileRegex(path, f.imageExcludeRegex) {
Deleter: *file.NewDeleter(), logger.Infof("File matched regex. Marking to clean: \"%s\"", path)
return true
}
return false
}
type cleanHandler struct {
PluginCache *plugin.Cache
}
func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error {
if err := h.deleteRelatedScenes(ctx, fileDeleter, fileID); err != nil {
return err
}
if err := h.deleteRelatedGalleries(ctx, fileID); err != nil {
return err
}
if err := h.deleteRelatedImages(ctx, fileDeleter, fileID); err != nil {
return err
}
return nil
}
func (h *cleanHandler) HandleFolder(ctx context.Context, fileDeleter *file.Deleter, folderID file.FolderID) error {
return h.deleteRelatedFolderGalleries(ctx, folderID)
}
func (h *cleanHandler) deleteRelatedScenes(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error {
mgr := GetInstance()
sceneQB := mgr.Database.Scene
scenes, err := sceneQB.FindByFileID(ctx, fileID)
if err != nil {
return err
}
fileNamingAlgo := mgr.Config.GetVideoFileNamingAlgorithm()
sceneFileDeleter := &scene.FileDeleter{
Deleter: fileDeleter,
FileNamingAlgo: fileNamingAlgo, FileNamingAlgo: fileNamingAlgo,
Paths: GetInstance().Paths, Paths: mgr.Paths,
} }
var s *models.Scene
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
repo := j.txnManager
qb := repo.Scene
var err error for _, scene := range scenes {
s, err = qb.Find(ctx, sceneID) // only delete if the scene has no other files
if len(scene.Files) <= 1 {
logger.Infof("Deleting scene %q since it has no other related files", scene.GetTitle())
if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil {
return err
}
checksum := scene.Checksum()
oshash := scene.OSHash()
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, scene.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
Checksum: checksum,
OSHash: oshash,
Path: scene.Path(),
}, nil)
}
}
return nil
}
func (h *cleanHandler) deleteRelatedGalleries(ctx context.Context, fileID file.ID) error {
mgr := GetInstance()
qb := mgr.Database.Gallery
galleries, err := qb.FindByFileID(ctx, fileID)
if err != nil { if err != nil {
return err return err
} }
return scene.Destroy(ctx, s, repo.Scene, repo.SceneMarker, fileDeleter, true, false) for _, g := range galleries {
}); err != nil { // only delete if the gallery has no other files
fileDeleter.Rollback() if len(g.Files) <= 1 {
logger.Infof("Deleting gallery %q since it has no other related files", g.GetTitle())
logger.Errorf("Error deleting scene from database: %s", err.Error()) if err := qb.Destroy(ctx, g.ID); err != nil {
return return err
} }
// perform the post-commit actions mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
fileDeleter.Commit() Checksum: g.Checksum(),
Path: g.Path(),
GetInstance().PluginCache.ExecutePostHooks(ctx, sceneID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
Checksum: s.Checksum.String,
OSHash: s.OSHash.String,
Path: s.Path,
}, nil) }, nil)
} }
}
func (j *cleanJob) deleteGallery(ctx context.Context, galleryID int) { return nil
var g *models.Gallery }
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error { func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderID file.FolderID) error {
qb := j.txnManager.Gallery mgr := GetInstance()
qb := mgr.Database.Gallery
var err error galleries, err := qb.FindByFolderID(ctx, folderID)
g, err = qb.Find(ctx, galleryID)
if err != nil { if err != nil {
return err return err
} }
return qb.Destroy(ctx, galleryID) for _, g := range galleries {
}); err != nil { logger.Infof("Deleting folder-based gallery %q since the folder no longer exists", g.GetTitle())
logger.Errorf("Error deleting gallery from database: %s", err.Error()) if err := qb.Destroy(ctx, g.ID); err != nil {
return return err
} }
GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{ mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
Checksum: g.Checksum, Checksum: g.Checksum(),
Path: g.Path.String, Path: g.Path(),
}, nil) }, nil)
} }
func (j *cleanJob) deleteImage(ctx context.Context, imageID int) { return nil
fileDeleter := &image.FileDeleter{ }
Deleter: *file.NewDeleter(),
func (h *cleanHandler) deleteRelatedImages(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error {
mgr := GetInstance()
imageQB := mgr.Database.Image
images, err := imageQB.FindByFileID(ctx, fileID)
if err != nil {
return err
}
imageFileDeleter := &image.FileDeleter{
Deleter: fileDeleter,
Paths: GetInstance().Paths, Paths: GetInstance().Paths,
} }
var i *models.Image for _, i := range images {
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error { if len(i.Files) <= 1 {
qb := j.txnManager.Image logger.Infof("Deleting image %q since it has no other related files", i.GetTitle())
if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil {
var err error
i, err = qb.Find(ctx, imageID)
if err != nil {
return err return err
} }
if i == nil { mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
return fmt.Errorf("image not found: %d", imageID) Checksum: i.Checksum(),
} Path: i.Path(),
return image.Destroy(ctx, i, qb, fileDeleter, true, false)
}); err != nil {
fileDeleter.Rollback()
logger.Errorf("Error deleting image from database: %s", err.Error())
return
}
// perform the post-commit actions
fileDeleter.Commit()
GetInstance().PluginCache.ExecutePostHooks(ctx, imageID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: i.Checksum,
Path: i.Path,
}, nil) }, nil)
} }
}
func getStashFromPath(pathToCheck string) *config.StashConfig { return nil
for _, s := range config.GetInstance().GetStashPaths() { }
if fsutil.IsPathInDir(s.Path, filepath.Dir(pathToCheck)) {
return s func getStashFromPath(stashes []*config.StashConfig, pathToCheck string) *config.StashConfig {
for _, f := range stashes {
if fsutil.IsPathInDir(f.Path, filepath.Dir(pathToCheck)) {
return f
} }
} }
return nil return nil
} }
func getStashFromDirPath(pathToCheck string) *config.StashConfig { func getStashFromDirPath(stashes []*config.StashConfig, pathToCheck string) *config.StashConfig {
for _, s := range config.GetInstance().GetStashPaths() { for _, f := range stashes {
if fsutil.IsPathInDir(s.Path, pathToCheck) { if fsutil.IsPathInDir(f.Path, pathToCheck) {
return s return f
} }
} }
return nil return nil

View File

@@ -32,7 +32,7 @@ import (
) )
type ExportTask struct { type ExportTask struct {
txnManager models.Repository txnManager Repository
full bool full bool
baseDir string baseDir string
@@ -286,7 +286,7 @@ func (t *ExportTask) zipFile(fn, outDir string, z *zip.Writer) error {
return nil return nil
} }
func (t *ExportTask) populateMovieScenes(ctx context.Context, repo models.Repository) { func (t *ExportTask) populateMovieScenes(ctx context.Context, repo Repository) {
reader := repo.Movie reader := repo.Movie
sceneReader := repo.Scene sceneReader := repo.Scene
@@ -316,7 +316,7 @@ func (t *ExportTask) populateMovieScenes(ctx context.Context, repo models.Reposi
} }
} }
func (t *ExportTask) populateGalleryImages(ctx context.Context, repo models.Repository) { func (t *ExportTask) populateGalleryImages(ctx context.Context, repo Repository) {
reader := repo.Gallery reader := repo.Gallery
imageReader := repo.Image imageReader := repo.Image
@@ -346,7 +346,7 @@ func (t *ExportTask) populateGalleryImages(ctx context.Context, repo models.Repo
} }
} }
func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo models.Repository) { func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Repository) {
var scenesWg sync.WaitGroup var scenesWg sync.WaitGroup
sceneReader := repo.Scene sceneReader := repo.Scene
@@ -380,7 +380,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo models.
if (i % 100) == 0 { // make progress easier to read if (i % 100) == 0 { // make progress easier to read
logger.Progressf("[scenes] %d of %d", index, len(scenes)) logger.Progressf("[scenes] %d of %d", index, len(scenes))
} }
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathNameMapping{Path: scene.Path, Checksum: scene.GetHash(t.fileNamingAlgorithm)}) t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathNameMapping{Path: scene.Path(), Checksum: scene.GetHash(t.fileNamingAlgorithm)})
jobCh <- scene // feed workers jobCh <- scene // feed workers
} }
@@ -390,7 +390,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo models.
logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers) logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers)
} }
func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Scene, repo models.Repository, t *ExportTask) { func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Scene, repo Repository, t *ExportTask) {
defer wg.Done() defer wg.Done()
sceneReader := repo.Scene sceneReader := repo.Scene
studioReader := repo.Studio studioReader := repo.Studio
@@ -443,15 +443,15 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
continue continue
} }
newSceneJSON.Movies, err = scene.GetSceneMoviesJSON(ctx, movieReader, sceneReader, s) newSceneJSON.Movies, err = scene.GetSceneMoviesJSON(ctx, movieReader, s)
if err != nil { if err != nil {
logger.Errorf("[scenes] <%s> error getting scene movies JSON: %s", sceneHash, err.Error()) logger.Errorf("[scenes] <%s> error getting scene movies JSON: %s", sceneHash, err.Error())
continue continue
} }
if t.includeDependencies { if t.includeDependencies {
if s.StudioID.Valid { if s.StudioID != nil {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, int(s.StudioID.Int64)) t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, *s.StudioID)
} }
t.galleries.IDs = intslice.IntAppendUniques(t.galleries.IDs, gallery.GetIDs(galleries)) t.galleries.IDs = intslice.IntAppendUniques(t.galleries.IDs, gallery.GetIDs(galleries))
@@ -463,7 +463,7 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
} }
t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tagIDs) t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tagIDs)
movieIDs, err := scene.GetDependentMovieIDs(ctx, sceneReader, s) movieIDs, err := scene.GetDependentMovieIDs(ctx, s)
if err != nil { if err != nil {
logger.Errorf("[scenes] <%s> error getting scene movies: %s", sceneHash, err.Error()) logger.Errorf("[scenes] <%s> error getting scene movies: %s", sceneHash, err.Error())
continue continue
@@ -484,7 +484,7 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
} }
} }
func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo models.Repository) { func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo Repository) {
var imagesWg sync.WaitGroup var imagesWg sync.WaitGroup
imageReader := repo.Image imageReader := repo.Image
@@ -518,7 +518,7 @@ func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo models.
if (i % 100) == 0 { // make progress easier to read if (i % 100) == 0 { // make progress easier to read
logger.Progressf("[images] %d of %d", index, len(images)) logger.Progressf("[images] %d of %d", index, len(images))
} }
t.Mappings.Images = append(t.Mappings.Images, jsonschema.PathNameMapping{Path: image.Path, Checksum: image.Checksum}) t.Mappings.Images = append(t.Mappings.Images, jsonschema.PathNameMapping{Path: image.Path(), Checksum: image.Checksum()})
jobCh <- image // feed workers jobCh <- image // feed workers
} }
@@ -528,7 +528,7 @@ func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo models.
logger.Infof("[images] export complete in %s. %d workers used.", time.Since(startTime), workers) logger.Infof("[images] export complete in %s. %d workers used.", time.Since(startTime), workers)
} }
func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Image, repo models.Repository, t *ExportTask) { func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Image, repo Repository, t *ExportTask) {
defer wg.Done() defer wg.Done()
studioReader := repo.Studio studioReader := repo.Studio
galleryReader := repo.Gallery galleryReader := repo.Gallery
@@ -536,7 +536,7 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
tagReader := repo.Tag tagReader := repo.Tag
for s := range jobChan { for s := range jobChan {
imageHash := s.Checksum imageHash := s.Checksum()
newImageJSON := image.ToBasicJSON(s) newImageJSON := image.ToBasicJSON(s)
@@ -572,8 +572,8 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
newImageJSON.Tags = tag.GetNames(tags) newImageJSON.Tags = tag.GetNames(tags)
if t.includeDependencies { if t.includeDependencies {
if s.StudioID.Valid { if s.StudioID != nil {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, int(s.StudioID.Int64)) t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, *s.StudioID)
} }
t.galleries.IDs = intslice.IntAppendUniques(t.galleries.IDs, gallery.GetIDs(imageGalleries)) t.galleries.IDs = intslice.IntAppendUniques(t.galleries.IDs, gallery.GetIDs(imageGalleries))
@@ -594,12 +594,12 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
func (t *ExportTask) getGalleryChecksums(galleries []*models.Gallery) (ret []string) { func (t *ExportTask) getGalleryChecksums(galleries []*models.Gallery) (ret []string) {
for _, g := range galleries { for _, g := range galleries {
ret = append(ret, g.Checksum) ret = append(ret, g.Checksum())
} }
return return
} }
func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo models.Repository) { func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo Repository) {
var galleriesWg sync.WaitGroup var galleriesWg sync.WaitGroup
reader := repo.Gallery reader := repo.Gallery
@@ -634,10 +634,13 @@ func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo mode
logger.Progressf("[galleries] %d of %d", index, len(galleries)) logger.Progressf("[galleries] %d of %d", index, len(galleries))
} }
title := gallery.Title
path := gallery.Path()
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathNameMapping{ t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathNameMapping{
Path: gallery.Path.String, Path: path,
Name: gallery.Title.String, Name: title,
Checksum: gallery.Checksum, Checksum: gallery.Checksum(),
}) })
jobCh <- gallery jobCh <- gallery
} }
@@ -648,14 +651,14 @@ func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo mode
logger.Infof("[galleries] export complete in %s. %d workers used.", time.Since(startTime), workers) logger.Infof("[galleries] export complete in %s. %d workers used.", time.Since(startTime), workers)
} }
func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Gallery, repo models.Repository, t *ExportTask) { func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Gallery, repo Repository, t *ExportTask) {
defer wg.Done() defer wg.Done()
studioReader := repo.Studio studioReader := repo.Studio
performerReader := repo.Performer performerReader := repo.Performer
tagReader := repo.Tag tagReader := repo.Tag
for g := range jobChan { for g := range jobChan {
galleryHash := g.Checksum galleryHash := g.Checksum()
newGalleryJSON, err := gallery.ToBasicJSON(g) newGalleryJSON, err := gallery.ToBasicJSON(g)
if err != nil { if err != nil {
@@ -686,8 +689,8 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
newGalleryJSON.Tags = tag.GetNames(tags) newGalleryJSON.Tags = tag.GetNames(tags)
if t.includeDependencies { if t.includeDependencies {
if g.StudioID.Valid { if g.StudioID != nil {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, int(g.StudioID.Int64)) t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, *g.StudioID)
} }
t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags)) t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags))
@@ -705,7 +708,7 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
} }
} }
func (t *ExportTask) ExportPerformers(ctx context.Context, workers int, repo models.Repository) { func (t *ExportTask) ExportPerformers(ctx context.Context, workers int, repo Repository) {
var performersWg sync.WaitGroup var performersWg sync.WaitGroup
reader := repo.Performer reader := repo.Performer
@@ -745,7 +748,7 @@ func (t *ExportTask) ExportPerformers(ctx context.Context, workers int, repo mod
logger.Infof("[performers] export complete in %s. %d workers used.", time.Since(startTime), workers) logger.Infof("[performers] export complete in %s. %d workers used.", time.Since(startTime), workers)
} }
func (t *ExportTask) exportPerformer(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Performer, repo models.Repository) { func (t *ExportTask) exportPerformer(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Performer, repo Repository) {
defer wg.Done() defer wg.Done()
performerReader := repo.Performer performerReader := repo.Performer
@@ -783,7 +786,7 @@ func (t *ExportTask) exportPerformer(ctx context.Context, wg *sync.WaitGroup, jo
} }
} }
func (t *ExportTask) ExportStudios(ctx context.Context, workers int, repo models.Repository) { func (t *ExportTask) ExportStudios(ctx context.Context, workers int, repo Repository) {
var studiosWg sync.WaitGroup var studiosWg sync.WaitGroup
reader := repo.Studio reader := repo.Studio
@@ -824,7 +827,7 @@ func (t *ExportTask) ExportStudios(ctx context.Context, workers int, repo models
logger.Infof("[studios] export complete in %s. %d workers used.", time.Since(startTime), workers) logger.Infof("[studios] export complete in %s. %d workers used.", time.Since(startTime), workers)
} }
func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Studio, repo models.Repository) { func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Studio, repo Repository) {
defer wg.Done() defer wg.Done()
studioReader := repo.Studio studioReader := repo.Studio
@@ -848,7 +851,7 @@ func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobCh
} }
} }
func (t *ExportTask) ExportTags(ctx context.Context, workers int, repo models.Repository) { func (t *ExportTask) ExportTags(ctx context.Context, workers int, repo Repository) {
var tagsWg sync.WaitGroup var tagsWg sync.WaitGroup
reader := repo.Tag reader := repo.Tag
@@ -892,7 +895,7 @@ func (t *ExportTask) ExportTags(ctx context.Context, workers int, repo models.Re
logger.Infof("[tags] export complete in %s. %d workers used.", time.Since(startTime), workers) logger.Infof("[tags] export complete in %s. %d workers used.", time.Since(startTime), workers)
} }
func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Tag, repo models.Repository) { func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Tag, repo Repository) {
defer wg.Done() defer wg.Done()
tagReader := repo.Tag tagReader := repo.Tag
@@ -919,7 +922,7 @@ func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan
} }
} }
func (t *ExportTask) ExportMovies(ctx context.Context, workers int, repo models.Repository) { func (t *ExportTask) ExportMovies(ctx context.Context, workers int, repo Repository) {
var moviesWg sync.WaitGroup var moviesWg sync.WaitGroup
reader := repo.Movie reader := repo.Movie
@@ -960,7 +963,7 @@ func (t *ExportTask) ExportMovies(ctx context.Context, workers int, repo models.
logger.Infof("[movies] export complete in %s. %d workers used.", time.Since(startTime), workers) logger.Infof("[movies] export complete in %s. %d workers used.", time.Since(startTime), workers)
} }
func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Movie, repo models.Repository) { func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Movie, repo Repository) {
defer wg.Done() defer wg.Done()
movieReader := repo.Movie movieReader := repo.Movie
@@ -993,7 +996,7 @@ func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobCha
} }
} }
func (t *ExportTask) ExportScrapedItems(ctx context.Context, repo models.Repository) { func (t *ExportTask) ExportScrapedItems(ctx context.Context, repo Repository) {
qb := repo.ScrapedItem qb := repo.ScrapedItem
sqb := repo.Studio sqb := repo.Studio
scrapedItems, err := qb.All(ctx) scrapedItems, err := qb.All(ctx)

View File

@@ -2,7 +2,6 @@ package manager
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"time" "time"
@@ -54,7 +53,7 @@ type GeneratePreviewOptionsInput struct {
const generateQueueSize = 200000 const generateQueueSize = 200000
type GenerateJob struct { type GenerateJob struct {
txnManager models.Repository txnManager Repository
input GenerateMetadataInput input GenerateMetadataInput
overwrite bool overwrite bool
@@ -192,20 +191,20 @@ func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, que
findFilter := models.BatchFindFilter(batchSize) findFilter := models.BatchFindFilter(batchSize)
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
for more := true; more; { for more := true; more; {
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
return context.Canceled return totals
} }
scenes, err := scene.Query(ctx, j.txnManager.Scene, nil, findFilter) scenes, err := scene.Query(ctx, j.txnManager.Scene, nil, findFilter)
if err != nil { if err != nil {
return err logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return totals
} }
for _, ss := range scenes { for _, ss := range scenes {
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
return context.Canceled return totals
} }
j.queueSceneJobs(ctx, g, ss, queue, &totals) j.queueSceneJobs(ctx, g, ss, queue, &totals)
@@ -218,13 +217,6 @@ func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, que
} }
} }
return nil
}); err != nil {
if !errors.Is(err, context.Canceled) {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
}
}
return totals return totals
} }
@@ -351,10 +343,13 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
} }
if utils.IsTrue(j.input.Phashes) { if utils.IsTrue(j.input.Phashes) {
// generate for all files in scene
for _, f := range scene.Files {
task := &GeneratePhashTask{ task := &GeneratePhashTask{
Scene: *scene, File: f,
fileNamingAlgorithm: j.fileNamingAlgo, fileNamingAlgorithm: j.fileNamingAlgo,
txnManager: j.txnManager, txnManager: j.txnManager,
fileUpdater: j.txnManager.File,
Overwrite: j.overwrite, Overwrite: j.overwrite,
} }
@@ -364,6 +359,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
queue <- task queue <- task
} }
} }
}
if utils.IsTrue(j.input.InteractiveHeatmapsSpeeds) { if utils.IsTrue(j.input.InteractiveHeatmapsSpeeds) {
task := &GenerateInteractiveHeatmapSpeedTask{ task := &GenerateInteractiveHeatmapSpeedTask{

View File

@@ -2,24 +2,23 @@ package manager
import ( import (
"context" "context"
"database/sql"
"fmt" "fmt"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
) )
type GenerateInteractiveHeatmapSpeedTask struct { type GenerateInteractiveHeatmapSpeedTask struct {
Scene models.Scene Scene models.Scene
Overwrite bool Overwrite bool
fileNamingAlgorithm models.HashAlgorithm fileNamingAlgorithm models.HashAlgorithm
TxnManager models.Repository TxnManager Repository
} }
func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string { func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string {
return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path) return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path())
} }
func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) { func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
@@ -28,7 +27,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
} }
videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm) videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
funscriptPath := scene.GetFunscriptPath(t.Scene.Path) funscriptPath := video.GetFunscriptPath(t.Scene.Path())
heatmapPath := instance.Paths.Scene.GetInteractiveHeatmapPath(videoChecksum) heatmapPath := instance.Paths.Scene.GetInteractiveHeatmapPath(videoChecksum)
generator := NewInteractiveHeatmapSpeedGenerator(funscriptPath, heatmapPath) generator := NewInteractiveHeatmapSpeedGenerator(funscriptPath, heatmapPath)
@@ -40,30 +39,13 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
return return
} }
median := sql.NullInt64{ median := generator.InteractiveSpeed
Int64: generator.InteractiveSpeed,
Valid: true,
}
var s *models.Scene
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error primaryFile := t.Scene.PrimaryFile()
s, err = t.TxnManager.Scene.FindByPath(ctx, t.Scene.Path) primaryFile.InteractiveSpeed = &median
return err qb := t.TxnManager.File
}); err != nil { return qb.Update(ctx, primaryFile)
logger.Error(err.Error())
return
}
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
qb := t.TxnManager.Scene
scenePartial := models.ScenePartial{
ID: s.ID,
InteractiveSpeed: &median,
}
_, err := qb.Update(ctx, scenePartial)
return err
}); err != nil { }); err != nil {
logger.Error(err.Error()) logger.Error(err.Error())
} }
@@ -71,7 +53,8 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
} }
func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool { func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool {
if !t.Scene.Interactive { primaryFile := t.Scene.PrimaryFile()
if primaryFile == nil || !primaryFile.Interactive {
return false return false
} }
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)

View File

@@ -13,7 +13,7 @@ import (
) )
type GenerateMarkersTask struct { type GenerateMarkersTask struct {
TxnManager models.Repository TxnManager Repository
Scene *models.Scene Scene *models.Scene
Marker *models.SceneMarker Marker *models.SceneMarker
Overwrite bool Overwrite bool
@@ -27,7 +27,7 @@ type GenerateMarkersTask struct {
func (t *GenerateMarkersTask) GetDescription() string { func (t *GenerateMarkersTask) GetDescription() string {
if t.Scene != nil { if t.Scene != nil {
return fmt.Sprintf("Generating markers for %s", t.Scene.Path) return fmt.Sprintf("Generating markers for %s", t.Scene.Path())
} else if t.Marker != nil { } else if t.Marker != nil {
return fmt.Sprintf("Generating marker preview for marker ID %d", t.Marker.ID) return fmt.Sprintf("Generating marker preview for marker ID %d", t.Marker.ID)
} }
@@ -57,7 +57,7 @@ func (t *GenerateMarkersTask) Start(ctx context.Context) {
} }
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil { if err != nil {
logger.Errorf("error reading video file: %s", err.Error()) logger.Errorf("error reading video file: %s", err.Error())
return return
@@ -83,7 +83,7 @@ func (t *GenerateMarkersTask) generateSceneMarkers(ctx context.Context) {
} }
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil { if err != nil {
logger.Errorf("error reading video file: %s", err.Error()) logger.Errorf("error reading video file: %s", err.Error())
return return
@@ -133,13 +133,9 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene
func (t *GenerateMarkersTask) markersNeeded(ctx context.Context) int { func (t *GenerateMarkersTask) markersNeeded(ctx context.Context) int {
markers := 0 markers := 0
var sceneMarkers []*models.SceneMarker sceneMarkers, err := t.TxnManager.SceneMarker.FindBySceneID(ctx, t.Scene.ID)
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { if err != nil {
var err error logger.Errorf("error finding scene markers: %s", err.Error())
sceneMarkers, err = t.TxnManager.SceneMarker.FindBySceneID(ctx, t.Scene.ID)
return err
}); err != nil {
logger.Errorf("errror finding scene markers: %s", err.Error())
return 0 return 0
} }

View File

@@ -2,23 +2,25 @@ package manager
import ( import (
"context" "context"
"database/sql"
"fmt" "fmt"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/hash/videophash" "github.com/stashapp/stash/pkg/hash/videophash"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
) )
type GeneratePhashTask struct { type GeneratePhashTask struct {
Scene models.Scene File *file.VideoFile
Overwrite bool Overwrite bool
fileNamingAlgorithm models.HashAlgorithm fileNamingAlgorithm models.HashAlgorithm
txnManager models.Repository txnManager txn.Manager
fileUpdater file.Updater
} }
func (t *GeneratePhashTask) GetDescription() string { func (t *GeneratePhashTask) GetDescription() string {
return fmt.Sprintf("Generating phash for %s", t.Scene.Path) return fmt.Sprintf("Generating phash for %s", t.File.Path)
} }
func (t *GeneratePhashTask) Start(ctx context.Context) { func (t *GeneratePhashTask) Start(ctx context.Context) {
@@ -26,34 +28,27 @@ func (t *GeneratePhashTask) Start(ctx context.Context) {
return return
} }
ffprobe := instance.FFProbe hash, err := videophash.Generate(instance.FFMPEG, t.File)
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return
}
hash, err := videophash.Generate(instance.FFMPEG, videoFile)
if err != nil { if err != nil {
logger.Errorf("error generating phash: %s", err.Error()) logger.Errorf("error generating phash: %s", err.Error())
logErrorOutput(err) logErrorOutput(err)
return return
} }
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error {
qb := t.txnManager.Scene qb := t.fileUpdater
hashValue := sql.NullInt64{Int64: int64(*hash), Valid: true} hashValue := int64(*hash)
scenePartial := models.ScenePartial{ t.File.Fingerprints = t.File.Fingerprints.AppendUnique(file.Fingerprint{
ID: t.Scene.ID, Type: file.FingerprintTypePhash,
Phash: &hashValue, Fingerprint: hashValue,
} })
_, err := qb.Update(ctx, scenePartial)
return err return qb.Update(ctx, t.File)
}); err != nil { }); err != nil {
logger.Error(err.Error()) logger.Error(err.Error())
} }
} }
func (t *GeneratePhashTask) shouldGenerate() bool { func (t *GeneratePhashTask) shouldGenerate() bool {
return t.Overwrite || !t.Scene.Phash.Valid return t.Overwrite || t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil
} }

View File

@@ -23,7 +23,7 @@ type GeneratePreviewTask struct {
} }
func (t *GeneratePreviewTask) GetDescription() string { func (t *GeneratePreviewTask) GetDescription() string {
return fmt.Sprintf("Generating preview for %s", t.Scene.Path) return fmt.Sprintf("Generating preview for %s", t.Scene.Path())
} }
func (t *GeneratePreviewTask) Start(ctx context.Context) { func (t *GeneratePreviewTask) Start(ctx context.Context) {
@@ -32,7 +32,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
} }
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil { if err != nil {
logger.Errorf("error reading video file: %v", err) logger.Errorf("error reading video file: %v", err)
return return
@@ -55,7 +55,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
} }
func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64) error { func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64) error {
videoFilename := t.Scene.Path videoFilename := t.Scene.Path()
if err := t.generator.PreviewVideo(context.TODO(), videoFilename, videoDuration, videoChecksum, t.Options, true); err != nil { if err := t.generator.PreviewVideo(context.TODO(), videoFilename, videoDuration, videoChecksum, t.Options, true); err != nil {
logger.Warnf("[generator] failed generating scene preview, trying fallback") logger.Warnf("[generator] failed generating scene preview, trying fallback")
@@ -68,7 +68,7 @@ func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration f
} }
func (t GeneratePreviewTask) generateWebp(videoChecksum string) error { func (t GeneratePreviewTask) generateWebp(videoChecksum string) error {
videoFilename := t.Scene.Path videoFilename := t.Scene.Path()
return t.generator.PreviewWebp(context.TODO(), videoFilename, videoChecksum) return t.generator.PreviewWebp(context.TODO(), videoFilename, videoChecksum)
} }

View File

@@ -5,7 +5,6 @@ import (
"fmt" "fmt"
"io" "io"
"os" "os"
"time"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
@@ -17,11 +16,11 @@ type GenerateScreenshotTask struct {
Scene models.Scene Scene models.Scene
ScreenshotAt *float64 ScreenshotAt *float64
fileNamingAlgorithm models.HashAlgorithm fileNamingAlgorithm models.HashAlgorithm
txnManager models.Repository txnManager Repository
} }
func (t *GenerateScreenshotTask) Start(ctx context.Context) { func (t *GenerateScreenshotTask) Start(ctx context.Context) {
scenePath := t.Scene.Path scenePath := t.Scene.Path()
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
probeResult, err := ffprobe.NewVideoFile(scenePath) probeResult, err := ffprobe.NewVideoFile(scenePath)
@@ -76,11 +75,7 @@ func (t *GenerateScreenshotTask) Start(ctx context.Context) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
qb := t.txnManager.Scene qb := t.txnManager.Scene
updatedTime := time.Now() updatedScene := models.NewScenePartial()
updatedScene := models.ScenePartial{
ID: t.Scene.ID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
if err := scene.SetScreenshot(instance.Paths, checksum, coverImageData); err != nil { if err := scene.SetScreenshot(instance.Paths, checksum, coverImageData); err != nil {
return fmt.Errorf("error writing screenshot: %v", err) return fmt.Errorf("error writing screenshot: %v", err)
@@ -92,7 +87,7 @@ func (t *GenerateScreenshotTask) Start(ctx context.Context) {
} }
// update the scene with the update date // update the scene with the update date
_, err = qb.Update(ctx, updatedScene) _, err = qb.UpdatePartial(ctx, t.Scene.ID, updatedScene)
if err != nil { if err != nil {
return fmt.Errorf("error updating scene: %v", err) return fmt.Errorf("error updating scene: %v", err)
} }

View File

@@ -16,7 +16,7 @@ type GenerateSpriteTask struct {
} }
func (t *GenerateSpriteTask) GetDescription() string { func (t *GenerateSpriteTask) GetDescription() string {
return fmt.Sprintf("Generating sprites for %s", t.Scene.Path) return fmt.Sprintf("Generating sprites for %s", t.Scene.Path())
} }
func (t *GenerateSpriteTask) Start(ctx context.Context) { func (t *GenerateSpriteTask) Start(ctx context.Context) {
@@ -25,7 +25,7 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) {
} }
ffprobe := instance.FFProbe ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil { if err != nil {
logger.Errorf("error reading video file: %s", err.Error()) logger.Errorf("error reading video file: %s", err.Error())
return return

View File

@@ -51,7 +51,8 @@ func (j *IdentifyJob) Execute(ctx context.Context, progress *job.Progress) {
// if scene ids provided, use those // if scene ids provided, use those
// otherwise, batch query for all scenes - ordering by path // otherwise, batch query for all scenes - ordering by path
if err := txn.WithTxn(ctx, instance.Repository, func(ctx context.Context) error { // don't use a transaction to query scenes
if err := txn.WithDatabase(ctx, instance.Repository, func(ctx context.Context) error {
if len(j.input.SceneIDs) == 0 { if len(j.input.SceneIDs) == 0 {
return j.identifyAllScenes(ctx, sources) return j.identifyAllScenes(ctx, sources)
} }
@@ -130,7 +131,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
} }
var taskError error var taskError error
j.progress.ExecuteTask("Identifying "+s.Path, func() { j.progress.ExecuteTask("Identifying "+s.Path(), func() {
task := identify.SceneIdentifier{ task := identify.SceneIdentifier{
SceneReaderUpdater: instance.Repository.Scene, SceneReaderUpdater: instance.Repository.Scene,
StudioCreator: instance.Repository.Studio, StudioCreator: instance.Repository.Studio,
@@ -139,7 +140,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
DefaultOptions: j.input.Options, DefaultOptions: j.input.Options,
Sources: sources, Sources: sources,
ScreenshotSetter: &scene.PathsScreenshotSetter{ ScreenshotSetter: &scene.PathsCoverSetter{
Paths: instance.Paths, Paths: instance.Paths,
FileNamingAlgorithm: instance.Config.GetVideoFileNamingAlgorithm(), FileNamingAlgorithm: instance.Config.GetVideoFileNamingAlgorithm(),
}, },
@@ -150,7 +151,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
}) })
if taskError != nil { if taskError != nil {
logger.Errorf("Error encountered identifying %s: %v", s.Path, taskError) logger.Errorf("Error encountered identifying %s: %v", s.Path(), taskError)
} }
j.progress.Increment() j.progress.Increment()

View File

@@ -28,7 +28,7 @@ import (
) )
type ImportTask struct { type ImportTask struct {
txnManager models.Repository txnManager Repository
json jsonUtils json jsonUtils
BaseDir string BaseDir string

View File

@@ -14,13 +14,13 @@ type MigrateHashTask struct {
// Start starts the task. // Start starts the task.
func (t *MigrateHashTask) Start() { func (t *MigrateHashTask) Start() {
if !t.Scene.OSHash.Valid || !t.Scene.Checksum.Valid { if t.Scene.OSHash() == "" || t.Scene.Checksum() == "" {
// nothing to do // nothing to do
return return
} }
oshash := t.Scene.OSHash.String oshash := t.Scene.OSHash()
checksum := t.Scene.Checksum.String checksum := t.Scene.Checksum()
oldHash := oshash oldHash := oshash
newHash := checksum newHash := checksum

View File

@@ -4,327 +4,279 @@ import (
"context" "context"
"errors" "errors"
"fmt" "fmt"
"os" "io/fs"
"path/filepath" "path/filepath"
"regexp"
"time" "time"
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate" "github.com/stashapp/stash/pkg/scene/generate"
"github.com/stashapp/stash/pkg/utils"
) )
const scanQueueSize = 200000 type scanner interface {
Scan(ctx context.Context, handlers []file.Handler, options file.ScanOptions, progressReporter file.ProgressReporter)
}
type ScanJob struct { type ScanJob struct {
txnManager models.Repository scanner scanner
input ScanMetadataInput input ScanMetadataInput
subscriptions *subscriptionManager subscriptions *subscriptionManager
} }
type scanFile struct {
path string
info os.FileInfo
caseSensitiveFs bool
}
func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) { func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
input := j.input input := j.input
paths := getScanPaths(input.Paths)
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request") logger.Info("Stopping due to user request")
return return
} }
sp := getScanPaths(input.Paths)
paths := make([]string, len(sp))
for i, p := range sp {
paths[i] = p.Path
}
start := time.Now() start := time.Now()
config := config.GetInstance()
parallelTasks := config.GetParallelTasksWithAutoDetection()
logger.Infof("Scan started with %d parallel tasks", parallelTasks) const taskQueueSize = 200000
taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, instance.Config.GetParallelTasksWithAutoDetection())
fileQueue := make(chan scanFile, scanQueueSize) j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{
go func() { Paths: paths,
total, newFiles := j.queueFiles(ctx, paths, fileQueue, parallelTasks) ScanFilters: []file.PathFilter{newScanFilter(instance.Config)},
ZipFileExtensions: instance.Config.GetGalleryExtensions(),
ParallelTasks: instance.Config.GetParallelTasksWithAutoDetection(),
}, progress)
if !job.IsCancelled(ctx) { taskQueue.Close()
progress.SetTotal(total)
logger.Infof("Finished counting files. Total files to scan: %d, %d new files found", total, newFiles)
}
}()
wg := sizedwaitgroup.New(parallelTasks)
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
calculateMD5 := config.IsCalculateMD5()
var err error
var galleries []string
mutexManager := utils.NewMutexManager()
for f := range fileQueue {
if job.IsCancelled(ctx) { if job.IsCancelled(ctx) {
break logger.Info("Stopping due to user request")
} return
if isGallery(f.path) {
galleries = append(galleries, f.path)
}
if err := instance.Paths.Generated.EnsureTmpDir(); err != nil {
logger.Warnf("couldn't create temporary directory: %v", err)
}
wg.Add()
task := ScanTask{
TxnManager: j.txnManager,
file: file.FSFile(f.path, f.info),
UseFileMetadata: input.UseFileMetadata,
StripFileExtension: input.StripFileExtension,
fileNamingAlgorithm: fileNamingAlgo,
calculateMD5: calculateMD5,
GeneratePreview: input.ScanGeneratePreviews,
GenerateImagePreview: input.ScanGenerateImagePreviews,
GenerateSprite: input.ScanGenerateSprites,
GeneratePhash: input.ScanGeneratePhashes,
GenerateThumbnails: input.ScanGenerateThumbnails,
progress: progress,
CaseSensitiveFs: f.caseSensitiveFs,
mutexManager: mutexManager,
}
go func() {
task.Start(ctx)
wg.Done()
progress.Increment()
}()
}
wg.Wait()
if err := instance.Paths.Generated.EmptyTmpDir(); err != nil {
logger.Warnf("couldn't empty temporary directory: %v", err)
} }
elapsed := time.Since(start) elapsed := time.Since(start)
logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed)) logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed))
if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request")
return
}
if err != nil {
return
}
progress.ExecuteTask("Associating galleries", func() {
for _, path := range galleries {
wg.Add()
task := ScanTask{
TxnManager: j.txnManager,
file: file.FSFile(path, nil), // hopefully info is not needed
UseFileMetadata: false,
}
go task.associateGallery(ctx, &wg)
wg.Wait()
}
logger.Info("Finished gallery association")
})
j.subscriptions.notify() j.subscriptions.notify()
} }
func (j *ScanJob) queueFiles(ctx context.Context, paths []*config.StashConfig, scanQueue chan<- scanFile, parallelTasks int) (total int, newFiles int) { type scanFilter struct {
defer close(scanQueue) stashPaths []*config.StashConfig
generatedPath string
var minModTime time.Time vidExt []string
if j.input.Filter != nil && j.input.Filter.MinModTime != nil { imgExt []string
minModTime = *j.input.Filter.MinModTime zipExt []string
videoExcludeRegex []*regexp.Regexp
imageExcludeRegex []*regexp.Regexp
} }
wg := sizedwaitgroup.New(parallelTasks) func newScanFilter(c *config.Instance) *scanFilter {
return &scanFilter{
for _, sp := range paths { stashPaths: c.GetStashPaths(),
csFs, er := fsutil.IsFsPathCaseSensitive(sp.Path) generatedPath: c.GetGeneratedPath(),
if er != nil { vidExt: c.GetVideoExtensions(),
logger.Warnf("Cannot determine fs case sensitivity: %s", er.Error()) imgExt: c.GetImageExtensions(),
zipExt: c.GetGalleryExtensions(),
videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
}
} }
err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error { func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
// check stop if fsutil.IsPathInDir(f.generatedPath, path) {
if job.IsCancelled(ctx) { return false
return context.Canceled
} }
// exit early on cutoff isVideoFile := fsutil.MatchExtension(path, f.vidExt)
if info.Mode().IsRegular() && info.ModTime().Before(minModTime) { isImageFile := fsutil.MatchExtension(path, f.imgExt)
return nil isZipFile := fsutil.MatchExtension(path, f.zipExt)
// handle caption files
if fsutil.MatchExtension(path, video.CaptionExts) {
// we don't include caption files in the file scan, but we do need
// to handle them
video.AssociateCaptions(ctx, path, instance.Repository, instance.Database.File, instance.Database.File)
return false
} }
wg.Add() if !info.IsDir() && !isVideoFile && !isImageFile && !isZipFile {
return false
go func() {
defer wg.Done()
// #1756 - skip zero length files and directories
if info.IsDir() {
return
} }
if info.Size() == 0 { // #1756 - skip zero length files
if !info.IsDir() && info.Size() == 0 {
logger.Infof("Skipping zero-length file: %s", path) logger.Infof("Skipping zero-length file: %s", path)
return return false
} }
total++ s := getStashFromDirPath(f.stashPaths, path)
if !j.doesPathExist(ctx, path) {
newFiles++
}
scanQueue <- scanFile{
path: path,
info: info,
caseSensitiveFs: csFs,
}
}()
return nil
})
wg.Wait()
if err != nil && !errors.Is(err, context.Canceled) {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return
}
}
return
}
func (j *ScanJob) doesPathExist(ctx context.Context, path string) bool {
config := config.GetInstance()
vidExt := config.GetVideoExtensions()
imgExt := config.GetImageExtensions()
gExt := config.GetGalleryExtensions()
ret := false
txnErr := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := j.txnManager
switch {
case fsutil.MatchExtension(path, gExt):
g, _ := r.Gallery.FindByPath(ctx, path)
if g != nil {
ret = true
}
case fsutil.MatchExtension(path, vidExt):
s, _ := r.Scene.FindByPath(ctx, path)
if s != nil {
ret = true
}
case fsutil.MatchExtension(path, imgExt):
i, _ := r.Image.FindByPath(ctx, path)
if i != nil {
ret = true
}
}
return nil
})
if txnErr != nil {
logger.Warnf("error checking if file exists in database: %v", txnErr)
}
return ret
}
type ScanTask struct {
TxnManager models.Repository
file file.SourceFile
UseFileMetadata bool
StripFileExtension bool
calculateMD5 bool
fileNamingAlgorithm models.HashAlgorithm
GenerateSprite bool
GeneratePhash bool
GeneratePreview bool
GenerateImagePreview bool
GenerateThumbnails bool
zipGallery *models.Gallery
progress *job.Progress
CaseSensitiveFs bool
mutexManager *utils.MutexManager
}
func (t *ScanTask) Start(ctx context.Context) {
var s *models.Scene
path := t.file.Path()
t.progress.ExecuteTask("Scanning "+path, func() {
switch {
case isGallery(path):
t.scanGallery(ctx)
case isVideo(path):
s = t.scanScene(ctx)
case isImage(path):
t.scanImage(ctx)
case isCaptions(path):
t.associateCaptions(ctx)
}
})
if s == nil { if s == nil {
return return false
} }
// Handle the case of a scene // shortcut: skip the directory entirely if it matches both exclusion patterns
iwg := sizedwaitgroup.New(2) // add a trailing separator so that it correctly matches against patterns like path/.*
pathExcludeTest := path + string(filepath.Separator)
if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, f.videoExcludeRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, f.imageExcludeRegex)) {
return false
}
if t.GenerateSprite { if isVideoFile && (s.ExcludeVideo || matchFileRegex(path, f.videoExcludeRegex)) {
iwg.Add() return false
} else if (isImageFile || isZipFile) && s.ExcludeImage || matchFileRegex(path, f.imageExcludeRegex) {
return false
}
go t.progress.ExecuteTask(fmt.Sprintf("Generating sprites for %s", path), func() { return true
}
type scanConfig struct {
isGenerateThumbnails bool
}
func (c *scanConfig) GetCreateGalleriesFromFolders() bool {
return instance.Config.GetCreateGalleriesFromFolders()
}
func (c *scanConfig) IsGenerateThumbnails() bool {
return c.isGenerateThumbnails
}
func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progress *job.Progress) []file.Handler {
db := instance.Database
pluginCache := instance.PluginCache
return []file.Handler{
&file.FilteredHandler{
Filter: file.FilterFunc(imageFileFilter),
Handler: &image.ScanHandler{
CreatorUpdater: db.Image,
GalleryFinder: db.Gallery,
ThumbnailGenerator: &imageThumbnailGenerator{},
ScanConfig: &scanConfig{
isGenerateThumbnails: options.ScanGenerateThumbnails,
},
PluginCache: pluginCache,
},
},
&file.FilteredHandler{
Filter: file.FilterFunc(galleryFileFilter),
Handler: &gallery.ScanHandler{
CreatorUpdater: db.Gallery,
SceneFinderUpdater: db.Scene,
PluginCache: pluginCache,
},
},
&file.FilteredHandler{
Filter: file.FilterFunc(videoFileFilter),
Handler: &scene.ScanHandler{
CreatorUpdater: db.Scene,
PluginCache: pluginCache,
CoverGenerator: &coverGenerator{},
ScanGenerator: &sceneGenerators{
input: options,
taskQueue: taskQueue,
progress: progress,
},
},
},
}
}
type imageThumbnailGenerator struct{}
func (g *imageThumbnailGenerator) GenerateThumbnail(ctx context.Context, i *models.Image, f *file.ImageFile) error {
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum(), models.DefaultGthumbWidth)
exists, _ := fsutil.FileExists(thumbPath)
if exists {
return nil
}
if f.Height <= models.DefaultGthumbWidth && f.Width <= models.DefaultGthumbWidth {
return nil
}
logger.Debugf("Generating thumbnail for %s", f.Path)
encoder := image.NewThumbnailEncoder(instance.FFMPEG)
data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth)
if err != nil {
// don't log for animated images
if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
return fmt.Errorf("getting thumbnail for image %s: %w", f.Path, err)
}
return nil
}
err = fsutil.WriteFile(thumbPath, data)
if err != nil {
return fmt.Errorf("writing thumbnail for image %s: %w", f.Path, err)
}
return nil
}
type sceneGenerators struct {
input ScanMetadataInput
taskQueue *job.TaskQueue
progress *job.Progress
}
func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *file.VideoFile) error {
const overwrite = false
progress := g.progress
t := g.input
path := f.Path
config := instance.Config
fileNamingAlgorithm := config.GetVideoFileNamingAlgorithm()
if t.ScanGenerateSprites {
progress.AddTotal(1)
g.taskQueue.Add(fmt.Sprintf("Generating sprites for %s", path), func(ctx context.Context) {
taskSprite := GenerateSpriteTask{ taskSprite := GenerateSpriteTask{
Scene: *s, Scene: *s,
Overwrite: false, Overwrite: overwrite,
fileNamingAlgorithm: t.fileNamingAlgorithm, fileNamingAlgorithm: fileNamingAlgorithm,
} }
taskSprite.Start(ctx) taskSprite.Start(ctx)
iwg.Done() progress.Increment()
}) })
} }
if t.GeneratePhash { if t.ScanGeneratePhashes {
iwg.Add() progress.AddTotal(1)
g.taskQueue.Add(fmt.Sprintf("Generating phash for %s", path), func(ctx context.Context) {
go t.progress.ExecuteTask(fmt.Sprintf("Generating phash for %s", path), func() {
taskPhash := GeneratePhashTask{ taskPhash := GeneratePhashTask{
Scene: *s, File: f,
fileNamingAlgorithm: t.fileNamingAlgorithm, fileNamingAlgorithm: fileNamingAlgorithm,
txnManager: t.TxnManager, txnManager: instance.Database,
fileUpdater: instance.Database.File,
Overwrite: overwrite,
} }
taskPhash.Start(ctx) taskPhash.Start(ctx)
iwg.Done() progress.Increment()
}) })
} }
if t.GeneratePreview { if t.ScanGeneratePreviews {
iwg.Add() progress.AddTotal(1)
g.taskQueue.Add(fmt.Sprintf("Generating preview for %s", path), func(ctx context.Context) {
go t.progress.ExecuteTask(fmt.Sprintf("Generating preview for %s", path), func() {
options := getGeneratePreviewOptions(GeneratePreviewOptionsInput{}) options := getGeneratePreviewOptions(GeneratePreviewOptionsInput{})
const overwrite = false
g := &generate.Generator{ g := &generate.Generator{
Encoder: instance.FFMPEG, Encoder: instance.FFMPEG,
@@ -336,73 +288,16 @@ func (t *ScanTask) Start(ctx context.Context) {
taskPreview := GeneratePreviewTask{ taskPreview := GeneratePreviewTask{
Scene: *s, Scene: *s,
ImagePreview: t.GenerateImagePreview, ImagePreview: t.ScanGenerateImagePreviews,
Options: options, Options: options,
Overwrite: overwrite, Overwrite: overwrite,
fileNamingAlgorithm: t.fileNamingAlgorithm, fileNamingAlgorithm: fileNamingAlgorithm,
generator: g, generator: g,
} }
taskPreview.Start(ctx) taskPreview.Start(ctx)
iwg.Done() progress.Increment()
}) })
} }
iwg.Wait()
}
func walkFilesToScan(s *config.StashConfig, f filepath.WalkFunc) error {
config := config.GetInstance()
vidExt := config.GetVideoExtensions()
imgExt := config.GetImageExtensions()
gExt := config.GetGalleryExtensions()
capExt := scene.CaptionExts
excludeVidRegex := generateRegexps(config.GetExcludes())
excludeImgRegex := generateRegexps(config.GetImageExcludes())
// don't scan zip images directly
if file.IsZipPath(s.Path) {
logger.Warnf("Cannot rescan zip image %s. Rescan zip gallery instead.", s.Path)
return nil return nil
} }
generatedPath := config.GetGeneratedPath()
return fsutil.SymWalk(s.Path, func(path string, info os.FileInfo, err error) error {
if err != nil {
logger.Warnf("error scanning %s: %s", path, err.Error())
return nil
}
if info.IsDir() {
// #1102 - ignore files in generated path
if fsutil.IsPathInDir(generatedPath, path) {
return filepath.SkipDir
}
// shortcut: skip the directory entirely if it matches both exclusion patterns
// add a trailing separator so that it correctly matches against patterns like path/.*
pathExcludeTest := path + string(filepath.Separator)
if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, excludeVidRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, excludeImgRegex)) {
return filepath.SkipDir
}
return nil
}
if !s.ExcludeVideo && fsutil.MatchExtension(path, vidExt) && !matchFileRegex(path, excludeVidRegex) {
return f(path, info, err)
}
if !s.ExcludeImage {
if (fsutil.MatchExtension(path, imgExt) || fsutil.MatchExtension(path, gExt)) && !matchFileRegex(path, excludeImgRegex) {
return f(path, info, err)
}
}
if fsutil.MatchExtension(path, capExt) {
return f(path, info, err)
}
return nil
})
}

View File

@@ -1,170 +1,160 @@
package manager package manager
import ( // func (t *ScanTask) scanGallery(ctx context.Context) {
"archive/zip" // var g *models.Gallery
"context" // path := t.file.Path()
"fmt" // images := 0
"path/filepath" // scanImages := false
"strings"
"github.com/remeh/sizedwaitgroup" // if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
"github.com/stashapp/stash/internal/manager/config" // var err error
"github.com/stashapp/stash/pkg/file" // g, err = t.TxnManager.Gallery.FindByPath(ctx, path)
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
func (t *ScanTask) scanGallery(ctx context.Context) { // if g != nil && err == nil {
var g *models.Gallery // images, err = t.TxnManager.Image.CountByGalleryID(ctx, g.ID)
path := t.file.Path() // if err != nil {
images := 0 // return fmt.Errorf("error getting images for zip gallery %s: %s", path, err.Error())
scanImages := false // }
// }
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { // return err
var err error // }); err != nil {
g, err = t.TxnManager.Gallery.FindByPath(ctx, path) // logger.Error(err.Error())
// return
// }
if g != nil && err == nil { // scanner := gallery.Scanner{
images, err = t.TxnManager.Image.CountByGalleryID(ctx, g.ID) // Scanner: gallery.FileScanner(&file.FSHasher{}),
if err != nil { // ImageExtensions: instance.Config.GetImageExtensions(),
return fmt.Errorf("error getting images for zip gallery %s: %s", path, err.Error()) // StripFileExtension: t.StripFileExtension,
} // CaseSensitiveFs: t.CaseSensitiveFs,
} // CreatorUpdater: t.TxnManager.Gallery,
// Paths: instance.Paths,
// PluginCache: instance.PluginCache,
// MutexManager: t.mutexManager,
// }
return err // var err error
}); err != nil { // if g != nil {
logger.Error(err.Error()) // g, scanImages, err = scanner.ScanExisting(ctx, g, t.file)
return // if err != nil {
} // logger.Error(err.Error())
// return
// }
scanner := gallery.Scanner{ // // scan the zip files if the gallery has no images
Scanner: gallery.FileScanner(&file.FSHasher{}), // scanImages = scanImages || images == 0
ImageExtensions: instance.Config.GetImageExtensions(), // } else {
StripFileExtension: t.StripFileExtension, // g, scanImages, err = scanner.ScanNew(ctx, t.file)
CaseSensitiveFs: t.CaseSensitiveFs, // if err != nil {
CreatorUpdater: t.TxnManager.Gallery, // logger.Error(err.Error())
Paths: instance.Paths, // }
PluginCache: instance.PluginCache, // }
MutexManager: t.mutexManager,
}
var err error // if g != nil {
if g != nil { // if scanImages {
g, scanImages, err = scanner.ScanExisting(ctx, g, t.file) // t.scanZipImages(ctx, g)
if err != nil { // } else {
logger.Error(err.Error()) // // in case thumbnails have been deleted, regenerate them
return // t.regenerateZipImages(ctx, g)
} // }
// }
// scan the zip files if the gallery has no images // }
scanImages = scanImages || images == 0
} else {
g, scanImages, err = scanner.ScanNew(ctx, t.file)
if err != nil {
logger.Error(err.Error())
}
}
if g != nil {
if scanImages {
t.scanZipImages(ctx, g)
} else {
// in case thumbnails have been deleted, regenerate them
t.regenerateZipImages(ctx, g)
}
}
}
// associates a gallery to a scene with the same basename // associates a gallery to a scene with the same basename
func (t *ScanTask) associateGallery(ctx context.Context, wg *sizedwaitgroup.SizedWaitGroup) { // func (t *ScanTask) associateGallery(ctx context.Context, wg *sizedwaitgroup.SizedWaitGroup) {
path := t.file.Path() // path := t.file.Path()
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { // if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
r := t.TxnManager // r := t.TxnManager
qb := r.Gallery // qb := r.Gallery
sqb := r.Scene // sqb := r.Scene
g, err := qb.FindByPath(ctx, path) // g, err := qb.FindByPath(ctx, path)
if err != nil { // if err != nil {
return err // return err
} // }
if g == nil { // if g == nil {
// associate is run after scan is finished // // associate is run after scan is finished
// should only happen if gallery is a directory or an io error occurs during hashing // // should only happen if gallery is a directory or an io error occurs during hashing
logger.Warnf("associate: gallery %s not found in DB", path) // logger.Warnf("associate: gallery %s not found in DB", path)
return nil // return nil
} // }
basename := strings.TrimSuffix(path, filepath.Ext(path)) // basename := strings.TrimSuffix(path, filepath.Ext(path))
var relatedFiles []string // var relatedFiles []string
vExt := config.GetInstance().GetVideoExtensions() // vExt := config.GetInstance().GetVideoExtensions()
// make a list of media files that can be related to the gallery // // make a list of media files that can be related to the gallery
for _, ext := range vExt { // for _, ext := range vExt {
related := basename + "." + ext // related := basename + "." + ext
// exclude gallery extensions from the related files // // exclude gallery extensions from the related files
if !isGallery(related) { // if !isGallery(related) {
relatedFiles = append(relatedFiles, related) // relatedFiles = append(relatedFiles, related)
} // }
} // }
for _, scenePath := range relatedFiles { // for _, scenePath := range relatedFiles {
scene, _ := sqb.FindByPath(ctx, scenePath) // scene, _ := sqb.FindByPath(ctx, scenePath)
// found related Scene // // found related Scene
if scene != nil { // if scene != nil {
sceneGalleries, _ := sqb.FindByGalleryID(ctx, g.ID) // check if gallery is already associated to the scene // sceneGalleries, _ := sqb.FindByGalleryID(ctx, g.ID) // check if gallery is already associated to the scene
isAssoc := false // isAssoc := false
for _, sg := range sceneGalleries { // for _, sg := range sceneGalleries {
if scene.ID == sg.ID { // if scene.ID == sg.ID {
isAssoc = true // isAssoc = true
break // break
} // }
} // }
if !isAssoc { // if !isAssoc {
logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID) // logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID)
if err := sqb.UpdateGalleries(ctx, scene.ID, []int{g.ID}); err != nil { // if _, err := sqb.UpdatePartial(ctx, scene.ID, models.ScenePartial{
return err // GalleryIDs: &models.UpdateIDs{
} // IDs: []int{g.ID},
} // Mode: models.RelationshipUpdateModeAdd,
} // },
} // }); err != nil {
return nil // return err
}); err != nil { // }
logger.Error(err.Error()) // }
} // }
wg.Done() // }
} // return nil
// }); err != nil {
// logger.Error(err.Error())
// }
// wg.Done()
// }
func (t *ScanTask) scanZipImages(ctx context.Context, zipGallery *models.Gallery) { // func (t *ScanTask) scanZipImages(ctx context.Context, zipGallery *models.Gallery) {
err := walkGalleryZip(zipGallery.Path.String, func(f *zip.File) error { // err := walkGalleryZip(*zipGallery.Path, func(f *zip.File) error {
// copy this task and change the filename // // copy this task and change the filename
subTask := *t // subTask := *t
// filepath is the zip file and the internal file name, separated by a null byte // // filepath is the zip file and the internal file name, separated by a null byte
subTask.file = file.ZipFile(zipGallery.Path.String, f) // subTask.file = file.ZipFile(*zipGallery.Path, f)
subTask.zipGallery = zipGallery // subTask.zipGallery = zipGallery
// run the subtask and wait for it to complete // // run the subtask and wait for it to complete
subTask.Start(ctx) // subTask.Start(ctx)
return nil // return nil
}) // })
if err != nil { // if err != nil {
logger.Warnf("failed to scan zip file images for %s: %s", zipGallery.Path.String, err.Error()) // logger.Warnf("failed to scan zip file images for %s: %s", *zipGallery.Path, err.Error())
} // }
} // }
func (t *ScanTask) regenerateZipImages(ctx context.Context, zipGallery *models.Gallery) { // func (t *ScanTask) regenerateZipImages(ctx context.Context, zipGallery *models.Gallery) {
var images []*models.Image // var images []*models.Image
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { // if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
iqb := t.TxnManager.Image // iqb := t.TxnManager.Image
var err error // var err error
images, err = iqb.FindByGalleryID(ctx, zipGallery.ID) // images, err = iqb.FindByGalleryID(ctx, zipGallery.ID)
return err // return err
}); err != nil { // }); err != nil {
logger.Warnf("failed to find gallery images: %s", err.Error()) // logger.Warnf("failed to find gallery images: %s", err.Error())
return // return
} // }
for _, img := range images { // for _, img := range images {
t.generateThumbnail(img) // t.generateThumbnail(img)
} // }
} // }

View File

@@ -1,184 +1,179 @@
package manager package manager
import ( // import (
"context" // "context"
"database/sql" // "errors"
"errors" // "os/exec"
"os/exec" // "path/filepath"
"path/filepath" // "time"
"time"
"github.com/stashapp/stash/internal/manager/config" // "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file" // "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil" // "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery" // "github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/hash/md5" // "github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/image" // "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger" // "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" // "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin" // "github.com/stashapp/stash/pkg/plugin"
) // )
func (t *ScanTask) scanImage(ctx context.Context) { // func (t *ScanTask) scanImage(ctx context.Context) {
var i *models.Image // var i *models.Image
path := t.file.Path() // path := t.file.Path()
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { // if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error // var err error
i, err = t.TxnManager.Image.FindByPath(ctx, path) // i, err = t.TxnManager.Image.FindByPath(ctx, path)
return err // return err
}); err != nil { // }); err != nil {
logger.Error(err.Error()) // logger.Error(err.Error())
return // return
} // }
scanner := image.Scanner{ // scanner := image.Scanner{
Scanner: image.FileScanner(&file.FSHasher{}), // Scanner: image.FileScanner(&file.FSHasher{}),
StripFileExtension: t.StripFileExtension, // StripFileExtension: t.StripFileExtension,
TxnManager: t.TxnManager, // TxnManager: t.TxnManager,
CreatorUpdater: t.TxnManager.Image, // CreatorUpdater: t.TxnManager.Image,
CaseSensitiveFs: t.CaseSensitiveFs, // CaseSensitiveFs: t.CaseSensitiveFs,
Paths: GetInstance().Paths, // Paths: GetInstance().Paths,
PluginCache: instance.PluginCache, // PluginCache: instance.PluginCache,
MutexManager: t.mutexManager, // MutexManager: t.mutexManager,
} // }
var err error // var err error
if i != nil { // if i != nil {
i, err = scanner.ScanExisting(ctx, i, t.file) // i, err = scanner.ScanExisting(ctx, i, t.file)
if err != nil { // if err != nil {
logger.Error(err.Error()) // logger.Error(err.Error())
return // return
} // }
} else { // } else {
i, err = scanner.ScanNew(ctx, t.file) // i, err = scanner.ScanNew(ctx, t.file)
if err != nil { // if err != nil {
logger.Error(err.Error()) // logger.Error(err.Error())
return // return
} // }
if i != nil { // if i != nil {
if t.zipGallery != nil { // if t.zipGallery != nil {
// associate with gallery // // associate with gallery
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { // if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
return gallery.AddImage(ctx, t.TxnManager.Gallery, t.zipGallery.ID, i.ID) // return gallery.AddImage(ctx, t.TxnManager.Gallery, t.zipGallery.ID, i.ID)
}); err != nil { // }); err != nil {
logger.Error(err.Error()) // logger.Error(err.Error())
return // return
} // }
} else if config.GetInstance().GetCreateGalleriesFromFolders() { // } else if config.GetInstance().GetCreateGalleriesFromFolders() {
// create gallery from folder or associate with existing gallery // // create gallery from folder or associate with existing gallery
logger.Infof("Associating image %s with folder gallery", i.Path) // logger.Infof("Associating image %s with folder gallery", i.Path)
var galleryID int // var galleryID int
var isNewGallery bool // var isNewGallery bool
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { // if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error // var err error
galleryID, isNewGallery, err = t.associateImageWithFolderGallery(ctx, i.ID, t.TxnManager.Gallery) // galleryID, isNewGallery, err = t.associateImageWithFolderGallery(ctx, i.ID, t.TxnManager.Gallery)
return err // return err
}); err != nil { // }); err != nil {
logger.Error(err.Error()) // logger.Error(err.Error())
return // return
} // }
if isNewGallery { // if isNewGallery {
GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryCreatePost, nil, nil) // GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryCreatePost, nil, nil)
} // }
} // }
} // }
} // }
if i != nil { // if i != nil {
t.generateThumbnail(i) // t.generateThumbnail(i)
} // }
} // }
type GalleryImageAssociator interface { // type GalleryImageAssociator interface {
FindByPath(ctx context.Context, path string) (*models.Gallery, error) // FindByPath(ctx context.Context, path string) (*models.Gallery, error)
Create(ctx context.Context, newGallery models.Gallery) (*models.Gallery, error) // Create(ctx context.Context, newGallery *models.Gallery) error
gallery.ImageUpdater // gallery.ImageUpdater
} // }
func (t *ScanTask) associateImageWithFolderGallery(ctx context.Context, imageID int, qb GalleryImageAssociator) (galleryID int, isNew bool, err error) { // func (t *ScanTask) associateImageWithFolderGallery(ctx context.Context, imageID int, qb GalleryImageAssociator) (galleryID int, isNew bool, err error) {
// find a gallery with the path specified // // find a gallery with the path specified
path := filepath.Dir(t.file.Path()) // path := filepath.Dir(t.file.Path())
var g *models.Gallery // var g *models.Gallery
g, err = qb.FindByPath(ctx, path) // g, err = qb.FindByPath(ctx, path)
if err != nil { // if err != nil {
return // return
} // }
if g == nil { // if g == nil {
checksum := md5.FromString(path) // checksum := md5.FromString(path)
// create the gallery // // create the gallery
currentTime := time.Now() // currentTime := time.Now()
newGallery := models.Gallery{ // title := fsutil.GetNameFromPath(path, false)
Checksum: checksum,
Path: sql.NullString{
String: path,
Valid: true,
},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
Title: sql.NullString{
String: fsutil.GetNameFromPath(path, false),
Valid: true,
},
}
logger.Infof("Creating gallery for folder %s", path) // g = &models.Gallery{
g, err = qb.Create(ctx, newGallery) // Checksum: checksum,
if err != nil { // Path: &path,
return 0, false, err // CreatedAt: currentTime,
} // UpdatedAt: currentTime,
// Title: title,
// }
isNew = true // logger.Infof("Creating gallery for folder %s", path)
} // err = qb.Create(ctx, g)
// if err != nil {
// return 0, false, err
// }
// associate image with gallery // isNew = true
err = gallery.AddImage(ctx, qb, g.ID, imageID) // }
galleryID = g.ID
return
}
func (t *ScanTask) generateThumbnail(i *models.Image) { // // associate image with gallery
if !t.GenerateThumbnails { // err = gallery.AddImage(ctx, qb, g.ID, imageID)
return // galleryID = g.ID
} // return
// }
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth) // func (t *ScanTask) generateThumbnail(i *models.Image) {
exists, _ := fsutil.FileExists(thumbPath) // if !t.GenerateThumbnails {
if exists { // return
return // }
}
config, _, err := image.DecodeSourceImage(i) // thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
if err != nil { // exists, _ := fsutil.FileExists(thumbPath)
logger.Errorf("error reading image %s: %s", i.Path, err.Error()) // if exists {
return // return
} // }
if config.Height > models.DefaultGthumbWidth || config.Width > models.DefaultGthumbWidth { // config, _, err := image.DecodeSourceImage(i)
encoder := image.NewThumbnailEncoder(instance.FFMPEG) // if err != nil {
data, err := encoder.GetThumbnail(i, models.DefaultGthumbWidth) // logger.Errorf("error reading image %s: %s", i.Path, err.Error())
// return
// }
if err != nil { // if config.Height > models.DefaultGthumbWidth || config.Width > models.DefaultGthumbWidth {
// don't log for animated images // encoder := image.NewThumbnailEncoder(instance.FFMPEG)
if !errors.Is(err, image.ErrNotSupportedForThumbnail) { // data, err := encoder.GetThumbnail(i, models.DefaultGthumbWidth)
logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error())
var exitErr *exec.ExitError // if err != nil {
if errors.As(err, &exitErr) { // // don't log for animated images
logger.Errorf("stderr: %s", string(exitErr.Stderr)) // if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
} // logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error())
}
return
}
err = fsutil.WriteFile(thumbPath, data) // var exitErr *exec.ExitError
if err != nil { // if errors.As(err, &exitErr) {
logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err) // logger.Errorf("stderr: %s", string(exitErr.Stderr))
} // }
} // }
} // return
// }
// err = fsutil.WriteFile(thumbPath, data)
// if err != nil {
// logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err)
// }
// }
// }

View File

@@ -1,129 +1,116 @@
package manager package manager
import ( // type sceneScreenshotter struct {
"context" // g *generate.Generator
"path/filepath" // }
"github.com/stashapp/stash/internal/manager/config" // func (ss *sceneScreenshotter) GenerateScreenshot(ctx context.Context, probeResult *ffmpeg.VideoFile, hash string) error {
"github.com/stashapp/stash/pkg/ffmpeg" // return ss.g.Screenshot(ctx, probeResult.Path, hash, probeResult.Width, probeResult.Duration, generate.ScreenshotOptions{})
"github.com/stashapp/stash/pkg/file" // }
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate"
)
type sceneScreenshotter struct { // func (ss *sceneScreenshotter) GenerateThumbnail(ctx context.Context, probeResult *ffmpeg.VideoFile, hash string) error {
g *generate.Generator // return ss.g.Thumbnail(ctx, probeResult.Path, hash, probeResult.Duration, generate.ScreenshotOptions{})
} // }
func (ss *sceneScreenshotter) GenerateScreenshot(ctx context.Context, probeResult *ffmpeg.VideoFile, hash string) error { // func (t *ScanTask) scanScene(ctx context.Context) *models.Scene {
return ss.g.Screenshot(ctx, probeResult.Path, hash, probeResult.Width, probeResult.Duration, generate.ScreenshotOptions{}) // logError := func(err error) *models.Scene {
} // logger.Error(err.Error())
// return nil
// }
func (ss *sceneScreenshotter) GenerateThumbnail(ctx context.Context, probeResult *ffmpeg.VideoFile, hash string) error { // var retScene *models.Scene
return ss.g.Thumbnail(ctx, probeResult.Path, hash, probeResult.Duration, generate.ScreenshotOptions{}) // var s *models.Scene
}
func (t *ScanTask) scanScene(ctx context.Context) *models.Scene { // if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
logError := func(err error) *models.Scene { // var err error
logger.Error(err.Error()) // s, err = t.TxnManager.Scene.FindByPath(ctx, t.file.Path())
return nil // return err
} // }); err != nil {
// logger.Error(err.Error())
// return nil
// }
var retScene *models.Scene // g := &generate.Generator{
var s *models.Scene // Encoder: instance.FFMPEG,
// LockManager: instance.ReadLockManager,
// ScenePaths: instance.Paths.Scene,
// }
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { // scanner := scene.Scanner{
var err error // Scanner: scene.FileScanner(&file.FSHasher{}, t.fileNamingAlgorithm, t.calculateMD5),
s, err = t.TxnManager.Scene.FindByPath(ctx, t.file.Path()) // StripFileExtension: t.StripFileExtension,
return err // FileNamingAlgorithm: t.fileNamingAlgorithm,
}); err != nil { // TxnManager: t.TxnManager,
logger.Error(err.Error()) // CreatorUpdater: t.TxnManager.Scene,
return nil // Paths: GetInstance().Paths,
} // CaseSensitiveFs: t.CaseSensitiveFs,
// Screenshotter: &sceneScreenshotter{
// g: g,
// },
// VideoFileCreator: &instance.FFProbe,
// PluginCache: instance.PluginCache,
// MutexManager: t.mutexManager,
// UseFileMetadata: t.UseFileMetadata,
// }
g := &generate.Generator{ // if s != nil {
Encoder: instance.FFMPEG, // if err := scanner.ScanExisting(ctx, s, t.file); err != nil {
LockManager: instance.ReadLockManager, // return logError(err)
ScenePaths: instance.Paths.Scene, // }
}
scanner := scene.Scanner{ // return nil
Scanner: scene.FileScanner(&file.FSHasher{}, t.fileNamingAlgorithm, t.calculateMD5), // }
StripFileExtension: t.StripFileExtension,
FileNamingAlgorithm: t.fileNamingAlgorithm,
TxnManager: t.TxnManager,
CreatorUpdater: t.TxnManager.Scene,
Paths: GetInstance().Paths,
CaseSensitiveFs: t.CaseSensitiveFs,
Screenshotter: &sceneScreenshotter{
g: g,
},
VideoFileCreator: &instance.FFProbe,
PluginCache: instance.PluginCache,
MutexManager: t.mutexManager,
UseFileMetadata: t.UseFileMetadata,
}
if s != nil { // var err error
if err := scanner.ScanExisting(ctx, s, t.file); err != nil { // retScene, err = scanner.ScanNew(ctx, t.file)
return logError(err) // if err != nil {
} // return logError(err)
// }
return nil // return retScene
} // }
var err error
retScene, err = scanner.ScanNew(ctx, t.file)
if err != nil {
return logError(err)
}
return retScene
}
// associates captions to scene/s with the same basename // associates captions to scene/s with the same basename
func (t *ScanTask) associateCaptions(ctx context.Context) { // func (t *ScanTask) associateCaptions(ctx context.Context) {
vExt := config.GetInstance().GetVideoExtensions() // vExt := config.GetInstance().GetVideoExtensions()
captionPath := t.file.Path() // captionPath := t.file.Path()
captionLang := scene.GetCaptionsLangFromPath(captionPath) // captionLang := scene.GetCaptionsLangFromPath(captionPath)
relatedFiles := scene.GenerateCaptionCandidates(captionPath, vExt) // relatedFiles := scene.GenerateCaptionCandidates(captionPath, vExt)
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error { // if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error // var err error
sqb := t.TxnManager.Scene // sqb := t.TxnManager.Scene
for _, scenePath := range relatedFiles { // for _, scenePath := range relatedFiles {
s, er := sqb.FindByPath(ctx, scenePath) // s, er := sqb.FindByPath(ctx, scenePath)
if er != nil { // if er != nil {
logger.Errorf("Error searching for scene %s: %v", scenePath, er) // logger.Errorf("Error searching for scene %s: %v", scenePath, er)
continue // continue
} // }
if s != nil { // found related Scene // if s != nil { // found related Scene
logger.Debugf("Matched captions to scene %s", s.Path) // logger.Debugf("Matched captions to scene %s", s.Path)
captions, er := sqb.GetCaptions(ctx, s.ID) // captions, er := sqb.GetCaptions(ctx, s.ID)
if er == nil { // if er == nil {
fileExt := filepath.Ext(captionPath) // fileExt := filepath.Ext(captionPath)
ext := fileExt[1:] // ext := fileExt[1:]
if !scene.IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present // if !scene.IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present
newCaption := &models.SceneCaption{ // newCaption := &models.SceneCaption{
LanguageCode: captionLang, // LanguageCode: captionLang,
Filename: filepath.Base(captionPath), // Filename: filepath.Base(captionPath),
CaptionType: ext, // CaptionType: ext,
} // }
captions = append(captions, newCaption) // captions = append(captions, newCaption)
er = sqb.UpdateCaptions(ctx, s.ID, captions) // er = sqb.UpdateCaptions(ctx, s.ID, captions)
if er == nil { // if er == nil {
logger.Debugf("Updated captions for scene %s. Added %s", s.Path, captionLang) // logger.Debugf("Updated captions for scene %s. Added %s", s.Path, captionLang)
} // }
} // }
} // }
} // }
} // }
return err // return err
}); err != nil { // }); err != nil {
logger.Error(err.Error()) // logger.Error(err.Error())
} // }
} // }

View File

@@ -166,7 +166,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
_, err := r.Performer.Update(ctx, partial) _, err := r.Performer.Update(ctx, partial)
if !t.refresh { if !t.refresh {
err = r.Performer.UpdateStashIDs(ctx, t.performer.ID, []models.StashID{ err = r.Performer.UpdateStashIDs(ctx, t.performer.ID, []*models.StashID{
{ {
Endpoint: t.box.Endpoint, Endpoint: t.box.Endpoint,
StashID: *performer.RemoteSiteID, StashID: *performer.RemoteSiteID,
@@ -231,7 +231,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
return err return err
} }
err = r.Performer.UpdateStashIDs(ctx, createdPerformer.ID, []models.StashID{ err = r.Performer.UpdateStashIDs(ctx, createdPerformer.ID, []*models.StashID{
{ {
Endpoint: t.box.Endpoint, Endpoint: t.box.Endpoint,
StashID: *performer.RemoteSiteID, StashID: *performer.RemoteSiteID,

View File

@@ -23,7 +23,7 @@ type GenerateTranscodeTask struct {
} }
func (t *GenerateTranscodeTask) GetDescription() string { func (t *GenerateTranscodeTask) GetDescription() string {
return fmt.Sprintf("Generating transcode for %s", t.Scene.Path) return fmt.Sprintf("Generating transcode for %s", t.Scene.Path())
} }
func (t *GenerateTranscodeTask) Start(ctc context.Context) { func (t *GenerateTranscodeTask) Start(ctc context.Context) {
@@ -42,10 +42,15 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
return return
} }
videoCodec := t.Scene.VideoCodec.String var videoCodec string
if t.Scene.VideoCodec() != "" {
videoCodec = t.Scene.VideoCodec()
}
audioCodec := ffmpeg.MissingUnsupported audioCodec := ffmpeg.MissingUnsupported
if t.Scene.AudioCodec.Valid { if t.Scene.AudioCodec() != "" {
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec.String) audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec())
} }
if !t.Force && ffmpeg.IsStreamable(videoCodec, audioCodec, container) == nil { if !t.Force && ffmpeg.IsStreamable(videoCodec, audioCodec, container) == nil {
@@ -54,7 +59,7 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
// TODO - move transcode generation logic elsewhere // TODO - move transcode generation logic elsewhere
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path) videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil { if err != nil {
logger.Errorf("[transcode] error reading video file: %s", err.Error()) logger.Errorf("[transcode] error reading video file: %s", err.Error())
return return
@@ -104,15 +109,18 @@ func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
return true return true
} }
videoCodec := t.Scene.VideoCodec.String var videoCodec string
if t.Scene.VideoCodec() != "" {
videoCodec = t.Scene.VideoCodec()
}
container := "" container := ""
audioCodec := ffmpeg.MissingUnsupported audioCodec := ffmpeg.MissingUnsupported
if t.Scene.AudioCodec.Valid { if t.Scene.AudioCodec() != "" {
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec.String) audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec())
} }
if t.Scene.Format.Valid { if t.Scene.Format() != "" {
container = t.Scene.Format.String container = t.Scene.Format()
} }
if ffmpeg.IsStreamable(videoCodec, audioCodec, ffmpeg.Container(container)) == nil { if ffmpeg.IsStreamable(videoCodec, audioCodec, ffmpeg.Container(container)) == nil {

View File

@@ -167,6 +167,9 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
} else { } else {
framerate, _ = strconv.ParseFloat(videoStream.AvgFrameRate, 64) framerate, _ = strconv.ParseFloat(videoStream.AvgFrameRate, 64)
} }
if math.IsNaN(framerate) {
framerate = 0
}
result.FrameRate = math.Round(framerate*100) / 100 result.FrameRate = math.Round(framerate*100) / 100
if rotate, err := strconv.ParseInt(videoStream.Tags.Rotate, 10, 64); err == nil && rotate != 180 { if rotate, err := strconv.ParseInt(videoStream.Tags.Rotate, 10, 64); err == nil && rotate != 180 {
result.Width = videoStream.Height result.Width = videoStream.Height

411
pkg/file/clean.go Normal file
View File

@@ -0,0 +1,411 @@
package file
import (
"context"
"errors"
"fmt"
"io/fs"
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/txn"
)
// Cleaner scans through stored file and folder instances and removes those that are no longer present on disk.
type Cleaner struct {
FS FS
Repository Repository
Handlers []CleanHandler
}
type cleanJob struct {
*Cleaner
progress *job.Progress
options CleanOptions
}
// ScanOptions provides options for scanning files.
type CleanOptions struct {
Paths []string
// Do a dry run. Don't delete any files
DryRun bool
// PathFilter are used to determine if a file should be included.
// Excluded files are marked for cleaning.
PathFilter PathFilter
}
// Clean starts the clean process.
func (s *Cleaner) Clean(ctx context.Context, options CleanOptions, progress *job.Progress) {
j := &cleanJob{
Cleaner: s,
progress: progress,
options: options,
}
if err := j.execute(ctx); err != nil {
logger.Errorf("error cleaning files: %w", err)
return
}
}
type fileOrFolder struct {
fileID ID
folderID FolderID
}
type deleteSet struct {
orderedList []fileOrFolder
fileIDSet map[ID]string
folderIDSet map[FolderID]string
}
func newDeleteSet() deleteSet {
return deleteSet{
fileIDSet: make(map[ID]string),
folderIDSet: make(map[FolderID]string),
}
}
func (s *deleteSet) add(id ID, path string) {
if _, ok := s.fileIDSet[id]; !ok {
s.orderedList = append(s.orderedList, fileOrFolder{fileID: id})
s.fileIDSet[id] = path
}
}
func (s *deleteSet) has(id ID) bool {
_, ok := s.fileIDSet[id]
return ok
}
func (s *deleteSet) addFolder(id FolderID, path string) {
if _, ok := s.folderIDSet[id]; !ok {
s.orderedList = append(s.orderedList, fileOrFolder{folderID: id})
s.folderIDSet[id] = path
}
}
func (s *deleteSet) hasFolder(id FolderID) bool {
_, ok := s.folderIDSet[id]
return ok
}
func (s *deleteSet) len() int {
return len(s.orderedList)
}
func (j *cleanJob) execute(ctx context.Context) error {
progress := j.progress
toDelete := newDeleteSet()
var (
fileCount int
folderCount int
)
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
var err error
fileCount, err = j.Repository.CountAllInPaths(ctx, j.options.Paths)
if err != nil {
return err
}
folderCount, err = j.Repository.FolderStore.CountAllInPaths(ctx, j.options.Paths)
if err != nil {
return err
}
return nil
}); err != nil {
return err
}
progress.AddTotal(fileCount + folderCount)
progress.Definite()
if err := j.assessFiles(ctx, &toDelete); err != nil {
return err
}
if err := j.assessFolders(ctx, &toDelete); err != nil {
return err
}
if j.options.DryRun && toDelete.len() > 0 {
// add progress for files that would've been deleted
progress.AddProcessed(toDelete.len())
return nil
}
progress.ExecuteTask(fmt.Sprintf("Cleaning %d files and folders", toDelete.len()), func() {
for _, ff := range toDelete.orderedList {
if job.IsCancelled(ctx) {
return
}
if ff.fileID != 0 {
j.deleteFile(ctx, ff.fileID, toDelete.fileIDSet[ff.fileID])
}
if ff.folderID != 0 {
j.deleteFolder(ctx, ff.folderID, toDelete.folderIDSet[ff.folderID])
}
progress.Increment()
}
})
return nil
}
func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error {
const batchSize = 1000
offset := 0
progress := j.progress
more := true
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
for more {
if job.IsCancelled(ctx) {
return nil
}
files, err := j.Repository.FindAllInPaths(ctx, j.options.Paths, batchSize, offset)
if err != nil {
return fmt.Errorf("error querying for files: %w", err)
}
for _, f := range files {
path := f.Base().Path
err = nil
fileID := f.Base().ID
// short-cut, don't assess if already added
if toDelete.has(fileID) {
continue
}
progress.ExecuteTask(fmt.Sprintf("Assessing file %s for clean", path), func() {
if j.shouldClean(ctx, f) {
err = j.flagFileForDelete(ctx, toDelete, f)
} else {
// increment progress, no further processing
progress.Increment()
}
})
if err != nil {
return err
}
}
if len(files) != batchSize {
more = false
} else {
offset += batchSize
}
}
return nil
}); err != nil {
return err
}
return nil
}
// flagFolderForDelete adds folders to the toDelete set, with the leaf folders added first
func (j *cleanJob) flagFileForDelete(ctx context.Context, toDelete *deleteSet, f File) error {
// add contained files first
containedFiles, err := j.Repository.FindByZipFileID(ctx, f.Base().ID)
if err != nil {
return fmt.Errorf("error finding contained files for %q: %w", f.Base().Path, err)
}
for _, cf := range containedFiles {
logger.Infof("Marking contained file %q to clean", cf.Base().Path)
toDelete.add(cf.Base().ID, cf.Base().Path)
}
// add contained folders as well
containedFolders, err := j.Repository.FolderStore.FindByZipFileID(ctx, f.Base().ID)
if err != nil {
return fmt.Errorf("error finding contained folders for %q: %w", f.Base().Path, err)
}
for _, cf := range containedFolders {
logger.Infof("Marking contained folder %q to clean", cf.Path)
toDelete.addFolder(cf.ID, cf.Path)
}
toDelete.add(f.Base().ID, f.Base().Path)
return nil
}
func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error {
const batchSize = 1000
offset := 0
progress := j.progress
more := true
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
for more {
if job.IsCancelled(ctx) {
return nil
}
folders, err := j.Repository.FolderStore.FindAllInPaths(ctx, j.options.Paths, batchSize, offset)
if err != nil {
return fmt.Errorf("error querying for folders: %w", err)
}
for _, f := range folders {
path := f.Path
folderID := f.ID
// short-cut, don't assess if already added
if toDelete.hasFolder(folderID) {
continue
}
err = nil
progress.ExecuteTask(fmt.Sprintf("Assessing folder %s for clean", path), func() {
if j.shouldCleanFolder(ctx, f) {
if err = j.flagFolderForDelete(ctx, toDelete, f); err != nil {
return
}
} else {
// increment progress, no further processing
progress.Increment()
}
})
if err != nil {
return err
}
}
if len(folders) != batchSize {
more = false
} else {
offset += batchSize
}
}
return nil
}); err != nil {
return err
}
return nil
}
func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, folder *Folder) error {
// it is possible that child folders may be included while parent folders are not
// so we need to check child folders separately
toDelete.addFolder(folder.ID, folder.Path)
return nil
}
func (j *cleanJob) shouldClean(ctx context.Context, f File) bool {
path := f.Base().Path
info, err := f.Base().Info(j.FS)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
logger.Errorf("error getting file info for %q, not cleaning: %v", path, err)
return false
}
if info == nil {
// info is nil - file not exist
logger.Infof("File not found. Marking to clean: \"%s\"", path)
return true
}
// run through path filter, if returns false then the file should be cleaned
filter := j.options.PathFilter
// don't log anything - assume filter will have logged the reason
return !filter.Accept(ctx, path, info)
}
func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *Folder) bool {
path := f.Path
info, err := f.Info(j.FS)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
logger.Errorf("error getting folder info for %q, not cleaning: %v", path, err)
return false
}
if info == nil {
// info is nil - file not exist
logger.Infof("Folder not found. Marking to clean: \"%s\"", path)
return true
}
// run through path filter, if returns false then the file should be cleaned
filter := j.options.PathFilter
// don't log anything - assume filter will have logged the reason
return !filter.Accept(ctx, path, info)
}
func (j *cleanJob) deleteFile(ctx context.Context, fileID ID, fn string) {
// delete associated objects
fileDeleter := NewDeleter()
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
fileDeleter.RegisterHooks(ctx, j.Repository)
if err := j.fireHandlers(ctx, fileDeleter, fileID); err != nil {
return err
}
return j.Repository.Destroy(ctx, fileID)
}); err != nil {
logger.Errorf("Error deleting file %q from database: %s", fn, err.Error())
return
}
}
func (j *cleanJob) deleteFolder(ctx context.Context, folderID FolderID, fn string) {
// delete associated objects
fileDeleter := NewDeleter()
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
fileDeleter.RegisterHooks(ctx, j.Repository)
if err := j.fireFolderHandlers(ctx, fileDeleter, folderID); err != nil {
return err
}
return j.Repository.FolderStore.Destroy(ctx, folderID)
}); err != nil {
logger.Errorf("Error deleting folder %q from database: %s", fn, err.Error())
return
}
}
func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileID ID) error {
for _, h := range j.Handlers {
if err := h.HandleFile(ctx, fileDeleter, fileID); err != nil {
return err
}
}
return nil
}
func (j *cleanJob) fireFolderHandlers(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error {
for _, h := range j.Handlers {
if err := h.HandleFolder(ctx, fileDeleter, folderID); err != nil {
return err
}
}
return nil
}

View File

@@ -1,12 +1,14 @@
package file package file
import ( import (
"context"
"errors" "errors"
"fmt" "fmt"
"io/fs" "io/fs"
"os" "os"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/txn"
) )
const deleteFileSuffix = ".delete" const deleteFileSuffix = ".delete"
@@ -66,6 +68,19 @@ func NewDeleter() *Deleter {
} }
} }
// RegisterHooks registers post-commit and post-rollback hooks.
func (d *Deleter) RegisterHooks(ctx context.Context, mgr txn.Manager) {
mgr.AddPostCommitHook(ctx, func(ctx context.Context) error {
d.Commit()
return nil
})
mgr.AddPostRollbackHook(ctx, func(ctx context.Context) error {
d.Rollback()
return nil
})
}
// Files designates files to be deleted. Each file marked will be renamed to add // Files designates files to be deleted. Each file marked will be renamed to add
// a `.delete` suffix. An error is returned if a file could not be renamed. // a `.delete` suffix. An error is returned if a file could not be renamed.
// Note that if an error is returned, then some files may be left renamed. // Note that if an error is returned, then some files may be left renamed.
@@ -159,3 +174,17 @@ func (d *Deleter) renameForDelete(path string) error {
func (d *Deleter) renameForRestore(path string) error { func (d *Deleter) renameForRestore(path string) error {
return d.RenamerRemover.Rename(path+deleteFileSuffix, path) return d.RenamerRemover.Rename(path+deleteFileSuffix, path)
} }
func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Deleter, deleteFile bool) error {
if err := destroyer.Destroy(ctx, f.Base().ID); err != nil {
return err
}
if deleteFile {
if err := fileDeleter.Files([]string{f.Base().Path}); err != nil {
return err
}
}
return nil
}

View File

@@ -1,31 +1,205 @@
package file package file
import ( import (
"context"
"io" "io"
"io/fs" "io/fs"
"os" "net/http"
"strconv"
"time"
"github.com/stashapp/stash/pkg/logger"
) )
type fsFile struct { // ID represents an ID of a file.
path string type ID int32
info fs.FileInfo
func (i ID) String() string {
return strconv.Itoa(int(i))
} }
func (f *fsFile) Open() (io.ReadCloser, error) { // DirEntry represents a file or directory in the file system.
return os.Open(f.path) type DirEntry struct {
ZipFileID *ID `json:"zip_file_id"`
// transient - not persisted
// only guaranteed to have id, path and basename set
ZipFile File
ModTime time.Time `json:"mod_time"`
} }
func (f *fsFile) Path() string { func (e *DirEntry) info(fs FS, path string) (fs.FileInfo, error) {
return f.path if e.ZipFile != nil {
zipPath := e.ZipFile.Base().Path
zfs, err := fs.OpenZip(zipPath)
if err != nil {
return nil, err
}
defer zfs.Close()
fs = zfs
}
// else assume os file
ret, err := fs.Lstat(path)
return ret, err
} }
func (f *fsFile) FileInfo() fs.FileInfo { // File represents a file in the file system.
return f.info type File interface {
Base() *BaseFile
SetFingerprints(fp []Fingerprint)
Open(fs FS) (io.ReadCloser, error)
} }
func FSFile(path string, info fs.FileInfo) SourceFile { // BaseFile represents a file in the file system.
return &fsFile{ type BaseFile struct {
path: path, ID ID `json:"id"`
info: info,
DirEntry
// resolved from parent folder and basename only - not stored in DB
Path string `json:"path"`
Basename string `json:"basename"`
ParentFolderID FolderID `json:"parent_folder_id"`
Fingerprints Fingerprints `json:"fingerprints"`
Size int64 `json:"size"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
// SetFingerprints sets the fingerprints of the file.
// If a fingerprint of the same type already exists, it is overwritten.
func (f *BaseFile) SetFingerprints(fp []Fingerprint) {
for _, v := range fp {
f.SetFingerprint(v)
} }
} }
// SetFingerprint sets the fingerprint of the file.
// If a fingerprint of the same type already exists, it is overwritten.
func (f *BaseFile) SetFingerprint(fp Fingerprint) {
for i, existing := range f.Fingerprints {
if existing.Type == fp.Type {
f.Fingerprints[i] = fp
return
}
}
f.Fingerprints = append(f.Fingerprints, fp)
}
// Base is used to fulfil the File interface.
func (f *BaseFile) Base() *BaseFile {
return f
}
func (f *BaseFile) Open(fs FS) (io.ReadCloser, error) {
if f.ZipFile != nil {
zipPath := f.ZipFile.Base().Path
zfs, err := fs.OpenZip(zipPath)
if err != nil {
return nil, err
}
return zfs.OpenOnly(f.Path)
}
return fs.Open(f.Path)
}
func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) {
return f.info(fs, f.Path)
}
func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) {
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
reader, err := f.Open(fs)
if err != nil {
// assume not found
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
return
}
defer reader.Close()
rsc, ok := reader.(io.ReadSeeker)
if !ok {
// fallback to direct copy
data, err := io.ReadAll(reader)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if k, err := w.Write(data); err != nil {
logger.Warnf("failure while serving image (wrote %v bytes out of %v): %v", k, len(data), err)
}
return
}
http.ServeContent(w, r, f.Basename, f.ModTime, rsc)
}
type Finder interface {
Find(ctx context.Context, id ...ID) ([]File, error)
}
// Getter provides methods to find Files.
type Getter interface {
FindByPath(ctx context.Context, path string) (File, error)
FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error)
FindByZipFileID(ctx context.Context, zipFileID ID) ([]File, error)
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error)
}
type Counter interface {
CountAllInPaths(ctx context.Context, p []string) (int, error)
}
// Creator provides methods to create Files.
type Creator interface {
Create(ctx context.Context, f File) error
}
// Updater provides methods to update Files.
type Updater interface {
Update(ctx context.Context, f File) error
}
type Destroyer interface {
Destroy(ctx context.Context, id ID) error
}
// Store provides methods to find, create and update Files.
type Store interface {
Getter
Counter
Creator
Updater
Destroyer
}
// Decorator wraps the Decorate method to add additional functionality while scanning files.
type Decorator interface {
Decorate(ctx context.Context, fs FS, f File) (File, error)
}
type FilteredDecorator struct {
Decorator
Filter
}
// Decorate runs the decorator if the filter accepts the file.
func (d *FilteredDecorator) Decorate(ctx context.Context, fs FS, f File) (File, error) {
if d.Accept(f) {
return d.Decorator.Decorate(ctx, fs, f)
}
return f, nil
}

43
pkg/file/fingerprint.go Normal file
View File

@@ -0,0 +1,43 @@
package file
var (
FingerprintTypeOshash = "oshash"
FingerprintTypeMD5 = "md5"
FingerprintTypePhash = "phash"
)
// Fingerprint represents a fingerprint of a file.
type Fingerprint struct {
Type string
Fingerprint interface{}
}
type Fingerprints []Fingerprint
func (f Fingerprints) Get(type_ string) interface{} {
for _, fp := range f {
if fp.Type == type_ {
return fp.Fingerprint
}
}
return nil
}
// AppendUnique appends a fingerprint to the list if a Fingerprint of the same type does not already exist in the list. If one does, then it is updated with o's Fingerprint value.
func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints {
ret := f
for i, fp := range ret {
if fp.Type == o.Type {
ret[i] = o
return ret
}
}
return append(f, o)
}
// FingerprintCalculator calculates a fingerprint for the provided file.
type FingerprintCalculator interface {
CalculateFingerprints(f *BaseFile, o Opener) ([]Fingerprint, error)
}

66
pkg/file/folder.go Normal file
View File

@@ -0,0 +1,66 @@
package file
import (
"context"
"io/fs"
"strconv"
"time"
)
// FolderID represents an ID of a folder.
type FolderID int32
// String converts the ID to a string.
func (i FolderID) String() string {
return strconv.Itoa(int(i))
}
// Folder represents a folder in the file system.
type Folder struct {
ID FolderID `json:"id"`
DirEntry
Path string `json:"path"`
ParentFolderID *FolderID `json:"parent_folder_id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
func (f *Folder) Info(fs FS) (fs.FileInfo, error) {
return f.info(fs, f.Path)
}
// FolderGetter provides methods to find Folders.
type FolderGetter interface {
FindByPath(ctx context.Context, path string) (*Folder, error)
FindByZipFileID(ctx context.Context, zipFileID ID) ([]*Folder, error)
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error)
FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error)
}
type FolderCounter interface {
CountAllInPaths(ctx context.Context, p []string) (int, error)
}
// FolderCreator provides methods to create Folders.
type FolderCreator interface {
Create(ctx context.Context, f *Folder) error
}
// FolderUpdater provides methods to update Folders.
type FolderUpdater interface {
Update(ctx context.Context, f *Folder) error
}
type FolderDestroyer interface {
Destroy(ctx context.Context, id FolderID) error
}
// FolderStore provides methods to find, create and update Folders.
type FolderStore interface {
FolderGetter
FolderCounter
FolderCreator
FolderUpdater
FolderDestroyer
}

48
pkg/file/fs.go Normal file
View File

@@ -0,0 +1,48 @@
package file
import (
"io"
"io/fs"
"os"
)
// Opener provides an interface to open a file.
type Opener interface {
Open() (io.ReadCloser, error)
}
type fsOpener struct {
fs FS
name string
}
func (o *fsOpener) Open() (io.ReadCloser, error) {
return o.fs.Open(o.name)
}
// FS represents a file system.
type FS interface {
Lstat(name string) (fs.FileInfo, error)
Open(name string) (fs.ReadDirFile, error)
OpenZip(name string) (*ZipFS, error)
}
// OsFS is a file system backed by the OS.
type OsFS struct{}
func (f *OsFS) Lstat(name string) (fs.FileInfo, error) {
return os.Lstat(name)
}
func (f *OsFS) Open(name string) (fs.ReadDirFile, error) {
return os.Open(name)
}
func (f *OsFS) OpenZip(name string) (*ZipFS, error) {
info, err := f.Lstat(name)
if err != nil {
return nil, err
}
return newZipFS(f, name, info)
}

53
pkg/file/handler.go Normal file
View File

@@ -0,0 +1,53 @@
package file
import (
"context"
"io/fs"
)
// PathFilter provides a filter function for paths.
type PathFilter interface {
Accept(ctx context.Context, path string, info fs.FileInfo) bool
}
type PathFilterFunc func(path string) bool
func (pff PathFilterFunc) Accept(path string) bool {
return pff(path)
}
// Filter provides a filter function for Files.
type Filter interface {
Accept(f File) bool
}
type FilterFunc func(f File) bool
func (ff FilterFunc) Accept(f File) bool {
return ff(f)
}
// Handler provides a handler for Files.
type Handler interface {
Handle(ctx context.Context, f File) error
}
// FilteredHandler is a Handler runs only if the filter accepts the file.
type FilteredHandler struct {
Handler
Filter
}
// Handle runs the handler if the filter accepts the file.
func (h *FilteredHandler) Handle(ctx context.Context, f File) error {
if h.Accept(f) {
return h.Handler.Handle(ctx, f)
}
return nil
}
// CleanHandler provides a handler for cleaning Files and Folders.
type CleanHandler interface {
HandleFile(ctx context.Context, fileDeleter *Deleter, fileID ID) error
HandleFolder(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error
}

View File

@@ -1,18 +0,0 @@
package file
import (
"io"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/hash/oshash"
)
type FSHasher struct{}
func (h *FSHasher) OSHash(src io.ReadSeeker, size int64) (string, error) {
return oshash.FromReader(src, size)
}
func (h *FSHasher) MD5(src io.Reader) (string, error) {
return md5.FromReader(src)
}

39
pkg/file/image/scan.go Normal file
View File

@@ -0,0 +1,39 @@
package image
import (
"context"
"fmt"
"image"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
"github.com/stashapp/stash/pkg/file"
_ "golang.org/x/image/webp"
)
// Decorator adds image specific fields to a File.
type Decorator struct {
}
func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) {
base := f.Base()
r, err := fs.Open(base.Path)
if err != nil {
return f, fmt.Errorf("reading image file %q: %w", base.Path, err)
}
defer r.Close()
c, format, err := image.DecodeConfig(r)
if err != nil {
return f, fmt.Errorf("decoding image file %q: %w", base.Path, err)
}
return &file.ImageFile{
BaseFile: base,
Format: format,
Width: c.Width,
Height: c.Height,
}, nil
}

9
pkg/file/image_file.go Normal file
View File

@@ -0,0 +1,9 @@
package file
// ImageFile is an extension of BaseFile to represent image files.
type ImageFile struct {
*BaseFile
Format string `json:"format"`
Width int `json:"width"`
Height int `json:"height"`
}

View File

@@ -1,190 +1,845 @@
package file package file
import ( import (
"context"
"errors"
"fmt" "fmt"
"io"
"io/fs" "io/fs"
"os" "path/filepath"
"strconv" "strings"
"sync"
"time" "time"
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn"
) )
type SourceFile interface { const scanQueueSize = 200000
Open() (io.ReadCloser, error)
Path() string // Repository provides access to storage methods for files and folders.
FileInfo() fs.FileInfo type Repository struct {
} txn.Manager
txn.DatabaseProvider
type FileBased interface { Store
File() models.File
} FolderStore FolderStore
type Hasher interface {
OSHash(src io.ReadSeeker, size int64) (string, error)
MD5(src io.Reader) (string, error)
}
type Scanned struct {
Old *models.File
New *models.File
}
// FileUpdated returns true if both old and new files are present and not equal.
func (s Scanned) FileUpdated() bool {
if s.Old == nil || s.New == nil {
return false
}
return !s.Old.Equal(*s.New)
}
// ContentsChanged returns true if both old and new files are present and the file content is different.
func (s Scanned) ContentsChanged() bool {
if s.Old == nil || s.New == nil {
return false
}
if s.Old.Checksum != s.New.Checksum {
return true
}
if s.Old.OSHash != s.New.OSHash {
return true
}
return false
} }
// Scanner scans files into the database.
//
// The scan process works using two goroutines. The first walks through the provided paths
// in the filesystem. It runs each directory entry through the provided ScanFilters. If none
// of the filter Accept methods return true, then the file/directory is ignored.
// Any folders found are handled immediately. Files inside zip files are also handled immediately.
// All other files encountered are sent to the second goroutine queue.
//
// Folders are handled by checking if the folder exists in the database, by its full path.
// If a folder entry already exists, then its mod time is updated (if applicable).
// If the folder does not exist in the database, then a new folder entry its created.
//
// Files are handled by first querying for the file by its path. If the file entry exists in the
// database, then the mod time is compared to the value in the database. If the mod time is different
// then file is marked as updated - it recalculates any fingerprints and fires decorators, then
// the file entry is updated and any applicable handlers are fired.
//
// If the file entry does not exist in the database, then fingerprints are calculated for the file.
// It then determines if the file is a rename of an existing file by querying for file entries with
// the same fingerprint. If any are found, it checks each to see if any are missing in the file
// system. If one is, then the file is treated as renamed and its path is updated. If none are missing,
// or many are, then the file is treated as a new file.
//
// If the file is not a renamed file, then the decorators are fired and the file is created, then
// the applicable handlers are fired.
type Scanner struct { type Scanner struct {
Hasher Hasher FS FS
Repository Repository
FingerprintCalculator FingerprintCalculator
CalculateMD5 bool // FileDecorators are applied to files as they are scanned.
CalculateOSHash bool FileDecorators []Decorator
} }
func (o Scanner) ScanExisting(existing FileBased, file SourceFile) (h *Scanned, err error) { // ProgressReporter is used to report progress of the scan.
info := file.FileInfo() type ProgressReporter interface {
h = &Scanned{} AddTotal(total int)
Increment()
Definite()
ExecuteTask(description string, fn func())
}
existingFile := existing.File() type scanJob struct {
h.Old = &existingFile *Scanner
updatedFile := existingFile // handlers are called after a file has been scanned.
h.New = &updatedFile handlers []Handler
// update existing data if needed ProgressReports ProgressReporter
options ScanOptions
startTime time.Time
fileQueue chan scanFile
dbQueue chan func(ctx context.Context) error
retryList []scanFile
retrying bool
folderPathToID sync.Map
zipPathToID sync.Map
count int
txnMutex sync.Mutex
}
// ScanOptions provides options for scanning files.
type ScanOptions struct {
Paths []string
// ZipFileExtensions is a list of file extensions that are considered zip files.
// Extension does not include the . character.
ZipFileExtensions []string
// ScanFilters are used to determine if a file should be scanned.
ScanFilters []PathFilter
ParallelTasks int
}
// Scan starts the scanning process.
func (s *Scanner) Scan(ctx context.Context, handlers []Handler, options ScanOptions, progressReporter ProgressReporter) {
job := &scanJob{
Scanner: s,
handlers: handlers,
ProgressReports: progressReporter,
options: options,
}
job.execute(ctx)
}
type scanFile struct {
*BaseFile
fs FS
info fs.FileInfo
zipFile *scanFile
}
func (s *scanJob) withTxn(ctx context.Context, fn func(ctx context.Context) error) error {
// get exclusive access to the database
s.txnMutex.Lock()
defer s.txnMutex.Unlock()
return txn.WithTxn(ctx, s.Repository, fn)
}
func (s *scanJob) withDB(ctx context.Context, fn func(ctx context.Context) error) error {
return txn.WithDatabase(ctx, s.Repository, fn)
}
func (s *scanJob) execute(ctx context.Context) {
paths := s.options.Paths
logger.Infof("scanning %d paths", len(paths))
s.startTime = time.Now()
s.fileQueue = make(chan scanFile, scanQueueSize)
s.dbQueue = make(chan func(ctx context.Context) error, scanQueueSize)
go func() {
if err := s.queueFiles(ctx, paths); err != nil {
if errors.Is(err, context.Canceled) {
return
}
logger.Errorf("error queuing files for scan: %v", err)
return
}
logger.Infof("Finished adding files to queue. %d files queued", s.count)
}()
done := make(chan struct{}, 1)
go func() {
if err := s.processDBOperations(ctx); err != nil {
if errors.Is(err, context.Canceled) {
return
}
logger.Errorf("error processing database operations for scan: %v", err)
}
close(done)
}()
if err := s.processQueue(ctx); err != nil {
if errors.Is(err, context.Canceled) {
return
}
logger.Errorf("error scanning files: %v", err)
return
}
// wait for database operations to complete
<-done
}
func (s *scanJob) queueFiles(ctx context.Context, paths []string) error {
var err error
s.ProgressReports.ExecuteTask("Walking directory tree", func() {
for _, p := range paths {
err = symWalk(s.FS, p, s.queueFileFunc(ctx, s.FS, nil))
if err != nil {
return
}
}
})
close(s.fileQueue)
if s.ProgressReports != nil {
s.ProgressReports.AddTotal(s.count)
s.ProgressReports.Definite()
}
return err
}
func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs.WalkDirFunc {
return func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if err = ctx.Err(); err != nil {
return err
}
info, err := d.Info()
if err != nil {
return fmt.Errorf("reading info for %q: %w", path, err)
}
if !s.acceptEntry(ctx, path, info) {
if info.IsDir() {
return fs.SkipDir
}
return nil
}
ff := scanFile{
BaseFile: &BaseFile{
DirEntry: DirEntry{
ModTime: modTime(info),
},
Path: path,
Basename: filepath.Base(path),
Size: info.Size(),
},
fs: f,
info: info,
// there is no guarantee that the zip file has been scanned
// so we can't just plug in the id.
zipFile: zipFile,
}
if info.IsDir() {
// handle folders immediately
if err := s.handleFolder(ctx, ff); err != nil {
logger.Errorf("error processing %q: %v", path, err)
// skip the directory since we won't be able to process the files anyway
return fs.SkipDir
}
return nil
}
// if zip file is present, we handle immediately
if zipFile != nil {
s.ProgressReports.ExecuteTask("Scanning "+path, func() {
if err := s.handleFile(ctx, ff); err != nil {
logger.Errorf("error processing %q: %v", path, err)
// don't return an error, just skip the file
}
})
return nil
}
s.fileQueue <- ff
s.count++
return nil
}
}
func (s *scanJob) acceptEntry(ctx context.Context, path string, info fs.FileInfo) bool {
// always accept if there's no filters
accept := len(s.options.ScanFilters) == 0
for _, filter := range s.options.ScanFilters {
// accept if any filter accepts the file
if filter.Accept(ctx, path, info) {
accept = true
break
}
}
return accept
}
func (s *scanJob) scanZipFile(ctx context.Context, f scanFile) error {
zipFS, err := f.fs.OpenZip(f.Path)
if err != nil {
if errors.Is(err, errNotReaderAt) {
// can't walk the zip file
// just return
return nil
}
return err
}
defer zipFS.Close()
return symWalk(zipFS, f.Path, s.queueFileFunc(ctx, zipFS, &f))
}
func (s *scanJob) processQueue(ctx context.Context) error {
parallelTasks := s.options.ParallelTasks
if parallelTasks < 1 {
parallelTasks = 1
}
wg := sizedwaitgroup.New(parallelTasks)
for f := range s.fileQueue {
if err := ctx.Err(); err != nil {
return err
}
wg.Add()
ff := f
go func() {
defer wg.Done()
s.processQueueItem(ctx, ff)
}()
}
wg.Wait()
s.retrying = true
for _, f := range s.retryList {
if err := ctx.Err(); err != nil {
return err
}
wg.Add()
ff := f
go func() {
defer wg.Done()
s.processQueueItem(ctx, ff)
}()
}
wg.Wait()
close(s.dbQueue)
return nil
}
func (s *scanJob) incrementProgress() {
if s.ProgressReports != nil {
s.ProgressReports.Increment()
}
}
func (s *scanJob) processDBOperations(ctx context.Context) error {
for fn := range s.dbQueue {
if err := ctx.Err(); err != nil {
return err
}
_ = s.withTxn(ctx, fn)
}
return nil
}
func (s *scanJob) processQueueItem(ctx context.Context, f scanFile) {
s.ProgressReports.ExecuteTask("Scanning "+f.Path, func() {
var err error
if f.info.IsDir() {
err = s.handleFolder(ctx, f)
} else {
err = s.handleFile(ctx, f)
}
if err != nil {
logger.Errorf("error processing %q: %v", f.Path, err)
}
})
}
func (s *scanJob) getFolderID(ctx context.Context, path string) (*FolderID, error) {
// check the folder cache first
if f, ok := s.folderPathToID.Load(path); ok {
v := f.(FolderID)
return &v, nil
}
ret, err := s.Repository.FolderStore.FindByPath(ctx, path)
if err != nil {
return nil, err
}
if ret == nil {
return nil, nil
}
s.folderPathToID.Store(path, ret.ID)
return &ret.ID, nil
}
func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*ID, error) {
if zipFile == nil {
return nil, nil
}
if zipFile.ID != 0 {
return &zipFile.ID, nil
}
path := zipFile.Path
// check the folder cache first
if f, ok := s.zipPathToID.Load(path); ok {
v := f.(ID)
return &v, nil
}
ret, err := s.Repository.FindByPath(ctx, path)
if err != nil {
return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err)
}
if ret == nil {
return nil, fmt.Errorf("zip file %q doesn't exist in database", zipFile.Path)
}
s.zipPathToID.Store(path, ret.Base().ID)
return &ret.Base().ID, nil
}
func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error {
path := file.Path
return s.withTxn(ctx, func(ctx context.Context) error {
defer s.incrementProgress()
// determine if folder already exists in data store (by path)
f, err := s.Repository.FolderStore.FindByPath(ctx, path)
if err != nil {
return fmt.Errorf("checking for existing folder %q: %w", path, err)
}
// if folder not exists, create it
if f == nil {
f, err = s.onNewFolder(ctx, file)
} else {
f, err = s.onExistingFolder(ctx, file, f)
}
if err != nil {
return err
}
if f != nil {
s.folderPathToID.Store(f.Path, f.ID)
}
return nil
})
}
func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, error) {
now := time.Now()
toCreate := &Folder{
DirEntry: DirEntry{
ModTime: file.ModTime,
},
Path: file.Path,
CreatedAt: now,
UpdatedAt: now,
}
zipFileID, err := s.getZipFileID(ctx, file.zipFile)
if err != nil {
return nil, err
}
if zipFileID != nil {
toCreate.ZipFileID = zipFileID
}
dir := filepath.Dir(file.Path)
if dir != "." {
parentFolderID, err := s.getFolderID(ctx, dir)
if err != nil {
return nil, fmt.Errorf("getting parent folder %q: %w", dir, err)
}
// if parent folder doesn't exist, assume it's a top-level folder
// this may not be true if we're using multiple goroutines
if parentFolderID != nil {
toCreate.ParentFolderID = parentFolderID
}
}
logger.Infof("%s doesn't exist. Creating new folder entry...", file.Path)
if err := s.Repository.FolderStore.Create(ctx, toCreate); err != nil {
return nil, fmt.Errorf("creating folder %q: %w", file.Path, err)
}
return toCreate, nil
}
func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *Folder) (*Folder, error) {
// check if the mod time is changed
entryModTime := f.ModTime
if !entryModTime.Equal(existing.ModTime) {
// update entry in store
existing.ModTime = entryModTime
var err error
if err = s.Repository.FolderStore.Update(ctx, existing); err != nil {
return nil, fmt.Errorf("updating folder %q: %w", f.Path, err)
}
}
return existing, nil
}
func modTime(info fs.FileInfo) time.Time {
// truncate to seconds, since we don't store beyond that in the database // truncate to seconds, since we don't store beyond that in the database
updatedFile.FileModTime = info.ModTime().Truncate(time.Second) return info.ModTime().Truncate(time.Second)
updatedFile.Size = strconv.FormatInt(info.Size(), 10) }
modTimeChanged := !existingFile.FileModTime.Equal(updatedFile.FileModTime) func (s *scanJob) handleFile(ctx context.Context, f scanFile) error {
var ff File
// don't use a transaction to check if new or existing
if err := s.withDB(ctx, func(ctx context.Context) error {
// determine if file already exists in data store
var err error
ff, err = s.Repository.FindByPath(ctx, f.Path)
if err != nil {
return fmt.Errorf("checking for existing file %q: %w", f.Path, err)
}
// regenerate hash(es) if missing or file mod time changed if ff == nil {
if _, err = o.generateHashes(&updatedFile, file, modTimeChanged); err != nil { ff, err = s.onNewFile(ctx, f)
return err
}
ff, err = s.onExistingFile(ctx, f, ff)
return err
}); err != nil {
return err
}
if ff != nil && s.isZipFile(f.info.Name()) {
f.BaseFile = ff.Base()
if err := s.scanZipFile(ctx, f); err != nil {
logger.Errorf("Error scanning zip file %q: %v", f.Path, err)
}
}
return nil
}
func (s *scanJob) isZipFile(path string) bool {
fExt := filepath.Ext(path)
for _, ext := range s.options.ZipFileExtensions {
if strings.EqualFold(fExt, "."+ext) {
return true
}
}
return false
}
func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
now := time.Now()
baseFile := f.BaseFile
path := baseFile.Path
baseFile.CreatedAt = now
baseFile.UpdatedAt = now
// find the parent folder
parentFolderID, err := s.getFolderID(ctx, filepath.Dir(path))
if err != nil {
return nil, fmt.Errorf("getting parent folder for %q: %w", path, err)
}
if parentFolderID == nil {
// if parent folder doesn't exist, assume it's not yet created
// add this file to the queue to be created later
if s.retrying {
// if we're retrying and the folder still doesn't exist, then it's a problem
s.incrementProgress()
return nil, fmt.Errorf("parent folder for %q doesn't exist", path)
}
s.retryList = append(s.retryList, f)
return nil, nil
}
baseFile.ParentFolderID = *parentFolderID
zipFileID, err := s.getZipFileID(ctx, f.zipFile)
if err != nil {
s.incrementProgress()
return nil, err return nil, err
} }
// notify of changes as needed if zipFileID != nil {
// object exists, no further processing required baseFile.ZipFileID = zipFileID
return
} }
func (o Scanner) ScanNew(file SourceFile) (*models.File, error) { fp, err := s.calculateFingerprints(f.fs, baseFile, path)
info := file.FileInfo() if err != nil {
sizeStr := strconv.FormatInt(info.Size(), 10) s.incrementProgress()
modTime := info.ModTime()
f := models.File{
Path: file.Path(),
Size: sizeStr,
FileModTime: modTime,
}
if _, err := o.generateHashes(&f, file, true); err != nil {
return nil, err return nil, err
} }
return &f, nil baseFile.SetFingerprints(fp)
}
// generateHashes regenerates and sets the hashes in the provided File. // determine if the file is renamed from an existing file in the store
// It will not recalculate unless specified. renamed, err := s.handleRename(ctx, baseFile, fp)
func (o Scanner) generateHashes(f *models.File, file SourceFile, regenerate bool) (changed bool, err error) {
existing := *f
var src io.ReadCloser
if o.CalculateOSHash && (regenerate || f.OSHash == "") {
logger.Infof("Calculating oshash for %s ...", f.Path)
size := file.FileInfo().Size()
// #2196 for symlinks
// get the size of the actual file, not the symlink
if file.FileInfo().Mode()&os.ModeSymlink == os.ModeSymlink {
fi, err := os.Stat(f.Path)
if err != nil { if err != nil {
return false, err s.incrementProgress()
} return nil, err
logger.Debugf("File <%s> is symlink. Size changed from <%d> to <%d>", f.Path, size, fi.Size())
size = fi.Size()
} }
src, err = file.Open() if renamed != nil {
return renamed, nil
}
file, err := s.fireDecorators(ctx, f.fs, baseFile)
if err != nil { if err != nil {
return false, err s.incrementProgress()
} return nil, err
defer src.Close()
seekSrc, valid := src.(io.ReadSeeker)
if !valid {
return false, fmt.Errorf("invalid source file type: %s", file.Path())
} }
// regenerate hash // if not renamed, queue file for creation
var oshash string if err := s.queueDBOperation(ctx, path, func(ctx context.Context) error {
oshash, err = o.Hasher.OSHash(seekSrc, size) logger.Infof("%s doesn't exist. Creating new file entry...", path)
if err := s.Repository.Create(ctx, file); err != nil {
return fmt.Errorf("creating file %q: %w", path, err)
}
if err := s.fireHandlers(ctx, file); err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
return file, nil
}
func (s *scanJob) queueDBOperation(ctx context.Context, path string, fn func(ctx context.Context) error) error {
// perform immediately if it is a zip file
if s.isZipFile(path) {
return s.withTxn(ctx, fn)
}
s.dbQueue <- fn
return nil
}
func (s *scanJob) fireDecorators(ctx context.Context, fs FS, f File) (File, error) {
for _, h := range s.FileDecorators {
var err error
f, err = h.Decorate(ctx, fs, f)
if err != nil { if err != nil {
return false, fmt.Errorf("error generating oshash for %s: %w", file.Path(), err) return f, err
}
} }
f.OSHash = oshash return f, nil
}
// reset reader to start of file func (s *scanJob) fireHandlers(ctx context.Context, f File) error {
_, err = seekSrc.Seek(0, io.SeekStart) for _, h := range s.handlers {
if err := h.Handle(ctx, f); err != nil {
return err
}
}
return nil
}
func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string) ([]Fingerprint, error) {
logger.Infof("Calculating fingerprints for %s ...", path)
// calculate primary fingerprint for the file
fp, err := s.FingerprintCalculator.CalculateFingerprints(f, &fsOpener{
fs: fs,
name: path,
})
if err != nil { if err != nil {
return false, fmt.Errorf("error seeking to start of file in %s: %w", file.Path(), err) return nil, fmt.Errorf("calculating fingerprint for file %q: %w", path, err)
}
return fp, nil
}
func appendFileUnique(v []File, toAdd []File) []File {
for _, f := range toAdd {
found := false
id := f.Base().ID
for _, vv := range v {
if vv.Base().ID == id {
found = true
break
} }
} }
// always generate if MD5 is nil if !found {
// only regenerate MD5 if: v = append(v, f)
// - OSHash was not calculated, or }
// - existing OSHash is different to generated one }
// or if it was different to the previous version
if o.CalculateMD5 && (f.Checksum == "" || (regenerate && (!o.CalculateOSHash || existing.OSHash != f.OSHash))) {
logger.Infof("Calculating checksum for %s...", f.Path)
if src == nil { return v
src, err = file.Open() }
func (s *scanJob) getFileFS(f *BaseFile) (FS, error) {
if f.ZipFile == nil {
return s.FS, nil
}
fs, err := s.getFileFS(f.ZipFile.Base())
if err != nil { if err != nil {
return false, err return nil, err
}
defer src.Close()
} }
// regenerate checksum zipPath := f.ZipFile.Base().Path
var checksum string return fs.OpenZip(zipPath)
checksum, err = o.Hasher.MD5(src) }
func (s *scanJob) handleRename(ctx context.Context, f *BaseFile, fp []Fingerprint) (File, error) {
var others []File
for _, tfp := range fp {
thisOthers, err := s.Repository.FindByFingerprint(ctx, tfp)
if err != nil { if err != nil {
return return nil, fmt.Errorf("getting files by fingerprint %v: %w", tfp, err)
} }
f.Checksum = checksum others = appendFileUnique(others, thisOthers)
} }
changed = (o.CalculateOSHash && (f.OSHash != existing.OSHash)) || (o.CalculateMD5 && (f.Checksum != existing.Checksum)) var missing []File
return for _, other := range others {
// if file does not exist, then update it to the new path
// TODO - handle #1426 scenario
fs, err := s.getFileFS(other.Base())
if err != nil {
return nil, fmt.Errorf("getting FS for %q: %w", other.Base().Path, err)
}
if _, err := fs.Lstat(other.Base().Path); err != nil {
missing = append(missing, other)
}
}
n := len(missing)
switch {
case n == 1:
// assume does not exist, update existing file
other := missing[0]
otherBase := other.Base()
logger.Infof("%s moved to %s. Updating path...", otherBase.Path, f.Path)
f.ID = otherBase.ID
f.CreatedAt = otherBase.CreatedAt
f.Fingerprints = otherBase.Fingerprints
*otherBase = *f
if err := s.queueDBOperation(ctx, f.Path, func(ctx context.Context) error {
if err := s.Repository.Update(ctx, other); err != nil {
return fmt.Errorf("updating file for rename %q: %w", f.Path, err)
}
return nil
}); err != nil {
return nil, err
}
return other, nil
case n > 1:
// multiple candidates
// TODO - mark all as missing and just create a new file
return nil, nil
}
return nil, nil
}
// returns a file only if it was updated
func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) (File, error) {
base := existing.Base()
path := base.Path
fileModTime := f.ModTime
updated := !fileModTime.Equal(base.ModTime)
if !updated {
s.incrementProgress()
return nil, nil
}
logger.Infof("%s has been updated: rescanning", path)
base.ModTime = fileModTime
base.Size = f.Size
base.UpdatedAt = time.Now()
// calculate and update fingerprints for the file
fp, err := s.calculateFingerprints(f.fs, base, path)
if err != nil {
s.incrementProgress()
return nil, err
}
existing.SetFingerprints(fp)
existing, err = s.fireDecorators(ctx, f.fs, existing)
if err != nil {
s.incrementProgress()
return nil, err
}
// queue file for update
if err := s.queueDBOperation(ctx, path, func(ctx context.Context) error {
if err := s.Repository.Update(ctx, existing); err != nil {
return fmt.Errorf("updating file %q: %w", path, err)
}
if err := s.fireHandlers(ctx, existing); err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
return existing, nil
} }

View File

@@ -1,14 +1,18 @@
package scene package video
import ( import (
"context"
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
"golang.org/x/text/language"
"github.com/asticode/go-astisub" "github.com/asticode/go-astisub"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
"golang.org/x/text/language"
) )
var CaptionExts = []string{"vtt", "srt"} // in a case where vtt and srt files are both provided prioritize vtt file due to native support var CaptionExts = []string{"vtt", "srt"} // in a case where vtt and srt files are both provided prioritize vtt file due to native support
@@ -46,7 +50,7 @@ func IsValidLanguage(lang string) bool {
// IsLangInCaptions returns true if lang is present // IsLangInCaptions returns true if lang is present
// in the captions // in the captions
func IsLangInCaptions(lang string, ext string, captions []*models.SceneCaption) bool { func IsLangInCaptions(lang string, ext string, captions []*models.VideoCaption) bool {
for _, caption := range captions { for _, caption := range captions {
if lang == caption.LanguageCode && ext == caption.CaptionType { if lang == caption.LanguageCode && ext == caption.CaptionType {
return true return true
@@ -55,41 +59,8 @@ func IsLangInCaptions(lang string, ext string, captions []*models.SceneCaption)
return false return false
} }
// GenerateCaptionCandidates generates a list of filenames with exts as extensions
// that can associated with the caption
func GenerateCaptionCandidates(captionPath string, exts []string) []string {
var candidates []string
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
// a caption file can be something like scene_filename.srt or scene_filename.en.srt
// if a language code is present and valid remove it from the basename
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
basename = strings.TrimSuffix(basename, languageExt)
}
for _, ext := range exts {
candidates = append(candidates, basename+"."+ext)
}
return candidates
}
// GetCaptionsLangFromPath returns the language code from a given captions path
// If no valid language is present LangUknown is returned
func GetCaptionsLangFromPath(captionPath string) string {
langCode := LangUnknown
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
langCode = languageExt[1:]
}
return langCode
}
// CleanCaptions removes non existent/accessible language codes from captions // CleanCaptions removes non existent/accessible language codes from captions
func CleanCaptions(scenePath string, captions []*models.SceneCaption) (cleanedCaptions []*models.SceneCaption, changed bool) { func CleanCaptions(scenePath string, captions []*models.VideoCaption) (cleanedCaptions []*models.VideoCaption, changed bool) {
changed = false changed = false
for _, caption := range captions { for _, caption := range captions {
found := false found := false
@@ -104,3 +75,76 @@ func CleanCaptions(scenePath string, captions []*models.SceneCaption) (cleanedCa
} }
return return
} }
// getCaptionPrefix returns the prefix used to search for video files for the provided caption path
func getCaptionPrefix(captionPath string) string {
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
// a caption file can be something like scene_filename.srt or scene_filename.en.srt
// if a language code is present and valid remove it from the basename
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
basename = strings.TrimSuffix(basename, languageExt)
}
return basename + "."
}
// GetCaptionsLangFromPath returns the language code from a given captions path
// If no valid language is present LangUknown is returned
func getCaptionsLangFromPath(captionPath string) string {
langCode := LangUnknown
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
langCode = languageExt[1:]
}
return langCode
}
type CaptionUpdater interface {
GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error)
UpdateCaptions(ctx context.Context, fileID file.ID, captions []*models.VideoCaption) error
}
// associates captions to scene/s with the same basename
func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb file.Getter, w CaptionUpdater) {
captionLang := getCaptionsLangFromPath(captionPath)
captionPrefix := getCaptionPrefix(captionPath)
if err := txn.WithTxn(ctx, txnMgr, func(ctx context.Context) error {
var err error
f, er := fqb.FindByPath(ctx, captionPrefix+"*")
if er != nil {
return fmt.Errorf("searching for scene %s: %w", captionPrefix, er)
}
if f != nil { // found related Scene
fileID := f.Base().ID
path := f.Base().Path
logger.Debugf("Matched captions to file %s", path)
captions, er := w.GetCaptions(ctx, fileID)
if er == nil {
fileExt := filepath.Ext(captionPath)
ext := fileExt[1:]
if !IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present
newCaption := &models.VideoCaption{
LanguageCode: captionLang,
Filename: filepath.Base(captionPath),
CaptionType: ext,
}
captions = append(captions, newCaption)
er = w.UpdateCaptions(ctx, fileID, captions)
if er == nil {
logger.Debugf("Updated captions for file %s. Added %s", path, captionLang)
}
}
}
}
return err
}); err != nil {
logger.Error(err.Error())
}
}

View File

@@ -0,0 +1,53 @@
package video
import (
"testing"
"github.com/stretchr/testify/assert"
)
type testCase struct {
captionPath string
expectedLang string
expectedResult string
}
var testCases = []testCase{
{
captionPath: "/stash/video.vtt",
expectedLang: LangUnknown,
expectedResult: "/stash/video.",
},
{
captionPath: "/stash/video.en.vtt",
expectedLang: "en",
expectedResult: "/stash/video.", // lang code valid, remove en part
},
{
captionPath: "/stash/video.test.srt",
expectedLang: LangUnknown,
expectedResult: "/stash/video.test.", // no lang code/lang code invalid test should remain
},
{
captionPath: "C:\\videos\\video.fr.srt",
expectedLang: "fr",
expectedResult: "C:\\videos\\video.",
},
{
captionPath: "C:\\videos\\video.xx.srt",
expectedLang: LangUnknown,
expectedResult: "C:\\videos\\video.xx.", // no lang code/lang code invalid xx should remain
},
}
func TestGenerateCaptionCandidates(t *testing.T) {
for _, c := range testCases {
assert.Equal(t, c.expectedResult, getCaptionPrefix(c.captionPath))
}
}
func TestGetCaptionsLangFromPath(t *testing.T) {
for _, l := range testCases {
assert.Equal(t, l.expectedLang, getCaptionsLangFromPath(l.captionPath))
}
}

View File

@@ -1,4 +1,4 @@
package scene package video
import ( import (
"path/filepath" "path/filepath"

Some files were not shown because too many files have changed in this diff Show More