mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 04:14:39 +03:00
Config Tweaks
Using viper for config management. Added configuration endpoint.
This commit is contained in:
2
go.mod
2
go.mod
@@ -5,6 +5,7 @@ require (
|
|||||||
github.com/PuerkitoBio/goquery v1.5.0
|
github.com/PuerkitoBio/goquery v1.5.0
|
||||||
github.com/bmatcuk/doublestar v1.1.1
|
github.com/bmatcuk/doublestar v1.1.1
|
||||||
github.com/disintegration/imaging v1.6.0
|
github.com/disintegration/imaging v1.6.0
|
||||||
|
github.com/fsnotify/fsnotify v1.4.7
|
||||||
github.com/go-chi/chi v4.0.1+incompatible
|
github.com/go-chi/chi v4.0.1+incompatible
|
||||||
github.com/gobuffalo/packr/v2 v2.0.0-rc.15
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.15
|
||||||
github.com/golang-migrate/migrate/v4 v4.2.2
|
github.com/golang-migrate/migrate/v4 v4.2.2
|
||||||
@@ -15,6 +16,7 @@ require (
|
|||||||
github.com/rs/cors v1.6.0
|
github.com/rs/cors v1.6.0
|
||||||
github.com/sirupsen/logrus v1.3.0
|
github.com/sirupsen/logrus v1.3.0
|
||||||
github.com/spf13/afero v1.2.0 // indirect
|
github.com/spf13/afero v1.2.0 // indirect
|
||||||
|
github.com/spf13/viper v1.3.2
|
||||||
github.com/vektah/gqlparser v1.1.0
|
github.com/vektah/gqlparser v1.1.0
|
||||||
golang.org/x/image v0.0.0-20190118043309-183bebdce1b2 // indirect
|
golang.org/x/image v0.0.0-20190118043309-183bebdce1b2 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
10
go.sum
10
go.sum
@@ -63,6 +63,7 @@ github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt
|
|||||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
github.com/fsouza/fake-gcs-server v1.3.0/go.mod h1:Lq+43m2znsXfDKHnQMfdA0HpYYAEJsfizsbpk5k3TLo=
|
github.com/fsouza/fake-gcs-server v1.3.0/go.mod h1:Lq+43m2znsXfDKHnQMfdA0HpYYAEJsfizsbpk5k3TLo=
|
||||||
github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
|
github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
|
||||||
@@ -285,6 +286,7 @@ github.com/h2non/filetype v1.0.6/go.mod h1:isekKqOuhMj+s/7r3rIeTErIRy4Rub5uBWHfv
|
|||||||
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
|
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
|
||||||
github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo=
|
github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo=
|
||||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
|
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||||
@@ -318,6 +320,7 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
|||||||
github.com/kshvakov/clickhouse v1.3.4/go.mod h1:DMzX7FxRymoNkVgizH0DWAL8Cur7wHLgx3MUnGwJqpE=
|
github.com/kshvakov/clickhouse v1.3.4/go.mod h1:DMzX7FxRymoNkVgizH0DWAL8Cur7wHLgx3MUnGwJqpE=
|
||||||
github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=
|
github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=
|
||||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
|
github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=
|
||||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||||
github.com/markbates/deplist v1.0.4/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
|
github.com/markbates/deplist v1.0.4/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
|
||||||
github.com/markbates/deplist v1.0.5/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
|
github.com/markbates/deplist v1.0.5/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
|
||||||
@@ -349,6 +352,7 @@ github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/le
|
|||||||
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
github.com/mitchellh/mapstructure v1.0.0/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v1.0.0/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
|
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
||||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
github.com/mongodb/mongo-go-driver v0.1.0/go.mod h1:NK/HWDIIZkaYsnYa0hmtP443T5ELr0KDecmIioVuuyU=
|
github.com/mongodb/mongo-go-driver v0.1.0/go.mod h1:NK/HWDIIZkaYsnYa0hmtP443T5ELr0KDecmIioVuuyU=
|
||||||
github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q=
|
github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q=
|
||||||
@@ -363,6 +367,7 @@ github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zM
|
|||||||
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
||||||
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||||
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
|
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
|
||||||
|
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
||||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||||
@@ -403,17 +408,22 @@ github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a/go.mod h1:X
|
|||||||
github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE=
|
github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE=
|
||||||
github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA=
|
github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA=
|
||||||
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||||
|
github.com/spf13/afero v1.2.0 h1:O9FblXGxoTc51M+cqr74Bm2Tmt4PvkA5iu/j8HrkNuY=
|
||||||
github.com/spf13/afero v1.2.0/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
github.com/spf13/afero v1.2.0/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
||||||
github.com/spf13/cast v1.2.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg=
|
github.com/spf13/cast v1.2.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg=
|
||||||
|
github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
|
||||||
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||||
github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8=
|
github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8=
|
||||||
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||||
|
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
|
||||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
||||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
github.com/spf13/viper v1.2.1/go.mod h1:P4AexN0a+C9tGAnUFNwDMYYZv3pjFuvmeiMyKRaNVlI=
|
github.com/spf13/viper v1.2.1/go.mod h1:P4AexN0a+C9tGAnUFNwDMYYZv3pjFuvmeiMyKRaNVlI=
|
||||||
github.com/spf13/viper v1.3.1/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
github.com/spf13/viper v1.3.1/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||||
|
github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M=
|
||||||
|
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
|
|||||||
5
main.go
5
main.go
@@ -4,14 +4,15 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/api"
|
"github.com/stashapp/stash/pkg/api"
|
||||||
"github.com/stashapp/stash/pkg/database"
|
"github.com/stashapp/stash/pkg/database"
|
||||||
"github.com/stashapp/stash/pkg/manager"
|
"github.com/stashapp/stash/pkg/manager"
|
||||||
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
|
|
||||||
_ "github.com/golang-migrate/migrate/v4/database/sqlite3"
|
_ "github.com/golang-migrate/migrate/v4/database/sqlite3"
|
||||||
_ "github.com/golang-migrate/migrate/v4/source/file"
|
_ "github.com/golang-migrate/migrate/v4/source/file"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
managerInstance := manager.Initialize()
|
manager.Initialize()
|
||||||
database.Initialize(managerInstance.StaticPaths.DatabaseFile)
|
database.Initialize(config.GetDatabasePath())
|
||||||
api.Start()
|
api.Start()
|
||||||
blockForever()
|
blockForever()
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
37
pkg/api/resolver_query_configure.go
Normal file
37
pkg/api/resolver_query_configure.go
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) ConfigureGeneral(ctx context.Context, input *models.ConfigGeneralInput) (models.ConfigGeneralResult, error) {
|
||||||
|
if input == nil {
|
||||||
|
return makeConfigGeneralResult(), fmt.Errorf("nil input")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(input.Stashes) > 0 {
|
||||||
|
for _, stashPath := range input.Stashes {
|
||||||
|
exists, err := utils.DirExists(stashPath)
|
||||||
|
if !exists {
|
||||||
|
return makeConfigGeneralResult(), err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
config.Set(config.Stash, input.Stashes)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := config.Write(); err != nil {
|
||||||
|
return makeConfigGeneralResult(), err
|
||||||
|
}
|
||||||
|
|
||||||
|
return makeConfigGeneralResult(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeConfigGeneralResult() models.ConfigGeneralResult {
|
||||||
|
return models.ConfigGeneralResult{
|
||||||
|
Stashes: config.GetStashPaths(),
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -12,8 +12,7 @@ import (
|
|||||||
"github.com/gorilla/websocket"
|
"github.com/gorilla/websocket"
|
||||||
"github.com/rs/cors"
|
"github.com/rs/cors"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
"github.com/stashapp/stash/pkg/manager"
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
"net/http"
|
"net/http"
|
||||||
@@ -29,6 +28,7 @@ const httpsPort = "9999"
|
|||||||
|
|
||||||
var certsBox *packr.Box
|
var certsBox *packr.Box
|
||||||
var uiBox *packr.Box
|
var uiBox *packr.Box
|
||||||
|
|
||||||
//var legacyUiBox *packr.Box
|
//var legacyUiBox *packr.Box
|
||||||
var setupUIBox *packr.Box
|
var setupUIBox *packr.Box
|
||||||
|
|
||||||
@@ -99,41 +99,44 @@ func Start() {
|
|||||||
http.Error(w, fmt.Sprintf("error: %s", err), 500)
|
http.Error(w, fmt.Sprintf("error: %s", err), 500)
|
||||||
}
|
}
|
||||||
stash := filepath.Clean(r.Form.Get("stash"))
|
stash := filepath.Clean(r.Form.Get("stash"))
|
||||||
|
generated := filepath.Clean(r.Form.Get("generated"))
|
||||||
metadata := filepath.Clean(r.Form.Get("metadata"))
|
metadata := filepath.Clean(r.Form.Get("metadata"))
|
||||||
cache := filepath.Clean(r.Form.Get("cache"))
|
cache := filepath.Clean(r.Form.Get("cache"))
|
||||||
//downloads := filepath.Clean(r.Form.Get("downloads")) // TODO
|
//downloads := filepath.Clean(r.Form.Get("downloads")) // TODO
|
||||||
downloads := filepath.Join(metadata, "downloads")
|
downloads := filepath.Join(metadata, "downloads")
|
||||||
|
|
||||||
exists, _ := utils.FileExists(stash)
|
exists, _ := utils.DirExists(stash)
|
||||||
fileInfo, _ := os.Stat(stash)
|
if !exists || stash == "." {
|
||||||
if !exists || !fileInfo.IsDir() {
|
|
||||||
http.Error(w, fmt.Sprintf("the stash path either doesn't exist, or is not a directory <%s>. Go back and try again.", stash), 500)
|
http.Error(w, fmt.Sprintf("the stash path either doesn't exist, or is not a directory <%s>. Go back and try again.", stash), 500)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
exists, _ = utils.FileExists(metadata)
|
exists, _ = utils.DirExists(generated)
|
||||||
fileInfo, _ = os.Stat(metadata)
|
if !exists || generated == "." {
|
||||||
if !exists || !fileInfo.IsDir() {
|
http.Error(w, fmt.Sprintf("the generated path either doesn't exist, or is not a directory <%s>. Go back and try again.", generated), 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
exists, _ = utils.DirExists(metadata)
|
||||||
|
if !exists || metadata == "." {
|
||||||
http.Error(w, fmt.Sprintf("the metadata path either doesn't exist, or is not a directory <%s> Go back and try again.", metadata), 500)
|
http.Error(w, fmt.Sprintf("the metadata path either doesn't exist, or is not a directory <%s> Go back and try again.", metadata), 500)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
exists, _ = utils.FileExists(cache)
|
exists, _ = utils.DirExists(cache)
|
||||||
fileInfo, _ = os.Stat(cache)
|
if !exists || cache == "." {
|
||||||
if !exists || !fileInfo.IsDir() {
|
|
||||||
http.Error(w, fmt.Sprintf("the cache path either doesn't exist, or is not a directory <%s> Go back and try again.", cache), 500)
|
http.Error(w, fmt.Sprintf("the cache path either doesn't exist, or is not a directory <%s> Go back and try again.", cache), 500)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = os.Mkdir(downloads, 0755)
|
_ = os.Mkdir(downloads, 0755)
|
||||||
|
|
||||||
config := &jsonschema.Config{
|
config.Set(config.Stash, stash)
|
||||||
Stash: stash,
|
config.Set(config.Generated, generated)
|
||||||
Metadata: metadata,
|
config.Set(config.Metadata, metadata)
|
||||||
Cache: cache,
|
config.Set(config.Cache, cache)
|
||||||
Downloads: downloads,
|
config.Set(config.Downloads, downloads)
|
||||||
}
|
if err := config.Write(); err != nil {
|
||||||
if err := manager.GetInstance().SaveConfig(config); err != nil {
|
|
||||||
http.Error(w, fmt.Sprintf("there was an error saving the config file: %s", err), 500)
|
http.Error(w, fmt.Sprintf("there was an error saving the config file: %s", err), 500)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -220,7 +223,7 @@ func ConfigCheckMiddleware(next http.Handler) http.Handler {
|
|||||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
ext := path.Ext(r.URL.Path)
|
ext := path.Ext(r.URL.Path)
|
||||||
shouldRedirect := ext == "" && r.Method == "GET" && r.URL.Path != "/init"
|
shouldRedirect := ext == "" && r.Method == "GET" && r.URL.Path != "/init"
|
||||||
if !manager.HasValidConfig() && shouldRedirect {
|
if !config.IsValid() && shouldRedirect {
|
||||||
if !strings.HasPrefix(r.URL.Path, "/setup") {
|
if !strings.HasPrefix(r.URL.Path, "/setup") {
|
||||||
http.Redirect(w, r, "/setup", 301)
|
http.Redirect(w, r, "/setup", 301)
|
||||||
return
|
return
|
||||||
|
|||||||
47
pkg/manager/config/config.go
Normal file
47
pkg/manager/config/config.go
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
)
|
||||||
|
|
||||||
|
const Stash = "stash"
|
||||||
|
const Cache = "cache"
|
||||||
|
const Generated = "generated"
|
||||||
|
const Metadata = "metadata"
|
||||||
|
const Downloads = "downloads"
|
||||||
|
|
||||||
|
const Database = "database"
|
||||||
|
|
||||||
|
func Set(key string, value interface{}) {
|
||||||
|
viper.Set(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Write() error {
|
||||||
|
return viper.WriteConfig()
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetStashPaths() []string {
|
||||||
|
return viper.GetStringSlice(Stash)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetCachePath() string {
|
||||||
|
return viper.GetString(Cache)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetGeneratedPath() string {
|
||||||
|
return viper.GetString(Generated)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetMetadataPath() string {
|
||||||
|
return viper.GetString(Metadata)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetDatabasePath() string {
|
||||||
|
return viper.GetString(Database)
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsValid() bool {
|
||||||
|
setPaths := viper.IsSet(Stash) && viper.IsSet(Cache) && viper.IsSet(Generated) && viper.IsSet(Metadata)
|
||||||
|
// TODO: check valid paths
|
||||||
|
return setPaths
|
||||||
|
}
|
||||||
@@ -43,7 +43,7 @@ func NewPreviewGenerator(videoFile ffmpeg.VideoFile, videoFilename string, image
|
|||||||
func (g *PreviewGenerator) Generate() error {
|
func (g *PreviewGenerator) Generate() error {
|
||||||
instance.Paths.Generated.EmptyTmpDir()
|
instance.Paths.Generated.EmptyTmpDir()
|
||||||
logger.Infof("[generator] generating scene preview for %s", g.Info.VideoFile.Path)
|
logger.Infof("[generator] generating scene preview for %s", g.Info.VideoFile.Path)
|
||||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
|
||||||
|
|
||||||
if err := g.generateConcatFile(); err != nil {
|
if err := g.generateConcatFile(); err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, imageOutputPath string, vttO
|
|||||||
|
|
||||||
func (g *SpriteGenerator) Generate() error {
|
func (g *SpriteGenerator) Generate() error {
|
||||||
instance.Paths.Generated.EmptyTmpDir()
|
instance.Paths.Generated.EmptyTmpDir()
|
||||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
|
||||||
|
|
||||||
if err := g.generateSpriteImage(&encoder); err != nil {
|
if err := g.generateSpriteImage(&encoder); err != nil {
|
||||||
return err
|
return err
|
||||||
|
|||||||
@@ -1,38 +0,0 @@
|
|||||||
package jsonschema
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Config struct {
|
|
||||||
Stash string `json:"stash"`
|
|
||||||
Metadata string `json:"metadata"`
|
|
||||||
// Generated string `json:"generated"` // TODO: Generated directory instead of metadata
|
|
||||||
Cache string `json:"cache"`
|
|
||||||
Downloads string `json:"downloads"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func LoadConfigFile(file string) *Config {
|
|
||||||
var config Config
|
|
||||||
configFile, err := os.Open(file)
|
|
||||||
defer configFile.Close()
|
|
||||||
if err != nil {
|
|
||||||
logger.Error(err.Error())
|
|
||||||
}
|
|
||||||
jsonParser := json.NewDecoder(configFile)
|
|
||||||
parseError := jsonParser.Decode(&config)
|
|
||||||
if parseError != nil {
|
|
||||||
logger.Errorf("config file parse error (ignore on first launch): %s", parseError)
|
|
||||||
}
|
|
||||||
return &config
|
|
||||||
}
|
|
||||||
|
|
||||||
func SaveConfigFile(filePath string, config *Config) error {
|
|
||||||
if config == nil {
|
|
||||||
return fmt.Errorf("config must not be nil")
|
|
||||||
}
|
|
||||||
return marshalToFile(filePath, config)
|
|
||||||
}
|
|
||||||
@@ -1,9 +1,12 @@
|
|||||||
package manager
|
package manager
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/fsnotify/fsnotify"
|
||||||
|
"github.com/spf13/viper"
|
||||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/manager/paths"
|
"github.com/stashapp/stash/pkg/manager/paths"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
"sync"
|
"sync"
|
||||||
@@ -12,8 +15,10 @@ import (
|
|||||||
type singleton struct {
|
type singleton struct {
|
||||||
Status JobStatus
|
Status JobStatus
|
||||||
Paths *paths.Paths
|
Paths *paths.Paths
|
||||||
StaticPaths *paths.StaticPathsType
|
|
||||||
JSON *jsonUtils
|
JSON *jsonUtils
|
||||||
|
|
||||||
|
FFMPEGPath string
|
||||||
|
FFProbePath string
|
||||||
}
|
}
|
||||||
|
|
||||||
var instance *singleton
|
var instance *singleton
|
||||||
@@ -26,16 +31,15 @@ func GetInstance() *singleton {
|
|||||||
|
|
||||||
func Initialize() *singleton {
|
func Initialize() *singleton {
|
||||||
once.Do(func() {
|
once.Do(func() {
|
||||||
_ = utils.EnsureDir(paths.StaticPaths.ConfigDirectory)
|
_ = utils.EnsureDir(paths.GetConfigDirectory())
|
||||||
configFile := jsonschema.LoadConfigFile(paths.StaticPaths.ConfigFile)
|
initConfig()
|
||||||
instance = &singleton{
|
instance = &singleton{
|
||||||
Status: Idle,
|
Status: Idle,
|
||||||
Paths: paths.NewPaths(configFile),
|
Paths: paths.NewPaths(),
|
||||||
StaticPaths: &paths.StaticPaths,
|
|
||||||
JSON: &jsonUtils{},
|
JSON: &jsonUtils{},
|
||||||
}
|
}
|
||||||
|
|
||||||
instance.refreshConfig(configFile)
|
instance.refreshConfig()
|
||||||
|
|
||||||
initFFMPEG()
|
initFFMPEG()
|
||||||
})
|
})
|
||||||
@@ -43,11 +47,45 @@ func Initialize() *singleton {
|
|||||||
return instance
|
return instance
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func initConfig() {
|
||||||
|
// The config file is called config. Leave off the file extension.
|
||||||
|
viper.SetConfigName("config")
|
||||||
|
|
||||||
|
viper.AddConfigPath("$HOME/.stash") // Look for the config in the home directory
|
||||||
|
viper.AddConfigPath(".") // Look for config in the working directory
|
||||||
|
|
||||||
|
viper.SetDefault(config.Database, paths.GetDefaultDatabaseFilePath())
|
||||||
|
|
||||||
|
// Set generated to the metadata path for backwards compat
|
||||||
|
if !viper.IsSet(config.Generated) {
|
||||||
|
viper.SetDefault(config.Generated, viper.GetString(config.Metadata))
|
||||||
|
}
|
||||||
|
|
||||||
|
err := viper.ReadInConfig() // Find and read the config file
|
||||||
|
if err != nil { // Handle errors reading the config file
|
||||||
|
_ = utils.Touch(paths.GetDefaultConfigFilePath())
|
||||||
|
if err = viper.ReadInConfig(); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Watch for changes
|
||||||
|
viper.WatchConfig()
|
||||||
|
viper.OnConfigChange(func(e fsnotify.Event) {
|
||||||
|
fmt.Println("Config file changed:", e.Name)
|
||||||
|
instance.refreshConfig()
|
||||||
|
})
|
||||||
|
|
||||||
|
//viper.Set("stash", []string{"/", "/stuff"})
|
||||||
|
//viper.WriteConfig()
|
||||||
|
}
|
||||||
|
|
||||||
func initFFMPEG() {
|
func initFFMPEG() {
|
||||||
ffmpegPath, ffprobePath := ffmpeg.GetPaths(instance.StaticPaths.ConfigDirectory)
|
configDirectory := paths.GetConfigDirectory()
|
||||||
|
ffmpegPath, ffprobePath := ffmpeg.GetPaths(configDirectory)
|
||||||
if ffmpegPath == "" || ffprobePath == "" {
|
if ffmpegPath == "" || ffprobePath == "" {
|
||||||
logger.Infof("couldn't find FFMPEG, attempting to download it")
|
logger.Infof("couldn't find FFMPEG, attempting to download it")
|
||||||
if err := ffmpeg.Download(instance.StaticPaths.ConfigDirectory); err != nil {
|
if err := ffmpeg.Download(configDirectory); err != nil {
|
||||||
msg := `Unable to locate / automatically download FFMPEG
|
msg := `Unable to locate / automatically download FFMPEG
|
||||||
|
|
||||||
Check the readme for download links.
|
Check the readme for download links.
|
||||||
@@ -55,40 +93,18 @@ The FFMPEG and FFProbe binaries should be placed in %s
|
|||||||
|
|
||||||
The error was: %s
|
The error was: %s
|
||||||
`
|
`
|
||||||
logger.Fatalf(msg, instance.StaticPaths.ConfigDirectory, err)
|
logger.Fatalf(msg, configDirectory, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
instance.StaticPaths.FFMPEG = ffmpegPath
|
// TODO: is this valid after download?
|
||||||
instance.StaticPaths.FFProbe = ffprobePath
|
instance.FFMPEGPath = ffmpegPath
|
||||||
|
instance.FFProbePath = ffprobePath
|
||||||
}
|
}
|
||||||
|
|
||||||
func HasValidConfig() bool {
|
func (s *singleton) refreshConfig() {
|
||||||
configFileExists, _ := utils.FileExists(instance.StaticPaths.ConfigFile) // TODO: Verify JSON is correct
|
s.Paths = paths.NewPaths()
|
||||||
if configFileExists && instance.Paths.Config != nil {
|
if config.IsValid() {
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *singleton) SaveConfig(config *jsonschema.Config) error {
|
|
||||||
if err := jsonschema.SaveConfigFile(s.StaticPaths.ConfigFile, config); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reload the config
|
|
||||||
s.refreshConfig(config)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *singleton) refreshConfig(config *jsonschema.Config) {
|
|
||||||
if config == nil {
|
|
||||||
config = jsonschema.LoadConfigFile(s.StaticPaths.ConfigFile)
|
|
||||||
}
|
|
||||||
s.Paths = paths.NewPaths(config)
|
|
||||||
|
|
||||||
if HasValidConfig() {
|
|
||||||
_ = utils.EnsureDir(s.Paths.Generated.Screenshots)
|
_ = utils.EnsureDir(s.Paths.Generated.Screenshots)
|
||||||
_ = utils.EnsureDir(s.Paths.Generated.Vtt)
|
_ = utils.EnsureDir(s.Paths.Generated.Vtt)
|
||||||
_ = utils.EnsureDir(s.Paths.Generated.Markers)
|
_ = utils.EnsureDir(s.Paths.Generated.Markers)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package manager
|
|||||||
import (
|
import (
|
||||||
"github.com/bmatcuk/doublestar"
|
"github.com/bmatcuk/doublestar"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@@ -18,12 +19,16 @@ func (s *singleton) Scan() {
|
|||||||
go func() {
|
go func() {
|
||||||
defer s.returnToIdleState()
|
defer s.returnToIdleState()
|
||||||
|
|
||||||
globPath := filepath.Join(s.Paths.Config.Stash, "**/*.{zip,m4v,mp4,mov,wmv}")
|
var results []string
|
||||||
|
for _, path := range config.GetStashPaths() {
|
||||||
|
globPath := filepath.Join(path, "**/*.{zip,m4v,mp4,mov,wmv}")
|
||||||
globResults, _ := doublestar.Glob(globPath)
|
globResults, _ := doublestar.Glob(globPath)
|
||||||
logger.Infof("Starting scan of %d files", len(globResults))
|
results = append(results, globResults...)
|
||||||
|
}
|
||||||
|
logger.Infof("Starting scan of %d files", len(results))
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
for _, path := range globResults {
|
for _, path := range results {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
task := ScanTask{FilePath: path}
|
task := ScanTask{FilePath: path}
|
||||||
go task.Start(&wg)
|
go task.Start(&wg)
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
package paths
|
package paths
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
"os/user"
|
||||||
|
"path/filepath"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Paths struct {
|
type Paths struct {
|
||||||
Config *jsonschema.Config
|
|
||||||
Generated *generatedPaths
|
Generated *generatedPaths
|
||||||
JSON *jsonPaths
|
JSON *jsonPaths
|
||||||
|
|
||||||
@@ -14,14 +14,33 @@ type Paths struct {
|
|||||||
SceneMarkers *sceneMarkerPaths
|
SceneMarkers *sceneMarkerPaths
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewPaths(config *jsonschema.Config) *Paths {
|
func NewPaths() *Paths {
|
||||||
p := Paths{}
|
p := Paths{}
|
||||||
p.Config = config
|
p.Generated = newGeneratedPaths()
|
||||||
p.Generated = newGeneratedPaths(p)
|
p.JSON = newJSONPaths()
|
||||||
p.JSON = newJSONPaths(p)
|
|
||||||
|
|
||||||
p.Gallery = newGalleryPaths(p.Config)
|
p.Gallery = newGalleryPaths()
|
||||||
p.Scene = newScenePaths(p)
|
p.Scene = newScenePaths(p)
|
||||||
p.SceneMarkers = newSceneMarkerPaths(p)
|
p.SceneMarkers = newSceneMarkerPaths(p)
|
||||||
return &p
|
return &p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetHomeDirectory() string {
|
||||||
|
currentUser, err := user.Current()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return currentUser.HomeDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetConfigDirectory() string {
|
||||||
|
return filepath.Join(GetHomeDirectory(), ".stash")
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetDefaultDatabaseFilePath() string {
|
||||||
|
return filepath.Join(GetConfigDirectory(), "stash-go.sqlite")
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetDefaultConfigFilePath() string {
|
||||||
|
return filepath.Join(GetConfigDirectory(), "config.yml")
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,24 +1,20 @@
|
|||||||
package paths
|
package paths
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
)
|
)
|
||||||
|
|
||||||
type galleryPaths struct {
|
type galleryPaths struct{}
|
||||||
config *jsonschema.Config
|
|
||||||
}
|
|
||||||
|
|
||||||
func newGalleryPaths(c *jsonschema.Config) *galleryPaths {
|
func newGalleryPaths() *galleryPaths {
|
||||||
gp := galleryPaths{}
|
return &galleryPaths{}
|
||||||
gp.config = c
|
|
||||||
return &gp
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (gp *galleryPaths) GetExtractedPath(checksum string) string {
|
func (gp *galleryPaths) GetExtractedPath(checksum string) string {
|
||||||
return filepath.Join(gp.config.Cache, checksum)
|
return filepath.Join(config.GetCachePath(), checksum)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string {
|
func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string {
|
||||||
return filepath.Join(gp.config.Cache, checksum, fileName)
|
return filepath.Join(config.GetCachePath(), checksum, fileName)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package paths
|
package paths
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
)
|
)
|
||||||
@@ -13,13 +14,13 @@ type generatedPaths struct {
|
|||||||
Tmp string
|
Tmp string
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGeneratedPaths(p Paths) *generatedPaths {
|
func newGeneratedPaths() *generatedPaths {
|
||||||
gp := generatedPaths{}
|
gp := generatedPaths{}
|
||||||
gp.Screenshots = filepath.Join(p.Config.Metadata, "screenshots")
|
gp.Screenshots = filepath.Join(config.GetGeneratedPath(), "screenshots")
|
||||||
gp.Vtt = filepath.Join(p.Config.Metadata, "vtt")
|
gp.Vtt = filepath.Join(config.GetGeneratedPath(), "vtt")
|
||||||
gp.Markers = filepath.Join(p.Config.Metadata, "markers")
|
gp.Markers = filepath.Join(config.GetGeneratedPath(), "markers")
|
||||||
gp.Transcodes = filepath.Join(p.Config.Metadata, "transcodes")
|
gp.Transcodes = filepath.Join(config.GetGeneratedPath(), "transcodes")
|
||||||
gp.Tmp = filepath.Join(p.Config.Metadata, "tmp")
|
gp.Tmp = filepath.Join(config.GetGeneratedPath(), "tmp")
|
||||||
return &gp
|
return &gp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package paths
|
package paths
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -14,14 +15,14 @@ type jsonPaths struct {
|
|||||||
Studios string
|
Studios string
|
||||||
}
|
}
|
||||||
|
|
||||||
func newJSONPaths(p Paths) *jsonPaths {
|
func newJSONPaths() *jsonPaths {
|
||||||
jp := jsonPaths{}
|
jp := jsonPaths{}
|
||||||
jp.MappingsFile = filepath.Join(p.Config.Metadata, "mappings.json")
|
jp.MappingsFile = filepath.Join(config.GetMetadataPath(), "mappings.json")
|
||||||
jp.ScrapedFile = filepath.Join(p.Config.Metadata, "scraped.json")
|
jp.ScrapedFile = filepath.Join(config.GetMetadataPath(), "scraped.json")
|
||||||
jp.Performers = filepath.Join(p.Config.Metadata, "performers")
|
jp.Performers = filepath.Join(config.GetMetadataPath(), "performers")
|
||||||
jp.Scenes = filepath.Join(p.Config.Metadata, "scenes")
|
jp.Scenes = filepath.Join(config.GetMetadataPath(), "scenes")
|
||||||
jp.Galleries = filepath.Join(p.Config.Metadata, "galleries")
|
jp.Galleries = filepath.Join(config.GetMetadataPath(), "galleries")
|
||||||
jp.Studios = filepath.Join(p.Config.Metadata, "studios")
|
jp.Studios = filepath.Join(config.GetMetadataPath(), "studios")
|
||||||
return &jp
|
return &jp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,44 +0,0 @@
|
|||||||
package paths
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"os/user"
|
|
||||||
"path/filepath"
|
|
||||||
)
|
|
||||||
|
|
||||||
type StaticPathsType struct {
|
|
||||||
ExecutionDirectory string
|
|
||||||
ConfigDirectory string
|
|
||||||
ConfigFile string
|
|
||||||
DatabaseFile string
|
|
||||||
|
|
||||||
FFMPEG string
|
|
||||||
FFProbe string
|
|
||||||
}
|
|
||||||
|
|
||||||
var StaticPaths = StaticPathsType{
|
|
||||||
ExecutionDirectory: getExecutionDirectory(),
|
|
||||||
ConfigDirectory: getConfigDirectory(),
|
|
||||||
ConfigFile: filepath.Join(getConfigDirectory(), "config.json"),
|
|
||||||
DatabaseFile: filepath.Join(getConfigDirectory(), "stash-go.sqlite"),
|
|
||||||
}
|
|
||||||
|
|
||||||
func getExecutionDirectory() string {
|
|
||||||
ex, err := os.Executable()
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return filepath.Dir(ex)
|
|
||||||
}
|
|
||||||
|
|
||||||
func getHomeDirectory() string {
|
|
||||||
currentUser, err := user.Current()
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return currentUser.HomeDir
|
|
||||||
}
|
|
||||||
|
|
||||||
func getConfigDirectory() string {
|
|
||||||
return filepath.Join(getHomeDirectory(), ".stash")
|
|
||||||
}
|
|
||||||
@@ -25,7 +25,7 @@ func (t *GenerateMarkersTask) Start(wg *sync.WaitGroup) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.Scene.Path)
|
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("error reading video file: %s", err.Error())
|
logger.Errorf("error reading video file: %s", err.Error())
|
||||||
return
|
return
|
||||||
@@ -35,7 +35,7 @@ func (t *GenerateMarkersTask) Start(wg *sync.WaitGroup) {
|
|||||||
markersFolder := filepath.Join(instance.Paths.Generated.Markers, t.Scene.Checksum)
|
markersFolder := filepath.Join(instance.Paths.Generated.Markers, t.Scene.Checksum)
|
||||||
_ = utils.EnsureDir(markersFolder)
|
_ = utils.EnsureDir(markersFolder)
|
||||||
|
|
||||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
|
||||||
for i, sceneMarker := range sceneMarkers {
|
for i, sceneMarker := range sceneMarkers {
|
||||||
index := i + 1
|
index := i + 1
|
||||||
logger.Progressf("[generator] <%s> scene marker %d of %d", t.Scene.Checksum, index, len(sceneMarkers))
|
logger.Progressf("[generator] <%s> scene marker %d of %d", t.Scene.Checksum, index, len(sceneMarkers))
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ func (t *GeneratePreviewTask) Start(wg *sync.WaitGroup) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.Scene.Path)
|
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("error reading video file: %s", err.Error())
|
logger.Errorf("error reading video file: %s", err.Error())
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ func (t *GenerateSpriteTask) Start(wg *sync.WaitGroup) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.Scene.Path)
|
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("error reading video file: %s", err.Error())
|
logger.Errorf("error reading video file: %s", err.Error())
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"github.com/jmoiron/sqlx"
|
"github.com/jmoiron/sqlx"
|
||||||
"github.com/stashapp/stash/pkg/database"
|
"github.com/stashapp/stash/pkg/database"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
@@ -33,7 +34,7 @@ func (t *ImportTask) Start(wg *sync.WaitGroup) {
|
|||||||
}
|
}
|
||||||
t.Scraped = scraped
|
t.Scraped = scraped
|
||||||
|
|
||||||
database.Reset(instance.StaticPaths.DatabaseFile)
|
database.Reset(config.GetDatabasePath())
|
||||||
|
|
||||||
ctx := context.TODO()
|
ctx := context.TODO()
|
||||||
|
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ func (t *ScanTask) scanGallery() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *ScanTask) scanScene() {
|
func (t *ScanTask) scanScene() {
|
||||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.FilePath)
|
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error(err.Error())
|
logger.Error(err.Error())
|
||||||
return
|
return
|
||||||
@@ -142,7 +142,7 @@ func (t *ScanTask) makeScreenshots(probeResult ffmpeg.VideoFile, checksum string
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *ScanTask) makeScreenshot(probeResult ffmpeg.VideoFile, outputPath string, quality int, width int) {
|
func (t *ScanTask) makeScreenshot(probeResult ffmpeg.VideoFile, outputPath string, quality int, width int) {
|
||||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
|
||||||
options := ffmpeg.ScreenshotOptions{
|
options := ffmpeg.ScreenshotOptions{
|
||||||
OutputPath: outputPath,
|
OutputPath: outputPath,
|
||||||
Quality: quality,
|
Quality: quality,
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
|
|||||||
|
|
||||||
logger.Infof("[transcode] <%s> scene has codec %s", t.Scene.Checksum, t.Scene.VideoCodec.String)
|
logger.Infof("[transcode] <%s> scene has codec %s", t.Scene.Checksum, t.Scene.VideoCodec.String)
|
||||||
|
|
||||||
videoFile, err := ffmpeg.NewVideoFile(instance.StaticPaths.FFProbe, t.Scene.Path)
|
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("[transcode] error reading video file: %s", err.Error())
|
logger.Errorf("[transcode] error reading video file: %s", err.Error())
|
||||||
return
|
return
|
||||||
@@ -36,7 +36,7 @@ func (t *GenerateTranscodeTask) Start(wg *sync.WaitGroup) {
|
|||||||
options := ffmpeg.TranscodeOptions{
|
options := ffmpeg.TranscodeOptions{
|
||||||
OutputPath: outputPath,
|
OutputPath: outputPath,
|
||||||
}
|
}
|
||||||
encoder := ffmpeg.NewEncoder(instance.StaticPaths.FFMPEG)
|
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
|
||||||
encoder.Transcode(*videoFile, options)
|
encoder.Transcode(*videoFile, options)
|
||||||
if err := os.Rename(outputPath, instance.Paths.Scene.GetTranscodePath(t.Scene.Checksum)); err != nil {
|
if err := os.Rename(outputPath, instance.Paths.Scene.GetTranscodePath(t.Scene.Checksum)); err != nil {
|
||||||
logger.Errorf("[transcode] error generating transcode: %s", err.Error())
|
logger.Errorf("[transcode] error generating transcode: %s", err.Error())
|
||||||
|
|||||||
@@ -47,6 +47,10 @@ type DirectiveRoot struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ComplexityRoot struct {
|
type ComplexityRoot struct {
|
||||||
|
ConfigGeneralResult struct {
|
||||||
|
Stashes func(childComplexity int) int
|
||||||
|
}
|
||||||
|
|
||||||
FindGalleriesResultType struct {
|
FindGalleriesResultType struct {
|
||||||
Count func(childComplexity int) int
|
Count func(childComplexity int) int
|
||||||
Galleries func(childComplexity int) int
|
Galleries func(childComplexity int) int
|
||||||
@@ -149,6 +153,7 @@ type ComplexityRoot struct {
|
|||||||
SceneMarkerTags func(childComplexity int, scene_id string) int
|
SceneMarkerTags func(childComplexity int, scene_id string) int
|
||||||
ScrapeFreeones func(childComplexity int, performer_name string) int
|
ScrapeFreeones func(childComplexity int, performer_name string) int
|
||||||
ScrapeFreeonesPerformerList func(childComplexity int, query string) int
|
ScrapeFreeonesPerformerList func(childComplexity int, query string) int
|
||||||
|
ConfigureGeneral func(childComplexity int, input *ConfigGeneralInput) int
|
||||||
MetadataImport func(childComplexity int) int
|
MetadataImport func(childComplexity int) int
|
||||||
MetadataExport func(childComplexity int) int
|
MetadataExport func(childComplexity int) int
|
||||||
MetadataScan func(childComplexity int) int
|
MetadataScan func(childComplexity int) int
|
||||||
@@ -322,6 +327,7 @@ type QueryResolver interface {
|
|||||||
SceneMarkerTags(ctx context.Context, scene_id string) ([]SceneMarkerTag, error)
|
SceneMarkerTags(ctx context.Context, scene_id string) ([]SceneMarkerTag, error)
|
||||||
ScrapeFreeones(ctx context.Context, performer_name string) (*ScrapedPerformer, error)
|
ScrapeFreeones(ctx context.Context, performer_name string) (*ScrapedPerformer, error)
|
||||||
ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error)
|
ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error)
|
||||||
|
ConfigureGeneral(ctx context.Context, input *ConfigGeneralInput) (ConfigGeneralResult, error)
|
||||||
MetadataImport(ctx context.Context) (string, error)
|
MetadataImport(ctx context.Context) (string, error)
|
||||||
MetadataExport(ctx context.Context) (string, error)
|
MetadataExport(ctx context.Context) (string, error)
|
||||||
MetadataScan(ctx context.Context) (string, error)
|
MetadataScan(ctx context.Context) (string, error)
|
||||||
@@ -1060,6 +1066,34 @@ func (e *executableSchema) field_Query_scrapeFreeonesPerformerList_args(ctx cont
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (e *executableSchema) field_Query_configureGeneral_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
|
||||||
|
args := map[string]interface{}{}
|
||||||
|
var arg0 *ConfigGeneralInput
|
||||||
|
if tmp, ok := rawArgs["input"]; ok {
|
||||||
|
var err error
|
||||||
|
var ptr1 ConfigGeneralInput
|
||||||
|
if tmp != nil {
|
||||||
|
ptr1, err = UnmarshalConfigGeneralInput(tmp)
|
||||||
|
arg0 = &ptr1
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if arg0 != nil {
|
||||||
|
var err error
|
||||||
|
arg0, err = e.ConfigGeneralInputMiddleware(ctx, arg0)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
args["input"] = arg0
|
||||||
|
return args, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func (e *executableSchema) field_Query___type_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
|
func (e *executableSchema) field_Query___type_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) {
|
||||||
args := map[string]interface{}{}
|
args := map[string]interface{}{}
|
||||||
var arg0 string
|
var arg0 string
|
||||||
@@ -1118,6 +1152,13 @@ func (e *executableSchema) Schema() *ast.Schema {
|
|||||||
func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
|
func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
|
||||||
switch typeName + "." + field {
|
switch typeName + "." + field {
|
||||||
|
|
||||||
|
case "ConfigGeneralResult.stashes":
|
||||||
|
if e.complexity.ConfigGeneralResult.Stashes == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.ConfigGeneralResult.Stashes(childComplexity), true
|
||||||
|
|
||||||
case "FindGalleriesResultType.count":
|
case "FindGalleriesResultType.count":
|
||||||
if e.complexity.FindGalleriesResultType.Count == nil {
|
if e.complexity.FindGalleriesResultType.Count == nil {
|
||||||
break
|
break
|
||||||
@@ -1755,6 +1796,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||||||
|
|
||||||
return e.complexity.Query.ScrapeFreeonesPerformerList(childComplexity, args["query"].(string)), true
|
return e.complexity.Query.ScrapeFreeonesPerformerList(childComplexity, args["query"].(string)), true
|
||||||
|
|
||||||
|
case "Query.configureGeneral":
|
||||||
|
if e.complexity.Query.ConfigureGeneral == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
args, err := e.field_Query_configureGeneral_args(context.TODO(), rawArgs)
|
||||||
|
if err != nil {
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.Query.ConfigureGeneral(childComplexity, args["input"].(*ConfigGeneralInput)), true
|
||||||
|
|
||||||
case "Query.metadataImport":
|
case "Query.metadataImport":
|
||||||
if e.complexity.Query.MetadataImport == nil {
|
if e.complexity.Query.MetadataImport == nil {
|
||||||
break
|
break
|
||||||
@@ -2383,6 +2436,64 @@ type executionContext struct {
|
|||||||
*executableSchema
|
*executableSchema
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var configGeneralResultImplementors = []string{"ConfigGeneralResult"}
|
||||||
|
|
||||||
|
// nolint: gocyclo, errcheck, gas, goconst
|
||||||
|
func (ec *executionContext) _ConfigGeneralResult(ctx context.Context, sel ast.SelectionSet, obj *ConfigGeneralResult) graphql.Marshaler {
|
||||||
|
fields := graphql.CollectFields(ctx, sel, configGeneralResultImplementors)
|
||||||
|
|
||||||
|
out := graphql.NewFieldSet(fields)
|
||||||
|
invalid := false
|
||||||
|
for i, field := range fields {
|
||||||
|
switch field.Name {
|
||||||
|
case "__typename":
|
||||||
|
out.Values[i] = graphql.MarshalString("ConfigGeneralResult")
|
||||||
|
case "stashes":
|
||||||
|
out.Values[i] = ec._ConfigGeneralResult_stashes(ctx, field, obj)
|
||||||
|
default:
|
||||||
|
panic("unknown field " + strconv.Quote(field.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out.Dispatch()
|
||||||
|
if invalid {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// nolint: vetshadow
|
||||||
|
func (ec *executionContext) _ConfigGeneralResult_stashes(ctx context.Context, field graphql.CollectedField, obj *ConfigGeneralResult) graphql.Marshaler {
|
||||||
|
ctx = ec.Tracer.StartFieldExecution(ctx, field)
|
||||||
|
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
|
||||||
|
rctx := &graphql.ResolverContext{
|
||||||
|
Object: "ConfigGeneralResult",
|
||||||
|
Field: field,
|
||||||
|
Args: nil,
|
||||||
|
}
|
||||||
|
ctx = graphql.WithResolverContext(ctx, rctx)
|
||||||
|
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
|
||||||
|
resTmp := ec.FieldMiddleware(ctx, obj, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Stashes, nil
|
||||||
|
})
|
||||||
|
if resTmp == nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.([]string)
|
||||||
|
rctx.Result = res
|
||||||
|
ctx = ec.Tracer.StartFieldChildExecution(ctx)
|
||||||
|
|
||||||
|
arr1 := make(graphql.Array, len(res))
|
||||||
|
|
||||||
|
for idx1 := range res {
|
||||||
|
arr1[idx1] = func() graphql.Marshaler {
|
||||||
|
return graphql.MarshalString(res[idx1])
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
return arr1
|
||||||
|
}
|
||||||
|
|
||||||
var findGalleriesResultTypeImplementors = []string{"FindGalleriesResultType"}
|
var findGalleriesResultTypeImplementors = []string{"FindGalleriesResultType"}
|
||||||
|
|
||||||
// nolint: gocyclo, errcheck, gas, goconst
|
// nolint: gocyclo, errcheck, gas, goconst
|
||||||
@@ -4824,6 +4935,15 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr
|
|||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
})
|
})
|
||||||
|
case "configureGeneral":
|
||||||
|
field := field
|
||||||
|
out.Concurrently(i, func() (res graphql.Marshaler) {
|
||||||
|
res = ec._Query_configureGeneral(ctx, field)
|
||||||
|
if res == graphql.Null {
|
||||||
|
invalid = true
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
})
|
||||||
case "metadataImport":
|
case "metadataImport":
|
||||||
field := field
|
field := field
|
||||||
out.Concurrently(i, func() (res graphql.Marshaler) {
|
out.Concurrently(i, func() (res graphql.Marshaler) {
|
||||||
@@ -5712,6 +5832,41 @@ func (ec *executionContext) _Query_scrapeFreeonesPerformerList(ctx context.Conte
|
|||||||
return arr1
|
return arr1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// nolint: vetshadow
|
||||||
|
func (ec *executionContext) _Query_configureGeneral(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
|
||||||
|
ctx = ec.Tracer.StartFieldExecution(ctx, field)
|
||||||
|
defer func() { ec.Tracer.EndFieldExecution(ctx) }()
|
||||||
|
rctx := &graphql.ResolverContext{
|
||||||
|
Object: "Query",
|
||||||
|
Field: field,
|
||||||
|
Args: nil,
|
||||||
|
}
|
||||||
|
ctx = graphql.WithResolverContext(ctx, rctx)
|
||||||
|
rawArgs := field.ArgumentMap(ec.Variables)
|
||||||
|
args, err := ec.field_Query_configureGeneral_args(ctx, rawArgs)
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
rctx.Args = args
|
||||||
|
ctx = ec.Tracer.StartFieldResolverExecution(ctx, rctx)
|
||||||
|
resTmp := ec.FieldMiddleware(ctx, nil, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return ec.resolvers.Query().ConfigureGeneral(rctx, args["input"].(*ConfigGeneralInput))
|
||||||
|
})
|
||||||
|
if resTmp == nil {
|
||||||
|
if !ec.HasError(rctx) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(ConfigGeneralResult)
|
||||||
|
rctx.Result = res
|
||||||
|
ctx = ec.Tracer.StartFieldChildExecution(ctx)
|
||||||
|
|
||||||
|
return ec._ConfigGeneralResult(ctx, field.Selections, &res)
|
||||||
|
}
|
||||||
|
|
||||||
// nolint: vetshadow
|
// nolint: vetshadow
|
||||||
func (ec *executionContext) _Query_metadataImport(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
|
func (ec *executionContext) _Query_metadataImport(ctx context.Context, field graphql.CollectedField) graphql.Marshaler {
|
||||||
ctx = ec.Tracer.StartFieldExecution(ctx, field)
|
ctx = ec.Tracer.StartFieldExecution(ctx, field)
|
||||||
@@ -10227,6 +10382,40 @@ func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.Co
|
|||||||
return ec.___Type(ctx, field.Selections, res)
|
return ec.___Type(ctx, field.Selections, res)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func UnmarshalConfigGeneralInput(v interface{}) (ConfigGeneralInput, error) {
|
||||||
|
var it ConfigGeneralInput
|
||||||
|
var asMap = v.(map[string]interface{})
|
||||||
|
|
||||||
|
for k, v := range asMap {
|
||||||
|
switch k {
|
||||||
|
case "stashes":
|
||||||
|
var err error
|
||||||
|
var rawIf1 []interface{}
|
||||||
|
if v != nil {
|
||||||
|
if tmp1, ok := v.([]interface{}); ok {
|
||||||
|
rawIf1 = tmp1
|
||||||
|
} else {
|
||||||
|
rawIf1 = []interface{}{v}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
it.Stashes = make([]string, len(rawIf1))
|
||||||
|
for idx1 := range rawIf1 {
|
||||||
|
it.Stashes[idx1], err = graphql.UnmarshalString(rawIf1[idx1])
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return it, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return it, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *executableSchema) ConfigGeneralInputMiddleware(ctx context.Context, obj *ConfigGeneralInput) (*ConfigGeneralInput, error) {
|
||||||
|
|
||||||
|
return obj, nil
|
||||||
|
}
|
||||||
|
|
||||||
func UnmarshalFindFilterType(v interface{}) (FindFilterType, error) {
|
func UnmarshalFindFilterType(v interface{}) (FindFilterType, error) {
|
||||||
var it FindFilterType
|
var it FindFilterType
|
||||||
var asMap = v.(map[string]interface{})
|
var asMap = v.(map[string]interface{})
|
||||||
@@ -11757,6 +11946,20 @@ input SceneFilterType {
|
|||||||
performer_id: ID
|
performer_id: ID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Config
|
||||||
|
#######################################
|
||||||
|
|
||||||
|
input ConfigGeneralInput {
|
||||||
|
"""Array of file paths to content"""
|
||||||
|
stashes: [String!]
|
||||||
|
}
|
||||||
|
|
||||||
|
type ConfigGeneralResult {
|
||||||
|
"""Array of file paths to content"""
|
||||||
|
stashes: [String!]
|
||||||
|
}
|
||||||
|
|
||||||
#############
|
#############
|
||||||
# Root Schema
|
# Root Schema
|
||||||
#############
|
#############
|
||||||
@@ -11807,6 +12010,9 @@ type Query {
|
|||||||
"""Scrape a list of performers from a query"""
|
"""Scrape a list of performers from a query"""
|
||||||
scrapeFreeonesPerformerList(query: String!): [String!]!
|
scrapeFreeonesPerformerList(query: String!): [String!]!
|
||||||
|
|
||||||
|
# Config
|
||||||
|
configureGeneral(input: ConfigGeneralInput): ConfigGeneralResult!
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
|
|
||||||
"""Start an import. Returns the job ID"""
|
"""Start an import. Returns the job ID"""
|
||||||
|
|||||||
@@ -8,6 +8,16 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type ConfigGeneralInput struct {
|
||||||
|
// Array of file paths to content
|
||||||
|
Stashes []string `json:"stashes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ConfigGeneralResult struct {
|
||||||
|
// Array of file paths to content
|
||||||
|
Stashes []string `json:"stashes"`
|
||||||
|
}
|
||||||
|
|
||||||
type FindFilterType struct {
|
type FindFilterType struct {
|
||||||
Q *string `json:"q"`
|
Q *string `json:"q"`
|
||||||
Page *int `json:"page"`
|
Page *int `json:"page"`
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package utils
|
package utils
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"github.com/h2non/filetype"
|
"github.com/h2non/filetype"
|
||||||
"github.com/h2non/filetype/types"
|
"github.com/h2non/filetype/types"
|
||||||
"os"
|
"os"
|
||||||
@@ -28,6 +29,27 @@ func FileExists(path string) (bool, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func DirExists(path string) (bool, error) {
|
||||||
|
exists, _ := FileExists(path)
|
||||||
|
fileInfo, _ := os.Stat(path)
|
||||||
|
if !exists || !fileInfo.IsDir() {
|
||||||
|
return false, fmt.Errorf("path either doesn't exist, or is not a directory <%s>", path)
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Touch(path string) error {
|
||||||
|
var _, err = os.Stat(path)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
var file, err = os.Create(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func EnsureDir(path string) error {
|
func EnsureDir(path string) error {
|
||||||
exists, err := FileExists(path)
|
exists, err := FileExists(path)
|
||||||
if !exists {
|
if !exists {
|
||||||
|
|||||||
@@ -1,393 +0,0 @@
|
|||||||
# Querys
|
|
||||||
|
|
||||||
query FindScenes($filter: FindFilterType, $scene_filter: SceneFilterType, $scene_ids: [Int!]) {
|
|
||||||
findScenes(filter: $filter, scene_filter: $scene_filter, scene_ids: $scene_ids) {
|
|
||||||
count
|
|
||||||
scenes {
|
|
||||||
...SlimSceneData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindScene($id: ID!, $checksum: String) {
|
|
||||||
findScene(id: $id, checksum: $checksum) {
|
|
||||||
...SceneData
|
|
||||||
}
|
|
||||||
|
|
||||||
sceneMarkerTags(scene_id: $id) {
|
|
||||||
tag {
|
|
||||||
id
|
|
||||||
name
|
|
||||||
}
|
|
||||||
scene_markers {
|
|
||||||
...SceneMarkerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindSceneMarkers($filter: FindFilterType, $scene_marker_filter: SceneMarkerFilterType) {
|
|
||||||
findSceneMarkers(filter: $filter, scene_marker_filter: $scene_marker_filter) {
|
|
||||||
count
|
|
||||||
scene_markers {
|
|
||||||
...SceneMarkerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query SceneWall($q: String) {
|
|
||||||
sceneWall(q: $q) {
|
|
||||||
...SceneData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query MarkerWall($q: String) {
|
|
||||||
markerWall(q: $q) {
|
|
||||||
...SceneMarkerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindPerformers($filter: FindFilterType, $performer_filter: PerformerFilterType) {
|
|
||||||
findPerformers(filter: $filter, performer_filter: $performer_filter) {
|
|
||||||
count
|
|
||||||
performers {
|
|
||||||
...PerformerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindPerformer($id: ID!) {
|
|
||||||
findPerformer(id: $id) {
|
|
||||||
...PerformerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindStudios($filter: FindFilterType) {
|
|
||||||
findStudios(filter: $filter) {
|
|
||||||
count
|
|
||||||
studios {
|
|
||||||
...StudioData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindStudio($id: ID!) {
|
|
||||||
findStudio(id: $id) {
|
|
||||||
...StudioData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindGalleries($filter: FindFilterType) {
|
|
||||||
findGalleries(filter: $filter) {
|
|
||||||
count
|
|
||||||
galleries {
|
|
||||||
...GalleryData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindGallery($id: ID!) {
|
|
||||||
findGallery(id: $id) {
|
|
||||||
...GalleryData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query FindTag($id: ID!) {
|
|
||||||
findTag(id: $id) {
|
|
||||||
...TagData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query MarkerStrings($q: String, $sort: String) {
|
|
||||||
markerStrings(q: $q, sort: $sort) {
|
|
||||||
id
|
|
||||||
count
|
|
||||||
title
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query ScrapeFreeones($performer_name: String!) {
|
|
||||||
scrapeFreeones(performer_name: $performer_name) {
|
|
||||||
name
|
|
||||||
url
|
|
||||||
twitter
|
|
||||||
instagram
|
|
||||||
birthdate
|
|
||||||
ethnicity
|
|
||||||
country
|
|
||||||
eye_color
|
|
||||||
height
|
|
||||||
measurements
|
|
||||||
fake_tits
|
|
||||||
career_length
|
|
||||||
tattoos
|
|
||||||
piercings
|
|
||||||
aliases
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query ScrapeFreeonesPerformers($q: String!) {
|
|
||||||
scrapeFreeonesPerformerList(query: $q)
|
|
||||||
}
|
|
||||||
|
|
||||||
query AllTags {
|
|
||||||
allTags {
|
|
||||||
...TagData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query AllPerformersForFilter {
|
|
||||||
allPerformers {
|
|
||||||
...SlimPerformerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query AllStudiosForFilter {
|
|
||||||
allStudios {
|
|
||||||
...SlimStudioData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query AllTagsForFilter {
|
|
||||||
allTags {
|
|
||||||
id
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query ValidGalleriesForScene($scene_id: ID!) {
|
|
||||||
validGalleriesForScene(scene_id: $scene_id) {
|
|
||||||
id
|
|
||||||
path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
query Stats {
|
|
||||||
stats {
|
|
||||||
scene_count,
|
|
||||||
gallery_count,
|
|
||||||
performer_count,
|
|
||||||
studio_count,
|
|
||||||
tag_count
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Mutations
|
|
||||||
|
|
||||||
|
|
||||||
mutation SceneUpdate(
|
|
||||||
$id: ID!,
|
|
||||||
$title: String,
|
|
||||||
$details: String,
|
|
||||||
$url: String,
|
|
||||||
$date: String,
|
|
||||||
$rating: Int,
|
|
||||||
$studio_id: ID,
|
|
||||||
$gallery_id: ID,
|
|
||||||
$performer_ids: [ID!] = [],
|
|
||||||
$tag_ids: [ID!] = []) {
|
|
||||||
|
|
||||||
sceneUpdate(input: {
|
|
||||||
id: $id,
|
|
||||||
title: $title,
|
|
||||||
details: $details,
|
|
||||||
url: $url,
|
|
||||||
date: $date,
|
|
||||||
rating: $rating,
|
|
||||||
studio_id: $studio_id,
|
|
||||||
gallery_id: $gallery_id,
|
|
||||||
performer_ids: $performer_ids,
|
|
||||||
tag_ids: $tag_ids
|
|
||||||
}) {
|
|
||||||
...SceneData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation PerformerCreate(
|
|
||||||
$name: String,
|
|
||||||
$url: String,
|
|
||||||
$birthdate: String,
|
|
||||||
$ethnicity: String,
|
|
||||||
$country: String,
|
|
||||||
$eye_color: String,
|
|
||||||
$height: String,
|
|
||||||
$measurements: String,
|
|
||||||
$fake_tits: String,
|
|
||||||
$career_length: String,
|
|
||||||
$tattoos: String,
|
|
||||||
$piercings: String,
|
|
||||||
$aliases: String,
|
|
||||||
$twitter: String,
|
|
||||||
$instagram: String,
|
|
||||||
$favorite: Boolean,
|
|
||||||
$image: String!) {
|
|
||||||
|
|
||||||
performerCreate(input: {
|
|
||||||
name: $name,
|
|
||||||
url: $url,
|
|
||||||
birthdate: $birthdate,
|
|
||||||
ethnicity: $ethnicity,
|
|
||||||
country: $country,
|
|
||||||
eye_color: $eye_color,
|
|
||||||
height: $height,
|
|
||||||
measurements: $measurements,
|
|
||||||
fake_tits: $fake_tits,
|
|
||||||
career_length: $career_length,
|
|
||||||
tattoos: $tattoos,
|
|
||||||
piercings: $piercings,
|
|
||||||
aliases: $aliases,
|
|
||||||
twitter: $twitter,
|
|
||||||
instagram: $instagram,
|
|
||||||
favorite: $favorite,
|
|
||||||
image: $image
|
|
||||||
}) {
|
|
||||||
...PerformerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation PerformerUpdate(
|
|
||||||
$id: ID!,
|
|
||||||
$name: String,
|
|
||||||
$url: String,
|
|
||||||
$birthdate: String,
|
|
||||||
$ethnicity: String,
|
|
||||||
$country: String,
|
|
||||||
$eye_color: String,
|
|
||||||
$height: String,
|
|
||||||
$measurements: String,
|
|
||||||
$fake_tits: String,
|
|
||||||
$career_length: String,
|
|
||||||
$tattoos: String,
|
|
||||||
$piercings: String,
|
|
||||||
$aliases: String,
|
|
||||||
$twitter: String,
|
|
||||||
$instagram: String,
|
|
||||||
$favorite: Boolean,
|
|
||||||
$image: String) {
|
|
||||||
|
|
||||||
performerUpdate(input: {
|
|
||||||
id: $id,
|
|
||||||
name: $name,
|
|
||||||
url: $url,
|
|
||||||
birthdate: $birthdate,
|
|
||||||
ethnicity: $ethnicity,
|
|
||||||
country: $country,
|
|
||||||
eye_color: $eye_color,
|
|
||||||
height: $height,
|
|
||||||
measurements: $measurements,
|
|
||||||
fake_tits: $fake_tits,
|
|
||||||
career_length: $career_length,
|
|
||||||
tattoos: $tattoos,
|
|
||||||
piercings: $piercings,
|
|
||||||
aliases: $aliases,
|
|
||||||
twitter: $twitter,
|
|
||||||
instagram: $instagram,
|
|
||||||
favorite: $favorite,
|
|
||||||
image: $image
|
|
||||||
}) {
|
|
||||||
...PerformerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation StudioCreate(
|
|
||||||
$name: String!,
|
|
||||||
$url: String,
|
|
||||||
$image: String!) {
|
|
||||||
|
|
||||||
studioCreate(input: { name: $name, url: $url, image: $image }) {
|
|
||||||
...StudioData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation StudioUpdate(
|
|
||||||
$id: ID!
|
|
||||||
$name: String,
|
|
||||||
$url: String,
|
|
||||||
$image: String) {
|
|
||||||
|
|
||||||
studioUpdate(input: { id: $id, name: $name, url: $url, image: $image }) {
|
|
||||||
...StudioData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation TagCreate($name: String!) {
|
|
||||||
tagCreate(input: { name: $name }) {
|
|
||||||
...TagData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation TagDestroy($id: ID!) {
|
|
||||||
tagDestroy(input: { id: $id })
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation TagUpdate($id: ID!, $name: String!) {
|
|
||||||
tagUpdate(input: { id: $id, name: $name }) {
|
|
||||||
...TagData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation SceneMarkerCreate(
|
|
||||||
$title: String!,
|
|
||||||
$seconds: Float!,
|
|
||||||
$scene_id: ID!,
|
|
||||||
$primary_tag_id: ID!,
|
|
||||||
$tag_ids: [ID!] = []) {
|
|
||||||
|
|
||||||
sceneMarkerCreate(input: {
|
|
||||||
title: $title,
|
|
||||||
seconds: $seconds,
|
|
||||||
scene_id: $scene_id,
|
|
||||||
primary_tag_id: $primary_tag_id,
|
|
||||||
tag_ids: $tag_ids
|
|
||||||
}) {
|
|
||||||
...SceneMarkerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation SceneMarkerUpdate(
|
|
||||||
$id: ID!,
|
|
||||||
$title: String!,
|
|
||||||
$seconds: Float!,
|
|
||||||
$scene_id: ID!,
|
|
||||||
$primary_tag_id: ID!,
|
|
||||||
$tag_ids: [ID!] = []) {
|
|
||||||
|
|
||||||
sceneMarkerUpdate(input: {
|
|
||||||
id: $id,
|
|
||||||
title: $title,
|
|
||||||
seconds: $seconds,
|
|
||||||
scene_id: $scene_id,
|
|
||||||
primary_tag_id: $primary_tag_id,
|
|
||||||
tag_ids: $tag_ids
|
|
||||||
}) {
|
|
||||||
...SceneMarkerData
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mutation SceneMarkerDestroy($id: ID!) {
|
|
||||||
sceneMarkerDestroy(id: $id)
|
|
||||||
}
|
|
||||||
|
|
||||||
query MetadataImport {
|
|
||||||
metadataImport
|
|
||||||
}
|
|
||||||
|
|
||||||
query MetadataExport {
|
|
||||||
metadataExport
|
|
||||||
}
|
|
||||||
|
|
||||||
query MetadataScan {
|
|
||||||
metadataScan
|
|
||||||
}
|
|
||||||
|
|
||||||
query MetadataGenerate {
|
|
||||||
metadataGenerate
|
|
||||||
}
|
|
||||||
|
|
||||||
query MetadataClean {
|
|
||||||
metadataClean
|
|
||||||
}
|
|
||||||
|
|
||||||
subscription MetadataUpdate {
|
|
||||||
metadataUpdate
|
|
||||||
}
|
|
||||||
85
schema/documents/mutations/performer.graphql
Normal file
85
schema/documents/mutations/performer.graphql
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
mutation PerformerCreate(
|
||||||
|
$name: String,
|
||||||
|
$url: String,
|
||||||
|
$birthdate: String,
|
||||||
|
$ethnicity: String,
|
||||||
|
$country: String,
|
||||||
|
$eye_color: String,
|
||||||
|
$height: String,
|
||||||
|
$measurements: String,
|
||||||
|
$fake_tits: String,
|
||||||
|
$career_length: String,
|
||||||
|
$tattoos: String,
|
||||||
|
$piercings: String,
|
||||||
|
$aliases: String,
|
||||||
|
$twitter: String,
|
||||||
|
$instagram: String,
|
||||||
|
$favorite: Boolean,
|
||||||
|
$image: String!) {
|
||||||
|
|
||||||
|
performerCreate(input: {
|
||||||
|
name: $name,
|
||||||
|
url: $url,
|
||||||
|
birthdate: $birthdate,
|
||||||
|
ethnicity: $ethnicity,
|
||||||
|
country: $country,
|
||||||
|
eye_color: $eye_color,
|
||||||
|
height: $height,
|
||||||
|
measurements: $measurements,
|
||||||
|
fake_tits: $fake_tits,
|
||||||
|
career_length: $career_length,
|
||||||
|
tattoos: $tattoos,
|
||||||
|
piercings: $piercings,
|
||||||
|
aliases: $aliases,
|
||||||
|
twitter: $twitter,
|
||||||
|
instagram: $instagram,
|
||||||
|
favorite: $favorite,
|
||||||
|
image: $image
|
||||||
|
}) {
|
||||||
|
...PerformerData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutation PerformerUpdate(
|
||||||
|
$id: ID!,
|
||||||
|
$name: String,
|
||||||
|
$url: String,
|
||||||
|
$birthdate: String,
|
||||||
|
$ethnicity: String,
|
||||||
|
$country: String,
|
||||||
|
$eye_color: String,
|
||||||
|
$height: String,
|
||||||
|
$measurements: String,
|
||||||
|
$fake_tits: String,
|
||||||
|
$career_length: String,
|
||||||
|
$tattoos: String,
|
||||||
|
$piercings: String,
|
||||||
|
$aliases: String,
|
||||||
|
$twitter: String,
|
||||||
|
$instagram: String,
|
||||||
|
$favorite: Boolean,
|
||||||
|
$image: String) {
|
||||||
|
|
||||||
|
performerUpdate(input: {
|
||||||
|
id: $id,
|
||||||
|
name: $name,
|
||||||
|
url: $url,
|
||||||
|
birthdate: $birthdate,
|
||||||
|
ethnicity: $ethnicity,
|
||||||
|
country: $country,
|
||||||
|
eye_color: $eye_color,
|
||||||
|
height: $height,
|
||||||
|
measurements: $measurements,
|
||||||
|
fake_tits: $fake_tits,
|
||||||
|
career_length: $career_length,
|
||||||
|
tattoos: $tattoos,
|
||||||
|
piercings: $piercings,
|
||||||
|
aliases: $aliases,
|
||||||
|
twitter: $twitter,
|
||||||
|
instagram: $instagram,
|
||||||
|
favorite: $favorite,
|
||||||
|
image: $image
|
||||||
|
}) {
|
||||||
|
...PerformerData
|
||||||
|
}
|
||||||
|
}
|
||||||
41
schema/documents/mutations/scene-marker.graphql
Normal file
41
schema/documents/mutations/scene-marker.graphql
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
mutation SceneMarkerCreate(
|
||||||
|
$title: String!,
|
||||||
|
$seconds: Float!,
|
||||||
|
$scene_id: ID!,
|
||||||
|
$primary_tag_id: ID!,
|
||||||
|
$tag_ids: [ID!] = []) {
|
||||||
|
|
||||||
|
sceneMarkerCreate(input: {
|
||||||
|
title: $title,
|
||||||
|
seconds: $seconds,
|
||||||
|
scene_id: $scene_id,
|
||||||
|
primary_tag_id: $primary_tag_id,
|
||||||
|
tag_ids: $tag_ids
|
||||||
|
}) {
|
||||||
|
...SceneMarkerData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutation SceneMarkerUpdate(
|
||||||
|
$id: ID!,
|
||||||
|
$title: String!,
|
||||||
|
$seconds: Float!,
|
||||||
|
$scene_id: ID!,
|
||||||
|
$primary_tag_id: ID!,
|
||||||
|
$tag_ids: [ID!] = []) {
|
||||||
|
|
||||||
|
sceneMarkerUpdate(input: {
|
||||||
|
id: $id,
|
||||||
|
title: $title,
|
||||||
|
seconds: $seconds,
|
||||||
|
scene_id: $scene_id,
|
||||||
|
primary_tag_id: $primary_tag_id,
|
||||||
|
tag_ids: $tag_ids
|
||||||
|
}) {
|
||||||
|
...SceneMarkerData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutation SceneMarkerDestroy($id: ID!) {
|
||||||
|
sceneMarkerDestroy(id: $id)
|
||||||
|
}
|
||||||
27
schema/documents/mutations/scene.graphql
Normal file
27
schema/documents/mutations/scene.graphql
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
mutation SceneUpdate(
|
||||||
|
$id: ID!,
|
||||||
|
$title: String,
|
||||||
|
$details: String,
|
||||||
|
$url: String,
|
||||||
|
$date: String,
|
||||||
|
$rating: Int,
|
||||||
|
$studio_id: ID,
|
||||||
|
$gallery_id: ID,
|
||||||
|
$performer_ids: [ID!] = [],
|
||||||
|
$tag_ids: [ID!] = []) {
|
||||||
|
|
||||||
|
sceneUpdate(input: {
|
||||||
|
id: $id,
|
||||||
|
title: $title,
|
||||||
|
details: $details,
|
||||||
|
url: $url,
|
||||||
|
date: $date,
|
||||||
|
rating: $rating,
|
||||||
|
studio_id: $studio_id,
|
||||||
|
gallery_id: $gallery_id,
|
||||||
|
performer_ids: $performer_ids,
|
||||||
|
tag_ids: $tag_ids
|
||||||
|
}) {
|
||||||
|
...SceneData
|
||||||
|
}
|
||||||
|
}
|
||||||
20
schema/documents/mutations/studio.graphql
Normal file
20
schema/documents/mutations/studio.graphql
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
mutation StudioCreate(
|
||||||
|
$name: String!,
|
||||||
|
$url: String,
|
||||||
|
$image: String!) {
|
||||||
|
|
||||||
|
studioCreate(input: { name: $name, url: $url, image: $image }) {
|
||||||
|
...StudioData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutation StudioUpdate(
|
||||||
|
$id: ID!
|
||||||
|
$name: String,
|
||||||
|
$url: String,
|
||||||
|
$image: String) {
|
||||||
|
|
||||||
|
studioUpdate(input: { id: $id, name: $name, url: $url, image: $image }) {
|
||||||
|
...StudioData
|
||||||
|
}
|
||||||
|
}
|
||||||
15
schema/documents/mutations/tag.graphql
Normal file
15
schema/documents/mutations/tag.graphql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
mutation TagCreate($name: String!) {
|
||||||
|
tagCreate(input: { name: $name }) {
|
||||||
|
...TagData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutation TagDestroy($id: ID!) {
|
||||||
|
tagDestroy(input: { id: $id })
|
||||||
|
}
|
||||||
|
|
||||||
|
mutation TagUpdate($id: ID!, $name: String!) {
|
||||||
|
tagUpdate(input: { id: $id, name: $name }) {
|
||||||
|
...TagData
|
||||||
|
}
|
||||||
|
}
|
||||||
14
schema/documents/queries/gallery.graphql
Normal file
14
schema/documents/queries/gallery.graphql
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
query FindGalleries($filter: FindFilterType) {
|
||||||
|
findGalleries(filter: $filter) {
|
||||||
|
count
|
||||||
|
galleries {
|
||||||
|
...GalleryData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query FindGallery($id: ID!) {
|
||||||
|
findGallery(id: $id) {
|
||||||
|
...GalleryData
|
||||||
|
}
|
||||||
|
}
|
||||||
11
schema/documents/queries/legacy.graphql
Normal file
11
schema/documents/queries/legacy.graphql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
query SceneWall($q: String) {
|
||||||
|
sceneWall(q: $q) {
|
||||||
|
...SceneData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query MarkerWall($q: String) {
|
||||||
|
markerWall(q: $q) {
|
||||||
|
...SceneMarkerData
|
||||||
|
}
|
||||||
|
}
|
||||||
55
schema/documents/queries/misc.graphql
Normal file
55
schema/documents/queries/misc.graphql
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
query FindTag($id: ID!) {
|
||||||
|
findTag(id: $id) {
|
||||||
|
...TagData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query MarkerStrings($q: String, $sort: String) {
|
||||||
|
markerStrings(q: $q, sort: $sort) {
|
||||||
|
id
|
||||||
|
count
|
||||||
|
title
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query AllTags {
|
||||||
|
allTags {
|
||||||
|
...TagData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query AllPerformersForFilter {
|
||||||
|
allPerformers {
|
||||||
|
...SlimPerformerData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query AllStudiosForFilter {
|
||||||
|
allStudios {
|
||||||
|
...SlimStudioData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query AllTagsForFilter {
|
||||||
|
allTags {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query ValidGalleriesForScene($scene_id: ID!) {
|
||||||
|
validGalleriesForScene(scene_id: $scene_id) {
|
||||||
|
id
|
||||||
|
path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query Stats {
|
||||||
|
stats {
|
||||||
|
scene_count,
|
||||||
|
gallery_count,
|
||||||
|
performer_count,
|
||||||
|
studio_count,
|
||||||
|
tag_count
|
||||||
|
}
|
||||||
|
}
|
||||||
14
schema/documents/queries/performer.graphql
Normal file
14
schema/documents/queries/performer.graphql
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
query FindPerformers($filter: FindFilterType, $performer_filter: PerformerFilterType) {
|
||||||
|
findPerformers(filter: $filter, performer_filter: $performer_filter) {
|
||||||
|
count
|
||||||
|
performers {
|
||||||
|
...PerformerData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query FindPerformer($id: ID!) {
|
||||||
|
findPerformer(id: $id) {
|
||||||
|
...PerformerData
|
||||||
|
}
|
||||||
|
}
|
||||||
8
schema/documents/queries/scene-marker.graphql
Normal file
8
schema/documents/queries/scene-marker.graphql
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
query FindSceneMarkers($filter: FindFilterType, $scene_marker_filter: SceneMarkerFilterType) {
|
||||||
|
findSceneMarkers(filter: $filter, scene_marker_filter: $scene_marker_filter) {
|
||||||
|
count
|
||||||
|
scene_markers {
|
||||||
|
...SceneMarkerData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
24
schema/documents/queries/scene.graphql
Normal file
24
schema/documents/queries/scene.graphql
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
query FindScenes($filter: FindFilterType, $scene_filter: SceneFilterType, $scene_ids: [Int!]) {
|
||||||
|
findScenes(filter: $filter, scene_filter: $scene_filter, scene_ids: $scene_ids) {
|
||||||
|
count
|
||||||
|
scenes {
|
||||||
|
...SlimSceneData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query FindScene($id: ID!, $checksum: String) {
|
||||||
|
findScene(id: $id, checksum: $checksum) {
|
||||||
|
...SceneData
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneMarkerTags(scene_id: $id) {
|
||||||
|
tag {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
scene_markers {
|
||||||
|
...SceneMarkerData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
23
schema/documents/queries/scrapers/freeones.graphql
Normal file
23
schema/documents/queries/scrapers/freeones.graphql
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
query ScrapeFreeones($performer_name: String!) {
|
||||||
|
scrapeFreeones(performer_name: $performer_name) {
|
||||||
|
name
|
||||||
|
url
|
||||||
|
twitter
|
||||||
|
instagram
|
||||||
|
birthdate
|
||||||
|
ethnicity
|
||||||
|
country
|
||||||
|
eye_color
|
||||||
|
height
|
||||||
|
measurements
|
||||||
|
fake_tits
|
||||||
|
career_length
|
||||||
|
tattoos
|
||||||
|
piercings
|
||||||
|
aliases
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query ScrapeFreeonesPerformers($q: String!) {
|
||||||
|
scrapeFreeonesPerformerList(query: $q)
|
||||||
|
}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
query ConfigureGeneral($input: ConfigGeneralInput!) {
|
||||||
|
configureGeneral(input: $input)
|
||||||
|
}
|
||||||
19
schema/documents/queries/settings/metadata.graphql
Normal file
19
schema/documents/queries/settings/metadata.graphql
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
query MetadataImport {
|
||||||
|
metadataImport
|
||||||
|
}
|
||||||
|
|
||||||
|
query MetadataExport {
|
||||||
|
metadataExport
|
||||||
|
}
|
||||||
|
|
||||||
|
query MetadataScan {
|
||||||
|
metadataScan
|
||||||
|
}
|
||||||
|
|
||||||
|
query MetadataGenerate {
|
||||||
|
metadataGenerate
|
||||||
|
}
|
||||||
|
|
||||||
|
query MetadataClean {
|
||||||
|
metadataClean
|
||||||
|
}
|
||||||
14
schema/documents/queries/studio.graphql
Normal file
14
schema/documents/queries/studio.graphql
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
query FindStudios($filter: FindFilterType) {
|
||||||
|
findStudios(filter: $filter) {
|
||||||
|
count
|
||||||
|
studios {
|
||||||
|
...StudioData
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query FindStudio($id: ID!) {
|
||||||
|
findStudio(id: $id) {
|
||||||
|
...StudioData
|
||||||
|
}
|
||||||
|
}
|
||||||
3
schema/documents/subscriptions.graphql
Normal file
3
schema/documents/subscriptions.graphql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
subscription MetadataUpdate {
|
||||||
|
metadataUpdate
|
||||||
|
}
|
||||||
@@ -372,6 +372,20 @@ input SceneFilterType {
|
|||||||
performer_id: ID
|
performer_id: ID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Config
|
||||||
|
#######################################
|
||||||
|
|
||||||
|
input ConfigGeneralInput {
|
||||||
|
"""Array of file paths to content"""
|
||||||
|
stashes: [String!]
|
||||||
|
}
|
||||||
|
|
||||||
|
type ConfigGeneralResult {
|
||||||
|
"""Array of file paths to content"""
|
||||||
|
stashes: [String!]
|
||||||
|
}
|
||||||
|
|
||||||
#############
|
#############
|
||||||
# Root Schema
|
# Root Schema
|
||||||
#############
|
#############
|
||||||
@@ -422,6 +436,9 @@ type Query {
|
|||||||
"""Scrape a list of performers from a query"""
|
"""Scrape a list of performers from a query"""
|
||||||
scrapeFreeonesPerformerList(query: String!): [String!]!
|
scrapeFreeonesPerformerList(query: String!): [String!]!
|
||||||
|
|
||||||
|
# Config
|
||||||
|
configureGeneral(input: ConfigGeneralInput): ConfigGeneralResult!
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
|
|
||||||
"""Start an import. Returns the job ID"""
|
"""Start an import. Returns the job ID"""
|
||||||
|
|||||||
@@ -16,7 +16,10 @@
|
|||||||
<label for="stash">Where is your porn located (mp4, wmv, zip, etc)?</label>
|
<label for="stash">Where is your porn located (mp4, wmv, zip, etc)?</label>
|
||||||
<input name="stash" type="text" placeholder="EX: C:\videos (Windows) or /User/StashApp/Videos (macOS / Linux)" />
|
<input name="stash" type="text" placeholder="EX: C:\videos (Windows) or /User/StashApp/Videos (macOS / Linux)" />
|
||||||
|
|
||||||
<label for="metadata">Where would you like to save metadata? Metadata includes generated videos / images and backup JSON files.</label>
|
<label for="generated">In order to provide previews Stash generates images and videos. This also includes transcodes for unsupported file formats. Where would you like to save generated files?</label>
|
||||||
|
<input name="generated" type="text" placeholder="EX: C:\stash\generated (Windows) or /User/StashApp/stash/generated (macOS / Linux)" />
|
||||||
|
|
||||||
|
<label for="metadata">Where would you like to save metadata? Metadata is stored as JSON files and can be created using the export button in settings.</label>
|
||||||
<input name="metadata" type="text" placeholder="EX: C:\stash\metadata (Windows) or /User/StashApp/stash/metadata (macOS / Linux)" />
|
<input name="metadata" type="text" placeholder="EX: C:\stash\metadata (Windows) or /User/StashApp/stash/metadata (macOS / Linux)" />
|
||||||
|
|
||||||
<label for="cache">Where do you want to Stash to save cache / temporary files it might need to create?</label>
|
<label for="cache">Where do you want to Stash to save cache / temporary files it might need to create?</label>
|
||||||
|
|||||||
Reference in New Issue
Block a user