mirror of
https://github.com/stashapp/stash.git
synced 2025-12-18 04:44:37 +03:00
Moved everything out of internal
This commit is contained in:
@@ -1,13 +0,0 @@
|
||||
package manager
|
||||
|
||||
type JobStatus int
|
||||
|
||||
const (
|
||||
Idle JobStatus = 0
|
||||
Import JobStatus = 1
|
||||
Export JobStatus = 2
|
||||
Scan JobStatus = 3
|
||||
Generate JobStatus = 4
|
||||
Clean JobStatus = 5
|
||||
Scrape JobStatus = 6
|
||||
)
|
||||
@@ -1,45 +0,0 @@
|
||||
package manager
|
||||
|
||||
import "github.com/stashapp/stash/internal/manager/jsonschema"
|
||||
|
||||
type jsonUtils struct {}
|
||||
|
||||
func (jp *jsonUtils) getMappings() (*jsonschema.Mappings, error) {
|
||||
return jsonschema.LoadMappingsFile(instance.Paths.JSON.MappingsFile)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveMappings(mappings *jsonschema.Mappings) error {
|
||||
return jsonschema.SaveMappingsFile(instance.Paths.JSON.MappingsFile, mappings)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getScraped() ([]jsonschema.ScrapedItem, error) {
|
||||
return jsonschema.LoadScrapedFile(instance.Paths.JSON.ScrapedFile)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveScaped(scraped []jsonschema.ScrapedItem) error {
|
||||
return jsonschema.SaveScrapedFile(instance.Paths.JSON.ScrapedFile, scraped)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getPerformer(checksum string) (*jsonschema.Performer, error) {
|
||||
return jsonschema.LoadPerformerFile(instance.Paths.JSON.PerformerJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) savePerformer(checksum string, performer *jsonschema.Performer) error {
|
||||
return jsonschema.SavePerformerFile(instance.Paths.JSON.PerformerJSONPath(checksum), performer)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getStudio(checksum string) (*jsonschema.Studio, error) {
|
||||
return jsonschema.LoadStudioFile(instance.Paths.JSON.StudioJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveStudio(checksum string, studio *jsonschema.Studio) error {
|
||||
return jsonschema.SaveStudioFile(instance.Paths.JSON.StudioJSONPath(checksum), studio)
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) {
|
||||
return jsonschema.LoadSceneFile(instance.Paths.JSON.SceneJSONPath(checksum))
|
||||
}
|
||||
|
||||
func (jp *jsonUtils) saveScene(checksum string, scene *jsonschema.Scene) error {
|
||||
return jsonschema.SaveSceneFile(instance.Paths.JSON.SceneJSONPath(checksum), scene)
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/stashapp/stash/internal/logger"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Stash string `json:"stash"`
|
||||
Metadata string `json:"metadata"`
|
||||
// Generated string `json:"generated"` // TODO: Generated directory instead of metadata
|
||||
Cache string `json:"cache"`
|
||||
Downloads string `json:"downloads"`
|
||||
}
|
||||
|
||||
func LoadConfigFile(file string) *Config {
|
||||
var config Config
|
||||
configFile, err := os.Open(file)
|
||||
defer configFile.Close()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
jsonParser := json.NewDecoder(configFile)
|
||||
parseError := jsonParser.Decode(&config)
|
||||
if parseError != nil { panic(parseError) }
|
||||
return &config
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type NameMapping struct {
|
||||
Name string `json:"name"`
|
||||
Checksum string `json:"checksum"`
|
||||
}
|
||||
|
||||
type PathMapping struct {
|
||||
Path string `json:"path"`
|
||||
Checksum string `json:"checksum"`
|
||||
}
|
||||
|
||||
type Mappings struct {
|
||||
Performers []NameMapping `json:"performers"`
|
||||
Studios []NameMapping `json:"studios"`
|
||||
Galleries []PathMapping `json:"galleries"`
|
||||
Scenes []PathMapping `json:"scenes"`
|
||||
}
|
||||
|
||||
func LoadMappingsFile(filePath string) (*Mappings, error) {
|
||||
var mappings Mappings
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&mappings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &mappings, nil
|
||||
}
|
||||
|
||||
func SaveMappingsFile(filePath string, mappings *Mappings) error {
|
||||
if mappings == nil {
|
||||
return fmt.Errorf("mappings must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, mappings)
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Performer struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
Twitter string `json:"twitter,omitempty"`
|
||||
Instagram string `json:"instagram,omitempty"`
|
||||
Birthdate string `json:"birthdate,omitempty"`
|
||||
Ethnicity string `json:"ethnicity,omitempty"`
|
||||
Country string `json:"country,omitempty"`
|
||||
EyeColor string `json:"eye_color,omitempty"`
|
||||
Height string `json:"height,omitempty"`
|
||||
Measurements string `json:"measurements,omitempty"`
|
||||
FakeTits string `json:"fake_tits,omitempty"`
|
||||
CareerLength string `json:"career_length,omitempty"`
|
||||
Tattoos string `json:"tattoos,omitempty"`
|
||||
Piercings string `json:"piercings,omitempty"`
|
||||
Aliases string `json:"aliases,omitempty"`
|
||||
Favorite bool `json:"favorite,omitempty"`
|
||||
Image string `json:"image,omitempty"`
|
||||
}
|
||||
|
||||
func LoadPerformerFile(filePath string) (*Performer, error) {
|
||||
var performer Performer
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&performer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &performer, nil
|
||||
}
|
||||
|
||||
func SavePerformerFile(filePath string, performer *Performer) error {
|
||||
if performer == nil {
|
||||
return fmt.Errorf("performer must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, performer)
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type SceneMarker struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Seconds string `json:"seconds,omitempty"`
|
||||
PrimaryTag string `json:"primary_tag,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
}
|
||||
|
||||
type SceneFile struct {
|
||||
Size string `json:"size"`
|
||||
Duration string `json:"duration"`
|
||||
VideoCodec string `json:"video_codec"`
|
||||
AudioCodec string `json:"audio_codec"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
Framerate string `json:"framerate"`
|
||||
Bitrate int `json:"bitrate"`
|
||||
}
|
||||
|
||||
type Scene struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Gallery string `json:"gallery,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
Markers []SceneMarker `json:"markers,omitempty"`
|
||||
File *SceneFile `json:"file,omitempty"`
|
||||
}
|
||||
|
||||
func LoadSceneFile(filePath string) (*Scene, error) {
|
||||
var scene Scene
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&scene)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &scene, nil
|
||||
}
|
||||
|
||||
func SaveSceneFile(filePath string, scene *Scene) error {
|
||||
if scene == nil {
|
||||
return fmt.Errorf("scene must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, scene)
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type ScrapedItem struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Rating string `json:"rating,omitempty"`
|
||||
Tags string `json:"tags,omitempty"`
|
||||
Models string `json:"models,omitempty"`
|
||||
Episode int `json:"episode,omitempty"`
|
||||
GalleryFilename string `json:"gallery_filename,omitempty"`
|
||||
GalleryUrl string `json:"gallery_url,omitempty"`
|
||||
VideoFilename string `json:"video_filename,omitempty"`
|
||||
VideoUrl string `json:"video_url,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
UpdatedAt RailsTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func LoadScrapedFile(filePath string) ([]ScrapedItem, error) {
|
||||
var scraped []ScrapedItem
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&scraped)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return scraped, nil
|
||||
}
|
||||
|
||||
func SaveScrapedFile(filePath string, scrapedItems []ScrapedItem) error {
|
||||
if scrapedItems == nil {
|
||||
return fmt.Errorf("scraped items must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, scrapedItems)
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Studio struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
Image string `json:"image,omitempty"`
|
||||
}
|
||||
|
||||
func LoadStudioFile(filePath string) (*Studio, error) {
|
||||
var studio Studio
|
||||
file, err := os.Open(filePath)
|
||||
defer file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&studio)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &studio, nil
|
||||
}
|
||||
|
||||
func SaveStudioFile(filePath string, studio *Studio) error {
|
||||
if studio == nil {
|
||||
return fmt.Errorf("studio must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, studio)
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type RailsTime struct {
|
||||
time.Time
|
||||
}
|
||||
|
||||
const railsTimeLayout = "2006-01-02 15:04:05 MST"
|
||||
|
||||
func (ct *RailsTime) UnmarshalJSON(b []byte) (err error) {
|
||||
s := strings.Trim(string(b), "\"")
|
||||
if s == "null" {
|
||||
ct.Time = time.Time{}
|
||||
return
|
||||
}
|
||||
ct.Time, err = time.Parse(railsTimeLayout, s)
|
||||
if err != nil {
|
||||
ct.Time, err = time.Parse(time.RFC3339, s)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (ct *RailsTime) MarshalJSON() ([]byte, error) {
|
||||
if ct.Time.UnixNano() == nilTime {
|
||||
return []byte("null"), nil
|
||||
}
|
||||
return []byte(fmt.Sprintf("\"%s\"", ct.Time.Format(time.RFC3339))), nil
|
||||
}
|
||||
|
||||
func (ct *RailsTime) IsSet() bool {
|
||||
return ct.UnixNano() != nilTime
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"time"
|
||||
)
|
||||
|
||||
var nilTime = (time.Time{}).UnixNano()
|
||||
|
||||
func CompareJSON(a interface{}, b interface{}) bool {
|
||||
aBuf, _ := encode(a)
|
||||
bBuf, _ := encode(b)
|
||||
return bytes.Compare(aBuf, bBuf) == 0
|
||||
}
|
||||
|
||||
func marshalToFile(filePath string, j interface{}) error {
|
||||
data, err := encode(j)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ioutil.WriteFile(filePath, data, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func encode(j interface{}) ([]byte, error) {
|
||||
buffer := &bytes.Buffer{}
|
||||
encoder := json.NewEncoder(buffer)
|
||||
encoder.SetEscapeHTML(false)
|
||||
encoder.SetIndent("", " ")
|
||||
if err := encoder.Encode(j); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Strip the newline at the end of the file
|
||||
return bytes.TrimRight(buffer.Bytes(), "\n"), nil
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"github.com/bmatcuk/doublestar"
|
||||
"github.com/stashapp/stash/internal/logger"
|
||||
"github.com/stashapp/stash/internal/manager/paths"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type singleton struct {
|
||||
Status JobStatus
|
||||
Paths *paths.Paths
|
||||
JSON *jsonUtils
|
||||
}
|
||||
|
||||
var instance *singleton
|
||||
var once sync.Once
|
||||
|
||||
func GetInstance() *singleton {
|
||||
Initialize()
|
||||
return instance
|
||||
}
|
||||
|
||||
func Initialize() *singleton {
|
||||
once.Do(func() {
|
||||
instance = &singleton{
|
||||
Status: Idle,
|
||||
Paths: paths.RefreshPaths(),
|
||||
JSON: &jsonUtils{},
|
||||
}
|
||||
})
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
func (s *singleton) Scan() {
|
||||
if s.Status != Idle { return }
|
||||
s.Status = Scan
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
globPath := filepath.Join(s.Paths.Config.Stash, "**/*.{zip,m4v,mp4,mov,wmv}")
|
||||
globResults, _ := doublestar.Glob(globPath)
|
||||
logger.Infof("Starting scan of %d files", len(globResults))
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for _, path := range globResults {
|
||||
wg.Add(1)
|
||||
task := ScanTask{FilePath: path}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *singleton) Import() {
|
||||
if s.Status != Idle { return }
|
||||
s.Status = Import
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
task := ImportTask{}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *singleton) Export() {
|
||||
if s.Status != Idle { return }
|
||||
s.Status = Export
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
task := ExportTask{}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}()
|
||||
}
|
||||
|
||||
func (s *singleton) returnToIdleState() {
|
||||
if r := recover(); r!= nil {
|
||||
logger.Info("recovered from ", r)
|
||||
}
|
||||
|
||||
s.Status = Idle
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/internal/manager/jsonschema"
|
||||
"github.com/stashapp/stash/internal/utils"
|
||||
"os"
|
||||
"os/user"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type Paths struct {
|
||||
FixedPaths *fixedPaths
|
||||
Config *jsonschema.Config
|
||||
Generated *generatedPaths
|
||||
JSON *jsonPaths
|
||||
|
||||
Gallery *galleryPaths
|
||||
Scene *scenePaths
|
||||
SceneMarkers *sceneMarkerPaths
|
||||
}
|
||||
|
||||
func RefreshPaths() *Paths {
|
||||
fp := newFixedPaths()
|
||||
ensureConfigFile(fp)
|
||||
return newPaths(fp)
|
||||
}
|
||||
|
||||
func newPaths(fp *fixedPaths) *Paths {
|
||||
p := Paths{}
|
||||
p.FixedPaths = fp
|
||||
p.Config = jsonschema.LoadConfigFile(p.FixedPaths.ConfigFile)
|
||||
p.Generated = newGeneratedPaths(p)
|
||||
p.JSON = newJSONPaths(p)
|
||||
|
||||
p.Gallery = newGalleryPaths(p.Config)
|
||||
p.Scene = newScenePaths(p)
|
||||
p.SceneMarkers = newSceneMarkerPaths(p)
|
||||
return &p
|
||||
}
|
||||
|
||||
func getExecutionDirectory() string {
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return filepath.Dir(ex)
|
||||
}
|
||||
|
||||
func getHomeDirectory() string {
|
||||
currentUser, err := user.Current()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return currentUser.HomeDir
|
||||
}
|
||||
|
||||
func ensureConfigFile(fp *fixedPaths) {
|
||||
configFileExists, _ := utils.FileExists(fp.ConfigFile) // TODO: Verify JSON is correct. Pass verified
|
||||
if configFileExists {
|
||||
return
|
||||
}
|
||||
|
||||
panic("No config file found")
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/internal/utils"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type fixedPaths struct {
|
||||
ExecutionDirectory string
|
||||
ConfigDirectory string
|
||||
ConfigFile string
|
||||
DatabaseFile string
|
||||
|
||||
FFMPEG string
|
||||
FFProbe string
|
||||
}
|
||||
|
||||
func newFixedPaths() *fixedPaths {
|
||||
fp := fixedPaths{}
|
||||
fp.ExecutionDirectory = getExecutionDirectory()
|
||||
fp.ConfigDirectory = filepath.Join(getHomeDirectory(), ".stash")
|
||||
fp.ConfigFile = filepath.Join(fp.ConfigDirectory, "config.json")
|
||||
fp.DatabaseFile = filepath.Join(fp.ConfigDirectory, "stash-go.sqlite")
|
||||
|
||||
ffmpegDirectories := []string{fp.ExecutionDirectory, fp.ConfigDirectory}
|
||||
ffmpegFileName := func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "ffmpeg.exe"
|
||||
} else {
|
||||
return "ffmpeg"
|
||||
}
|
||||
}()
|
||||
ffprobeFileName := func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "ffprobe.exe"
|
||||
} else {
|
||||
return "ffprobe"
|
||||
}
|
||||
}()
|
||||
for _, directory := range ffmpegDirectories {
|
||||
ffmpegPath := filepath.Join(directory, ffmpegFileName)
|
||||
ffprobePath := filepath.Join(directory, ffprobeFileName)
|
||||
if exists, _ := utils.FileExists(ffmpegPath); exists {
|
||||
fp.FFMPEG = ffmpegPath
|
||||
}
|
||||
if exists, _ := utils.FileExists(ffprobePath); exists {
|
||||
fp.FFProbe = ffprobePath
|
||||
}
|
||||
}
|
||||
|
||||
errorText := fmt.Sprintf(
|
||||
"FFMPEG or FFProbe not found. Place it in one of the following folders:\n\n%s",
|
||||
strings.Join(ffmpegDirectories, ","),
|
||||
)
|
||||
if exists, _ := utils.FileExists(fp.FFMPEG); !exists {
|
||||
panic(errorText)
|
||||
}
|
||||
if exists, _ := utils.FileExists(fp.FFProbe); !exists {
|
||||
panic(errorText)
|
||||
}
|
||||
|
||||
return &fp
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/internal/manager/jsonschema"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type galleryPaths struct {
|
||||
config *jsonschema.Config
|
||||
}
|
||||
|
||||
func newGalleryPaths(c *jsonschema.Config) *galleryPaths {
|
||||
gp := galleryPaths{}
|
||||
gp.config = c
|
||||
return &gp
|
||||
}
|
||||
|
||||
func (gp *galleryPaths) GetExtractedPath(checksum string) string {
|
||||
return filepath.Join(gp.config.Cache, checksum)
|
||||
}
|
||||
|
||||
func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string {
|
||||
return filepath.Join(gp.config.Cache, checksum, fileName)
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/internal/utils"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type generatedPaths struct {
|
||||
Screenshots string
|
||||
Vtt string
|
||||
Markers string
|
||||
Transcodes string
|
||||
Tmp string
|
||||
}
|
||||
|
||||
func newGeneratedPaths(p Paths) *generatedPaths {
|
||||
gp := generatedPaths{}
|
||||
gp.Screenshots = filepath.Join(p.Config.Metadata, "screenshots")
|
||||
gp.Vtt = filepath.Join(p.Config.Metadata, "vtt")
|
||||
gp.Markers = filepath.Join(p.Config.Metadata, "markers")
|
||||
gp.Transcodes = filepath.Join(p.Config.Metadata, "transcodes")
|
||||
gp.Tmp = filepath.Join(p.Config.Metadata, "tmp")
|
||||
|
||||
_ = utils.EnsureDir(gp.Screenshots)
|
||||
_ = utils.EnsureDir(gp.Vtt)
|
||||
_ = utils.EnsureDir(gp.Markers)
|
||||
_ = utils.EnsureDir(gp.Transcodes)
|
||||
return &gp
|
||||
}
|
||||
|
||||
func (gp *generatedPaths) GetTmpPath(fileName string) string {
|
||||
return filepath.Join(gp.Tmp, fileName)
|
||||
}
|
||||
|
||||
func (gp *generatedPaths) EnsureTmpDir() {
|
||||
_ = utils.EnsureDir(gp.Tmp)
|
||||
}
|
||||
|
||||
func (gp *generatedPaths) EmptyTmpDir() {
|
||||
_ = utils.EmptyDir(gp.Tmp)
|
||||
}
|
||||
|
||||
func (gp *generatedPaths) RemoveTmpDir() {
|
||||
_ = utils.RemoveDir(gp.Tmp)
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/internal/utils"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type jsonPaths struct {
|
||||
MappingsFile string
|
||||
ScrapedFile string
|
||||
|
||||
Performers string
|
||||
Scenes string
|
||||
Galleries string
|
||||
Studios string
|
||||
}
|
||||
|
||||
func newJSONPaths(p Paths) *jsonPaths {
|
||||
jp := jsonPaths{}
|
||||
jp.MappingsFile = filepath.Join(p.Config.Metadata, "mappings.json")
|
||||
jp.ScrapedFile = filepath.Join(p.Config.Metadata, "scraped.json")
|
||||
jp.Performers = filepath.Join(p.Config.Metadata, "performers")
|
||||
jp.Scenes = filepath.Join(p.Config.Metadata, "scenes")
|
||||
jp.Galleries = filepath.Join(p.Config.Metadata, "galleries")
|
||||
jp.Studios = filepath.Join(p.Config.Metadata, "studios")
|
||||
|
||||
_ = utils.EnsureDir(jp.Performers)
|
||||
_ = utils.EnsureDir(jp.Scenes)
|
||||
_ = utils.EnsureDir(jp.Galleries)
|
||||
_ = utils.EnsureDir(jp.Studios)
|
||||
return &jp
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) PerformerJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Performers, checksum + ".json")
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) SceneJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Scenes, checksum + ".json")
|
||||
}
|
||||
|
||||
func (jp *jsonPaths) StudioJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Studios, checksum + ".json")
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type sceneMarkerPaths struct {
|
||||
generated generatedPaths
|
||||
}
|
||||
|
||||
func newSceneMarkerPaths(p Paths) *sceneMarkerPaths {
|
||||
sp := sceneMarkerPaths{}
|
||||
sp.generated = *p.Generated
|
||||
return &sp
|
||||
}
|
||||
|
||||
func (sp *sceneMarkerPaths) GetStreamPath(checksum string, seconds int) string {
|
||||
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds) + ".mp4")
|
||||
}
|
||||
|
||||
func (sp *sceneMarkerPaths) GetStreamPreviewImagePath(checksum string, seconds int) string {
|
||||
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds) + ".webp")
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
package paths
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/internal/utils"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type scenePaths struct {
|
||||
generated generatedPaths
|
||||
}
|
||||
|
||||
func newScenePaths(p Paths) *scenePaths {
|
||||
sp := scenePaths{}
|
||||
sp.generated = *p.Generated
|
||||
return &sp
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetScreenshotPath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Screenshots, checksum+".jpg")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetThumbnailScreenshotPath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Screenshots, checksum+".thumb.jpg")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetTranscodePath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Transcodes, checksum+".mp4")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetStreamPath(scenePath string, checksum string) string {
|
||||
transcodePath := sp.GetTranscodePath(checksum)
|
||||
transcodeExists, _ := utils.FileExists(transcodePath)
|
||||
if transcodeExists {
|
||||
return transcodePath
|
||||
} else {
|
||||
return scenePath
|
||||
}
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetStreamPreviewPath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Screenshots, checksum+".mp4")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetStreamPreviewImagePath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Screenshots, checksum+".webp")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetSpriteImageFilePath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Vtt, checksum+"_sprite.jpg")
|
||||
}
|
||||
|
||||
func (sp *scenePaths) GetSpriteVttFilePath(checksum string) string {
|
||||
return filepath.Join(sp.generated.Vtt, checksum+"_thumbs.vtt")
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
package manager
|
||||
|
||||
import "sync"
|
||||
|
||||
type Task interface {
|
||||
Start(wg *sync.WaitGroup)
|
||||
}
|
||||
@@ -1,459 +0,0 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/stashapp/stash/internal/database"
|
||||
"github.com/stashapp/stash/internal/logger"
|
||||
"github.com/stashapp/stash/internal/manager/jsonschema"
|
||||
"github.com/stashapp/stash/internal/models"
|
||||
"github.com/stashapp/stash/internal/utils"
|
||||
"math"
|
||||
"strconv"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type ExportTask struct {
|
||||
Mappings *jsonschema.Mappings
|
||||
Scraped []jsonschema.ScrapedItem
|
||||
}
|
||||
|
||||
func (t *ExportTask) Start(wg *sync.WaitGroup) {
|
||||
// @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count
|
||||
|
||||
t.Mappings = &jsonschema.Mappings{}
|
||||
t.Scraped = []jsonschema.ScrapedItem{}
|
||||
|
||||
ctx := context.TODO()
|
||||
|
||||
t.ExportScenes(ctx)
|
||||
t.ExportGalleries(ctx)
|
||||
t.ExportPerformers(ctx)
|
||||
t.ExportStudios(ctx)
|
||||
|
||||
if err := instance.JSON.saveMappings(t.Mappings); err != nil {
|
||||
logger.Errorf("[mappings] failed to save json: %s", err.Error())
|
||||
}
|
||||
|
||||
t.ExportScrapedItems(ctx)
|
||||
|
||||
wg.Done()
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportScenes(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
studioQB := models.NewStudioQueryBuilder()
|
||||
galleryQB := models.NewGalleryQueryBuilder()
|
||||
performerQB := models.NewPerformerQueryBuilder()
|
||||
tagQB := models.NewTagQueryBuilder()
|
||||
sceneMarkerQB := models.NewSceneMarkerQueryBuilder()
|
||||
scenes, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] failed to fetch all scenes: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[scenes] exporting")
|
||||
|
||||
for i, scene := range scenes {
|
||||
index := i + 1
|
||||
logger.Progressf("[scenes] %d of %d", index, len(scenes))
|
||||
|
||||
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{ Path: scene.Path, Checksum: scene.Checksum })
|
||||
newSceneJSON := jsonschema.Scene{}
|
||||
|
||||
var studioName string
|
||||
if scene.StudioID.Valid {
|
||||
studio, _ := studioQB.Find(int(scene.StudioID.Int64), tx)
|
||||
if studio != nil {
|
||||
studioName = studio.Name.String
|
||||
}
|
||||
}
|
||||
|
||||
var galleryChecksum string
|
||||
gallery, _ := galleryQB.FindBySceneID(scene.ID, tx)
|
||||
if gallery != nil {
|
||||
galleryChecksum = gallery.Checksum
|
||||
}
|
||||
|
||||
performers, _ := performerQB.FindBySceneID(scene.ID, tx)
|
||||
tags, _ := tagQB.FindBySceneID(scene.ID, tx)
|
||||
sceneMarkers, _ := sceneMarkerQB.FindBySceneID(scene.ID, tx)
|
||||
|
||||
if scene.Title.Valid {
|
||||
newSceneJSON.Title = scene.Title.String
|
||||
}
|
||||
if studioName != "" {
|
||||
newSceneJSON.Studio = studioName
|
||||
}
|
||||
if scene.Url.Valid {
|
||||
newSceneJSON.Url = scene.Url.String
|
||||
}
|
||||
if scene.Date.Valid {
|
||||
newSceneJSON.Date = utils.GetYMDFromDatabaseDate(scene.Date.String)
|
||||
}
|
||||
if scene.Rating.Valid {
|
||||
newSceneJSON.Rating = int(scene.Rating.Int64)
|
||||
}
|
||||
if scene.Details.Valid {
|
||||
newSceneJSON.Details = scene.Details.String
|
||||
}
|
||||
if galleryChecksum != "" {
|
||||
newSceneJSON.Gallery = galleryChecksum
|
||||
}
|
||||
|
||||
newSceneJSON.Performers = t.getPerformerNames(performers)
|
||||
newSceneJSON.Tags = t.getTagNames(tags)
|
||||
|
||||
for _, sceneMarker := range sceneMarkers {
|
||||
var primaryTagID int
|
||||
if sceneMarker.PrimaryTagID.Valid {
|
||||
primaryTagID = int(sceneMarker.PrimaryTagID.Int64)
|
||||
}
|
||||
primaryTag, err := tagQB.Find(primaryTagID, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> invalid primary tag for scene marker: %s", scene.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
sceneMarkerTags, err := tagQB.FindBySceneMarkerID(sceneMarker.ID, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> invalid tags for scene marker: %s", scene.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
if sceneMarker.Title == "" || sceneMarker.Seconds == 0 || primaryTag.Name == "" {
|
||||
logger.Errorf("[scenes] invalid scene marker: %v", sceneMarker)
|
||||
}
|
||||
|
||||
sceneMarkerJSON := jsonschema.SceneMarker{
|
||||
Title: sceneMarker.Title,
|
||||
Seconds: t.getDecimalString(sceneMarker.Seconds),
|
||||
PrimaryTag: primaryTag.Name,
|
||||
Tags: t.getTagNames(sceneMarkerTags),
|
||||
}
|
||||
|
||||
newSceneJSON.Markers = append(newSceneJSON.Markers, sceneMarkerJSON)
|
||||
}
|
||||
|
||||
newSceneJSON.File = &jsonschema.SceneFile{}
|
||||
if scene.Size.Valid {
|
||||
newSceneJSON.File.Size = scene.Size.String
|
||||
}
|
||||
if scene.Duration.Valid {
|
||||
newSceneJSON.File.Duration = t.getDecimalString(scene.Duration.Float64)
|
||||
}
|
||||
if scene.VideoCodec.Valid {
|
||||
newSceneJSON.File.VideoCodec = scene.VideoCodec.String
|
||||
}
|
||||
if scene.AudioCodec.Valid {
|
||||
newSceneJSON.File.AudioCodec = scene.AudioCodec.String
|
||||
}
|
||||
if scene.Width.Valid {
|
||||
newSceneJSON.File.Width = int(scene.Width.Int64)
|
||||
}
|
||||
if scene.Height.Valid {
|
||||
newSceneJSON.File.Height = int(scene.Height.Int64)
|
||||
}
|
||||
if scene.Framerate.Valid {
|
||||
newSceneJSON.File.Framerate = t.getDecimalString(scene.Framerate.Float64)
|
||||
}
|
||||
if scene.Bitrate.Valid {
|
||||
newSceneJSON.File.Bitrate = int(scene.Bitrate.Int64)
|
||||
}
|
||||
|
||||
sceneJSON, err := instance.JSON.getScene(scene.Checksum)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] error reading scene json: %s", err.Error())
|
||||
}
|
||||
if jsonschema.CompareJSON(*sceneJSON, newSceneJSON) {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := instance.JSON.saveScene(scene.Checksum, &newSceneJSON); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to save json: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("[scenes] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportGalleries(ctx context.Context) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
galleries, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[galleries] failed to fetch all galleries: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[galleries] exporting")
|
||||
|
||||
for i, gallery := range galleries {
|
||||
index := i + 1
|
||||
logger.Progressf("[galleries] %d of %d", index, len(galleries))
|
||||
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathMapping{ Path: gallery.Path, Checksum: gallery.Checksum })
|
||||
}
|
||||
|
||||
logger.Infof("[galleries] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportPerformers(ctx context.Context) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performers, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[performers] failed to fetch all performers: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[performers] exporting")
|
||||
|
||||
for i, performer := range performers {
|
||||
index := i + 1
|
||||
logger.Progressf("[performers] %d of %d", index, len(performers))
|
||||
|
||||
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.NameMapping{ Name: performer.Name.String, Checksum: performer.Checksum })
|
||||
|
||||
newPerformerJSON := jsonschema.Performer{}
|
||||
|
||||
if performer.Name.Valid {
|
||||
newPerformerJSON.Name = performer.Name.String
|
||||
}
|
||||
if performer.Url.Valid {
|
||||
newPerformerJSON.Url = performer.Url.String
|
||||
}
|
||||
if performer.Birthdate.Valid {
|
||||
newPerformerJSON.Birthdate = utils.GetYMDFromDatabaseDate(performer.Birthdate.String)
|
||||
}
|
||||
if performer.Ethnicity.Valid {
|
||||
newPerformerJSON.Ethnicity = performer.Ethnicity.String
|
||||
}
|
||||
if performer.Country.Valid {
|
||||
newPerformerJSON.Country = performer.Country.String
|
||||
}
|
||||
if performer.EyeColor.Valid {
|
||||
newPerformerJSON.EyeColor = performer.EyeColor.String
|
||||
}
|
||||
if performer.Height.Valid {
|
||||
newPerformerJSON.Height = performer.Height.String
|
||||
}
|
||||
if performer.Measurements.Valid {
|
||||
newPerformerJSON.Measurements = performer.Measurements.String
|
||||
}
|
||||
if performer.FakeTits.Valid {
|
||||
newPerformerJSON.FakeTits = performer.FakeTits.String
|
||||
}
|
||||
if performer.CareerLength.Valid {
|
||||
newPerformerJSON.CareerLength = performer.CareerLength.String
|
||||
}
|
||||
if performer.Tattoos.Valid {
|
||||
newPerformerJSON.Tattoos = performer.Tattoos.String
|
||||
}
|
||||
if performer.Piercings.Valid {
|
||||
newPerformerJSON.Piercings = performer.Piercings.String
|
||||
}
|
||||
if performer.Aliases.Valid {
|
||||
newPerformerJSON.Aliases = performer.Aliases.String
|
||||
}
|
||||
if performer.Twitter.Valid {
|
||||
newPerformerJSON.Twitter = performer.Twitter.String
|
||||
}
|
||||
if performer.Instagram.Valid {
|
||||
newPerformerJSON.Instagram = performer.Instagram.String
|
||||
}
|
||||
if performer.Favorite.Valid {
|
||||
newPerformerJSON.Favorite = performer.Favorite.Bool
|
||||
}
|
||||
|
||||
newPerformerJSON.Image = utils.GetBase64StringFromData(performer.Image)
|
||||
|
||||
performerJSON, err := instance.JSON.getPerformer(performer.Checksum)
|
||||
if err != nil {
|
||||
logger.Errorf("[performers] error reading performer json: %s", err.Error())
|
||||
}
|
||||
if jsonschema.CompareJSON(*performerJSON, newPerformerJSON) {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := instance.JSON.savePerformer(performer.Checksum, &newPerformerJSON); err != nil {
|
||||
logger.Errorf("[performers] <%s> failed to save json: %s", performer.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("[performers] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportStudios(ctx context.Context) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studios, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[studios] failed to fetch all studios: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[studios] exporting")
|
||||
|
||||
for i, studio := range studios {
|
||||
index := i + 1
|
||||
logger.Progressf("[studios] %d of %d", index, len(studios))
|
||||
|
||||
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.NameMapping{ Name: studio.Name.String, Checksum: studio.Checksum })
|
||||
|
||||
newStudioJSON := jsonschema.Studio{}
|
||||
|
||||
if studio.Name.Valid {
|
||||
newStudioJSON.Name = studio.Name.String
|
||||
}
|
||||
if studio.Url.Valid {
|
||||
newStudioJSON.Url = studio.Url.String
|
||||
}
|
||||
|
||||
newStudioJSON.Image = utils.GetBase64StringFromData(studio.Image)
|
||||
|
||||
studioJSON, err := instance.JSON.getStudio(studio.Checksum)
|
||||
if err != nil {
|
||||
logger.Errorf("[studios] error reading studio json: %s", err.Error())
|
||||
}
|
||||
if jsonschema.CompareJSON(*studioJSON, newStudioJSON) {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := instance.JSON.saveStudio(studio.Checksum, &newStudioJSON); err != nil {
|
||||
logger.Errorf("[studios] <%s> failed to save json: %s", studio.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("[studios] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportScrapedItems(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewScrapedItemQueryBuilder()
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
scrapedItems, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] failed to fetch all items: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[scraped sites] exporting")
|
||||
|
||||
for i, scrapedItem := range scrapedItems {
|
||||
index := i + 1
|
||||
logger.Progressf("[scraped sites] %d of %d", index, len(scrapedItems))
|
||||
|
||||
var studioName string
|
||||
if scrapedItem.StudioID.Valid {
|
||||
studio, _ := sqb.Find(int(scrapedItem.StudioID.Int64), tx)
|
||||
if studio != nil {
|
||||
studioName = studio.Name.String
|
||||
}
|
||||
}
|
||||
|
||||
newScrapedItemJSON := jsonschema.ScrapedItem{}
|
||||
|
||||
if scrapedItem.Title.Valid {
|
||||
newScrapedItemJSON.Title = scrapedItem.Title.String
|
||||
}
|
||||
if scrapedItem.Description.Valid {
|
||||
newScrapedItemJSON.Description = scrapedItem.Description.String
|
||||
}
|
||||
if scrapedItem.Url.Valid {
|
||||
newScrapedItemJSON.Url = scrapedItem.Url.String
|
||||
}
|
||||
if scrapedItem.Date.Valid {
|
||||
newScrapedItemJSON.Date = utils.GetYMDFromDatabaseDate(scrapedItem.Date.String)
|
||||
}
|
||||
if scrapedItem.Rating.Valid {
|
||||
newScrapedItemJSON.Rating = scrapedItem.Rating.String
|
||||
}
|
||||
if scrapedItem.Tags.Valid {
|
||||
newScrapedItemJSON.Tags = scrapedItem.Tags.String
|
||||
}
|
||||
if scrapedItem.Models.Valid {
|
||||
newScrapedItemJSON.Models = scrapedItem.Models.String
|
||||
}
|
||||
if scrapedItem.Episode.Valid {
|
||||
newScrapedItemJSON.Episode = int(scrapedItem.Episode.Int64)
|
||||
}
|
||||
if scrapedItem.GalleryFilename.Valid {
|
||||
newScrapedItemJSON.GalleryFilename = scrapedItem.GalleryFilename.String
|
||||
}
|
||||
if scrapedItem.GalleryUrl.Valid {
|
||||
newScrapedItemJSON.GalleryUrl = scrapedItem.GalleryUrl.String
|
||||
}
|
||||
if scrapedItem.VideoFilename.Valid {
|
||||
newScrapedItemJSON.VideoFilename = scrapedItem.VideoFilename.String
|
||||
}
|
||||
if scrapedItem.VideoUrl.Valid {
|
||||
newScrapedItemJSON.VideoUrl = scrapedItem.VideoUrl.String
|
||||
}
|
||||
|
||||
newScrapedItemJSON.Studio = studioName
|
||||
updatedAt := jsonschema.RailsTime{ Time: scrapedItem.UpdatedAt.Timestamp } // TODO keeping ruby format
|
||||
newScrapedItemJSON.UpdatedAt = updatedAt
|
||||
|
||||
t.Scraped = append(t.Scraped, newScrapedItemJSON)
|
||||
}
|
||||
|
||||
scrapedJSON, err := instance.JSON.getScraped()
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] error reading json: %s", err.Error())
|
||||
}
|
||||
if !jsonschema.CompareJSON(scrapedJSON, t.Scraped) {
|
||||
if err := instance.JSON.saveScaped(t.Scraped); err != nil {
|
||||
logger.Errorf("[scraped sites] failed to save json: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("[scraped sites] export complete")
|
||||
}
|
||||
|
||||
func (t *ExportTask) getPerformerNames(performers []models.Performer) []string {
|
||||
if len(performers) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var results []string
|
||||
for _, performer := range performers {
|
||||
if performer.Name.Valid {
|
||||
results = append(results, performer.Name.String)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func (t *ExportTask) getTagNames(tags []models.Tag) []string {
|
||||
if len(tags) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var results []string
|
||||
for _, tag := range tags {
|
||||
if tag.Name != "" {
|
||||
results = append(results, tag.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func (t *ExportTask) getDecimalString(num float64) string {
|
||||
if num == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
precision := getPrecision(num)
|
||||
if precision == 0 {
|
||||
precision = 1
|
||||
}
|
||||
return fmt.Sprintf("%."+strconv.Itoa(precision)+"f", num)
|
||||
}
|
||||
|
||||
func getPrecision(num float64) int {
|
||||
if num == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
e := 1.0
|
||||
p := 0
|
||||
for (math.Round(num * e) / e) != num {
|
||||
e *= 10
|
||||
p += 1
|
||||
}
|
||||
return p
|
||||
}
|
||||
@@ -1,626 +0,0 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/internal/database"
|
||||
"github.com/stashapp/stash/internal/logger"
|
||||
"github.com/stashapp/stash/internal/manager/jsonschema"
|
||||
"github.com/stashapp/stash/internal/models"
|
||||
"github.com/stashapp/stash/internal/utils"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ImportTask struct {
|
||||
Mappings *jsonschema.Mappings
|
||||
Scraped []jsonschema.ScrapedItem
|
||||
}
|
||||
|
||||
func (t *ImportTask) Start(wg *sync.WaitGroup) {
|
||||
t.Mappings, _ = instance.JSON.getMappings()
|
||||
if t.Mappings == nil {
|
||||
panic("missing mappings json")
|
||||
}
|
||||
scraped, _ := instance.JSON.getScraped()
|
||||
if scraped == nil {
|
||||
logger.Warn("missing scraped json")
|
||||
}
|
||||
t.Scraped = scraped
|
||||
|
||||
database.Reset(instance.Paths.FixedPaths.DatabaseFile)
|
||||
|
||||
ctx := context.TODO()
|
||||
|
||||
t.ImportPerformers(ctx)
|
||||
t.ImportStudios(ctx)
|
||||
t.ImportGalleries(ctx)
|
||||
t.ImportTags(ctx)
|
||||
|
||||
t.ImportScrapedItems(ctx)
|
||||
t.ImportScenes(ctx)
|
||||
|
||||
wg.Done()
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Performers {
|
||||
index := i + 1
|
||||
performerJSON, err := instance.JSON.getPerformer(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Errorf("[performers] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || performerJSON == nil { return }
|
||||
|
||||
logger.Progressf("[performers] %d of %d", index, len(t.Mappings.Performers))
|
||||
|
||||
// Process the base 64 encoded image string
|
||||
checksum, imageData, err := utils.ProcessBase64Image(performerJSON.Image)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[performers] <%s> invalid image: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Populate a new performer from the input
|
||||
currentTime := time.Now()
|
||||
newPerformer := models.Performer{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Favorite: sql.NullBool{ Bool: performerJSON.Favorite, Valid: true },
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
}
|
||||
|
||||
if performerJSON.Name != "" {
|
||||
newPerformer.Name = sql.NullString{ String: performerJSON.Name, Valid: true }
|
||||
}
|
||||
if performerJSON.Url != "" {
|
||||
newPerformer.Url = sql.NullString{ String: performerJSON.Url, Valid: true }
|
||||
}
|
||||
if performerJSON.Birthdate != "" {
|
||||
newPerformer.Birthdate = sql.NullString{ String: performerJSON.Birthdate, Valid: true }
|
||||
}
|
||||
if performerJSON.Ethnicity != "" {
|
||||
newPerformer.Ethnicity = sql.NullString{ String: performerJSON.Ethnicity, Valid: true }
|
||||
}
|
||||
if performerJSON.Country != "" {
|
||||
newPerformer.Country = sql.NullString{ String: performerJSON.Country, Valid: true }
|
||||
}
|
||||
if performerJSON.EyeColor != "" {
|
||||
newPerformer.EyeColor = sql.NullString{ String: performerJSON.EyeColor, Valid: true }
|
||||
}
|
||||
if performerJSON.Height != "" {
|
||||
newPerformer.Height = sql.NullString{ String: performerJSON.Height, Valid: true }
|
||||
}
|
||||
if performerJSON.Measurements != "" {
|
||||
newPerformer.Measurements = sql.NullString{ String: performerJSON.Measurements, Valid: true }
|
||||
}
|
||||
if performerJSON.FakeTits != "" {
|
||||
newPerformer.FakeTits = sql.NullString{ String: performerJSON.FakeTits, Valid: true }
|
||||
}
|
||||
if performerJSON.CareerLength != "" {
|
||||
newPerformer.CareerLength = sql.NullString{ String: performerJSON.CareerLength, Valid: true }
|
||||
}
|
||||
if performerJSON.Tattoos != "" {
|
||||
newPerformer.Tattoos = sql.NullString{ String: performerJSON.Tattoos, Valid: true }
|
||||
}
|
||||
if performerJSON.Piercings != "" {
|
||||
newPerformer.Piercings = sql.NullString{ String: performerJSON.Piercings, Valid: true }
|
||||
}
|
||||
if performerJSON.Aliases != "" {
|
||||
newPerformer.Aliases = sql.NullString{ String: performerJSON.Aliases, Valid: true }
|
||||
}
|
||||
if performerJSON.Twitter != "" {
|
||||
newPerformer.Twitter = sql.NullString{ String: performerJSON.Twitter, Valid: true }
|
||||
}
|
||||
if performerJSON.Instagram != "" {
|
||||
newPerformer.Instagram = sql.NullString{ String: performerJSON.Instagram, Valid: true }
|
||||
}
|
||||
|
||||
_, err = qb.Create(newPerformer, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[performers] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[performers] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[performers] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[performers] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Studios {
|
||||
index := i + 1
|
||||
studioJSON, err := instance.JSON.getStudio(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Errorf("[studios] failed to read json: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || studioJSON == nil { return }
|
||||
|
||||
logger.Progressf("[studios] %d of %d", index, len(t.Mappings.Studios))
|
||||
|
||||
// Process the base 64 encoded image string
|
||||
checksum, imageData, err := utils.ProcessBase64Image(studioJSON.Image)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[studios] <%s> invalid image: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Populate a new studio from the input
|
||||
currentTime := time.Now()
|
||||
newStudio := models.Studio{
|
||||
Image: imageData,
|
||||
Checksum: checksum,
|
||||
Name: sql.NullString{ String: studioJSON.Name, Valid: true },
|
||||
Url: sql.NullString{ String: studioJSON.Url, Valid: true },
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
}
|
||||
|
||||
_, err = qb.Create(newStudio, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[studios] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[studios] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[studios] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[studios] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Galleries {
|
||||
index := i + 1
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Path == "" { return }
|
||||
|
||||
logger.Progressf("[galleries] %d of %d", index, len(t.Mappings.Galleries))
|
||||
|
||||
// Populate a new gallery from the input
|
||||
currentTime := time.Now()
|
||||
newGallery := models.Gallery{
|
||||
Checksum: mappingJSON.Checksum,
|
||||
Path: mappingJSON.Path,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
}
|
||||
|
||||
_, err := qb.Create(newGallery, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[galleries] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[galleries] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[galleries] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[galleries] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportTags(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewTagQueryBuilder()
|
||||
|
||||
var tagNames []string
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Scenes {
|
||||
index := i + 1
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
||||
logger.Warn("[tags] scene mapping without checksum or path: ", mappingJSON)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[tags] %d of %d scenes", index, len(t.Mappings.Scenes))
|
||||
|
||||
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Infof("[tags] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
||||
}
|
||||
// Return early if we are missing a json file.
|
||||
if sceneJSON == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the tags from the tags json if we have it
|
||||
if len(sceneJSON.Tags) > 0 {
|
||||
tagNames = append(tagNames, sceneJSON.Tags...)
|
||||
}
|
||||
|
||||
// Get the tags from the markers if we have marker json
|
||||
if len(sceneJSON.Markers) == 0 { continue }
|
||||
for _, markerJSON := range sceneJSON.Markers {
|
||||
if markerJSON.PrimaryTag != "" {
|
||||
tagNames = append(tagNames, markerJSON.PrimaryTag)
|
||||
}
|
||||
if len(markerJSON.Tags) > 0 {
|
||||
tagNames = append(tagNames, markerJSON.Tags...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
uniqueTagNames := t.getUnique(tagNames)
|
||||
for _, tagName := range uniqueTagNames {
|
||||
currentTime := time.Now()
|
||||
newTag := models.Tag{
|
||||
Name: tagName,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
}
|
||||
|
||||
_, err := qb.Create(newTag, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[tags] <%s> failed to create: %s", tagName, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[tags] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[tags] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[tags] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewScrapedItemQueryBuilder()
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
currentTime := time.Now()
|
||||
|
||||
for i, mappingJSON := range t.Scraped {
|
||||
index := i + 1
|
||||
logger.Progressf("[scraped sites] %d of %d", index, len(t.Mappings.Scenes))
|
||||
|
||||
var updatedAt time.Time
|
||||
if currentTime.Location() != nil {
|
||||
updatedAt = mappingJSON.UpdatedAt.Time.In(currentTime.Location())
|
||||
} else {
|
||||
updatedAt = mappingJSON.UpdatedAt.Time
|
||||
}
|
||||
newScrapedItem := models.ScrapedItem{
|
||||
Title: sql.NullString{String: mappingJSON.Title, Valid: true},
|
||||
Description: sql.NullString{String: mappingJSON.Description, Valid: true},
|
||||
Url: sql.NullString{String: mappingJSON.Url, Valid: true},
|
||||
Date: sql.NullString{String: mappingJSON.Date, Valid: true},
|
||||
Rating: sql.NullString{String: mappingJSON.Rating, Valid: true},
|
||||
Tags: sql.NullString{String: mappingJSON.Tags, Valid: true},
|
||||
Models: sql.NullString{String: mappingJSON.Models, Valid: true},
|
||||
Episode: sql.NullInt64{Int64: int64(mappingJSON.Episode), Valid: true},
|
||||
GalleryFilename: sql.NullString{String: mappingJSON.GalleryFilename, Valid: true},
|
||||
GalleryUrl: sql.NullString{String: mappingJSON.GalleryUrl, Valid: true},
|
||||
VideoFilename: sql.NullString{String: mappingJSON.VideoFilename, Valid: true},
|
||||
VideoUrl: sql.NullString{String: mappingJSON.VideoUrl, Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedAt},
|
||||
}
|
||||
|
||||
studio, err := sqb.FindByName(mappingJSON.Studio, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] failed to fetch studio: %s", err.Error())
|
||||
}
|
||||
if studio != nil {
|
||||
newScrapedItem.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||
}
|
||||
|
||||
_, err = qb.Create(newScrapedItem, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] <%s> failed to create: %s", newScrapedItem.Title, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[scraped sites] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[scraped sites] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[scraped sites] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
currentTime := time.Now()
|
||||
|
||||
for i, mappingJSON := range t.Mappings.Scenes {
|
||||
index := i + 1
|
||||
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
||||
logger.Warn("[scenes] scene mapping without checksum or path: ", mappingJSON)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Progressf("[scenes] %d of %d", index, len(t.Mappings.Scenes))
|
||||
|
||||
newScene := models.Scene{
|
||||
Checksum: mappingJSON.Checksum,
|
||||
Path: mappingJSON.Path,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
}
|
||||
|
||||
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
||||
if err != nil {
|
||||
logger.Infof("[scenes] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
// Populate scene fields
|
||||
if sceneJSON != nil {
|
||||
if sceneJSON.Title != "" {
|
||||
newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true}
|
||||
}
|
||||
if sceneJSON.Details != "" {
|
||||
newScene.Details = sql.NullString{String: sceneJSON.Details, Valid: true}
|
||||
}
|
||||
if sceneJSON.Url != "" {
|
||||
newScene.Url = sql.NullString{String: sceneJSON.Url, Valid: true}
|
||||
}
|
||||
if sceneJSON.Date != "" {
|
||||
newScene.Date = sql.NullString{String: sceneJSON.Date, Valid: true}
|
||||
}
|
||||
if sceneJSON.Rating != 0 {
|
||||
newScene.Rating = sql.NullInt64{Int64: int64(sceneJSON.Rating), Valid: true}
|
||||
}
|
||||
|
||||
if sceneJSON.File != nil {
|
||||
if sceneJSON.File.Size != "" {
|
||||
newScene.Size = sql.NullString{String: sceneJSON.File.Size, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Duration != "" {
|
||||
duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64)
|
||||
newScene.Duration = sql.NullFloat64{Float64: duration, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.VideoCodec != "" {
|
||||
newScene.VideoCodec = sql.NullString{String: sceneJSON.File.VideoCodec, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.AudioCodec != "" {
|
||||
newScene.AudioCodec = sql.NullString{String: sceneJSON.File.AudioCodec, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Width != 0 {
|
||||
newScene.Width = sql.NullInt64{Int64: int64(sceneJSON.File.Width), Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Height != 0 {
|
||||
newScene.Height = sql.NullInt64{Int64: int64(sceneJSON.File.Height), Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Framerate != "" {
|
||||
framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64)
|
||||
newScene.Framerate = sql.NullFloat64{Float64: framerate, Valid: true}
|
||||
}
|
||||
if sceneJSON.File.Bitrate != 0 {
|
||||
newScene.Bitrate = sql.NullInt64{Int64: int64(sceneJSON.File.Bitrate), Valid: true}
|
||||
}
|
||||
} else {
|
||||
// TODO: Get FFMPEG data?
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the studio ID
|
||||
if sceneJSON.Studio != "" {
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
studio, err := sqb.FindByName(sceneJSON.Studio, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] studio <%s> does not exist: %s", sceneJSON.Studio, err)
|
||||
} else {
|
||||
newScene.StudioID = sql.NullInt64{ Int64: int64(studio.ID), Valid: true }
|
||||
}
|
||||
}
|
||||
|
||||
// Create the scene in the DB
|
||||
scene, err := qb.Create(newScene, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[scenes] <%s> failed to create: %s", scene.Checksum, err.Error())
|
||||
return
|
||||
}
|
||||
if scene.ID == 0 {
|
||||
_ = tx.Rollback()
|
||||
logger.Errorf("[scenes] <%s> invalid id after scene creation", mappingJSON.Checksum)
|
||||
return
|
||||
}
|
||||
|
||||
// Relate the scene to the gallery
|
||||
if sceneJSON.Gallery != "" {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
gallery, err := gqb.FindByChecksum(sceneJSON.Gallery, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] gallery <%s> does not exist: %s", sceneJSON.Gallery, err)
|
||||
} else {
|
||||
gallery.SceneID = sql.NullInt64{ Int64: int64(scene.ID), Valid: true }
|
||||
_, err := gqb.Update(*gallery, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to update gallery: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Relate the scene to the performers
|
||||
if len(sceneJSON.Performers) > 0 {
|
||||
performers, err := t.getPerformers(sceneJSON.Performers, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] <%s> failed to fetch performers: %s", scene.Checksum, err)
|
||||
} else {
|
||||
var performerJoins []models.PerformersScenes
|
||||
for _, performer := range performers {
|
||||
join := models.PerformersScenes{
|
||||
PerformerID: performer.ID,
|
||||
SceneID: scene.ID,
|
||||
}
|
||||
performerJoins = append(performerJoins, join)
|
||||
}
|
||||
if err := jqb.CreatePerformersScenes(performerJoins, tx); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to associate performers: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Relate the scene to the tags
|
||||
if len(sceneJSON.Tags) > 0 {
|
||||
tags, err := t.getTags(scene.Checksum, sceneJSON.Tags, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] <%s> failed to fetch tags: %s", scene.Checksum, err)
|
||||
} else {
|
||||
var tagJoins []models.ScenesTags
|
||||
for _, tag := range tags {
|
||||
join := models.ScenesTags{
|
||||
SceneID: scene.ID,
|
||||
TagID: tag.ID,
|
||||
}
|
||||
tagJoins = append(tagJoins, join)
|
||||
}
|
||||
if err := jqb.CreateScenesTags(tagJoins, tx); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to associate tags: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Relate the scene to the scene markers
|
||||
if len(sceneJSON.Markers) > 0 {
|
||||
smqb := models.NewSceneMarkerQueryBuilder()
|
||||
tqb := models.NewTagQueryBuilder()
|
||||
for _, marker := range sceneJSON.Markers {
|
||||
seconds, _ := strconv.ParseFloat(marker.Seconds, 64)
|
||||
newSceneMarker := models.SceneMarker{
|
||||
Title: marker.Title,
|
||||
Seconds: seconds,
|
||||
SceneID: sql.NullInt64{Int64: int64(scene.ID), Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
}
|
||||
|
||||
primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to find primary tag for marker: %s", scene.Checksum, err.Error())
|
||||
} else {
|
||||
newSceneMarker.PrimaryTagID = sql.NullInt64{Int64: int64(primaryTag.ID), Valid: true}
|
||||
}
|
||||
|
||||
// Create the scene marker in the DB
|
||||
sceneMarker, err := smqb.Create(newSceneMarker, tx)
|
||||
if err != nil {
|
||||
logger.Warnf("[scenes] <%s> failed to create scene marker: %s", scene.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
if sceneMarker.ID == 0 {
|
||||
logger.Warnf("[scenes] <%s> invalid scene marker id after scene marker creation", scene.Checksum)
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the scene marker tags and create the joins
|
||||
tags, err := t.getTags(scene.Checksum, marker.Tags, tx)
|
||||
if err != nil {
|
||||
logger.Warn("[scenes] <%s> failed to fetch scene marker tags: %s", scene.Checksum, err)
|
||||
} else {
|
||||
var tagJoins []models.SceneMarkersTags
|
||||
for _, tag := range tags {
|
||||
join := models.SceneMarkersTags{
|
||||
SceneMarkerID: sceneMarker.ID,
|
||||
TagID: tag.ID,
|
||||
}
|
||||
tagJoins = append(tagJoins, join)
|
||||
}
|
||||
if err := jqb.CreateSceneMarkersTags(tagJoins, tx); err != nil {
|
||||
logger.Errorf("[scenes] <%s> failed to associate scene marker tags: %s", scene.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[scenes] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[scenes] import failed to commit: %s", err.Error())
|
||||
}
|
||||
logger.Info("[scenes] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]models.Performer, error) {
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
performers, err := pqb.FindByNames(names, tx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pluckedNames []string
|
||||
for _, performer := range performers {
|
||||
if !performer.Name.Valid {
|
||||
continue
|
||||
}
|
||||
pluckedNames = append(pluckedNames, performer.Name.String)
|
||||
}
|
||||
|
||||
missingPerformers := utils.StrFilter(names, func(name string) bool {
|
||||
return !utils.StrInclude(pluckedNames, name)
|
||||
})
|
||||
|
||||
for _, missingPerformer := range missingPerformers {
|
||||
logger.Warnf("[scenes] performer %s does not exist", missingPerformer)
|
||||
}
|
||||
|
||||
return performers, nil
|
||||
}
|
||||
|
||||
func (t *ImportTask) getTags(sceneChecksum string, names []string, tx *sqlx.Tx) ([]models.Tag, error) {
|
||||
tqb := models.NewTagQueryBuilder()
|
||||
tags, err := tqb.FindByNames(names, tx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pluckedNames []string
|
||||
for _, tag := range tags {
|
||||
if tag.Name == "" {
|
||||
continue
|
||||
}
|
||||
pluckedNames = append(pluckedNames, tag.Name)
|
||||
}
|
||||
|
||||
missingTags := utils.StrFilter(names, func(name string) bool {
|
||||
return !utils.StrInclude(pluckedNames, name)
|
||||
})
|
||||
|
||||
for _, missingTag := range missingTags {
|
||||
logger.Warnf("[scenes] <%s> tag %s does not exist", sceneChecksum, missingTag)
|
||||
}
|
||||
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
// https://www.reddit.com/r/golang/comments/5ia523/idiomatic_way_to_remove_duplicates_in_a_slice/db6qa2e
|
||||
func (t *ImportTask) getUnique(s []string) []string {
|
||||
seen := make(map[string]struct{}, len(s))
|
||||
j := 0
|
||||
for _, v := range s {
|
||||
if _, ok := seen[v]; ok {
|
||||
continue
|
||||
}
|
||||
seen[v] = struct{}{}
|
||||
s[j] = v
|
||||
j++
|
||||
}
|
||||
return s[:j]
|
||||
}
|
||||
@@ -1,164 +0,0 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"github.com/stashapp/stash/internal/database"
|
||||
"github.com/stashapp/stash/internal/ffmpeg"
|
||||
"github.com/stashapp/stash/internal/logger"
|
||||
"github.com/stashapp/stash/internal/models"
|
||||
"github.com/stashapp/stash/internal/utils"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ScanTask struct {
|
||||
FilePath string
|
||||
}
|
||||
|
||||
func (t *ScanTask) Start(wg *sync.WaitGroup) {
|
||||
if filepath.Ext(t.FilePath) == ".zip" {
|
||||
t.scanGallery()
|
||||
} else {
|
||||
t.scanScene()
|
||||
}
|
||||
|
||||
wg.Done()
|
||||
}
|
||||
|
||||
func (t *ScanTask) scanGallery() {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, _ := qb.FindByPath(t.FilePath)
|
||||
if gallery != nil {
|
||||
// We already have this item in the database, keep going
|
||||
return
|
||||
}
|
||||
|
||||
checksum, err := t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
gallery, _ = qb.FindByChecksum(checksum, tx)
|
||||
if gallery != nil {
|
||||
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||
gallery.Path = t.FilePath
|
||||
_, err = qb.Update(*gallery, tx)
|
||||
} else {
|
||||
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||
currentTime := time.Now()
|
||||
newGallery := models.Gallery{
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
}
|
||||
_, err = qb.Create(newGallery, tx)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ScanTask) scanScene() {
|
||||
ffprobe := ffmpeg.NewFFProbe(instance.Paths.FixedPaths.FFProbe)
|
||||
ffprobeResult, err := ffprobe.ProbeVideo(t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scene, _ := qb.FindByPath(t.FilePath)
|
||||
if scene != nil {
|
||||
// We already have this item in the database, keep going
|
||||
return
|
||||
}
|
||||
|
||||
checksum, err := t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
t.makeScreenshots(*ffprobeResult, checksum)
|
||||
|
||||
scene, _ = qb.FindByChecksum(checksum)
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if scene != nil {
|
||||
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||
scene.Path = t.FilePath
|
||||
_, err = qb.Update(*scene, tx)
|
||||
} else {
|
||||
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||
currentTime := time.Now()
|
||||
newScene := models.Scene{
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
Duration: sql.NullFloat64{Float64: ffprobeResult.Duration, Valid: true },
|
||||
VideoCodec: sql.NullString{ String: ffprobeResult.VideoCodec, Valid: true},
|
||||
AudioCodec: sql.NullString{ String: ffprobeResult.AudioCodec, Valid: true},
|
||||
Width: sql.NullInt64{ Int64: int64(ffprobeResult.Width), Valid: true },
|
||||
Height: sql.NullInt64{ Int64: int64(ffprobeResult.Height), Valid: true },
|
||||
Framerate: sql.NullFloat64{ Float64: ffprobeResult.FrameRate, Valid: true },
|
||||
Bitrate: sql.NullInt64{ Int64: ffprobeResult.Bitrate, Valid: true },
|
||||
Size: sql.NullString{ String: strconv.Itoa(int(ffprobeResult.Size)), Valid: true },
|
||||
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||
}
|
||||
_, err = qb.Create(newScene, tx)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ScanTask) makeScreenshots(probeResult ffmpeg.FFProbeResult, checksum string) {
|
||||
thumbPath := instance.Paths.Scene.GetThumbnailScreenshotPath(checksum)
|
||||
normalPath := instance.Paths.Scene.GetScreenshotPath(checksum)
|
||||
|
||||
thumbExists, _ := utils.FileExists(thumbPath)
|
||||
normalExists, _ := utils.FileExists(normalPath)
|
||||
if thumbExists && normalExists {
|
||||
logger.Debug("Screenshots already exist for this path... skipping")
|
||||
return
|
||||
}
|
||||
|
||||
t.makeScreenshot(probeResult, thumbPath, 5, 320)
|
||||
t.makeScreenshot(probeResult, normalPath, 2, probeResult.Width)
|
||||
}
|
||||
|
||||
func (t *ScanTask) makeScreenshot(probeResult ffmpeg.FFProbeResult, outputPath string, quality int, width int) {
|
||||
encoder := ffmpeg.NewEncoder(instance.Paths.FixedPaths.FFMPEG)
|
||||
options := ffmpeg.ScreenshotOptions{
|
||||
OutputPath: outputPath,
|
||||
Quality: quality,
|
||||
Time: float64(probeResult.Duration) * 0.2,
|
||||
Width: width,
|
||||
}
|
||||
encoder.Screenshot(probeResult, options)
|
||||
}
|
||||
|
||||
func (t *ScanTask) calculateChecksum() (string, error) {
|
||||
logger.Infof("%s not found. Calculating checksum...", t.FilePath)
|
||||
checksum, err := utils.MD5FromFilePath(t.FilePath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
logger.Debugf("Checksum calculated: %s", checksum)
|
||||
return checksum, nil
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package manager
|
||||
|
||||
import "github.com/stashapp/stash/internal/utils"
|
||||
|
||||
func IsStreamable(videoPath string, checksum string) (bool, error) {
|
||||
fileType, err := utils.FileType(videoPath)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if fileType.MIME.Value == "video/quicktime" || fileType.MIME.Value == "video/mp4" || fileType.MIME.Value == "video/webm" || fileType.MIME.Value == "video/x-m4v" {
|
||||
return true, nil
|
||||
} else {
|
||||
transcodePath := instance.Paths.Scene.GetTranscodePath(checksum)
|
||||
return utils.FileExists(transcodePath)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user