From 8866670e531cb281f6d96088bc9d9d21a8ec2942 Mon Sep 17 00:00:00 2001 From: WithoutPants <53250216+WithoutPants@users.noreply.github.com> Date: Sun, 20 Sep 2020 18:36:02 +1000 Subject: [PATCH] Add partial import functionality (#812) --- graphql/documents/mutations/metadata.graphql | 4 + graphql/schema/schema.graphql | 7 +- graphql/schema/types/metadata.graphql | 20 + pkg/api/resolver_mutation_metadata.go | 10 + pkg/gallery/import.go | 72 ++ pkg/gallery/import_test.go | 147 +++ pkg/manager/import.go | 61 ++ pkg/manager/jsonschema/utils.go | 3 +- pkg/manager/manager_tasks.go | 8 +- pkg/manager/task_import.go | 878 +++++++----------- pkg/models/gallery.go | 24 +- pkg/models/join.go | 36 +- pkg/models/json_time.go | 21 +- pkg/models/mocks/GalleryReaderWriter.go | 92 ++ pkg/models/mocks/JoinReaderWriter.go | 84 ++ pkg/models/mocks/MovieReaderWriter.go | 129 +++ pkg/models/mocks/PerformerReaderWriter.go | 83 ++ pkg/models/mocks/SceneMarkerReaderWriter.go | 46 + pkg/models/mocks/SceneReaderWriter.go | 129 +++ pkg/models/mocks/StudioReaderWriter.go | 106 +++ pkg/models/mocks/TagReaderWriter.go | 106 +++ pkg/models/model_movie.go | 13 + pkg/models/model_performer.go | 14 + pkg/models/model_studio.go | 13 + pkg/models/model_tag.go | 11 + pkg/models/movie.go | 35 +- pkg/models/performer.go | 24 +- pkg/models/querybuilder_movies.go | 13 + pkg/models/querybuilder_scene.go | 13 + pkg/models/querybuilder_studio.go | 17 + pkg/models/scene.go | 35 +- pkg/models/scene_marker.go | 12 +- pkg/models/sqlite_date.go | 8 +- pkg/models/studio.go | 29 +- pkg/models/tag.go | 30 +- pkg/movie/import.go | 166 ++++ pkg/movie/import_test.go | 278 ++++++ pkg/performer/export_test.go | 22 +- pkg/performer/import.go | 144 +++ pkg/performer/import_test.go | 184 ++++ pkg/scene/import.go | 483 ++++++++++ pkg/scene/import_test.go | 761 +++++++++++++++ pkg/scene/marker_import.go | 125 +++ pkg/scene/marker_import_test.go | 210 +++++ pkg/studio/import.go | 143 +++ pkg/studio/import_test.go | 263 ++++++ pkg/tag/import.go | 85 ++ pkg/tag/import_test.go | 179 ++++ ui/v2.5/package.json | 2 + .../src/components/Changelog/versions/v040.md | 1 + .../SettingsTasksPanel/ImportDialog.tsx | 171 ++++ .../SettingsTasksPanel/SettingsTasksPanel.tsx | 34 +- ui/v2.5/src/components/Shared/Modal.tsx | 4 +- ui/v2.5/src/core/StashService.ts | 6 + ui/v2.5/src/core/createClient.ts | 5 +- ui/v2.5/yarn.lock | 55 +- 56 files changed, 5030 insertions(+), 624 deletions(-) create mode 100644 pkg/gallery/import.go create mode 100644 pkg/gallery/import_test.go create mode 100644 pkg/manager/import.go create mode 100644 pkg/movie/import.go create mode 100644 pkg/movie/import_test.go create mode 100644 pkg/performer/import.go create mode 100644 pkg/performer/import_test.go create mode 100644 pkg/scene/import.go create mode 100644 pkg/scene/import_test.go create mode 100644 pkg/scene/marker_import.go create mode 100644 pkg/scene/marker_import_test.go create mode 100644 pkg/studio/import.go create mode 100644 pkg/studio/import_test.go create mode 100644 pkg/tag/import.go create mode 100644 pkg/tag/import_test.go create mode 100644 ui/v2.5/src/components/Settings/SettingsTasksPanel/ImportDialog.tsx diff --git a/graphql/documents/mutations/metadata.graphql b/graphql/documents/mutations/metadata.graphql index b33a91eec..0b94728af 100644 --- a/graphql/documents/mutations/metadata.graphql +++ b/graphql/documents/mutations/metadata.graphql @@ -10,6 +10,10 @@ mutation ExportObjects($input: ExportObjectsInput!) { exportObjects(input: $input) } +mutation ImportObjects($input: ImportObjectsInput!) { + importObjects(input: $input) +} + mutation MetadataScan($input: ScanMetadataInput!) { metadataScan(input: $input) } diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index ac1e2d696..e0dfea489 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -164,9 +164,12 @@ type Mutation { """Returns a link to download the result""" exportObjects(input: ExportObjectsInput!): String - """Start an import. Returns the job ID""" + """Performs an incremental import. Returns the job ID""" + importObjects(input: ImportObjectsInput!): String! + + """Start an full import. Completely wipes the database and imports from the metadata directory. Returns the job ID""" metadataImport: String! - """Start an export. Returns the job ID""" + """Start a full export. Outputs to the metadata directory. Returns the job ID""" metadataExport: String! """Start a scan. Returns the job ID""" metadataScan(input: ScanMetadataInput!): String! diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index 5b7253530..41b952587 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -1,3 +1,5 @@ +scalar Upload + input GenerateMetadataInput { sprites: Boolean! previews: Boolean! @@ -65,3 +67,21 @@ input ExportObjectsInput { galleries: ExportObjectTypeInput includeDependencies: Boolean } + +enum ImportDuplicateEnum { + IGNORE + OVERWRITE + FAIL +} + +enum ImportMissingRefEnum { + IGNORE + FAIL + CREATE +} + +input ImportObjectsInput { + file: Upload! + duplicateBehaviour: ImportDuplicateEnum! + missingRefBehaviour: ImportMissingRefEnum! +} diff --git a/pkg/api/resolver_mutation_metadata.go b/pkg/api/resolver_mutation_metadata.go index 992712115..fa48b2287 100644 --- a/pkg/api/resolver_mutation_metadata.go +++ b/pkg/api/resolver_mutation_metadata.go @@ -19,6 +19,16 @@ func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) { return "todo", nil } +func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) { + t := manager.CreateImportTask(config.GetVideoFileNamingAlgorithm(), input) + _, err := manager.GetInstance().RunSingleTask(t) + if err != nil { + return "", err + } + + return "todo", nil +} + func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) { manager.GetInstance().Export() return "todo", nil diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go new file mode 100644 index 000000000..f671181fb --- /dev/null +++ b/pkg/gallery/import.go @@ -0,0 +1,72 @@ +package gallery + +import ( + "fmt" + "time" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" +) + +type Importer struct { + ReaderWriter models.GalleryReaderWriter + Input jsonschema.PathMapping + + gallery models.Gallery + imageData []byte +} + +func (i *Importer) PreImport() error { + currentTime := time.Now() + i.gallery = models.Gallery{ + Checksum: i.Input.Checksum, + Path: i.Input.Path, + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + } + + return nil +} + +func (i *Importer) PostImport(id int) error { + return nil +} + +func (i *Importer) Name() string { + return i.Input.Path +} + +func (i *Importer) FindExistingID() (*int, error) { + existing, err := i.ReaderWriter.FindByPath(i.Name()) + if err != nil { + return nil, err + } + + if existing != nil { + id := existing.ID + return &id, nil + } + + return nil, nil +} + +func (i *Importer) Create() (*int, error) { + created, err := i.ReaderWriter.Create(i.gallery) + if err != nil { + return nil, fmt.Errorf("error creating gallery: %s", err.Error()) + } + + id := created.ID + return &id, nil +} + +func (i *Importer) Update(id int) error { + gallery := i.gallery + gallery.ID = id + _, err := i.ReaderWriter.Update(gallery) + if err != nil { + return fmt.Errorf("error updating existing gallery: %s", err.Error()) + } + + return nil +} diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go new file mode 100644 index 000000000..29dc1cb8a --- /dev/null +++ b/pkg/gallery/import_test.go @@ -0,0 +1,147 @@ +package gallery + +import ( + "errors" + "testing" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" +) + +const ( + galleryPath = "galleryPath" + galleryPathErr = "galleryPathErr" + existingGalleryPath = "existingGalleryPath" + + galleryID = 1 + idErr = 2 + existingGalleryID = 100 +) + +func TestImporterName(t *testing.T) { + i := Importer{ + Input: jsonschema.PathMapping{ + Path: galleryPath, + }, + } + + assert.Equal(t, galleryPath, i.Name()) +} + +func TestImporterPreImport(t *testing.T) { + i := Importer{ + Input: jsonschema.PathMapping{ + Path: galleryPath, + }, + } + + err := i.PreImport() + assert.Nil(t, err) +} + +func TestImporterFindExistingID(t *testing.T) { + readerWriter := &mocks.GalleryReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Input: jsonschema.PathMapping{ + Path: galleryPath, + }, + } + + errFindByPath := errors.New("FindByPath error") + readerWriter.On("FindByPath", galleryPath).Return(nil, nil).Once() + readerWriter.On("FindByPath", existingGalleryPath).Return(&models.Gallery{ + ID: existingGalleryID, + }, nil).Once() + readerWriter.On("FindByPath", galleryPathErr).Return(nil, errFindByPath).Once() + + id, err := i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.Input.Path = existingGalleryPath + id, err = i.FindExistingID() + assert.Equal(t, existingGalleryID, *id) + assert.Nil(t, err) + + i.Input.Path = galleryPathErr + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestCreate(t *testing.T) { + readerWriter := &mocks.GalleryReaderWriter{} + + gallery := models.Gallery{ + Path: galleryPath, + } + + galleryErr := models.Gallery{ + Path: galleryPathErr, + } + + i := Importer{ + ReaderWriter: readerWriter, + gallery: gallery, + } + + errCreate := errors.New("Create error") + readerWriter.On("Create", gallery).Return(&models.Gallery{ + ID: galleryID, + }, nil).Once() + readerWriter.On("Create", galleryErr).Return(nil, errCreate).Once() + + id, err := i.Create() + assert.Equal(t, galleryID, *id) + assert.Nil(t, err) + + i.gallery = galleryErr + id, err = i.Create() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestUpdate(t *testing.T) { + readerWriter := &mocks.GalleryReaderWriter{} + + gallery := models.Gallery{ + Path: galleryPath, + } + + galleryErr := models.Gallery{ + Path: galleryPathErr, + } + + i := Importer{ + ReaderWriter: readerWriter, + gallery: gallery, + } + + errUpdate := errors.New("Update error") + + // id needs to be set for the mock input + gallery.ID = galleryID + readerWriter.On("Update", gallery).Return(nil, nil).Once() + + err := i.Update(galleryID) + assert.Nil(t, err) + + i.gallery = galleryErr + + // need to set id separately + galleryErr.ID = idErr + readerWriter.On("Update", galleryErr).Return(nil, errUpdate).Once() + + err = i.Update(idErr) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} diff --git a/pkg/manager/import.go b/pkg/manager/import.go new file mode 100644 index 000000000..d5f61bcf2 --- /dev/null +++ b/pkg/manager/import.go @@ -0,0 +1,61 @@ +package manager + +import ( + "fmt" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type importer interface { + PreImport() error + PostImport(id int) error + Name() string + FindExistingID() (*int, error) + Create() (*int, error) + Update(id int) error +} + +func performImport(i importer, duplicateBehaviour models.ImportDuplicateEnum) error { + if err := i.PreImport(); err != nil { + return err + } + + // try to find an existing object with the same name + name := i.Name() + existing, err := i.FindExistingID() + if err != nil { + return fmt.Errorf("error finding existing objects: %s", err.Error()) + } + + var id int + + if existing != nil { + if duplicateBehaviour == models.ImportDuplicateEnumFail { + return fmt.Errorf("existing object with name '%s'", name) + } else if duplicateBehaviour == models.ImportDuplicateEnumIgnore { + logger.Info("Skipping existing object") + return nil + } + + // must be overwriting + id = *existing + if err := i.Update(id); err != nil { + return fmt.Errorf("error updating existing object: %s", err.Error()) + } + } else { + // creating + createdID, err := i.Create() + if err != nil { + return fmt.Errorf("error creating object: %s", err.Error()) + } + + id = *createdID + } + + if err := i.PostImport(id); err != nil { + return err + } + + return nil +} diff --git a/pkg/manager/jsonschema/utils.go b/pkg/manager/jsonschema/utils.go index cbe3cd3ad..921d68fec 100644 --- a/pkg/manager/jsonschema/utils.go +++ b/pkg/manager/jsonschema/utils.go @@ -2,10 +2,11 @@ package jsonschema import ( "bytes" - "github.com/json-iterator/go" "io/ioutil" "time" + + jsoniter "github.com/json-iterator/go" ) var nilTime = (time.Time{}).UnixNano() diff --git a/pkg/manager/manager_tasks.go b/pkg/manager/manager_tasks.go index f08c5cd3b..af47e2f63 100644 --- a/pkg/manager/manager_tasks.go +++ b/pkg/manager/manager_tasks.go @@ -153,7 +153,13 @@ func (s *singleton) Import() { var wg sync.WaitGroup wg.Add(1) - task := ImportTask{fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm()} + task := ImportTask{ + BaseDir: config.GetMetadataPath(), + Reset: true, + DuplicateBehaviour: models.ImportDuplicateEnumFail, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), + } go task.Start(&wg) wg.Wait() }() diff --git a/pkg/manager/task_import.go b/pkg/manager/task_import.go index c2e1e4d47..d6da28523 100644 --- a/pkg/manager/task_import.go +++ b/pkg/manager/task_import.go @@ -1,42 +1,99 @@ package manager import ( + "archive/zip" "context" "database/sql" "fmt" - "strconv" + "io" + "os" + "path/filepath" "sync" "time" "github.com/jmoiron/sqlx" "github.com/stashapp/stash/pkg/database" + "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/manager/jsonschema" "github.com/stashapp/stash/pkg/manager/paths" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/movie" + "github.com/stashapp/stash/pkg/performer" + "github.com/stashapp/stash/pkg/scene" + "github.com/stashapp/stash/pkg/studio" + "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) type ImportTask struct { json jsonUtils - Mappings *jsonschema.Mappings - Scraped []jsonschema.ScrapedItem + BaseDir string + ZipFile io.Reader + Reset bool + DuplicateBehaviour models.ImportDuplicateEnum + MissingRefBehaviour models.ImportMissingRefEnum + + mappings *jsonschema.Mappings + scraped []jsonschema.ScrapedItem fileNamingAlgorithm models.HashAlgorithm } +func CreateImportTask(a models.HashAlgorithm, input models.ImportObjectsInput) *ImportTask { + return &ImportTask{ + ZipFile: input.File.File, + Reset: false, + DuplicateBehaviour: input.DuplicateBehaviour, + MissingRefBehaviour: input.MissingRefBehaviour, + fileNamingAlgorithm: a, + } +} + +func (t *ImportTask) GetStatus() JobStatus { + return Import +} + func (t *ImportTask) Start(wg *sync.WaitGroup) { defer wg.Done() - baseDir := config.GetMetadataPath() + if t.ZipFile != nil { + // unzip the file and defer remove the temp directory + var err error + t.BaseDir, err = instance.Paths.Generated.TempDir("import") + if err != nil { + logger.Errorf("error creating temporary directory for import: %s", err.Error()) + return + } - t.json = jsonUtils{ - json: *paths.GetJSONPaths(baseDir), + defer func() { + err := utils.RemoveDir(t.BaseDir) + if err != nil { + logger.Errorf("error removing directory %s: %s", t.BaseDir, err.Error()) + } + }() + + if err := t.unzipFile(); err != nil { + logger.Errorf("error unzipping provided file for import: %s", err.Error()) + return + } } - t.Mappings, _ = t.json.getMappings() - if t.Mappings == nil { + t.json = jsonUtils{ + json: *paths.GetJSONPaths(t.BaseDir), + } + + // set default behaviour if not provided + if !t.DuplicateBehaviour.IsValid() { + t.DuplicateBehaviour = models.ImportDuplicateEnumFail + } + if !t.MissingRefBehaviour.IsValid() { + t.MissingRefBehaviour = models.ImportMissingRefEnumFail + } + + t.mappings, _ = t.json.getMappings() + if t.mappings == nil { logger.Error("missing mappings json") return } @@ -44,13 +101,15 @@ func (t *ImportTask) Start(wg *sync.WaitGroup) { if scraped == nil { logger.Warn("missing scraped json") } - t.Scraped = scraped + t.scraped = scraped - err := database.Reset(config.GetDatabasePath()) + if t.Reset { + err := database.Reset(config.GetDatabasePath()) - if err != nil { - logger.Errorf("Error resetting database: %s", err.Error()) - return + if err != nil { + logger.Errorf("Error resetting database: %s", err.Error()) + return + } } ctx := context.TODO() @@ -65,140 +124,138 @@ func (t *ImportTask) Start(wg *sync.WaitGroup) { t.ImportScenes(ctx) } -func (t *ImportTask) ImportPerformers(ctx context.Context) { - tx := database.DB.MustBeginTx(ctx, nil) - qb := models.NewPerformerQueryBuilder() +func (t *ImportTask) unzipFile() error { + // copy the zip file to the temporary directory + tmpZip := filepath.Join(t.BaseDir, "import.zip") + out, err := os.Create(tmpZip) + if err != nil { + return err + } - for i, mappingJSON := range t.Mappings.Performers { + if _, err := io.Copy(out, t.ZipFile); err != nil { + out.Close() + return err + } + + out.Close() + + defer func() { + err := os.Remove(tmpZip) + if err != nil { + logger.Errorf("error removing temporary zip file %s: %s", tmpZip, err.Error()) + } + }() + + // now we can read the zip file + r, err := zip.OpenReader(tmpZip) + if err != nil { + return err + } + defer r.Close() + + for _, f := range r.File { + fn := filepath.Join(t.BaseDir, f.Name) + + if f.FileInfo().IsDir() { + os.MkdirAll(fn, os.ModePerm) + continue + } + + if err := os.MkdirAll(filepath.Dir(fn), os.ModePerm); err != nil { + return err + } + + o, err := os.OpenFile(fn, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) + if err != nil { + return err + } + defer o.Close() + + i, err := f.Open() + if err != nil { + return err + } + defer i.Close() + + if _, err := io.Copy(o, i); err != nil { + return err + } + } + + return nil +} + +func (t *ImportTask) ImportPerformers(ctx context.Context) { + logger.Info("[performers] importing") + + for i, mappingJSON := range t.mappings.Performers { index := i + 1 performerJSON, err := t.json.getPerformer(mappingJSON.Checksum) if err != nil { logger.Errorf("[performers] failed to read json: %s", err.Error()) continue } - if mappingJSON.Checksum == "" || mappingJSON.Name == "" || performerJSON == nil { - return + + logger.Progressf("[performers] %d of %d", index, len(t.mappings.Performers)) + + tx := database.DB.MustBeginTx(ctx, nil) + readerWriter := models.NewPerformerReaderWriter(tx) + importer := &performer.Importer{ + ReaderWriter: readerWriter, + Input: *performerJSON, } - logger.Progressf("[performers] %d of %d", index, len(t.Mappings.Performers)) - - // generate checksum from performer name rather than image - checksum := utils.MD5FromString(performerJSON.Name) - - // Process the base 64 encoded image string - var imageData []byte - if len(performerJSON.Image) > 0 { - _, imageData, err = utils.ProcessBase64Image(performerJSON.Image) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[performers] <%s> invalid image: %s", mappingJSON.Checksum, err.Error()) - return - } + if err := performImport(importer, t.DuplicateBehaviour); err != nil { + tx.Rollback() + logger.Errorf("[performers] <%s> failed to import: %s", mappingJSON.Checksum, err.Error()) + continue } - // Populate a new performer from the input - newPerformer := models.Performer{ - Checksum: checksum, - Favorite: sql.NullBool{Bool: performerJSON.Favorite, Valid: true}, - CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(performerJSON.CreatedAt)}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(performerJSON.UpdatedAt)}, - } - - if performerJSON.Name != "" { - newPerformer.Name = sql.NullString{String: performerJSON.Name, Valid: true} - } - if performerJSON.Gender != "" { - newPerformer.Gender = sql.NullString{String: performerJSON.Gender, Valid: true} - } - if performerJSON.URL != "" { - newPerformer.URL = sql.NullString{String: performerJSON.URL, Valid: true} - } - if performerJSON.Birthdate != "" { - newPerformer.Birthdate = models.SQLiteDate{String: performerJSON.Birthdate, Valid: true} - } - if performerJSON.Ethnicity != "" { - newPerformer.Ethnicity = sql.NullString{String: performerJSON.Ethnicity, Valid: true} - } - if performerJSON.Country != "" { - newPerformer.Country = sql.NullString{String: performerJSON.Country, Valid: true} - } - if performerJSON.EyeColor != "" { - newPerformer.EyeColor = sql.NullString{String: performerJSON.EyeColor, Valid: true} - } - if performerJSON.Height != "" { - newPerformer.Height = sql.NullString{String: performerJSON.Height, Valid: true} - } - if performerJSON.Measurements != "" { - newPerformer.Measurements = sql.NullString{String: performerJSON.Measurements, Valid: true} - } - if performerJSON.FakeTits != "" { - newPerformer.FakeTits = sql.NullString{String: performerJSON.FakeTits, Valid: true} - } - if performerJSON.CareerLength != "" { - newPerformer.CareerLength = sql.NullString{String: performerJSON.CareerLength, Valid: true} - } - if performerJSON.Tattoos != "" { - newPerformer.Tattoos = sql.NullString{String: performerJSON.Tattoos, Valid: true} - } - if performerJSON.Piercings != "" { - newPerformer.Piercings = sql.NullString{String: performerJSON.Piercings, Valid: true} - } - if performerJSON.Aliases != "" { - newPerformer.Aliases = sql.NullString{String: performerJSON.Aliases, Valid: true} - } - if performerJSON.Twitter != "" { - newPerformer.Twitter = sql.NullString{String: performerJSON.Twitter, Valid: true} - } - if performerJSON.Instagram != "" { - newPerformer.Instagram = sql.NullString{String: performerJSON.Instagram, Valid: true} - } - - createdPerformer, err := qb.Create(newPerformer, tx) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[performers] <%s> failed to create: %s", mappingJSON.Checksum, err.Error()) - return - } - - // Add the performer image if set - if len(imageData) > 0 { - if err := qb.UpdatePerformerImage(createdPerformer.ID, imageData, tx); err != nil { - _ = tx.Rollback() - logger.Errorf("[performers] <%s> error setting performer image: %s", mappingJSON.Checksum, err.Error()) - return - } + if err := tx.Commit(); err != nil { + tx.Rollback() + logger.Errorf("[performers] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error()) } } - logger.Info("[performers] importing") - if err := tx.Commit(); err != nil { - logger.Errorf("[performers] import failed to commit: %s", err.Error()) - } logger.Info("[performers] import complete") } func (t *ImportTask) ImportStudios(ctx context.Context) { - tx := database.DB.MustBeginTx(ctx, nil) - pendingParent := make(map[string][]*jsonschema.Studio) - for i, mappingJSON := range t.Mappings.Studios { + logger.Info("[studios] importing") + + for i, mappingJSON := range t.mappings.Studios { index := i + 1 studioJSON, err := t.json.getStudio(mappingJSON.Checksum) if err != nil { logger.Errorf("[studios] failed to read json: %s", err.Error()) continue } - if mappingJSON.Checksum == "" || mappingJSON.Name == "" || studioJSON == nil { - return - } - logger.Progressf("[studios] %d of %d", index, len(t.Mappings.Studios)) + logger.Progressf("[studios] %d of %d", index, len(t.mappings.Studios)) + tx := database.DB.MustBeginTx(ctx, nil) + + // fail on missing parent studio to begin with if err := t.ImportStudio(studioJSON, pendingParent, tx); err != nil { tx.Rollback() + + if err == studio.ErrParentStudioNotExist { + // add to the pending parent list so that it is created after the parent + s := pendingParent[studioJSON.ParentStudio] + s = append(s, studioJSON) + pendingParent[studioJSON.ParentStudio] = s + continue + } + logger.Errorf("[studios] <%s> failed to create: %s", mappingJSON.Checksum, err.Error()) - return + continue + } + + if err := tx.Commit(); err != nil { + logger.Errorf("[studios] import failed to commit: %s", err.Error()) + continue } } @@ -208,84 +265,42 @@ func (t *ImportTask) ImportStudios(ctx context.Context) { for _, s := range pendingParent { for _, orphanStudioJSON := range s { + tx := database.DB.MustBeginTx(ctx, nil) + if err := t.ImportStudio(orphanStudioJSON, nil, tx); err != nil { tx.Rollback() logger.Errorf("[studios] <%s> failed to create: %s", orphanStudioJSON.Name, err.Error()) - return + continue + } + + if err := tx.Commit(); err != nil { + logger.Errorf("[studios] import failed to commit: %s", err.Error()) + continue } } } } - logger.Info("[studios] importing") - if err := tx.Commit(); err != nil { - logger.Errorf("[studios] import failed to commit: %s", err.Error()) - } logger.Info("[studios] import complete") } func (t *ImportTask) ImportStudio(studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, tx *sqlx.Tx) error { - qb := models.NewStudioQueryBuilder() - - // generate checksum from studio name rather than image - checksum := utils.MD5FromString(studioJSON.Name) - - // Process the base 64 encoded image string - var imageData []byte - var err error - if len(studioJSON.Image) > 0 { - _, imageData, err = utils.ProcessBase64Image(studioJSON.Image) - if err != nil { - return fmt.Errorf("invalid image: %s", err.Error()) - } + readerWriter := models.NewStudioReaderWriter(tx) + importer := &studio.Importer{ + ReaderWriter: readerWriter, + Input: *studioJSON, + MissingRefBehaviour: t.MissingRefBehaviour, } - // Populate a new studio from the input - newStudio := models.Studio{ - Checksum: checksum, - Name: sql.NullString{String: studioJSON.Name, Valid: true}, - URL: sql.NullString{String: studioJSON.URL, Valid: true}, - CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(studioJSON.CreatedAt)}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(studioJSON.UpdatedAt)}, + // first phase: return error if parent does not exist + if pendingParent != nil { + importer.MissingRefBehaviour = models.ImportMissingRefEnumFail } - // Populate the parent ID - if studioJSON.ParentStudio != "" { - studio, err := qb.FindByName(studioJSON.ParentStudio, tx, false) - if err != nil { - return fmt.Errorf("error finding studio by name <%s>: %s", studioJSON.ParentStudio, err.Error()) - } - - if studio == nil { - // its possible that the parent hasn't been created yet - // do it after it is created - if pendingParent == nil { - logger.Warnf("[studios] studio <%s> does not exist", studioJSON.ParentStudio) - } else { - // add to the pending parent list so that it is created after the parent - s := pendingParent[studioJSON.ParentStudio] - s = append(s, studioJSON) - pendingParent[studioJSON.ParentStudio] = s - - // skip - return nil - } - } else { - newStudio.ParentID = sql.NullInt64{Int64: int64(studio.ID), Valid: true} - } - } - - createdStudio, err := qb.Create(newStudio, tx) - if err != nil { + if err := performImport(importer, t.DuplicateBehaviour); err != nil { return err } - if len(imageData) > 0 { - if err := qb.UpdateStudioImage(createdStudio.ID, imageData, tx); err != nil { - return fmt.Errorf("error setting studio image: %s", err.Error()) - } - } - // now create the studios pending this studios creation s := pendingParent[studioJSON.Name] for _, childStudioJSON := range s { @@ -298,198 +313,114 @@ func (t *ImportTask) ImportStudio(studioJSON *jsonschema.Studio, pendingParent m // delete the entry from the map so that we know its not left over delete(pendingParent, studioJSON.Name) - return err + return nil } func (t *ImportTask) ImportMovies(ctx context.Context) { - tx := database.DB.MustBeginTx(ctx, nil) - qb := models.NewMovieQueryBuilder() + logger.Info("[movies] importing") - for i, mappingJSON := range t.Mappings.Movies { + for i, mappingJSON := range t.mappings.Movies { index := i + 1 movieJSON, err := t.json.getMovie(mappingJSON.Checksum) if err != nil { logger.Errorf("[movies] failed to read json: %s", err.Error()) continue } - if mappingJSON.Checksum == "" || mappingJSON.Name == "" || movieJSON == nil { - return + + logger.Progressf("[movies] %d of %d", index, len(t.mappings.Movies)) + + tx := database.DB.MustBeginTx(ctx, nil) + readerWriter := models.NewMovieReaderWriter(tx) + studioReaderWriter := models.NewStudioReaderWriter(tx) + + movieImporter := &movie.Importer{ + ReaderWriter: readerWriter, + StudioWriter: studioReaderWriter, + Input: *movieJSON, + MissingRefBehaviour: t.MissingRefBehaviour, } - logger.Progressf("[movies] %d of %d", index, len(t.Mappings.Movies)) - - // generate checksum from movie name rather than image - checksum := utils.MD5FromString(movieJSON.Name) - - // Process the base 64 encoded image string - var frontimageData []byte - var backimageData []byte - if len(movieJSON.FrontImage) > 0 { - _, frontimageData, err = utils.ProcessBase64Image(movieJSON.FrontImage) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[movies] <%s> invalid front_image: %s", mappingJSON.Checksum, err.Error()) - return - } - } - if len(movieJSON.BackImage) > 0 { - _, backimageData, err = utils.ProcessBase64Image(movieJSON.BackImage) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[movies] <%s> invalid back_image: %s", mappingJSON.Checksum, err.Error()) - return - } + if err := performImport(movieImporter, t.DuplicateBehaviour); err != nil { + tx.Rollback() + logger.Errorf("[movies] <%s> failed to import: %s", mappingJSON.Checksum, err.Error()) + continue } - // Populate a new movie from the input - newMovie := models.Movie{ - Checksum: checksum, - Name: sql.NullString{String: movieJSON.Name, Valid: true}, - Aliases: sql.NullString{String: movieJSON.Aliases, Valid: true}, - Date: models.SQLiteDate{String: movieJSON.Date, Valid: true}, - Director: sql.NullString{String: movieJSON.Director, Valid: true}, - Synopsis: sql.NullString{String: movieJSON.Synopsis, Valid: true}, - URL: sql.NullString{String: movieJSON.URL, Valid: true}, - CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.CreatedAt)}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(movieJSON.UpdatedAt)}, - } - - if movieJSON.Rating != 0 { - newMovie.Rating = sql.NullInt64{Int64: int64(movieJSON.Rating), Valid: true} - } - if movieJSON.Duration != 0 { - newMovie.Duration = sql.NullInt64{Int64: int64(movieJSON.Duration), Valid: true} - } - - // Populate the studio ID - if movieJSON.Studio != "" { - sqb := models.NewStudioQueryBuilder() - studio, err := sqb.FindByName(movieJSON.Studio, tx, false) - if err != nil { - logger.Warnf("[movies] error getting studio <%s>: %s", movieJSON.Studio, err.Error()) - } else if studio == nil { - logger.Warnf("[movies] studio <%s> does not exist", movieJSON.Studio) - } else { - newMovie.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true} - } - } - - createdMovie, err := qb.Create(newMovie, tx) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[movies] <%s> failed to create: %s", mappingJSON.Checksum, err.Error()) - return - } - - // Add the movie images if set - if len(frontimageData) > 0 { - if err := qb.UpdateMovieImages(createdMovie.ID, frontimageData, backimageData, tx); err != nil { - _ = tx.Rollback() - logger.Errorf("[movies] <%s> error setting movie images: %s", mappingJSON.Checksum, err.Error()) - return - } + if err := tx.Commit(); err != nil { + tx.Rollback() + logger.Errorf("[movies] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error()) + continue } } - logger.Info("[movies] importing") - if err := tx.Commit(); err != nil { - logger.Errorf("[movies] import failed to commit: %s", err.Error()) - } logger.Info("[movies] import complete") } func (t *ImportTask) ImportGalleries(ctx context.Context) { - tx := database.DB.MustBeginTx(ctx, nil) - qb := models.NewGalleryQueryBuilder() - - for i, mappingJSON := range t.Mappings.Galleries { - index := i + 1 - if mappingJSON.Checksum == "" || mappingJSON.Path == "" { - return - } - - logger.Progressf("[galleries] %d of %d", index, len(t.Mappings.Galleries)) - - // Populate a new gallery from the input - currentTime := time.Now() - newGallery := models.Gallery{ - Checksum: mappingJSON.Checksum, - Path: mappingJSON.Path, - CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - } - - _, err := qb.Create(newGallery, tx) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[galleries] <%s> failed to create: %s", mappingJSON.Checksum, err.Error()) - return - } - } - logger.Info("[galleries] importing") - if err := tx.Commit(); err != nil { - logger.Errorf("[galleries] import failed to commit: %s", err.Error()) + + for i, mappingJSON := range t.mappings.Galleries { + index := i + 1 + + logger.Progressf("[galleries] %d of %d", index, len(t.mappings.Galleries)) + + tx := database.DB.MustBeginTx(ctx, nil) + readerWriter := models.NewGalleryReaderWriter(tx) + + galleryImporter := &gallery.Importer{ + ReaderWriter: readerWriter, + Input: mappingJSON, + } + + if err := performImport(galleryImporter, t.DuplicateBehaviour); err != nil { + tx.Rollback() + logger.Errorf("[galleries] <%s> failed to import: %s", mappingJSON.Checksum, err.Error()) + continue + } + + if err := tx.Commit(); err != nil { + tx.Rollback() + logger.Errorf("[galleries] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error()) + continue + } } + logger.Info("[galleries] import complete") } func (t *ImportTask) ImportTags(ctx context.Context) { - tx := database.DB.MustBeginTx(ctx, nil) - qb := models.NewTagQueryBuilder() + logger.Info("[tags] importing") - for i, mappingJSON := range t.Mappings.Tags { + for i, mappingJSON := range t.mappings.Tags { index := i + 1 tagJSON, err := t.json.getTag(mappingJSON.Checksum) if err != nil { logger.Errorf("[tags] failed to read json: %s", err.Error()) continue } - if mappingJSON.Checksum == "" || mappingJSON.Name == "" || tagJSON == nil { - return + + logger.Progressf("[tags] %d of %d", index, len(t.mappings.Tags)) + + tx := database.DB.MustBeginTx(ctx, nil) + readerWriter := models.NewTagReaderWriter(tx) + + tagImporter := &tag.Importer{ + ReaderWriter: readerWriter, + Input: *tagJSON, } - logger.Progressf("[tags] %d of %d", index, len(t.Mappings.Tags)) - - // Process the base 64 encoded image string - var imageData []byte - if len(tagJSON.Image) > 0 { - _, imageData, err = utils.ProcessBase64Image(tagJSON.Image) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[tags] <%s> invalid image: %s", mappingJSON.Checksum, err.Error()) - return - } + if err := performImport(tagImporter, t.DuplicateBehaviour); err != nil { + tx.Rollback() + logger.Errorf("[tags] <%s> failed to import: %s", mappingJSON.Checksum, err.Error()) + continue } - // Populate a new tag from the input - newTag := models.Tag{ - Name: tagJSON.Name, - CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(tagJSON.CreatedAt)}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(tagJSON.UpdatedAt)}, - } - - createdTag, err := qb.Create(newTag, tx) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[tags] <%s> failed to create: %s", mappingJSON.Checksum, err.Error()) - return - } - - // Add the tag image if set - if len(imageData) > 0 { - if err := qb.UpdateTagImage(createdTag.ID, imageData, tx); err != nil { - _ = tx.Rollback() - logger.Errorf("[tags] <%s> error setting tag image: %s", mappingJSON.Checksum, err.Error()) - return - } + if err := tx.Commit(); err != nil { + tx.Rollback() + logger.Errorf("[tags] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error()) } } - logger.Info("[tags] importing") - if err := tx.Commit(); err != nil { - logger.Errorf("[tags] import failed to commit: %s", err.Error()) - } logger.Info("[tags] import complete") } @@ -499,9 +430,9 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) { sqb := models.NewStudioQueryBuilder() currentTime := time.Now() - for i, mappingJSON := range t.Scraped { + for i, mappingJSON := range t.scraped { index := i + 1 - logger.Progressf("[scraped sites] %d of %d", index, len(t.Mappings.Scenes)) + logger.Progressf("[scraped sites] %d of %d", index, len(t.mappings.Scenes)) newScrapedItem := models.ScrapedItem{ Title: sql.NullString{String: mappingJSON.Title, Valid: true}, @@ -542,19 +473,12 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) { } func (t *ImportTask) ImportScenes(ctx context.Context) { - tx := database.DB.MustBeginTx(ctx, nil) - qb := models.NewSceneQueryBuilder() - jqb := models.NewJoinsQueryBuilder() + logger.Info("[scenes] importing") - for i, mappingJSON := range t.Mappings.Scenes { + for i, mappingJSON := range t.mappings.Scenes { index := i + 1 - if mappingJSON.Checksum == "" || mappingJSON.Path == "" { - _ = tx.Rollback() - logger.Warn("[scenes] scene mapping without checksum or path: ", mappingJSON) - return - } - logger.Progressf("[scenes] %d of %d", index, len(t.Mappings.Scenes)) + logger.Progressf("[scenes] %d of %d", index, len(t.mappings.Scenes)) sceneJSON, err := t.json.getScene(mappingJSON.Checksum) if err != nil { @@ -564,244 +488,68 @@ func (t *ImportTask) ImportScenes(ctx context.Context) { sceneHash := mappingJSON.Checksum - newScene := models.Scene{ - Checksum: sql.NullString{String: sceneJSON.Checksum, Valid: sceneJSON.Checksum != ""}, - OSHash: sql.NullString{String: sceneJSON.OSHash, Valid: sceneJSON.OSHash != ""}, - Path: mappingJSON.Path, + tx := database.DB.MustBeginTx(ctx, nil) + readerWriter := models.NewSceneReaderWriter(tx) + tagWriter := models.NewTagReaderWriter(tx) + galleryWriter := models.NewGalleryReaderWriter(tx) + joinWriter := models.NewJoinReaderWriter(tx) + movieWriter := models.NewMovieReaderWriter(tx) + performerWriter := models.NewPerformerReaderWriter(tx) + studioWriter := models.NewStudioReaderWriter(tx) + markerWriter := models.NewSceneMarkerReaderWriter(tx) + + sceneImporter := &scene.Importer{ + ReaderWriter: readerWriter, + Input: *sceneJSON, + Path: mappingJSON.Path, + + FileNamingAlgorithm: t.fileNamingAlgorithm, + MissingRefBehaviour: t.MissingRefBehaviour, + + GalleryWriter: galleryWriter, + JoinWriter: joinWriter, + MovieWriter: movieWriter, + PerformerWriter: performerWriter, + StudioWriter: studioWriter, + TagWriter: tagWriter, } - // Process the base 64 encoded cover image string - var coverImageData []byte - if sceneJSON.Cover != "" { - _, coverImageData, err = utils.ProcessBase64Image(sceneJSON.Cover) - if err != nil { - logger.Warnf("[scenes] <%s> invalid cover image: %s", sceneHash, err.Error()) - } - if len(coverImageData) > 0 { - if err = SetSceneScreenshot(sceneHash, coverImageData); err != nil { - logger.Warnf("[scenes] <%s> failed to create cover image: %s", sceneHash, err.Error()) - } + if err := performImport(sceneImporter, t.DuplicateBehaviour); err != nil { + tx.Rollback() + logger.Errorf("[scenes] <%s> failed to import: %s", sceneHash, err.Error()) + continue + } - // write the cover image data after creating the scene + // import the scene markers + failedMarkers := false + for _, m := range sceneJSON.Markers { + markerImporter := &scene.MarkerImporter{ + SceneID: sceneImporter.ID, + Input: m, + MissingRefBehaviour: t.MissingRefBehaviour, + ReaderWriter: markerWriter, + JoinWriter: joinWriter, + TagWriter: tagWriter, + } + + if err := performImport(markerImporter, t.DuplicateBehaviour); err != nil { + failedMarkers = true + logger.Errorf("[scenes] <%s> failed to import markers: %s", sceneHash, err.Error()) + break } } - // Populate scene fields - if sceneJSON != nil { - if sceneJSON.Title != "" { - newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true} - } - if sceneJSON.Details != "" { - newScene.Details = sql.NullString{String: sceneJSON.Details, Valid: true} - } - if sceneJSON.URL != "" { - newScene.URL = sql.NullString{String: sceneJSON.URL, Valid: true} - } - if sceneJSON.Date != "" { - newScene.Date = models.SQLiteDate{String: sceneJSON.Date, Valid: true} - } - if sceneJSON.Rating != 0 { - newScene.Rating = sql.NullInt64{Int64: int64(sceneJSON.Rating), Valid: true} - } - - newScene.OCounter = sceneJSON.OCounter - newScene.CreatedAt = models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(sceneJSON.CreatedAt)} - newScene.UpdatedAt = models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(sceneJSON.UpdatedAt)} - - if sceneJSON.File != nil { - if sceneJSON.File.Size != "" { - newScene.Size = sql.NullString{String: sceneJSON.File.Size, Valid: true} - } - if sceneJSON.File.Duration != "" { - duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64) - newScene.Duration = sql.NullFloat64{Float64: duration, Valid: true} - } - if sceneJSON.File.VideoCodec != "" { - newScene.VideoCodec = sql.NullString{String: sceneJSON.File.VideoCodec, Valid: true} - } - if sceneJSON.File.AudioCodec != "" { - newScene.AudioCodec = sql.NullString{String: sceneJSON.File.AudioCodec, Valid: true} - } - if sceneJSON.File.Format != "" { - newScene.Format = sql.NullString{String: sceneJSON.File.Format, Valid: true} - } - if sceneJSON.File.Width != 0 { - newScene.Width = sql.NullInt64{Int64: int64(sceneJSON.File.Width), Valid: true} - } - if sceneJSON.File.Height != 0 { - newScene.Height = sql.NullInt64{Int64: int64(sceneJSON.File.Height), Valid: true} - } - if sceneJSON.File.Framerate != "" { - framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64) - newScene.Framerate = sql.NullFloat64{Float64: framerate, Valid: true} - } - if sceneJSON.File.Bitrate != 0 { - newScene.Bitrate = sql.NullInt64{Int64: int64(sceneJSON.File.Bitrate), Valid: true} - } - } else { - // TODO: Get FFMPEG data? - } + if failedMarkers { + tx.Rollback() + continue } - // Populate the studio ID - if sceneJSON.Studio != "" { - sqb := models.NewStudioQueryBuilder() - studio, err := sqb.FindByName(sceneJSON.Studio, tx, false) - if err != nil { - logger.Warnf("[scenes] error getting studio <%s>: %s", sceneJSON.Studio, err.Error()) - } else if studio == nil { - logger.Warnf("[scenes] studio <%s> does not exist", sceneJSON.Studio) - } else { - newScene.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true} - } - } - - // Create the scene in the DB - scene, err := qb.Create(newScene, tx) - if err != nil { - _ = tx.Rollback() - logger.Errorf("[scenes] <%s> failed to create: %s", sceneHash, err.Error()) - return - } - if scene.ID == 0 { - _ = tx.Rollback() - logger.Errorf("[scenes] <%s> invalid id after scene creation", sceneHash) - return - } - - // Add the scene cover if set - if len(coverImageData) > 0 { - if err := qb.UpdateSceneCover(scene.ID, coverImageData, tx); err != nil { - _ = tx.Rollback() - logger.Errorf("[scenes] <%s> error setting scene cover: %s", sceneHash, err.Error()) - return - } - } - - // Relate the scene to the gallery - if sceneJSON.Gallery != "" { - gqb := models.NewGalleryQueryBuilder() - gallery, err := gqb.FindByChecksum(sceneJSON.Gallery, tx) - if err != nil { - logger.Warnf("[scenes] gallery <%s> does not exist: %s", sceneJSON.Gallery, err.Error()) - } else { - gallery.SceneID = sql.NullInt64{Int64: int64(scene.ID), Valid: true} - _, err := gqb.Update(*gallery, tx) - if err != nil { - logger.Errorf("[scenes] <%s> failed to update gallery: %s", sceneHash, err.Error()) - } - } - } - - // Relate the scene to the performers - if len(sceneJSON.Performers) > 0 { - performers, err := t.getPerformers(sceneJSON.Performers, tx) - if err != nil { - logger.Warnf("[scenes] <%s> failed to fetch performers: %s", sceneHash, err.Error()) - } else { - var performerJoins []models.PerformersScenes - for _, performer := range performers { - join := models.PerformersScenes{ - PerformerID: performer.ID, - SceneID: scene.ID, - } - performerJoins = append(performerJoins, join) - } - if err := jqb.CreatePerformersScenes(performerJoins, tx); err != nil { - logger.Errorf("[scenes] <%s> failed to associate performers: %s", sceneHash, err.Error()) - } - } - } - - // Relate the scene to the movies - if len(sceneJSON.Movies) > 0 { - moviesScenes, err := t.getMoviesScenes(sceneJSON.Movies, scene.ID, tx) - if err != nil { - logger.Warnf("[scenes] <%s> failed to fetch movies: %s", sceneHash, err.Error()) - } else { - if err := jqb.CreateMoviesScenes(moviesScenes, tx); err != nil { - logger.Errorf("[scenes] <%s> failed to associate movies: %s", sceneHash, err.Error()) - } - } - } - - // Relate the scene to the tags - if len(sceneJSON.Tags) > 0 { - tags, err := t.getTags(sceneHash, sceneJSON.Tags, tx) - if err != nil { - logger.Warnf("[scenes] <%s> failed to fetch tags: %s", sceneHash, err.Error()) - } else { - var tagJoins []models.ScenesTags - for _, tag := range tags { - join := models.ScenesTags{ - SceneID: scene.ID, - TagID: tag.ID, - } - tagJoins = append(tagJoins, join) - } - if err := jqb.CreateScenesTags(tagJoins, tx); err != nil { - logger.Errorf("[scenes] <%s> failed to associate tags: %s", sceneHash, err.Error()) - } - } - } - - // Relate the scene to the scene markers - if len(sceneJSON.Markers) > 0 { - smqb := models.NewSceneMarkerQueryBuilder() - tqb := models.NewTagQueryBuilder() - for _, marker := range sceneJSON.Markers { - seconds, _ := strconv.ParseFloat(marker.Seconds, 64) - newSceneMarker := models.SceneMarker{ - Title: marker.Title, - Seconds: seconds, - SceneID: sql.NullInt64{Int64: int64(scene.ID), Valid: true}, - CreatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(marker.CreatedAt)}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(marker.UpdatedAt)}, - } - - primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx, false) - if err != nil { - logger.Errorf("[scenes] <%s> failed to find primary tag for marker: %s", sceneHash, err.Error()) - } else { - newSceneMarker.PrimaryTagID = primaryTag.ID - } - - // Create the scene marker in the DB - sceneMarker, err := smqb.Create(newSceneMarker, tx) - if err != nil { - logger.Warnf("[scenes] <%s> failed to create scene marker: %s", sceneHash, err.Error()) - continue - } - if sceneMarker.ID == 0 { - logger.Warnf("[scenes] <%s> invalid scene marker id after scene marker creation", sceneHash) - continue - } - - // Get the scene marker tags and create the joins - tags, err := t.getTags(sceneHash, marker.Tags, tx) - if err != nil { - logger.Warnf("[scenes] <%s> failed to fetch scene marker tags: %s", sceneHash, err.Error()) - } else { - var tagJoins []models.SceneMarkersTags - for _, tag := range tags { - join := models.SceneMarkersTags{ - SceneMarkerID: sceneMarker.ID, - TagID: tag.ID, - } - tagJoins = append(tagJoins, join) - } - if err := jqb.CreateSceneMarkersTags(tagJoins, tx); err != nil { - logger.Errorf("[scenes] <%s> failed to associate scene marker tags: %s", sceneHash, err.Error()) - } - } - } + if err := tx.Commit(); err != nil { + tx.Rollback() + logger.Errorf("[scenes] <%s> import failed to commit: %s", sceneHash, err.Error()) } } - logger.Info("[scenes] importing") - if err := tx.Commit(); err != nil { - logger.Errorf("[scenes] import failed to commit: %s", err.Error()) - } logger.Info("[scenes] import complete") } diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 14f191972..6ab87ac80 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -7,8 +7,8 @@ import ( type GalleryReader interface { // Find(id int) (*Gallery, error) FindMany(ids []int) ([]*Gallery, error) - // FindByChecksum(checksum string) (*Gallery, error) - // FindByPath(path string) (*Gallery, error) + FindByChecksum(checksum string) (*Gallery, error) + FindByPath(path string) (*Gallery, error) FindBySceneID(sceneID int) (*Gallery, error) // ValidGalleriesForScenePath(scenePath string) ([]*Gallery, error) // Count() (int, error) @@ -17,8 +17,8 @@ type GalleryReader interface { } type GalleryWriter interface { - // Create(newGallery Gallery) (*Gallery, error) - // Update(updatedGallery Gallery) (*Gallery, error) + Create(newGallery Gallery) (*Gallery, error) + Update(updatedGallery Gallery) (*Gallery, error) // Destroy(id int) error // ClearGalleryId(sceneID int) error } @@ -44,10 +44,26 @@ func (t *galleryReaderWriter) FindMany(ids []int) ([]*Gallery, error) { return t.qb.FindMany(ids) } +func (t *galleryReaderWriter) FindByChecksum(checksum string) (*Gallery, error) { + return t.qb.FindByChecksum(checksum, t.tx) +} + func (t *galleryReaderWriter) All() ([]*Gallery, error) { return t.qb.All() } +func (t *galleryReaderWriter) FindByPath(path string) (*Gallery, error) { + return t.qb.FindByPath(path) +} + func (t *galleryReaderWriter) FindBySceneID(sceneID int) (*Gallery, error) { return t.qb.FindBySceneID(sceneID, t.tx) } + +func (t *galleryReaderWriter) Create(newGallery Gallery) (*Gallery, error) { + return t.qb.Create(newGallery, t.tx) +} + +func (t *galleryReaderWriter) Update(updatedGallery Gallery) (*Gallery, error) { + return t.qb.Update(updatedGallery, t.tx) +} diff --git a/pkg/models/join.go b/pkg/models/join.go index 1ee2d2427..443dcf218 100644 --- a/pkg/models/join.go +++ b/pkg/models/join.go @@ -11,20 +11,20 @@ type JoinReader interface { } type JoinWriter interface { - // CreatePerformersScenes(newJoins []PerformersScenes) error + CreatePerformersScenes(newJoins []PerformersScenes) error // AddPerformerScene(sceneID int, performerID int) (bool, error) - // UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes) error + UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes) error // DestroyPerformersScenes(sceneID int) error - // CreateMoviesScenes(newJoins []MoviesScenes) error + CreateMoviesScenes(newJoins []MoviesScenes) error // AddMoviesScene(sceneID int, movieID int, sceneIdx *int) (bool, error) - // UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes) error + UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes) error // DestroyMoviesScenes(sceneID int) error // CreateScenesTags(newJoins []ScenesTags) error - // UpdateScenesTags(sceneID int, updatedJoins []ScenesTags) error + UpdateScenesTags(sceneID int, updatedJoins []ScenesTags) error // AddSceneTag(sceneID int, tagID int) (bool, error) // DestroyScenesTags(sceneID int) error // CreateSceneMarkersTags(newJoins []SceneMarkersTags) error - // UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error + UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error // DestroySceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error // DestroyScenesGalleries(sceneID int) error // DestroyScenesMarkers(sceneID int) error @@ -50,3 +50,27 @@ type joinReaderWriter struct { func (t *joinReaderWriter) GetSceneMovies(sceneID int) ([]MoviesScenes, error) { return t.qb.GetSceneMovies(sceneID, t.tx) } + +func (t *joinReaderWriter) CreatePerformersScenes(newJoins []PerformersScenes) error { + return t.qb.CreatePerformersScenes(newJoins, t.tx) +} + +func (t *joinReaderWriter) UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes) error { + return t.qb.UpdatePerformersScenes(sceneID, updatedJoins, t.tx) +} + +func (t *joinReaderWriter) CreateMoviesScenes(newJoins []MoviesScenes) error { + return t.qb.CreateMoviesScenes(newJoins, t.tx) +} + +func (t *joinReaderWriter) UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes) error { + return t.qb.UpdateMoviesScenes(sceneID, updatedJoins, t.tx) +} + +func (t *joinReaderWriter) UpdateScenesTags(sceneID int, updatedJoins []ScenesTags) error { + return t.qb.UpdateScenesTags(sceneID, updatedJoins, t.tx) +} + +func (t *joinReaderWriter) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error { + return t.qb.UpdateSceneMarkersTags(sceneMarkerID, updatedJoins, t.tx) +} diff --git a/pkg/models/json_time.go b/pkg/models/json_time.go index af3fe3308..344069338 100644 --- a/pkg/models/json_time.go +++ b/pkg/models/json_time.go @@ -2,11 +2,14 @@ package models import ( "fmt" - "github.com/stashapp/stash/pkg/utils" "strings" "time" + + "github.com/stashapp/stash/pkg/utils" ) +var currentLocation = time.Now().Location() + type JSONTime struct { time.Time } @@ -28,3 +31,19 @@ func (jt *JSONTime) MarshalJSON() ([]byte, error) { } return []byte(fmt.Sprintf("\"%s\"", jt.Time.Format(time.RFC3339))), nil } + +func (jt JSONTime) GetTime() time.Time { + if currentLocation != nil { + if jt.IsZero() { + return time.Now().In(currentLocation) + } else { + return jt.Time.In(currentLocation) + } + } else { + if jt.IsZero() { + return time.Now() + } else { + return jt.Time + } + } +} diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index 9ae1432c1..31c23fadf 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -35,6 +35,75 @@ func (_m *GalleryReaderWriter) All() ([]*models.Gallery, error) { return r0, r1 } +// Create provides a mock function with given fields: newGallery +func (_m *GalleryReaderWriter) Create(newGallery models.Gallery) (*models.Gallery, error) { + ret := _m.Called(newGallery) + + var r0 *models.Gallery + if rf, ok := ret.Get(0).(func(models.Gallery) *models.Gallery); ok { + r0 = rf(newGallery) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Gallery) error); ok { + r1 = rf(newGallery) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByChecksum provides a mock function with given fields: checksum +func (_m *GalleryReaderWriter) FindByChecksum(checksum string) (*models.Gallery, error) { + ret := _m.Called(checksum) + + var r0 *models.Gallery + if rf, ok := ret.Get(0).(func(string) *models.Gallery); ok { + r0 = rf(checksum) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(checksum) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByPath provides a mock function with given fields: path +func (_m *GalleryReaderWriter) FindByPath(path string) (*models.Gallery, error) { + ret := _m.Called(path) + + var r0 *models.Gallery + if rf, ok := ret.Get(0).(func(string) *models.Gallery); ok { + r0 = rf(path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindBySceneID provides a mock function with given fields: sceneID func (_m *GalleryReaderWriter) FindBySceneID(sceneID int) (*models.Gallery, error) { ret := _m.Called(sceneID) @@ -80,3 +149,26 @@ func (_m *GalleryReaderWriter) FindMany(ids []int) ([]*models.Gallery, error) { return r0, r1 } + +// Update provides a mock function with given fields: updatedGallery +func (_m *GalleryReaderWriter) Update(updatedGallery models.Gallery) (*models.Gallery, error) { + ret := _m.Called(updatedGallery) + + var r0 *models.Gallery + if rf, ok := ret.Get(0).(func(models.Gallery) *models.Gallery); ok { + r0 = rf(updatedGallery) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Gallery) error); ok { + r1 = rf(updatedGallery) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/pkg/models/mocks/JoinReaderWriter.go b/pkg/models/mocks/JoinReaderWriter.go index 451823fda..a3c94c1a4 100644 --- a/pkg/models/mocks/JoinReaderWriter.go +++ b/pkg/models/mocks/JoinReaderWriter.go @@ -12,6 +12,34 @@ type JoinReaderWriter struct { mock.Mock } +// CreateMoviesScenes provides a mock function with given fields: newJoins +func (_m *JoinReaderWriter) CreateMoviesScenes(newJoins []models.MoviesScenes) error { + ret := _m.Called(newJoins) + + var r0 error + if rf, ok := ret.Get(0).(func([]models.MoviesScenes) error); ok { + r0 = rf(newJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// CreatePerformersScenes provides a mock function with given fields: newJoins +func (_m *JoinReaderWriter) CreatePerformersScenes(newJoins []models.PerformersScenes) error { + ret := _m.Called(newJoins) + + var r0 error + if rf, ok := ret.Get(0).(func([]models.PerformersScenes) error); ok { + r0 = rf(newJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // GetSceneMovies provides a mock function with given fields: sceneID func (_m *JoinReaderWriter) GetSceneMovies(sceneID int) ([]models.MoviesScenes, error) { ret := _m.Called(sceneID) @@ -34,3 +62,59 @@ func (_m *JoinReaderWriter) GetSceneMovies(sceneID int) ([]models.MoviesScenes, return r0, r1 } + +// UpdateMoviesScenes provides a mock function with given fields: sceneID, updatedJoins +func (_m *JoinReaderWriter) UpdateMoviesScenes(sceneID int, updatedJoins []models.MoviesScenes) error { + ret := _m.Called(sceneID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.MoviesScenes) error); ok { + r0 = rf(sceneID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdatePerformersScenes provides a mock function with given fields: sceneID, updatedJoins +func (_m *JoinReaderWriter) UpdatePerformersScenes(sceneID int, updatedJoins []models.PerformersScenes) error { + ret := _m.Called(sceneID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.PerformersScenes) error); ok { + r0 = rf(sceneID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateSceneMarkersTags provides a mock function with given fields: sceneMarkerID, updatedJoins +func (_m *JoinReaderWriter) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []models.SceneMarkersTags) error { + ret := _m.Called(sceneMarkerID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.SceneMarkersTags) error); ok { + r0 = rf(sceneMarkerID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateScenesTags provides a mock function with given fields: sceneID, updatedJoins +func (_m *JoinReaderWriter) UpdateScenesTags(sceneID int, updatedJoins []models.ScenesTags) error { + ret := _m.Called(sceneID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.ScenesTags) error); ok { + r0 = rf(sceneID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/MovieReaderWriter.go b/pkg/models/mocks/MovieReaderWriter.go index f0798afef..5521f25e4 100644 --- a/pkg/models/mocks/MovieReaderWriter.go +++ b/pkg/models/mocks/MovieReaderWriter.go @@ -35,6 +35,29 @@ func (_m *MovieReaderWriter) All() ([]*models.Movie, error) { return r0, r1 } +// Create provides a mock function with given fields: newMovie +func (_m *MovieReaderWriter) Create(newMovie models.Movie) (*models.Movie, error) { + ret := _m.Called(newMovie) + + var r0 *models.Movie + if rf, ok := ret.Get(0).(func(models.Movie) *models.Movie); ok { + r0 = rf(newMovie) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Movie) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Movie) error); ok { + r1 = rf(newMovie) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Find provides a mock function with given fields: id func (_m *MovieReaderWriter) Find(id int) (*models.Movie, error) { ret := _m.Called(id) @@ -58,6 +81,52 @@ func (_m *MovieReaderWriter) Find(id int) (*models.Movie, error) { return r0, r1 } +// FindByName provides a mock function with given fields: name, nocase +func (_m *MovieReaderWriter) FindByName(name string, nocase bool) (*models.Movie, error) { + ret := _m.Called(name, nocase) + + var r0 *models.Movie + if rf, ok := ret.Get(0).(func(string, bool) *models.Movie); ok { + r0 = rf(name, nocase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Movie) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, bool) error); ok { + r1 = rf(name, nocase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByNames provides a mock function with given fields: names, nocase +func (_m *MovieReaderWriter) FindByNames(names []string, nocase bool) ([]*models.Movie, error) { + ret := _m.Called(names, nocase) + + var r0 []*models.Movie + if rf, ok := ret.Get(0).(func([]string, bool) []*models.Movie); ok { + r0 = rf(names, nocase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Movie) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func([]string, bool) error); ok { + r1 = rf(names, nocase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindMany provides a mock function with given fields: ids func (_m *MovieReaderWriter) FindMany(ids []int) ([]*models.Movie, error) { ret := _m.Called(ids) @@ -126,3 +195,63 @@ func (_m *MovieReaderWriter) GetFrontImage(movieID int) ([]byte, error) { return r0, r1 } + +// Update provides a mock function with given fields: updatedMovie +func (_m *MovieReaderWriter) Update(updatedMovie models.MoviePartial) (*models.Movie, error) { + ret := _m.Called(updatedMovie) + + var r0 *models.Movie + if rf, ok := ret.Get(0).(func(models.MoviePartial) *models.Movie); ok { + r0 = rf(updatedMovie) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Movie) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.MoviePartial) error); ok { + r1 = rf(updatedMovie) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateFull provides a mock function with given fields: updatedMovie +func (_m *MovieReaderWriter) UpdateFull(updatedMovie models.Movie) (*models.Movie, error) { + ret := _m.Called(updatedMovie) + + var r0 *models.Movie + if rf, ok := ret.Get(0).(func(models.Movie) *models.Movie); ok { + r0 = rf(updatedMovie) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Movie) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Movie) error); ok { + r1 = rf(updatedMovie) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateMovieImages provides a mock function with given fields: movieID, frontImage, backImage +func (_m *MovieReaderWriter) UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error { + ret := _m.Called(movieID, frontImage, backImage) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []byte, []byte) error); ok { + r0 = rf(movieID, frontImage, backImage) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index 04af0b26a..2f7db7bba 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -35,6 +35,52 @@ func (_m *PerformerReaderWriter) All() ([]*models.Performer, error) { return r0, r1 } +// Create provides a mock function with given fields: newPerformer +func (_m *PerformerReaderWriter) Create(newPerformer models.Performer) (*models.Performer, error) { + ret := _m.Called(newPerformer) + + var r0 *models.Performer + if rf, ok := ret.Get(0).(func(models.Performer) *models.Performer); ok { + r0 = rf(newPerformer) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Performer) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Performer) error); ok { + r1 = rf(newPerformer) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByNames provides a mock function with given fields: names, nocase +func (_m *PerformerReaderWriter) FindByNames(names []string, nocase bool) ([]*models.Performer, error) { + ret := _m.Called(names, nocase) + + var r0 []*models.Performer + if rf, ok := ret.Get(0).(func([]string, bool) []*models.Performer); ok { + r0 = rf(names, nocase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Performer) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func([]string, bool) error); ok { + r1 = rf(names, nocase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindBySceneID provides a mock function with given fields: sceneID func (_m *PerformerReaderWriter) FindBySceneID(sceneID int) ([]*models.Performer, error) { ret := _m.Called(sceneID) @@ -126,3 +172,40 @@ func (_m *PerformerReaderWriter) GetPerformerImage(performerID int) ([]byte, err return r0, r1 } + +// Update provides a mock function with given fields: updatedPerformer +func (_m *PerformerReaderWriter) Update(updatedPerformer models.Performer) (*models.Performer, error) { + ret := _m.Called(updatedPerformer) + + var r0 *models.Performer + if rf, ok := ret.Get(0).(func(models.Performer) *models.Performer); ok { + r0 = rf(updatedPerformer) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Performer) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Performer) error); ok { + r1 = rf(updatedPerformer) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdatePerformerImage provides a mock function with given fields: performerID, image +func (_m *PerformerReaderWriter) UpdatePerformerImage(performerID int, image []byte) error { + ret := _m.Called(performerID, image) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []byte) error); ok { + r0 = rf(performerID, image) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/SceneMarkerReaderWriter.go b/pkg/models/mocks/SceneMarkerReaderWriter.go index d39e88f6f..df1b4f937 100644 --- a/pkg/models/mocks/SceneMarkerReaderWriter.go +++ b/pkg/models/mocks/SceneMarkerReaderWriter.go @@ -12,6 +12,29 @@ type SceneMarkerReaderWriter struct { mock.Mock } +// Create provides a mock function with given fields: newSceneMarker +func (_m *SceneMarkerReaderWriter) Create(newSceneMarker models.SceneMarker) (*models.SceneMarker, error) { + ret := _m.Called(newSceneMarker) + + var r0 *models.SceneMarker + if rf, ok := ret.Get(0).(func(models.SceneMarker) *models.SceneMarker); ok { + r0 = rf(newSceneMarker) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.SceneMarker) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.SceneMarker) error); ok { + r1 = rf(newSceneMarker) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindBySceneID provides a mock function with given fields: sceneID func (_m *SceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*models.SceneMarker, error) { ret := _m.Called(sceneID) @@ -34,3 +57,26 @@ func (_m *SceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*models.SceneMa return r0, r1 } + +// Update provides a mock function with given fields: updatedSceneMarker +func (_m *SceneMarkerReaderWriter) Update(updatedSceneMarker models.SceneMarker) (*models.SceneMarker, error) { + ret := _m.Called(updatedSceneMarker) + + var r0 *models.SceneMarker + if rf, ok := ret.Get(0).(func(models.SceneMarker) *models.SceneMarker); ok { + r0 = rf(updatedSceneMarker) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.SceneMarker) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.SceneMarker) error); ok { + r1 = rf(updatedSceneMarker) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index a024e2523..eee27824b 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -35,6 +35,75 @@ func (_m *SceneReaderWriter) All() ([]*models.Scene, error) { return r0, r1 } +// Create provides a mock function with given fields: newScene +func (_m *SceneReaderWriter) Create(newScene models.Scene) (*models.Scene, error) { + ret := _m.Called(newScene) + + var r0 *models.Scene + if rf, ok := ret.Get(0).(func(models.Scene) *models.Scene); ok { + r0 = rf(newScene) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Scene) error); ok { + r1 = rf(newScene) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByChecksum provides a mock function with given fields: checksum +func (_m *SceneReaderWriter) FindByChecksum(checksum string) (*models.Scene, error) { + ret := _m.Called(checksum) + + var r0 *models.Scene + if rf, ok := ret.Get(0).(func(string) *models.Scene); ok { + r0 = rf(checksum) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(checksum) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByOSHash provides a mock function with given fields: oshash +func (_m *SceneReaderWriter) FindByOSHash(oshash string) (*models.Scene, error) { + ret := _m.Called(oshash) + + var r0 *models.Scene + if rf, ok := ret.Get(0).(func(string) *models.Scene); ok { + r0 = rf(oshash) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(oshash) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindMany provides a mock function with given fields: ids func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) { ret := _m.Called(ids) @@ -80,3 +149,63 @@ func (_m *SceneReaderWriter) GetSceneCover(sceneID int) ([]byte, error) { return r0, r1 } + +// Update provides a mock function with given fields: updatedScene +func (_m *SceneReaderWriter) Update(updatedScene models.ScenePartial) (*models.Scene, error) { + ret := _m.Called(updatedScene) + + var r0 *models.Scene + if rf, ok := ret.Get(0).(func(models.ScenePartial) *models.Scene); ok { + r0 = rf(updatedScene) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.ScenePartial) error); ok { + r1 = rf(updatedScene) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateFull provides a mock function with given fields: updatedScene +func (_m *SceneReaderWriter) UpdateFull(updatedScene models.Scene) (*models.Scene, error) { + ret := _m.Called(updatedScene) + + var r0 *models.Scene + if rf, ok := ret.Get(0).(func(models.Scene) *models.Scene); ok { + r0 = rf(updatedScene) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Scene) error); ok { + r1 = rf(updatedScene) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateSceneCover provides a mock function with given fields: sceneID, cover +func (_m *SceneReaderWriter) UpdateSceneCover(sceneID int, cover []byte) error { + ret := _m.Called(sceneID, cover) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []byte) error); ok { + r0 = rf(sceneID, cover) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index 170d50435..cc51d0755 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -35,6 +35,29 @@ func (_m *StudioReaderWriter) All() ([]*models.Studio, error) { return r0, r1 } +// Create provides a mock function with given fields: newStudio +func (_m *StudioReaderWriter) Create(newStudio models.Studio) (*models.Studio, error) { + ret := _m.Called(newStudio) + + var r0 *models.Studio + if rf, ok := ret.Get(0).(func(models.Studio) *models.Studio); ok { + r0 = rf(newStudio) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Studio) error); ok { + r1 = rf(newStudio) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Find provides a mock function with given fields: id func (_m *StudioReaderWriter) Find(id int) (*models.Studio, error) { ret := _m.Called(id) @@ -58,6 +81,29 @@ func (_m *StudioReaderWriter) Find(id int) (*models.Studio, error) { return r0, r1 } +// FindByName provides a mock function with given fields: name, nocase +func (_m *StudioReaderWriter) FindByName(name string, nocase bool) (*models.Studio, error) { + ret := _m.Called(name, nocase) + + var r0 *models.Studio + if rf, ok := ret.Get(0).(func(string, bool) *models.Studio); ok { + r0 = rf(name, nocase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, bool) error); ok { + r1 = rf(name, nocase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindMany provides a mock function with given fields: ids func (_m *StudioReaderWriter) FindMany(ids []int) ([]*models.Studio, error) { ret := _m.Called(ids) @@ -103,3 +149,63 @@ func (_m *StudioReaderWriter) GetStudioImage(studioID int) ([]byte, error) { return r0, r1 } + +// Update provides a mock function with given fields: updatedStudio +func (_m *StudioReaderWriter) Update(updatedStudio models.StudioPartial) (*models.Studio, error) { + ret := _m.Called(updatedStudio) + + var r0 *models.Studio + if rf, ok := ret.Get(0).(func(models.StudioPartial) *models.Studio); ok { + r0 = rf(updatedStudio) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.StudioPartial) error); ok { + r1 = rf(updatedStudio) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateFull provides a mock function with given fields: updatedStudio +func (_m *StudioReaderWriter) UpdateFull(updatedStudio models.Studio) (*models.Studio, error) { + ret := _m.Called(updatedStudio) + + var r0 *models.Studio + if rf, ok := ret.Get(0).(func(models.Studio) *models.Studio); ok { + r0 = rf(updatedStudio) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Studio) error); ok { + r1 = rf(updatedStudio) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateStudioImage provides a mock function with given fields: studioID, image +func (_m *StudioReaderWriter) UpdateStudioImage(studioID int, image []byte) error { + ret := _m.Called(studioID, image) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []byte) error); ok { + r0 = rf(studioID, image) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index 1dabbaa32..43d8a299f 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -35,6 +35,29 @@ func (_m *TagReaderWriter) All() ([]*models.Tag, error) { return r0, r1 } +// Create provides a mock function with given fields: newTag +func (_m *TagReaderWriter) Create(newTag models.Tag) (*models.Tag, error) { + ret := _m.Called(newTag) + + var r0 *models.Tag + if rf, ok := ret.Get(0).(func(models.Tag) *models.Tag); ok { + r0 = rf(newTag) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Tag) error); ok { + r1 = rf(newTag) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Find provides a mock function with given fields: id func (_m *TagReaderWriter) Find(id int) (*models.Tag, error) { ret := _m.Called(id) @@ -58,6 +81,52 @@ func (_m *TagReaderWriter) Find(id int) (*models.Tag, error) { return r0, r1 } +// FindByName provides a mock function with given fields: name, nocase +func (_m *TagReaderWriter) FindByName(name string, nocase bool) (*models.Tag, error) { + ret := _m.Called(name, nocase) + + var r0 *models.Tag + if rf, ok := ret.Get(0).(func(string, bool) *models.Tag); ok { + r0 = rf(name, nocase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, bool) error); ok { + r1 = rf(name, nocase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByNames provides a mock function with given fields: names, nocase +func (_m *TagReaderWriter) FindByNames(names []string, nocase bool) ([]*models.Tag, error) { + ret := _m.Called(names, nocase) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func([]string, bool) []*models.Tag); ok { + r0 = rf(names, nocase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func([]string, bool) error); ok { + r1 = rf(names, nocase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindBySceneID provides a mock function with given fields: sceneID func (_m *TagReaderWriter) FindBySceneID(sceneID int) ([]*models.Tag, error) { ret := _m.Called(sceneID) @@ -149,3 +218,40 @@ func (_m *TagReaderWriter) GetTagImage(tagID int) ([]byte, error) { return r0, r1 } + +// Update provides a mock function with given fields: updatedTag +func (_m *TagReaderWriter) Update(updatedTag models.Tag) (*models.Tag, error) { + ret := _m.Called(updatedTag) + + var r0 *models.Tag + if rf, ok := ret.Get(0).(func(models.Tag) *models.Tag); ok { + r0 = rf(updatedTag) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Tag) error); ok { + r1 = rf(updatedTag) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateTagImage provides a mock function with given fields: tagID, image +func (_m *TagReaderWriter) UpdateTagImage(tagID int, image []byte) error { + ret := _m.Called(tagID, image) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []byte) error); ok { + r0 = rf(tagID, image) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/model_movie.go b/pkg/models/model_movie.go index bc9939b25..678ce2944 100644 --- a/pkg/models/model_movie.go +++ b/pkg/models/model_movie.go @@ -2,6 +2,9 @@ package models import ( "database/sql" + "time" + + "github.com/stashapp/stash/pkg/utils" ) type Movie struct { @@ -37,3 +40,13 @@ type MoviePartial struct { } var DefaultMovieImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC" + +func NewMovie(name string) *Movie { + currentTime := time.Now() + return &Movie{ + Checksum: utils.MD5FromString(name), + Name: sql.NullString{String: name, Valid: true}, + CreatedAt: SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: SQLiteTimestamp{Timestamp: currentTime}, + } +} diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index 12348bc75..26b033d61 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -2,6 +2,9 @@ package models import ( "database/sql" + "time" + + "github.com/stashapp/stash/pkg/utils" ) type Performer struct { @@ -27,3 +30,14 @@ type Performer struct { CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` } + +func NewPerformer(name string) *Performer { + currentTime := time.Now() + return &Performer{ + Checksum: utils.MD5FromString(name), + Name: sql.NullString{String: name, Valid: true}, + Favorite: sql.NullBool{Bool: false, Valid: true}, + CreatedAt: SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: SQLiteTimestamp{Timestamp: currentTime}, + } +} diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go index 6880d3986..d3a4940ff 100644 --- a/pkg/models/model_studio.go +++ b/pkg/models/model_studio.go @@ -2,6 +2,9 @@ package models import ( "database/sql" + "time" + + "github.com/stashapp/stash/pkg/utils" ) type Studio struct { @@ -25,3 +28,13 @@ type StudioPartial struct { } var DefaultStudioImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC" + +func NewStudio(name string) *Studio { + currentTime := time.Now() + return &Studio{ + Checksum: utils.MD5FromString(name), + Name: sql.NullString{String: name, Valid: true}, + CreatedAt: SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: SQLiteTimestamp{Timestamp: currentTime}, + } +} diff --git a/pkg/models/model_tag.go b/pkg/models/model_tag.go index d62d83784..181f188a3 100644 --- a/pkg/models/model_tag.go +++ b/pkg/models/model_tag.go @@ -1,5 +1,7 @@ package models +import "time" + type Tag struct { ID int `db:"id" json:"id"` Name string `db:"name" json:"name"` // TODO make schema not null @@ -7,6 +9,15 @@ type Tag struct { UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` } +func NewTag(name string) *Tag { + currentTime := time.Now() + return &Tag{ + Name: name, + CreatedAt: SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: SQLiteTimestamp{Timestamp: currentTime}, + } +} + // Original Tag image from: https://fontawesome.com/icons/tag?style=solid // Modified to change color and rotate // Licensed under CC Attribution 4.0: https://fontawesome.com/license diff --git a/pkg/models/movie.go b/pkg/models/movie.go index 2ee6316ff..f4dd7ea89 100644 --- a/pkg/models/movie.go +++ b/pkg/models/movie.go @@ -8,8 +8,8 @@ type MovieReader interface { Find(id int) (*Movie, error) FindMany(ids []int) ([]*Movie, error) // FindBySceneID(sceneID int) ([]*Movie, error) - // FindByName(name string, nocase bool) (*Movie, error) - // FindByNames(names []string, nocase bool) ([]*Movie, error) + FindByName(name string, nocase bool) (*Movie, error) + FindByNames(names []string, nocase bool) ([]*Movie, error) All() ([]*Movie, error) // AllSlim() ([]*Movie, error) // Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int) @@ -18,10 +18,11 @@ type MovieReader interface { } type MovieWriter interface { - // Create(newMovie Movie) (*Movie, error) - // Update(updatedMovie MoviePartial) (*Movie, error) + Create(newMovie Movie) (*Movie, error) + Update(updatedMovie MoviePartial) (*Movie, error) + UpdateFull(updatedMovie Movie) (*Movie, error) // Destroy(id string) error - // UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error + UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error // DestroyMovieImages(movieID int) error } @@ -50,6 +51,14 @@ func (t *movieReaderWriter) FindMany(ids []int) ([]*Movie, error) { return t.qb.FindMany(ids) } +func (t *movieReaderWriter) FindByName(name string, nocase bool) (*Movie, error) { + return t.qb.FindByName(name, t.tx, nocase) +} + +func (t *movieReaderWriter) FindByNames(names []string, nocase bool) ([]*Movie, error) { + return t.qb.FindByNames(names, t.tx, nocase) +} + func (t *movieReaderWriter) All() ([]*Movie, error) { return t.qb.All() } @@ -61,3 +70,19 @@ func (t *movieReaderWriter) GetFrontImage(movieID int) ([]byte, error) { func (t *movieReaderWriter) GetBackImage(movieID int) ([]byte, error) { return t.qb.GetBackImage(movieID, t.tx) } + +func (t *movieReaderWriter) Create(newMovie Movie) (*Movie, error) { + return t.qb.Create(newMovie, t.tx) +} + +func (t *movieReaderWriter) Update(updatedMovie MoviePartial) (*Movie, error) { + return t.qb.Update(updatedMovie, t.tx) +} + +func (t *movieReaderWriter) UpdateFull(updatedMovie Movie) (*Movie, error) { + return t.qb.UpdateFull(updatedMovie, t.tx) +} + +func (t *movieReaderWriter) UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error { + return t.qb.UpdateMovieImages(movieID, frontImage, backImage, t.tx) +} diff --git a/pkg/models/performer.go b/pkg/models/performer.go index e98cc1908..b4e4eaddc 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -9,7 +9,7 @@ type PerformerReader interface { FindMany(ids []int) ([]*Performer, error) FindBySceneID(sceneID int) ([]*Performer, error) FindNamesBySceneID(sceneID int) ([]*Performer, error) - // FindByNames(names []string, nocase bool) ([]*Performer, error) + FindByNames(names []string, nocase bool) ([]*Performer, error) // Count() (int, error) All() ([]*Performer, error) // AllSlim() ([]*Performer, error) @@ -18,10 +18,10 @@ type PerformerReader interface { } type PerformerWriter interface { - // Create(newPerformer Performer) (*Performer, error) - // Update(updatedPerformer Performer) (*Performer, error) + Create(newPerformer Performer) (*Performer, error) + Update(updatedPerformer Performer) (*Performer, error) // Destroy(id string) error - // UpdatePerformerImage(performerID int, image []byte) error + UpdatePerformerImage(performerID int, image []byte) error // DestroyPerformerImage(performerID int) error } @@ -46,6 +46,10 @@ func (t *performerReaderWriter) FindMany(ids []int) ([]*Performer, error) { return t.qb.FindMany(ids) } +func (t *performerReaderWriter) FindByNames(names []string, nocase bool) ([]*Performer, error) { + return t.qb.FindByNames(names, t.tx, nocase) +} + func (t *performerReaderWriter) All() ([]*Performer, error) { return t.qb.All() } @@ -61,3 +65,15 @@ func (t *performerReaderWriter) FindBySceneID(id int) ([]*Performer, error) { func (t *performerReaderWriter) FindNamesBySceneID(sceneID int) ([]*Performer, error) { return t.qb.FindNameBySceneID(sceneID, t.tx) } + +func (t *performerReaderWriter) Create(newPerformer Performer) (*Performer, error) { + return t.qb.Create(newPerformer, t.tx) +} + +func (t *performerReaderWriter) Update(updatedPerformer Performer) (*Performer, error) { + return t.qb.Update(updatedPerformer, t.tx) +} + +func (t *performerReaderWriter) UpdatePerformerImage(performerID int, image []byte) error { + return t.qb.UpdatePerformerImage(performerID, image, t.tx) +} diff --git a/pkg/models/querybuilder_movies.go b/pkg/models/querybuilder_movies.go index f37b2a1e2..85eeb5708 100644 --- a/pkg/models/querybuilder_movies.go +++ b/pkg/models/querybuilder_movies.go @@ -49,6 +49,19 @@ func (qb *MovieQueryBuilder) Update(updatedMovie MoviePartial, tx *sqlx.Tx) (*Mo return qb.Find(updatedMovie.ID, tx) } +func (qb *MovieQueryBuilder) UpdateFull(updatedMovie Movie, tx *sqlx.Tx) (*Movie, error) { + ensureTx(tx) + _, err := tx.NamedExec( + `UPDATE movies SET `+SQLGenKeys(updatedMovie)+` WHERE movies.id = :id`, + updatedMovie, + ) + if err != nil { + return nil, err + } + + return qb.Find(updatedMovie.ID, tx) +} + func (qb *MovieQueryBuilder) Destroy(id string, tx *sqlx.Tx) error { // delete movie from movies_scenes diff --git a/pkg/models/querybuilder_scene.go b/pkg/models/querybuilder_scene.go index e2cbca00a..4f18ab7a5 100644 --- a/pkg/models/querybuilder_scene.go +++ b/pkg/models/querybuilder_scene.go @@ -93,6 +93,19 @@ func (qb *SceneQueryBuilder) Update(updatedScene ScenePartial, tx *sqlx.Tx) (*Sc return qb.find(updatedScene.ID, tx) } +func (qb *SceneQueryBuilder) UpdateFull(updatedScene Scene, tx *sqlx.Tx) (*Scene, error) { + ensureTx(tx) + _, err := tx.NamedExec( + `UPDATE scenes SET `+SQLGenKeys(updatedScene)+` WHERE scenes.id = :id`, + updatedScene, + ) + if err != nil { + return nil, err + } + + return qb.find(updatedScene.ID, tx) +} + func (qb *SceneQueryBuilder) IncrementOCounter(id int, tx *sqlx.Tx) (int, error) { ensureTx(tx) _, err := tx.Exec( diff --git a/pkg/models/querybuilder_studio.go b/pkg/models/querybuilder_studio.go index 1f3537459..e713215ac 100644 --- a/pkg/models/querybuilder_studio.go +++ b/pkg/models/querybuilder_studio.go @@ -53,6 +53,23 @@ func (qb *StudioQueryBuilder) Update(updatedStudio StudioPartial, tx *sqlx.Tx) ( return &ret, nil } +func (qb *StudioQueryBuilder) UpdateFull(updatedStudio Studio, tx *sqlx.Tx) (*Studio, error) { + ensureTx(tx) + _, err := tx.NamedExec( + `UPDATE studios SET `+SQLGenKeys(updatedStudio)+` WHERE studios.id = :id`, + updatedStudio, + ) + if err != nil { + return nil, err + } + + var ret Studio + if err := tx.Get(&ret, `SELECT * FROM studios WHERE id = ? LIMIT 1`, updatedStudio.ID); err != nil { + return nil, err + } + return &ret, nil +} + func (qb *StudioQueryBuilder) Destroy(id string, tx *sqlx.Tx) error { // remove studio from scenes _, err := tx.Exec("UPDATE scenes SET studio_id = null WHERE studio_id = ?", id) diff --git a/pkg/models/scene.go b/pkg/models/scene.go index 7d95714fb..60f282e41 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -7,8 +7,8 @@ import ( type SceneReader interface { // Find(id int) (*Scene, error) FindMany(ids []int) ([]*Scene, error) - // FindByChecksum(checksum string) (*Scene, error) - // FindByOSHash(oshash string) (*Scene, error) + FindByChecksum(checksum string) (*Scene, error) + FindByOSHash(oshash string) (*Scene, error) // FindByPath(path string) (*Scene, error) // FindByPerformerID(performerID int) ([]*Scene, error) // CountByPerformerID(performerID int) (int, error) @@ -30,8 +30,9 @@ type SceneReader interface { } type SceneWriter interface { - // Create(newScene Scene) (*Scene, error) - // Update(updatedScene ScenePartial) (*Scene, error) + Create(newScene Scene) (*Scene, error) + Update(updatedScene ScenePartial) (*Scene, error) + UpdateFull(updatedScene Scene) (*Scene, error) // IncrementOCounter(id int) (int, error) // DecrementOCounter(id int) (int, error) // ResetOCounter(id int) (int, error) @@ -39,7 +40,7 @@ type SceneWriter interface { // UpdateFormat(id int, format string) error // UpdateOSHash(id int, oshash string) error // UpdateChecksum(id int, checksum string) error - // UpdateSceneCover(sceneID int, cover []byte) error + UpdateSceneCover(sceneID int, cover []byte) error // DestroySceneCover(sceneID int) error } @@ -64,6 +65,14 @@ func (t *sceneReaderWriter) FindMany(ids []int) ([]*Scene, error) { return t.qb.FindMany(ids) } +func (t *sceneReaderWriter) FindByChecksum(checksum string) (*Scene, error) { + return t.qb.FindByChecksum(checksum) +} + +func (t *sceneReaderWriter) FindByOSHash(oshash string) (*Scene, error) { + return t.qb.FindByOSHash(oshash) +} + func (t *sceneReaderWriter) All() ([]*Scene, error) { return t.qb.All() } @@ -71,3 +80,19 @@ func (t *sceneReaderWriter) All() ([]*Scene, error) { func (t *sceneReaderWriter) GetSceneCover(sceneID int) ([]byte, error) { return t.qb.GetSceneCover(sceneID, t.tx) } + +func (t *sceneReaderWriter) Create(newScene Scene) (*Scene, error) { + return t.qb.Create(newScene, t.tx) +} + +func (t *sceneReaderWriter) Update(updatedScene ScenePartial) (*Scene, error) { + return t.qb.Update(updatedScene, t.tx) +} + +func (t *sceneReaderWriter) UpdateFull(updatedScene Scene) (*Scene, error) { + return t.qb.UpdateFull(updatedScene, t.tx) +} + +func (t *sceneReaderWriter) UpdateSceneCover(sceneID int, cover []byte) error { + return t.qb.UpdateSceneCover(sceneID, cover, t.tx) +} diff --git a/pkg/models/scene_marker.go b/pkg/models/scene_marker.go index 81958114e..530e00684 100644 --- a/pkg/models/scene_marker.go +++ b/pkg/models/scene_marker.go @@ -15,8 +15,8 @@ type SceneMarkerReader interface { } type SceneMarkerWriter interface { - // Create(newSceneMarker SceneMarker) (*SceneMarker, error) - // Update(updatedSceneMarker SceneMarker) (*SceneMarker, error) + Create(newSceneMarker SceneMarker) (*SceneMarker, error) + Update(updatedSceneMarker SceneMarker) (*SceneMarker, error) // Destroy(id string) error } @@ -40,3 +40,11 @@ type sceneMarkerReaderWriter struct { func (t *sceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*SceneMarker, error) { return t.qb.FindBySceneID(sceneID, t.tx) } + +func (t *sceneMarkerReaderWriter) Create(newSceneMarker SceneMarker) (*SceneMarker, error) { + return t.qb.Create(newSceneMarker, t.tx) +} + +func (t *sceneMarkerReaderWriter) Update(updatedSceneMarker SceneMarker) (*SceneMarker, error) { + return t.qb.Update(updatedSceneMarker, t.tx) +} diff --git a/pkg/models/sqlite_date.go b/pkg/models/sqlite_date.go index c3fb6d012..bd9ebf8cd 100644 --- a/pkg/models/sqlite_date.go +++ b/pkg/models/sqlite_date.go @@ -2,9 +2,10 @@ package models import ( "database/sql/driver" + "time" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/utils" - "time" ) type SQLiteDate struct { @@ -32,6 +33,11 @@ func (t *SQLiteDate) Scan(value interface{}) error { // Value implements the driver Valuer interface. func (t SQLiteDate) Value() (driver.Value, error) { + // handle empty string + if t.String == "" { + return "", nil + } + result, err := utils.ParseDateStringAsFormat(t.String, "2006-01-02") if err != nil { logger.Debugf("sqlite date conversion error: %s", err.Error()) diff --git a/pkg/models/studio.go b/pkg/models/studio.go index 460faa482..8916ad3a4 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -9,7 +9,7 @@ type StudioReader interface { FindMany(ids []int) ([]*Studio, error) // FindChildren(id int) ([]*Studio, error) // FindBySceneID(sceneID int) (*Studio, error) - // FindByName(name string, nocase bool) (*Studio, error) + FindByName(name string, nocase bool) (*Studio, error) // Count() (int, error) All() ([]*Studio, error) // AllSlim() ([]*Studio, error) @@ -18,10 +18,11 @@ type StudioReader interface { } type StudioWriter interface { - // Create(newStudio Studio) (*Studio, error) - // Update(updatedStudio StudioPartial) (*Studio, error) + Create(newStudio Studio) (*Studio, error) + Update(updatedStudio StudioPartial) (*Studio, error) + UpdateFull(updatedStudio Studio) (*Studio, error) // Destroy(id string) error - // UpdateStudioImage(studioID int, image []byte) error + UpdateStudioImage(studioID int, image []byte) error // DestroyStudioImage(studioID int) error } @@ -50,6 +51,10 @@ func (t *studioReaderWriter) FindMany(ids []int) ([]*Studio, error) { return t.qb.FindMany(ids) } +func (t *studioReaderWriter) FindByName(name string, nocase bool) (*Studio, error) { + return t.qb.FindByName(name, t.tx, nocase) +} + func (t *studioReaderWriter) All() ([]*Studio, error) { return t.qb.All() } @@ -57,3 +62,19 @@ func (t *studioReaderWriter) All() ([]*Studio, error) { func (t *studioReaderWriter) GetStudioImage(studioID int) ([]byte, error) { return t.qb.GetStudioImage(studioID, t.tx) } + +func (t *studioReaderWriter) Create(newStudio Studio) (*Studio, error) { + return t.qb.Create(newStudio, t.tx) +} + +func (t *studioReaderWriter) Update(updatedStudio StudioPartial) (*Studio, error) { + return t.qb.Update(updatedStudio, t.tx) +} + +func (t *studioReaderWriter) UpdateFull(updatedStudio Studio) (*Studio, error) { + return t.qb.UpdateFull(updatedStudio, t.tx) +} + +func (t *studioReaderWriter) UpdateStudioImage(studioID int, image []byte) error { + return t.qb.UpdateStudioImage(studioID, image, t.tx) +} diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 98985c995..044b667e4 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -9,8 +9,8 @@ type TagReader interface { FindMany(ids []int) ([]*Tag, error) FindBySceneID(sceneID int) ([]*Tag, error) FindBySceneMarkerID(sceneMarkerID int) ([]*Tag, error) - // FindByName(name string, nocase bool) (*Tag, error) - // FindByNames(names []string, nocase bool) ([]*Tag, error) + FindByName(name string, nocase bool) (*Tag, error) + FindByNames(names []string, nocase bool) ([]*Tag, error) // Count() (int, error) All() ([]*Tag, error) // AllSlim() ([]*Tag, error) @@ -19,10 +19,10 @@ type TagReader interface { } type TagWriter interface { - // Create(newTag Tag) (*Tag, error) - // Update(updatedTag Tag) (*Tag, error) + Create(newTag Tag) (*Tag, error) + Update(updatedTag Tag) (*Tag, error) // Destroy(id string) error - // UpdateTagImage(tagID int, image []byte) error + UpdateTagImage(tagID int, image []byte) error // DestroyTagImage(tagID int) error } @@ -59,6 +59,14 @@ func (t *tagReaderWriter) FindBySceneMarkerID(sceneMarkerID int) ([]*Tag, error) return t.qb.FindBySceneMarkerID(sceneMarkerID, t.tx) } +func (t *tagReaderWriter) FindByName(name string, nocase bool) (*Tag, error) { + return t.qb.FindByName(name, t.tx, nocase) +} + +func (t *tagReaderWriter) FindByNames(names []string, nocase bool) ([]*Tag, error) { + return t.qb.FindByNames(names, t.tx, nocase) +} + func (t *tagReaderWriter) GetTagImage(tagID int) ([]byte, error) { return t.qb.GetTagImage(tagID, t.tx) } @@ -66,3 +74,15 @@ func (t *tagReaderWriter) GetTagImage(tagID int) ([]byte, error) { func (t *tagReaderWriter) FindBySceneID(sceneID int) ([]*Tag, error) { return t.qb.FindBySceneID(sceneID, t.tx) } + +func (t *tagReaderWriter) Create(newTag Tag) (*Tag, error) { + return t.qb.Create(newTag, t.tx) +} + +func (t *tagReaderWriter) Update(updatedTag Tag) (*Tag, error) { + return t.qb.Update(updatedTag, t.tx) +} + +func (t *tagReaderWriter) UpdateTagImage(tagID int, image []byte) error { + return t.qb.UpdateTagImage(tagID, image, t.tx) +} diff --git a/pkg/movie/import.go b/pkg/movie/import.go new file mode 100644 index 000000000..7e1065df6 --- /dev/null +++ b/pkg/movie/import.go @@ -0,0 +1,166 @@ +package movie + +import ( + "database/sql" + "fmt" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type Importer struct { + ReaderWriter models.MovieReaderWriter + StudioWriter models.StudioReaderWriter + Input jsonschema.Movie + MissingRefBehaviour models.ImportMissingRefEnum + + movie models.Movie + frontImageData []byte + backImageData []byte +} + +func (i *Importer) PreImport() error { + i.movie = i.movieJSONToMovie(i.Input) + + if err := i.populateStudio(); err != nil { + return err + } + + var err error + if len(i.Input.FrontImage) > 0 { + _, i.frontImageData, err = utils.ProcessBase64Image(i.Input.FrontImage) + if err != nil { + return fmt.Errorf("invalid front_image: %s", err.Error()) + } + } + if len(i.Input.BackImage) > 0 { + _, i.backImageData, err = utils.ProcessBase64Image(i.Input.BackImage) + if err != nil { + return fmt.Errorf("invalid back_image: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) movieJSONToMovie(movieJSON jsonschema.Movie) models.Movie { + checksum := utils.MD5FromString(movieJSON.Name) + + newMovie := models.Movie{ + Checksum: checksum, + Name: sql.NullString{String: movieJSON.Name, Valid: true}, + Aliases: sql.NullString{String: movieJSON.Aliases, Valid: true}, + Date: models.SQLiteDate{String: movieJSON.Date, Valid: true}, + Director: sql.NullString{String: movieJSON.Director, Valid: true}, + Synopsis: sql.NullString{String: movieJSON.Synopsis, Valid: true}, + URL: sql.NullString{String: movieJSON.URL, Valid: true}, + CreatedAt: models.SQLiteTimestamp{Timestamp: movieJSON.CreatedAt.GetTime()}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: movieJSON.UpdatedAt.GetTime()}, + } + + if movieJSON.Rating != 0 { + newMovie.Rating = sql.NullInt64{Int64: int64(movieJSON.Rating), Valid: true} + } + + if movieJSON.Duration != 0 { + newMovie.Duration = sql.NullInt64{Int64: int64(movieJSON.Duration), Valid: true} + } + + return newMovie +} + +func (i *Importer) populateStudio() error { + if i.Input.Studio != "" { + studio, err := i.StudioWriter.FindByName(i.Input.Studio, false) + if err != nil { + return fmt.Errorf("error finding studio by name: %s", err.Error()) + } + + if studio == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("movie studio '%s' not found", i.Input.Studio) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { + return nil + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + studioID, err := i.createStudio(i.Input.Studio) + if err != nil { + return err + } + i.movie.StudioID = sql.NullInt64{ + Int64: int64(studioID), + Valid: true, + } + } + } else { + i.movie.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true} + } + } + + return nil +} + +func (i *Importer) createStudio(name string) (int, error) { + newStudio := *models.NewStudio(name) + + created, err := i.StudioWriter.Create(newStudio) + if err != nil { + return 0, err + } + + return created.ID, nil +} + +func (i *Importer) PostImport(id int) error { + if len(i.frontImageData) > 0 { + if err := i.ReaderWriter.UpdateMovieImages(id, i.frontImageData, i.backImageData); err != nil { + return fmt.Errorf("error setting movie images: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) Name() string { + return i.Input.Name +} + +func (i *Importer) FindExistingID() (*int, error) { + const nocase = false + existing, err := i.ReaderWriter.FindByName(i.Name(), nocase) + if err != nil { + return nil, err + } + + if existing != nil { + id := existing.ID + return &id, nil + } + + return nil, nil +} + +func (i *Importer) Create() (*int, error) { + created, err := i.ReaderWriter.Create(i.movie) + if err != nil { + return nil, fmt.Errorf("error creating movie: %s", err.Error()) + } + + id := created.ID + return &id, nil +} + +func (i *Importer) Update(id int) error { + movie := i.movie + movie.ID = id + _, err := i.ReaderWriter.UpdateFull(movie) + if err != nil { + return fmt.Errorf("error updating existing movie: %s", err.Error()) + } + + return nil +} diff --git a/pkg/movie/import_test.go b/pkg/movie/import_test.go new file mode 100644 index 000000000..afdc484d1 --- /dev/null +++ b/pkg/movie/import_test.go @@ -0,0 +1,278 @@ +package movie + +import ( + "errors" + "testing" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/models/modelstest" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +const invalidImage = "aW1hZ2VCeXRlcw&&" + +const ( + movieNameErr = "movieNameErr" + existingMovieName = "existingMovieName" + + existingMovieID = 100 + existingStudioID = 101 + + existingStudioName = "existingStudioName" + existingStudioErr = "existingStudioErr" + missingStudioName = "existingStudioName" + + errImageID = 3 +) + +func TestImporterName(t *testing.T) { + i := Importer{ + Input: jsonschema.Movie{ + Name: movieName, + }, + } + + assert.Equal(t, movieName, i.Name()) +} + +func TestImporterPreImport(t *testing.T) { + i := Importer{ + Input: jsonschema.Movie{ + Name: movieName, + FrontImage: invalidImage, + }, + } + + err := i.PreImport() + assert.NotNil(t, err) + + i.Input.FrontImage = frontImage + i.Input.BackImage = invalidImage + + err = i.PreImport() + assert.NotNil(t, err) + + i.Input.BackImage = "" + + err = i.PreImport() + assert.Nil(t, err) + + i.Input.BackImage = backImage + + err = i.PreImport() + assert.Nil(t, err) +} + +func TestImporterPreImportWithStudio(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Input: jsonschema.Movie{ + Name: movieName, + FrontImage: frontImage, + Studio: existingStudioName, + Rating: 5, + Duration: 10, + }, + } + + studioReaderWriter.On("FindByName", existingStudioName, false).Return(&models.Studio{ + ID: existingStudioID, + }, nil).Once() + studioReaderWriter.On("FindByName", existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.movie.StudioID.Int64) + + i.Input.Studio = existingStudioErr + err = i.PreImport() + assert.NotNil(t, err) + + studioReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingStudio(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Input: jsonschema.Movie{ + Name: movieName, + FrontImage: frontImage, + Studio: missingStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Times(3) + studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(&models.Studio{ + ID: existingStudioID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.movie.StudioID.Int64) + + studioReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Input: jsonschema.Movie{ + Name: movieName, + FrontImage: frontImage, + Studio: missingStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Once() + studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPostImport(t *testing.T) { + readerWriter := &mocks.MovieReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + frontImageData: frontImageBytes, + backImageData: backImageBytes, + } + + updateMovieImageErr := errors.New("UpdateMovieImage error") + + readerWriter.On("UpdateMovieImages", movieID, frontImageBytes, backImageBytes).Return(nil).Once() + readerWriter.On("UpdateMovieImages", errImageID, frontImageBytes, backImageBytes).Return(updateMovieImageErr).Once() + + err := i.PostImport(movieID) + assert.Nil(t, err) + + err = i.PostImport(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestImporterFindExistingID(t *testing.T) { + readerWriter := &mocks.MovieReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Input: jsonschema.Movie{ + Name: movieName, + }, + } + + errFindByName := errors.New("FindByName error") + readerWriter.On("FindByName", movieName, false).Return(nil, nil).Once() + readerWriter.On("FindByName", existingMovieName, false).Return(&models.Movie{ + ID: existingMovieID, + }, nil).Once() + readerWriter.On("FindByName", movieNameErr, false).Return(nil, errFindByName).Once() + + id, err := i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.Input.Name = existingMovieName + id, err = i.FindExistingID() + assert.Equal(t, existingMovieID, *id) + assert.Nil(t, err) + + i.Input.Name = movieNameErr + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestCreate(t *testing.T) { + readerWriter := &mocks.MovieReaderWriter{} + + movie := models.Movie{ + Name: modelstest.NullString(movieName), + } + + movieErr := models.Movie{ + Name: modelstest.NullString(movieNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + movie: movie, + } + + errCreate := errors.New("Create error") + readerWriter.On("Create", movie).Return(&models.Movie{ + ID: movieID, + }, nil).Once() + readerWriter.On("Create", movieErr).Return(nil, errCreate).Once() + + id, err := i.Create() + assert.Equal(t, movieID, *id) + assert.Nil(t, err) + + i.movie = movieErr + id, err = i.Create() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestUpdate(t *testing.T) { + readerWriter := &mocks.MovieReaderWriter{} + + movie := models.Movie{ + Name: modelstest.NullString(movieName), + } + + movieErr := models.Movie{ + Name: modelstest.NullString(movieNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + movie: movie, + } + + errUpdate := errors.New("Update error") + + // id needs to be set for the mock input + movie.ID = movieID + readerWriter.On("UpdateFull", movie).Return(nil, nil).Once() + + err := i.Update(movieID) + assert.Nil(t, err) + + i.movie = movieErr + + // need to set id separately + movieErr.ID = errImageID + readerWriter.On("UpdateFull", movieErr).Return(nil, errUpdate).Once() + + err = i.Update(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index d0f0dc08c..6df042341 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -8,6 +8,7 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/modelstest" + "github.com/stashapp/stash/pkg/utils" "github.com/stretchr/testify/assert" "testing" @@ -46,13 +47,14 @@ var birthDate = models.SQLiteDate{ String: "2001-01-01", Valid: true, } -var createTime time.Time = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) -var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) +var createTime time.Time = time.Date(2001, 01, 01, 0, 0, 0, 0, time.Local) +var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.Local) -func createFullPerformer(id int) models.Performer { - return models.Performer{ +func createFullPerformer(id int, name string) *models.Performer { + return &models.Performer{ ID: id, - Name: modelstest.NullString(performerName), + Name: modelstest.NullString(name), + Checksum: utils.MD5FromString(name), URL: modelstest.NullString(url), Aliases: modelstest.NullString(aliases), Birthdate: birthDate, @@ -93,9 +95,9 @@ func createEmptyPerformer(id int) models.Performer { } } -func createFullJSONPerformer(image string) *jsonschema.Performer { +func createFullJSONPerformer(name string, image string) *jsonschema.Performer { return &jsonschema.Performer{ - Name: performerName, + Name: name, URL: url, Aliases: aliases, Birthdate: birthDate.String, @@ -144,8 +146,8 @@ var scenarios []testScenario func initTestTable() { scenarios = []testScenario{ testScenario{ - createFullPerformer(performerID), - createFullJSONPerformer(image), + *createFullPerformer(performerID, performerName), + createFullJSONPerformer(performerName, image), false, }, testScenario{ @@ -154,7 +156,7 @@ func initTestTable() { false, }, testScenario{ - createFullPerformer(errImageID), + *createFullPerformer(errImageID, performerName), nil, true, }, diff --git a/pkg/performer/import.go b/pkg/performer/import.go new file mode 100644 index 000000000..0dae51776 --- /dev/null +++ b/pkg/performer/import.go @@ -0,0 +1,144 @@ +package performer + +import ( + "database/sql" + "fmt" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type Importer struct { + ReaderWriter models.PerformerReaderWriter + Input jsonschema.Performer + + performer models.Performer + imageData []byte +} + +func (i *Importer) PreImport() error { + i.performer = performerJSONToPerformer(i.Input) + + var err error + if len(i.Input.Image) > 0 { + _, i.imageData, err = utils.ProcessBase64Image(i.Input.Image) + if err != nil { + return fmt.Errorf("invalid image: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) PostImport(id int) error { + if len(i.imageData) > 0 { + if err := i.ReaderWriter.UpdatePerformerImage(id, i.imageData); err != nil { + return fmt.Errorf("error setting performer image: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) Name() string { + return i.Input.Name +} + +func (i *Importer) FindExistingID() (*int, error) { + const nocase = false + existing, err := i.ReaderWriter.FindByNames([]string{i.Name()}, nocase) + if err != nil { + return nil, err + } + + if len(existing) > 0 { + id := existing[0].ID + return &id, nil + } + + return nil, nil +} + +func (i *Importer) Create() (*int, error) { + created, err := i.ReaderWriter.Create(i.performer) + if err != nil { + return nil, fmt.Errorf("error creating performer: %s", err.Error()) + } + + id := created.ID + return &id, nil +} + +func (i *Importer) Update(id int) error { + performer := i.performer + performer.ID = id + _, err := i.ReaderWriter.Update(performer) + if err != nil { + return fmt.Errorf("error updating existing performer: %s", err.Error()) + } + + return nil +} + +func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Performer { + checksum := utils.MD5FromString(performerJSON.Name) + + newPerformer := models.Performer{ + Checksum: checksum, + Favorite: sql.NullBool{Bool: performerJSON.Favorite, Valid: true}, + CreatedAt: models.SQLiteTimestamp{Timestamp: performerJSON.CreatedAt.GetTime()}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: performerJSON.UpdatedAt.GetTime()}, + } + + if performerJSON.Name != "" { + newPerformer.Name = sql.NullString{String: performerJSON.Name, Valid: true} + } + if performerJSON.Gender != "" { + newPerformer.Gender = sql.NullString{String: performerJSON.Gender, Valid: true} + } + if performerJSON.URL != "" { + newPerformer.URL = sql.NullString{String: performerJSON.URL, Valid: true} + } + if performerJSON.Birthdate != "" { + newPerformer.Birthdate = models.SQLiteDate{String: performerJSON.Birthdate, Valid: true} + } + if performerJSON.Ethnicity != "" { + newPerformer.Ethnicity = sql.NullString{String: performerJSON.Ethnicity, Valid: true} + } + if performerJSON.Country != "" { + newPerformer.Country = sql.NullString{String: performerJSON.Country, Valid: true} + } + if performerJSON.EyeColor != "" { + newPerformer.EyeColor = sql.NullString{String: performerJSON.EyeColor, Valid: true} + } + if performerJSON.Height != "" { + newPerformer.Height = sql.NullString{String: performerJSON.Height, Valid: true} + } + if performerJSON.Measurements != "" { + newPerformer.Measurements = sql.NullString{String: performerJSON.Measurements, Valid: true} + } + if performerJSON.FakeTits != "" { + newPerformer.FakeTits = sql.NullString{String: performerJSON.FakeTits, Valid: true} + } + if performerJSON.CareerLength != "" { + newPerformer.CareerLength = sql.NullString{String: performerJSON.CareerLength, Valid: true} + } + if performerJSON.Tattoos != "" { + newPerformer.Tattoos = sql.NullString{String: performerJSON.Tattoos, Valid: true} + } + if performerJSON.Piercings != "" { + newPerformer.Piercings = sql.NullString{String: performerJSON.Piercings, Valid: true} + } + if performerJSON.Aliases != "" { + newPerformer.Aliases = sql.NullString{String: performerJSON.Aliases, Valid: true} + } + if performerJSON.Twitter != "" { + newPerformer.Twitter = sql.NullString{String: performerJSON.Twitter, Valid: true} + } + if performerJSON.Instagram != "" { + newPerformer.Instagram = sql.NullString{String: performerJSON.Instagram, Valid: true} + } + + return newPerformer +} diff --git a/pkg/performer/import_test.go b/pkg/performer/import_test.go new file mode 100644 index 000000000..d58f91265 --- /dev/null +++ b/pkg/performer/import_test.go @@ -0,0 +1,184 @@ +package performer + +import ( + "errors" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/models/modelstest" + "github.com/stashapp/stash/pkg/utils" + "github.com/stretchr/testify/assert" + + "testing" +) + +const invalidImage = "aW1hZ2VCeXRlcw&&" + +const ( + existingPerformerID = 100 + + existingPerformerName = "existingPerformerName" + performerNameErr = "performerNameErr" +) + +func TestImporterName(t *testing.T) { + i := Importer{ + Input: jsonschema.Performer{ + Name: performerName, + }, + } + + assert.Equal(t, performerName, i.Name()) +} + +func TestImporterPreImport(t *testing.T) { + i := Importer{ + Input: jsonschema.Performer{ + Name: performerName, + Image: invalidImage, + }, + } + + err := i.PreImport() + + assert.NotNil(t, err) + + i.Input = *createFullJSONPerformer(performerName, image) + + err = i.PreImport() + + assert.Nil(t, err) + expectedPerformer := *createFullPerformer(0, performerName) + expectedPerformer.Checksum = utils.MD5FromString(performerName) + assert.Equal(t, expectedPerformer, i.performer) +} + +func TestImporterPostImport(t *testing.T) { + readerWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + imageData: imageBytes, + } + + updatePerformerImageErr := errors.New("UpdatePerformerImage error") + + readerWriter.On("UpdatePerformerImage", performerID, imageBytes).Return(nil).Once() + readerWriter.On("UpdatePerformerImage", errImageID, imageBytes).Return(updatePerformerImageErr).Once() + + err := i.PostImport(performerID) + assert.Nil(t, err) + + err = i.PostImport(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestImporterFindExistingID(t *testing.T) { + readerWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Input: jsonschema.Performer{ + Name: performerName, + }, + } + + errFindByNames := errors.New("FindByNames error") + readerWriter.On("FindByNames", []string{performerName}, false).Return(nil, nil).Once() + readerWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{ + { + ID: existingPerformerID, + }, + }, nil).Once() + readerWriter.On("FindByNames", []string{performerNameErr}, false).Return(nil, errFindByNames).Once() + + id, err := i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.Input.Name = existingPerformerName + id, err = i.FindExistingID() + assert.Equal(t, existingPerformerID, *id) + assert.Nil(t, err) + + i.Input.Name = performerNameErr + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestCreate(t *testing.T) { + readerWriter := &mocks.PerformerReaderWriter{} + + performer := models.Performer{ + Name: modelstest.NullString(performerName), + } + + performerErr := models.Performer{ + Name: modelstest.NullString(performerNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + performer: performer, + } + + errCreate := errors.New("Create error") + readerWriter.On("Create", performer).Return(&models.Performer{ + ID: performerID, + }, nil).Once() + readerWriter.On("Create", performerErr).Return(nil, errCreate).Once() + + id, err := i.Create() + assert.Equal(t, performerID, *id) + assert.Nil(t, err) + + i.performer = performerErr + id, err = i.Create() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestUpdate(t *testing.T) { + readerWriter := &mocks.PerformerReaderWriter{} + + performer := models.Performer{ + Name: modelstest.NullString(performerName), + } + + performerErr := models.Performer{ + Name: modelstest.NullString(performerNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + performer: performer, + } + + errUpdate := errors.New("Update error") + + // id needs to be set for the mock input + performer.ID = performerID + readerWriter.On("Update", performer).Return(nil, nil).Once() + + err := i.Update(performerID) + assert.Nil(t, err) + + i.performer = performerErr + + // need to set id separately + performerErr.ID = errImageID + readerWriter.On("Update", performerErr).Return(nil, errUpdate).Once() + + err = i.Update(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} diff --git a/pkg/scene/import.go b/pkg/scene/import.go new file mode 100644 index 000000000..a843646c2 --- /dev/null +++ b/pkg/scene/import.go @@ -0,0 +1,483 @@ +package scene + +import ( + "database/sql" + "fmt" + "strconv" + "strings" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type Importer struct { + ReaderWriter models.SceneReaderWriter + StudioWriter models.StudioReaderWriter + GalleryWriter models.GalleryReaderWriter + PerformerWriter models.PerformerReaderWriter + MovieWriter models.MovieReaderWriter + TagWriter models.TagReaderWriter + JoinWriter models.JoinReaderWriter + Input jsonschema.Scene + Path string + MissingRefBehaviour models.ImportMissingRefEnum + FileNamingAlgorithm models.HashAlgorithm + + ID int + scene models.Scene + gallery *models.Gallery + performers []*models.Performer + movies []models.MoviesScenes + tags []*models.Tag + coverImageData []byte +} + +func (i *Importer) PreImport() error { + i.scene = i.sceneJSONToScene(i.Input) + + if err := i.populateStudio(); err != nil { + return err + } + + if err := i.populateGallery(); err != nil { + return err + } + + if err := i.populatePerformers(); err != nil { + return err + } + + if err := i.populateTags(); err != nil { + return err + } + + if err := i.populateMovies(); err != nil { + return err + } + + var err error + if len(i.Input.Cover) > 0 { + _, i.coverImageData, err = utils.ProcessBase64Image(i.Input.Cover) + if err != nil { + return fmt.Errorf("invalid cover image: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene { + newScene := models.Scene{ + Checksum: sql.NullString{String: sceneJSON.Checksum, Valid: sceneJSON.Checksum != ""}, + OSHash: sql.NullString{String: sceneJSON.OSHash, Valid: sceneJSON.OSHash != ""}, + Path: i.Path, + } + + if sceneJSON.Title != "" { + newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true} + } + if sceneJSON.Details != "" { + newScene.Details = sql.NullString{String: sceneJSON.Details, Valid: true} + } + if sceneJSON.URL != "" { + newScene.URL = sql.NullString{String: sceneJSON.URL, Valid: true} + } + if sceneJSON.Date != "" { + newScene.Date = models.SQLiteDate{String: sceneJSON.Date, Valid: true} + } + if sceneJSON.Rating != 0 { + newScene.Rating = sql.NullInt64{Int64: int64(sceneJSON.Rating), Valid: true} + } + + newScene.OCounter = sceneJSON.OCounter + newScene.CreatedAt = models.SQLiteTimestamp{Timestamp: sceneJSON.CreatedAt.GetTime()} + newScene.UpdatedAt = models.SQLiteTimestamp{Timestamp: sceneJSON.UpdatedAt.GetTime()} + + if sceneJSON.File != nil { + if sceneJSON.File.Size != "" { + newScene.Size = sql.NullString{String: sceneJSON.File.Size, Valid: true} + } + if sceneJSON.File.Duration != "" { + duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64) + newScene.Duration = sql.NullFloat64{Float64: duration, Valid: true} + } + if sceneJSON.File.VideoCodec != "" { + newScene.VideoCodec = sql.NullString{String: sceneJSON.File.VideoCodec, Valid: true} + } + if sceneJSON.File.AudioCodec != "" { + newScene.AudioCodec = sql.NullString{String: sceneJSON.File.AudioCodec, Valid: true} + } + if sceneJSON.File.Format != "" { + newScene.Format = sql.NullString{String: sceneJSON.File.Format, Valid: true} + } + if sceneJSON.File.Width != 0 { + newScene.Width = sql.NullInt64{Int64: int64(sceneJSON.File.Width), Valid: true} + } + if sceneJSON.File.Height != 0 { + newScene.Height = sql.NullInt64{Int64: int64(sceneJSON.File.Height), Valid: true} + } + if sceneJSON.File.Framerate != "" { + framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64) + newScene.Framerate = sql.NullFloat64{Float64: framerate, Valid: true} + } + if sceneJSON.File.Bitrate != 0 { + newScene.Bitrate = sql.NullInt64{Int64: int64(sceneJSON.File.Bitrate), Valid: true} + } + } + + return newScene +} + +func (i *Importer) populateStudio() error { + if i.Input.Studio != "" { + studio, err := i.StudioWriter.FindByName(i.Input.Studio, false) + if err != nil { + return fmt.Errorf("error finding studio by name: %s", err.Error()) + } + + if studio == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("scene studio '%s' not found", i.Input.Studio) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { + return nil + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + studioID, err := i.createStudio(i.Input.Studio) + if err != nil { + return err + } + i.scene.StudioID = sql.NullInt64{ + Int64: int64(studioID), + Valid: true, + } + } + } else { + i.scene.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true} + } + } + + return nil +} + +func (i *Importer) createStudio(name string) (int, error) { + newStudio := *models.NewStudio(name) + + created, err := i.StudioWriter.Create(newStudio) + if err != nil { + return 0, err + } + + return created.ID, nil +} + +func (i *Importer) populateGallery() error { + if i.Input.Gallery != "" { + gallery, err := i.GalleryWriter.FindByChecksum(i.Input.Gallery) + if err != nil { + return fmt.Errorf("error finding gallery: %s", err.Error()) + } + + if gallery == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("scene gallery '%s' not found", i.Input.Studio) + } + + // we don't create galleries - just ignore + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore || i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + return nil + } + } else { + i.gallery = gallery + } + } + + return nil +} + +func (i *Importer) populatePerformers() error { + if len(i.Input.Performers) > 0 { + names := i.Input.Performers + performers, err := i.PerformerWriter.FindByNames(names, false) + if err != nil { + return err + } + + var pluckedNames []string + for _, performer := range performers { + if !performer.Name.Valid { + continue + } + pluckedNames = append(pluckedNames, performer.Name.String) + } + + missingPerformers := utils.StrFilter(names, func(name string) bool { + return !utils.StrInclude(pluckedNames, name) + }) + + if len(missingPerformers) > 0 { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("scene performers [%s] not found", strings.Join(missingPerformers, ", ")) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + createdPerformers, err := i.createPerformers(missingPerformers) + if err != nil { + return fmt.Errorf("error creating scene performers: %s", err.Error()) + } + + performers = append(performers, createdPerformers...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + i.performers = performers + } + + return nil +} + +func (i *Importer) createPerformers(names []string) ([]*models.Performer, error) { + var ret []*models.Performer + for _, name := range names { + newPerformer := *models.NewPerformer(name) + + created, err := i.PerformerWriter.Create(newPerformer) + if err != nil { + return nil, err + } + + ret = append(ret, created) + } + + return ret, nil +} + +func (i *Importer) populateMovies() error { + if len(i.Input.Movies) > 0 { + for _, inputMovie := range i.Input.Movies { + movie, err := i.MovieWriter.FindByName(inputMovie.MovieName, false) + if err != nil { + return fmt.Errorf("error finding scene movie: %s", err.Error()) + } + + if movie == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("scene movie [%s] not found", inputMovie.MovieName) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + movie, err = i.createMovie(inputMovie.MovieName) + if err != nil { + return fmt.Errorf("error creating scene movie: %s", err.Error()) + } + } + + // ignore if MissingRefBehaviour set to Ignore + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { + continue + } + } + + toAdd := models.MoviesScenes{ + MovieID: movie.ID, + } + + if inputMovie.SceneIndex != 0 { + toAdd.SceneIndex = sql.NullInt64{ + Int64: int64(inputMovie.SceneIndex), + Valid: true, + } + } + + i.movies = append(i.movies, toAdd) + } + } + + return nil +} + +func (i *Importer) createMovie(name string) (*models.Movie, error) { + newMovie := *models.NewMovie(name) + + created, err := i.MovieWriter.Create(newMovie) + if err != nil { + return nil, err + } + + return created, nil +} + +func (i *Importer) populateTags() error { + if len(i.Input.Tags) > 0 { + + tags, err := importTags(i.TagWriter, i.Input.Tags, i.MissingRefBehaviour) + if err != nil { + return err + } + + i.tags = tags + } + + return nil +} + +func (i *Importer) PostImport(id int) error { + if len(i.coverImageData) > 0 { + if err := i.ReaderWriter.UpdateSceneCover(id, i.coverImageData); err != nil { + return fmt.Errorf("error setting scene images: %s", err.Error()) + } + } + + if i.gallery != nil { + i.gallery.SceneID = sql.NullInt64{Int64: int64(id), Valid: true} + _, err := i.GalleryWriter.Update(*i.gallery) + if err != nil { + return fmt.Errorf("failed to update gallery: %s", err.Error()) + } + } + + if len(i.performers) > 0 { + var performerJoins []models.PerformersScenes + for _, performer := range i.performers { + join := models.PerformersScenes{ + PerformerID: performer.ID, + SceneID: id, + } + performerJoins = append(performerJoins, join) + } + if err := i.JoinWriter.UpdatePerformersScenes(id, performerJoins); err != nil { + return fmt.Errorf("failed to associate performers: %s", err.Error()) + } + } + + if len(i.movies) > 0 { + for index := range i.movies { + i.movies[index].SceneID = id + } + if err := i.JoinWriter.UpdateMoviesScenes(id, i.movies); err != nil { + return fmt.Errorf("failed to associate movies: %s", err.Error()) + } + } + + if len(i.tags) > 0 { + var tagJoins []models.ScenesTags + for _, tag := range i.tags { + join := models.ScenesTags{ + SceneID: id, + TagID: tag.ID, + } + tagJoins = append(tagJoins, join) + } + if err := i.JoinWriter.UpdateScenesTags(id, tagJoins); err != nil { + return fmt.Errorf("failed to associate tags: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) Name() string { + return i.Path +} + +func (i *Importer) FindExistingID() (*int, error) { + var existing *models.Scene + var err error + if i.FileNamingAlgorithm == models.HashAlgorithmMd5 { + existing, err = i.ReaderWriter.FindByChecksum(i.Input.Checksum) + } else if i.FileNamingAlgorithm == models.HashAlgorithmOshash { + existing, err = i.ReaderWriter.FindByOSHash(i.Input.OSHash) + } else { + panic("unknown file naming algorithm") + } + + if err != nil { + return nil, err + } + + if existing != nil { + id := existing.ID + return &id, nil + } + + return nil, nil +} + +func (i *Importer) Create() (*int, error) { + created, err := i.ReaderWriter.Create(i.scene) + if err != nil { + return nil, fmt.Errorf("error creating scene: %s", err.Error()) + } + + id := created.ID + i.ID = id + return &id, nil +} + +func (i *Importer) Update(id int) error { + scene := i.scene + scene.ID = id + i.ID = id + _, err := i.ReaderWriter.UpdateFull(scene) + if err != nil { + return fmt.Errorf("error updating existing scene: %s", err.Error()) + } + + return nil +} + +func importTags(tagWriter models.TagReaderWriter, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { + tags, err := tagWriter.FindByNames(names, false) + if err != nil { + return nil, err + } + + var pluckedNames []string + for _, tag := range tags { + pluckedNames = append(pluckedNames, tag.Name) + } + + missingTags := utils.StrFilter(names, func(name string) bool { + return !utils.StrInclude(pluckedNames, name) + }) + + if len(missingTags) > 0 { + if missingRefBehaviour == models.ImportMissingRefEnumFail { + return nil, fmt.Errorf("tags [%s] not found", strings.Join(missingTags, ", ")) + } + + if missingRefBehaviour == models.ImportMissingRefEnumCreate { + createdTags, err := createTags(tagWriter, missingTags) + if err != nil { + return nil, fmt.Errorf("error creating tags: %s", err.Error()) + } + + tags = append(tags, createdTags...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + return tags, nil +} + +func createTags(tagWriter models.TagWriter, names []string) ([]*models.Tag, error) { + var ret []*models.Tag + for _, name := range names { + newTag := *models.NewTag(name) + + created, err := tagWriter.Create(newTag) + if err != nil { + return nil, err + } + + ret = append(ret, created) + } + + return ret, nil +} diff --git a/pkg/scene/import_test.go b/pkg/scene/import_test.go new file mode 100644 index 000000000..e43e0ff43 --- /dev/null +++ b/pkg/scene/import_test.go @@ -0,0 +1,761 @@ +package scene + +import ( + "errors" + "testing" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/models/modelstest" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +const invalidImage = "aW1hZ2VCeXRlcw&&" + +const ( + path = "path" + + sceneNameErr = "sceneNameErr" + existingSceneName = "existingSceneName" + + existingSceneID = 100 + existingStudioID = 101 + existingGalleryID = 102 + existingPerformerID = 103 + existingMovieID = 104 + existingTagID = 105 + + existingStudioName = "existingStudioName" + existingStudioErr = "existingStudioErr" + missingStudioName = "missingStudioName" + + existingGalleryChecksum = "existingGalleryChecksum" + existingGalleryErr = "existingGalleryErr" + missingGalleryChecksum = "missingGalleryChecksum" + + existingPerformerName = "existingPerformerName" + existingPerformerErr = "existingPerformerErr" + missingPerformerName = "missingPerformerName" + + existingMovieName = "existingMovieName" + existingMovieErr = "existingMovieErr" + missingMovieName = "missingMovieName" + + existingTagName = "existingTagName" + existingTagErr = "existingTagErr" + missingTagName = "missingTagName" + + errPerformersID = 200 + + missingChecksum = "missingChecksum" + missingOSHash = "missingOSHash" + errChecksum = "errChecksum" + errOSHash = "errOSHash" +) + +func TestImporterName(t *testing.T) { + i := Importer{ + Path: path, + Input: jsonschema.Scene{}, + } + + assert.Equal(t, path, i.Name()) +} + +func TestImporterPreImport(t *testing.T) { + i := Importer{ + Path: path, + Input: jsonschema.Scene{ + Cover: invalidImage, + }, + } + + err := i.PreImport() + assert.NotNil(t, err) + + i.Input.Cover = image + + err = i.PreImport() + assert.Nil(t, err) +} + +func TestImporterPreImportWithStudio(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Path: path, + Input: jsonschema.Scene{ + Studio: existingStudioName, + }, + } + + studioReaderWriter.On("FindByName", existingStudioName, false).Return(&models.Studio{ + ID: existingStudioID, + }, nil).Once() + studioReaderWriter.On("FindByName", existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.scene.StudioID.Int64) + + i.Input.Studio = existingStudioErr + err = i.PreImport() + assert.NotNil(t, err) + + studioReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingStudio(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + Path: path, + StudioWriter: studioReaderWriter, + Input: jsonschema.Scene{ + Studio: missingStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Times(3) + studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(&models.Studio{ + ID: existingStudioID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.scene.StudioID.Int64) + + studioReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Path: path, + Input: jsonschema.Scene{ + Studio: missingStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Once() + studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPreImportWithGallery(t *testing.T) { + galleryReaderWriter := &mocks.GalleryReaderWriter{} + + i := Importer{ + GalleryWriter: galleryReaderWriter, + Path: path, + Input: jsonschema.Scene{ + Gallery: existingGalleryChecksum, + }, + } + + galleryReaderWriter.On("FindByChecksum", existingGalleryChecksum).Return(&models.Gallery{ + ID: existingGalleryID, + }, nil).Once() + galleryReaderWriter.On("FindByChecksum", existingGalleryErr).Return(nil, errors.New("FindByChecksum error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingGalleryID, i.gallery.ID) + + i.Input.Gallery = existingGalleryErr + err = i.PreImport() + assert.NotNil(t, err) + + galleryReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingGallery(t *testing.T) { + galleryReaderWriter := &mocks.GalleryReaderWriter{} + + i := Importer{ + Path: path, + GalleryWriter: galleryReaderWriter, + Input: jsonschema.Scene{ + Gallery: missingGalleryChecksum, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + galleryReaderWriter.On("FindByChecksum", missingGalleryChecksum).Return(nil, nil).Times(3) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + assert.Nil(t, i.gallery) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Nil(t, i.gallery) + + galleryReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithPerformer(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + PerformerWriter: performerReaderWriter, + Path: path, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Scene{ + Performers: []string{ + existingPerformerName, + }, + }, + } + + performerReaderWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{ + { + ID: existingPerformerID, + Name: modelstest.NullString(existingPerformerName), + }, + }, nil).Once() + performerReaderWriter.On("FindByNames", []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingPerformerID, i.performers[0].ID) + + i.Input.Performers = []string{existingPerformerErr} + err = i.PreImport() + assert.NotNil(t, err) + + performerReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingPerformer(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + Path: path, + PerformerWriter: performerReaderWriter, + Input: jsonschema.Scene{ + Performers: []string{ + missingPerformerName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + performerReaderWriter.On("FindByNames", []string{missingPerformerName}, false).Return(nil, nil).Times(3) + performerReaderWriter.On("Create", mock.AnythingOfType("models.Performer")).Return(&models.Performer{ + ID: existingPerformerID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingPerformerID, i.performers[0].ID) + + performerReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + PerformerWriter: performerReaderWriter, + Path: path, + Input: jsonschema.Scene{ + Performers: []string{ + missingPerformerName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + performerReaderWriter.On("FindByNames", []string{missingPerformerName}, false).Return(nil, nil).Once() + performerReaderWriter.On("Create", mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPreImportWithMovie(t *testing.T) { + movieReaderWriter := &mocks.MovieReaderWriter{} + + i := Importer{ + MovieWriter: movieReaderWriter, + Path: path, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Scene{ + Movies: []jsonschema.SceneMovie{ + { + MovieName: existingMovieName, + SceneIndex: 1, + }, + }, + }, + } + + movieReaderWriter.On("FindByName", existingMovieName, false).Return(&models.Movie{ + ID: existingMovieID, + Name: modelstest.NullString(existingMovieName), + }, nil).Once() + movieReaderWriter.On("FindByName", existingMovieErr, false).Return(nil, errors.New("FindByName error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingMovieID, i.movies[0].MovieID) + + i.Input.Movies[0].MovieName = existingMovieErr + err = i.PreImport() + assert.NotNil(t, err) + + movieReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingMovie(t *testing.T) { + movieReaderWriter := &mocks.MovieReaderWriter{} + + i := Importer{ + Path: path, + MovieWriter: movieReaderWriter, + Input: jsonschema.Scene{ + Movies: []jsonschema.SceneMovie{ + { + MovieName: missingMovieName, + }, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + movieReaderWriter.On("FindByName", missingMovieName, false).Return(nil, nil).Times(3) + movieReaderWriter.On("Create", mock.AnythingOfType("models.Movie")).Return(&models.Movie{ + ID: existingMovieID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingMovieID, i.movies[0].MovieID) + + movieReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingMovieCreateErr(t *testing.T) { + movieReaderWriter := &mocks.MovieReaderWriter{} + + i := Importer{ + MovieWriter: movieReaderWriter, + Path: path, + Input: jsonschema.Scene{ + Movies: []jsonschema.SceneMovie{ + { + MovieName: missingMovieName, + }, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + movieReaderWriter.On("FindByName", missingMovieName, false).Return(nil, nil).Once() + movieReaderWriter.On("Create", mock.AnythingOfType("models.Movie")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPreImportWithTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + Path: path, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Scene{ + Tags: []string{ + existingTagName, + }, + }, + } + + tagReaderWriter.On("FindByNames", []string{existingTagName}, false).Return([]*models.Tag{ + { + ID: existingTagID, + Name: existingTagName, + }, + }, nil).Once() + tagReaderWriter.On("FindByNames", []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + i.Input.Tags = []string{existingTagErr} + err = i.PreImport() + assert.NotNil(t, err) + + tagReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + Path: path, + TagWriter: tagReaderWriter, + Input: jsonschema.Scene{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Times(3) + tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{ + ID: existingTagID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + tagReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + Path: path, + Input: jsonschema.Scene{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Once() + tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPostImport(t *testing.T) { + readerWriter := &mocks.SceneReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + coverImageData: imageBytes, + } + + updateSceneImageErr := errors.New("UpdateSceneCover error") + + readerWriter.On("UpdateSceneCover", sceneID, imageBytes).Return(nil).Once() + readerWriter.On("UpdateSceneCover", errImageID, imageBytes).Return(updateSceneImageErr).Once() + + err := i.PostImport(sceneID) + assert.Nil(t, err) + + err = i.PostImport(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestImporterPostImportUpdateGallery(t *testing.T) { + galleryReaderWriter := &mocks.GalleryReaderWriter{} + + i := Importer{ + GalleryWriter: galleryReaderWriter, + gallery: &models.Gallery{ + ID: existingGalleryID, + }, + } + + updateErr := errors.New("Update error") + + updateArg := *i.gallery + updateArg.SceneID = modelstest.NullInt64(sceneID) + + galleryReaderWriter.On("Update", updateArg).Return(nil, nil).Once() + + updateArg.SceneID = modelstest.NullInt64(errGalleryID) + galleryReaderWriter.On("Update", updateArg).Return(nil, updateErr).Once() + + err := i.PostImport(sceneID) + assert.Nil(t, err) + + err = i.PostImport(errGalleryID) + assert.NotNil(t, err) + + galleryReaderWriter.AssertExpectations(t) +} + +func TestImporterPostImportUpdatePerformers(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := Importer{ + JoinWriter: joinReaderWriter, + performers: []*models.Performer{ + { + ID: existingPerformerID, + }, + }, + } + + updateErr := errors.New("UpdatePerformersScenes error") + + joinReaderWriter.On("UpdatePerformersScenes", sceneID, []models.PerformersScenes{ + { + PerformerID: existingPerformerID, + SceneID: sceneID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdatePerformersScenes", errPerformersID, mock.AnythingOfType("[]models.PerformersScenes")).Return(updateErr).Once() + + err := i.PostImport(sceneID) + assert.Nil(t, err) + + err = i.PostImport(errPerformersID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) +} + +func TestImporterPostImportUpdateMovies(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := Importer{ + JoinWriter: joinReaderWriter, + movies: []models.MoviesScenes{ + { + MovieID: existingMovieID, + }, + }, + } + + updateErr := errors.New("UpdateMoviesScenes error") + + joinReaderWriter.On("UpdateMoviesScenes", sceneID, []models.MoviesScenes{ + { + MovieID: existingMovieID, + SceneID: sceneID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdateMoviesScenes", errMoviesID, mock.AnythingOfType("[]models.MoviesScenes")).Return(updateErr).Once() + + err := i.PostImport(sceneID) + assert.Nil(t, err) + + err = i.PostImport(errMoviesID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) +} + +func TestImporterPostImportUpdateTags(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := Importer{ + JoinWriter: joinReaderWriter, + tags: []*models.Tag{ + { + ID: existingTagID, + }, + }, + } + + updateErr := errors.New("UpdateScenesTags error") + + joinReaderWriter.On("UpdateScenesTags", sceneID, []models.ScenesTags{ + { + TagID: existingTagID, + SceneID: sceneID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdateScenesTags", errTagsID, mock.AnythingOfType("[]models.ScenesTags")).Return(updateErr).Once() + + err := i.PostImport(sceneID) + assert.Nil(t, err) + + err = i.PostImport(errTagsID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) +} + +func TestImporterFindExistingID(t *testing.T) { + readerWriter := &mocks.SceneReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Path: path, + Input: jsonschema.Scene{ + Checksum: missingChecksum, + OSHash: missingOSHash, + }, + FileNamingAlgorithm: models.HashAlgorithmMd5, + } + + expectedErr := errors.New("FindBy* error") + readerWriter.On("FindByChecksum", missingChecksum).Return(nil, nil).Once() + readerWriter.On("FindByChecksum", checksum).Return(&models.Scene{ + ID: existingSceneID, + }, nil).Once() + readerWriter.On("FindByChecksum", errChecksum).Return(nil, expectedErr).Once() + + readerWriter.On("FindByOSHash", missingOSHash).Return(nil, nil).Once() + readerWriter.On("FindByOSHash", oshash).Return(&models.Scene{ + ID: existingSceneID, + }, nil).Once() + readerWriter.On("FindByOSHash", errOSHash).Return(nil, expectedErr).Once() + + id, err := i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.Input.Checksum = checksum + id, err = i.FindExistingID() + assert.Equal(t, existingSceneID, *id) + assert.Nil(t, err) + + i.Input.Checksum = errChecksum + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + i.FileNamingAlgorithm = models.HashAlgorithmOshash + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.Input.OSHash = oshash + id, err = i.FindExistingID() + assert.Equal(t, existingSceneID, *id) + assert.Nil(t, err) + + i.Input.OSHash = errOSHash + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestCreate(t *testing.T) { + readerWriter := &mocks.SceneReaderWriter{} + + scene := models.Scene{ + Title: modelstest.NullString(title), + } + + sceneErr := models.Scene{ + Title: modelstest.NullString(sceneNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + scene: scene, + } + + errCreate := errors.New("Create error") + readerWriter.On("Create", scene).Return(&models.Scene{ + ID: sceneID, + }, nil).Once() + readerWriter.On("Create", sceneErr).Return(nil, errCreate).Once() + + id, err := i.Create() + assert.Equal(t, sceneID, *id) + assert.Nil(t, err) + assert.Equal(t, sceneID, i.ID) + + i.scene = sceneErr + id, err = i.Create() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestUpdate(t *testing.T) { + readerWriter := &mocks.SceneReaderWriter{} + + scene := models.Scene{ + Title: modelstest.NullString(title), + } + + sceneErr := models.Scene{ + Title: modelstest.NullString(sceneNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + scene: scene, + } + + errUpdate := errors.New("Update error") + + // id needs to be set for the mock input + scene.ID = sceneID + readerWriter.On("UpdateFull", scene).Return(nil, nil).Once() + + err := i.Update(sceneID) + assert.Nil(t, err) + assert.Equal(t, sceneID, i.ID) + + i.scene = sceneErr + + // need to set id separately + sceneErr.ID = errImageID + readerWriter.On("UpdateFull", sceneErr).Return(nil, errUpdate).Once() + + err = i.Update(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} diff --git a/pkg/scene/marker_import.go b/pkg/scene/marker_import.go new file mode 100644 index 000000000..9a559b384 --- /dev/null +++ b/pkg/scene/marker_import.go @@ -0,0 +1,125 @@ +package scene + +import ( + "database/sql" + "fmt" + "strconv" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" +) + +type MarkerImporter struct { + SceneID int + ReaderWriter models.SceneMarkerReaderWriter + TagWriter models.TagReaderWriter + JoinWriter models.JoinReaderWriter + Input jsonschema.SceneMarker + MissingRefBehaviour models.ImportMissingRefEnum + + tags []*models.Tag + marker models.SceneMarker +} + +func (i *MarkerImporter) PreImport() error { + seconds, _ := strconv.ParseFloat(i.Input.Seconds, 64) + i.marker = models.SceneMarker{ + Title: i.Input.Title, + Seconds: seconds, + SceneID: sql.NullInt64{Int64: int64(i.SceneID), Valid: true}, + CreatedAt: models.SQLiteTimestamp{Timestamp: i.Input.CreatedAt.GetTime()}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: i.Input.UpdatedAt.GetTime()}, + } + + if err := i.populateTags(); err != nil { + return err + } + + return nil +} + +func (i *MarkerImporter) populateTags() error { + // primary tag cannot be ignored + mrb := i.MissingRefBehaviour + if mrb == models.ImportMissingRefEnumIgnore { + mrb = models.ImportMissingRefEnumFail + } + + primaryTag, err := importTags(i.TagWriter, []string{i.Input.PrimaryTag}, mrb) + if err != nil { + return err + } + + i.marker.PrimaryTagID = primaryTag[0].ID + + if len(i.Input.Tags) > 0 { + tags, err := importTags(i.TagWriter, i.Input.Tags, i.MissingRefBehaviour) + if err != nil { + return err + } + + i.tags = tags + } + + return nil +} + +func (i *MarkerImporter) PostImport(id int) error { + if len(i.tags) > 0 { + var tagJoins []models.SceneMarkersTags + for _, tag := range i.tags { + join := models.SceneMarkersTags{ + SceneMarkerID: id, + TagID: tag.ID, + } + tagJoins = append(tagJoins, join) + } + if err := i.JoinWriter.UpdateSceneMarkersTags(id, tagJoins); err != nil { + return fmt.Errorf("failed to associate tags: %s", err.Error()) + } + } + + return nil +} + +func (i *MarkerImporter) Name() string { + return fmt.Sprintf("%s (%s)", i.Input.Title, i.Input.Seconds) +} + +func (i *MarkerImporter) FindExistingID() (*int, error) { + existingMarkers, err := i.ReaderWriter.FindBySceneID(i.SceneID) + + if err != nil { + return nil, err + } + + for _, m := range existingMarkers { + if m.Seconds == i.marker.Seconds { + id := m.ID + return &id, nil + } + } + + return nil, nil +} + +func (i *MarkerImporter) Create() (*int, error) { + created, err := i.ReaderWriter.Create(i.marker) + if err != nil { + return nil, fmt.Errorf("error creating marker: %s", err.Error()) + } + + id := created.ID + return &id, nil +} + +func (i *MarkerImporter) Update(id int) error { + marker := i.marker + marker.ID = id + _, err := i.ReaderWriter.Update(marker) + if err != nil { + return fmt.Errorf("error updating existing marker: %s", err.Error()) + } + + return nil +} diff --git a/pkg/scene/marker_import_test.go b/pkg/scene/marker_import_test.go new file mode 100644 index 000000000..23d0d7cf6 --- /dev/null +++ b/pkg/scene/marker_import_test.go @@ -0,0 +1,210 @@ +package scene + +import ( + "errors" + "testing" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +const ( + seconds = "5" + secondsFloat = 5.0 + errSceneID = 999 +) + +func TestMarkerImporterName(t *testing.T) { + i := MarkerImporter{ + Input: jsonschema.SceneMarker{ + Title: title, + Seconds: seconds, + }, + } + + assert.Equal(t, title+" (5)", i.Name()) +} + +func TestMarkerImporterPreImportWithTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := MarkerImporter{ + TagWriter: tagReaderWriter, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.SceneMarker{ + PrimaryTag: existingTagName, + }, + } + + tagReaderWriter.On("FindByNames", []string{existingTagName}, false).Return([]*models.Tag{ + { + ID: existingTagID, + Name: existingTagName, + }, + }, nil).Times(4) + tagReaderWriter.On("FindByNames", []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Times(2) + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.marker.PrimaryTagID) + + i.Input.PrimaryTag = existingTagErr + err = i.PreImport() + assert.NotNil(t, err) + + i.Input.PrimaryTag = existingTagName + i.Input.Tags = []string{ + existingTagName, + } + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + i.Input.Tags[0] = existingTagErr + err = i.PreImport() + assert.NotNil(t, err) + + tagReaderWriter.AssertExpectations(t) +} + +func TestMarkerImporterPostImportUpdateTags(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := MarkerImporter{ + JoinWriter: joinReaderWriter, + tags: []*models.Tag{ + { + ID: existingTagID, + }, + }, + } + + updateErr := errors.New("UpdateSceneMarkersTags error") + + joinReaderWriter.On("UpdateSceneMarkersTags", sceneID, []models.SceneMarkersTags{ + { + TagID: existingTagID, + SceneMarkerID: sceneID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdateSceneMarkersTags", errTagsID, mock.AnythingOfType("[]models.SceneMarkersTags")).Return(updateErr).Once() + + err := i.PostImport(sceneID) + assert.Nil(t, err) + + err = i.PostImport(errTagsID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) +} + +func TestMarkerImporterFindExistingID(t *testing.T) { + readerWriter := &mocks.SceneMarkerReaderWriter{} + + i := MarkerImporter{ + ReaderWriter: readerWriter, + SceneID: sceneID, + marker: models.SceneMarker{ + Seconds: secondsFloat, + }, + } + + expectedErr := errors.New("FindBy* error") + readerWriter.On("FindBySceneID", sceneID).Return([]*models.SceneMarker{ + { + ID: existingSceneID, + Seconds: secondsFloat, + }, + }, nil).Times(2) + readerWriter.On("FindBySceneID", errSceneID).Return(nil, expectedErr).Once() + + id, err := i.FindExistingID() + assert.Equal(t, existingSceneID, *id) + assert.Nil(t, err) + + i.marker.Seconds++ + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.SceneID = errSceneID + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestMarkerImporterCreate(t *testing.T) { + readerWriter := &mocks.SceneMarkerReaderWriter{} + + scene := models.SceneMarker{ + Title: title, + } + + sceneErr := models.SceneMarker{ + Title: sceneNameErr, + } + + i := MarkerImporter{ + ReaderWriter: readerWriter, + marker: scene, + } + + errCreate := errors.New("Create error") + readerWriter.On("Create", scene).Return(&models.SceneMarker{ + ID: sceneID, + }, nil).Once() + readerWriter.On("Create", sceneErr).Return(nil, errCreate).Once() + + id, err := i.Create() + assert.Equal(t, sceneID, *id) + assert.Nil(t, err) + + i.marker = sceneErr + id, err = i.Create() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestMarkerImporterUpdate(t *testing.T) { + readerWriter := &mocks.SceneMarkerReaderWriter{} + + scene := models.SceneMarker{ + Title: title, + } + + sceneErr := models.SceneMarker{ + Title: sceneNameErr, + } + + i := MarkerImporter{ + ReaderWriter: readerWriter, + marker: scene, + } + + errUpdate := errors.New("Update error") + + // id needs to be set for the mock input + scene.ID = sceneID + readerWriter.On("Update", scene).Return(nil, nil).Once() + + err := i.Update(sceneID) + assert.Nil(t, err) + + i.marker = sceneErr + + // need to set id separately + sceneErr.ID = errImageID + readerWriter.On("Update", sceneErr).Return(nil, errUpdate).Once() + + err = i.Update(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} diff --git a/pkg/studio/import.go b/pkg/studio/import.go new file mode 100644 index 000000000..64924f475 --- /dev/null +++ b/pkg/studio/import.go @@ -0,0 +1,143 @@ +package studio + +import ( + "database/sql" + "errors" + "fmt" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +var ErrParentStudioNotExist = errors.New("parent studio does not exist") + +type Importer struct { + ReaderWriter models.StudioReaderWriter + Input jsonschema.Studio + MissingRefBehaviour models.ImportMissingRefEnum + + studio models.Studio + imageData []byte +} + +func (i *Importer) PreImport() error { + checksum := utils.MD5FromString(i.Input.Name) + + i.studio = models.Studio{ + Checksum: checksum, + Name: sql.NullString{String: i.Input.Name, Valid: true}, + URL: sql.NullString{String: i.Input.URL, Valid: true}, + CreatedAt: models.SQLiteTimestamp{Timestamp: i.Input.CreatedAt.GetTime()}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: i.Input.UpdatedAt.GetTime()}, + } + + if err := i.populateParentStudio(); err != nil { + return err + } + + var err error + if len(i.Input.Image) > 0 { + _, i.imageData, err = utils.ProcessBase64Image(i.Input.Image) + if err != nil { + return fmt.Errorf("invalid image: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) populateParentStudio() error { + if i.Input.ParentStudio != "" { + studio, err := i.ReaderWriter.FindByName(i.Input.ParentStudio, false) + if err != nil { + return fmt.Errorf("error finding studio by name: %s", err.Error()) + } + + if studio == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return ErrParentStudioNotExist + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { + return nil + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + parentID, err := i.createParentStudio(i.Input.ParentStudio) + if err != nil { + return err + } + i.studio.ParentID = sql.NullInt64{ + Int64: int64(parentID), + Valid: true, + } + } + } else { + i.studio.ParentID = sql.NullInt64{Int64: int64(studio.ID), Valid: true} + } + } + + return nil +} + +func (i *Importer) createParentStudio(name string) (int, error) { + newStudio := *models.NewStudio(name) + + created, err := i.ReaderWriter.Create(newStudio) + if err != nil { + return 0, err + } + + return created.ID, nil +} + +func (i *Importer) PostImport(id int) error { + if len(i.imageData) > 0 { + if err := i.ReaderWriter.UpdateStudioImage(id, i.imageData); err != nil { + return fmt.Errorf("error setting studio image: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) Name() string { + return i.Input.Name +} + +func (i *Importer) FindExistingID() (*int, error) { + const nocase = false + existing, err := i.ReaderWriter.FindByName(i.Name(), nocase) + if err != nil { + return nil, err + } + + if existing != nil { + id := existing.ID + return &id, nil + } + + return nil, nil +} + +func (i *Importer) Create() (*int, error) { + created, err := i.ReaderWriter.Create(i.studio) + if err != nil { + return nil, fmt.Errorf("error creating studio: %s", err.Error()) + } + + id := created.ID + return &id, nil +} + +func (i *Importer) Update(id int) error { + studio := i.studio + studio.ID = id + _, err := i.ReaderWriter.UpdateFull(studio) + if err != nil { + return fmt.Errorf("error updating existing studio: %s", err.Error()) + } + + return nil +} diff --git a/pkg/studio/import_test.go b/pkg/studio/import_test.go new file mode 100644 index 000000000..bc71c8b10 --- /dev/null +++ b/pkg/studio/import_test.go @@ -0,0 +1,263 @@ +package studio + +import ( + "errors" + "testing" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/models/modelstest" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +const invalidImage = "aW1hZ2VCeXRlcw&&" + +const ( + studioNameErr = "studioNameErr" + existingStudioName = "existingTagName" + + existingStudioID = 100 + + existingParentStudioName = "existingParentStudioName" + existingParentStudioErr = "existingParentStudioErr" + missingParentStudioName = "existingParentStudioName" +) + +func TestImporterName(t *testing.T) { + i := Importer{ + Input: jsonschema.Studio{ + Name: studioName, + }, + } + + assert.Equal(t, studioName, i.Name()) +} + +func TestImporterPreImport(t *testing.T) { + i := Importer{ + Input: jsonschema.Studio{ + Name: studioName, + Image: invalidImage, + }, + } + + err := i.PreImport() + + assert.NotNil(t, err) + + i.Input.Image = image + + err = i.PreImport() + + assert.Nil(t, err) +} + +func TestImporterPreImportWithParent(t *testing.T) { + readerWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Input: jsonschema.Studio{ + Name: studioName, + Image: image, + ParentStudio: existingParentStudioName, + }, + } + + readerWriter.On("FindByName", existingParentStudioName, false).Return(&models.Studio{ + ID: existingStudioID, + }, nil).Once() + readerWriter.On("FindByName", existingParentStudioErr, false).Return(nil, errors.New("FindByName error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.studio.ParentID.Int64) + + i.Input.ParentStudio = existingParentStudioErr + err = i.PreImport() + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingParent(t *testing.T) { + readerWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Input: jsonschema.Studio{ + Name: studioName, + Image: image, + ParentStudio: missingParentStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + readerWriter.On("FindByName", missingParentStudioName, false).Return(nil, nil).Times(3) + readerWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(&models.Studio{ + ID: existingStudioID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.studio.ParentID.Int64) + + readerWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingParentCreateErr(t *testing.T) { + readerWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Input: jsonschema.Studio{ + Name: studioName, + Image: image, + ParentStudio: missingParentStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + readerWriter.On("FindByName", missingParentStudioName, false).Return(nil, nil).Once() + readerWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPostImport(t *testing.T) { + readerWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + imageData: imageBytes, + } + + updateStudioImageErr := errors.New("UpdateStudioImage error") + + readerWriter.On("UpdateStudioImage", studioID, imageBytes).Return(nil).Once() + readerWriter.On("UpdateStudioImage", errImageID, imageBytes).Return(updateStudioImageErr).Once() + + err := i.PostImport(studioID) + assert.Nil(t, err) + + err = i.PostImport(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestImporterFindExistingID(t *testing.T) { + readerWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Input: jsonschema.Studio{ + Name: studioName, + }, + } + + errFindByName := errors.New("FindByName error") + readerWriter.On("FindByName", studioName, false).Return(nil, nil).Once() + readerWriter.On("FindByName", existingStudioName, false).Return(&models.Studio{ + ID: existingStudioID, + }, nil).Once() + readerWriter.On("FindByName", studioNameErr, false).Return(nil, errFindByName).Once() + + id, err := i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.Input.Name = existingStudioName + id, err = i.FindExistingID() + assert.Equal(t, existingStudioID, *id) + assert.Nil(t, err) + + i.Input.Name = studioNameErr + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestCreate(t *testing.T) { + readerWriter := &mocks.StudioReaderWriter{} + + studio := models.Studio{ + Name: modelstest.NullString(studioName), + } + + studioErr := models.Studio{ + Name: modelstest.NullString(studioNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + studio: studio, + } + + errCreate := errors.New("Create error") + readerWriter.On("Create", studio).Return(&models.Studio{ + ID: studioID, + }, nil).Once() + readerWriter.On("Create", studioErr).Return(nil, errCreate).Once() + + id, err := i.Create() + assert.Equal(t, studioID, *id) + assert.Nil(t, err) + + i.studio = studioErr + id, err = i.Create() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestUpdate(t *testing.T) { + readerWriter := &mocks.StudioReaderWriter{} + + studio := models.Studio{ + Name: modelstest.NullString(studioName), + } + + studioErr := models.Studio{ + Name: modelstest.NullString(studioNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + studio: studio, + } + + errUpdate := errors.New("Update error") + + // id needs to be set for the mock input + studio.ID = studioID + readerWriter.On("UpdateFull", studio).Return(nil, nil).Once() + + err := i.Update(studioID) + assert.Nil(t, err) + + i.studio = studioErr + + // need to set id separately + studioErr.ID = errImageID + readerWriter.On("UpdateFull", studioErr).Return(nil, errUpdate).Once() + + err = i.Update(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} diff --git a/pkg/tag/import.go b/pkg/tag/import.go new file mode 100644 index 000000000..5985253e6 --- /dev/null +++ b/pkg/tag/import.go @@ -0,0 +1,85 @@ +package tag + +import ( + "fmt" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type Importer struct { + ReaderWriter models.TagReaderWriter + Input jsonschema.Tag + + tag models.Tag + imageData []byte +} + +func (i *Importer) PreImport() error { + i.tag = models.Tag{ + Name: i.Input.Name, + CreatedAt: models.SQLiteTimestamp{Timestamp: i.Input.CreatedAt.GetTime()}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: i.Input.UpdatedAt.GetTime()}, + } + + var err error + if len(i.Input.Image) > 0 { + _, i.imageData, err = utils.ProcessBase64Image(i.Input.Image) + if err != nil { + return fmt.Errorf("invalid image: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) PostImport(id int) error { + if len(i.imageData) > 0 { + if err := i.ReaderWriter.UpdateTagImage(id, i.imageData); err != nil { + return fmt.Errorf("error setting tag image: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) Name() string { + return i.Input.Name +} + +func (i *Importer) FindExistingID() (*int, error) { + const nocase = false + existing, err := i.ReaderWriter.FindByName(i.Name(), nocase) + if err != nil { + return nil, err + } + + if existing != nil { + id := existing.ID + return &id, nil + } + + return nil, nil +} + +func (i *Importer) Create() (*int, error) { + created, err := i.ReaderWriter.Create(i.tag) + if err != nil { + return nil, fmt.Errorf("error creating tag: %s", err.Error()) + } + + id := created.ID + return &id, nil +} + +func (i *Importer) Update(id int) error { + tag := i.tag + tag.ID = id + _, err := i.ReaderWriter.Update(tag) + if err != nil { + return fmt.Errorf("error updating existing tag: %s", err.Error()) + } + + return nil +} diff --git a/pkg/tag/import_test.go b/pkg/tag/import_test.go new file mode 100644 index 000000000..b99dd012c --- /dev/null +++ b/pkg/tag/import_test.go @@ -0,0 +1,179 @@ +package tag + +import ( + "errors" + "testing" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stretchr/testify/assert" +) + +const image = "aW1hZ2VCeXRlcw==" +const invalidImage = "aW1hZ2VCeXRlcw&&" + +var imageBytes = []byte("imageBytes") + +const ( + tagNameErr = "tagNameErr" + existingTagName = "existingTagName" + + existingTagID = 100 +) + +func TestImporterName(t *testing.T) { + i := Importer{ + Input: jsonschema.Tag{ + Name: tagName, + }, + } + + assert.Equal(t, tagName, i.Name()) +} + +func TestImporterPreImport(t *testing.T) { + i := Importer{ + Input: jsonschema.Tag{ + Name: tagName, + Image: invalidImage, + }, + } + + err := i.PreImport() + + assert.NotNil(t, err) + + i.Input.Image = image + + err = i.PreImport() + + assert.Nil(t, err) +} + +func TestImporterPostImport(t *testing.T) { + readerWriter := &mocks.TagReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + imageData: imageBytes, + } + + updateTagImageErr := errors.New("UpdateTagImage error") + + readerWriter.On("UpdateTagImage", tagID, imageBytes).Return(nil).Once() + readerWriter.On("UpdateTagImage", errImageID, imageBytes).Return(updateTagImageErr).Once() + + err := i.PostImport(tagID) + assert.Nil(t, err) + + err = i.PostImport(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestImporterFindExistingID(t *testing.T) { + readerWriter := &mocks.TagReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Input: jsonschema.Tag{ + Name: tagName, + }, + } + + errFindByName := errors.New("FindByName error") + readerWriter.On("FindByName", tagName, false).Return(nil, nil).Once() + readerWriter.On("FindByName", existingTagName, false).Return(&models.Tag{ + ID: existingTagID, + }, nil).Once() + readerWriter.On("FindByName", tagNameErr, false).Return(nil, errFindByName).Once() + + id, err := i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.Input.Name = existingTagName + id, err = i.FindExistingID() + assert.Equal(t, existingTagID, *id) + assert.Nil(t, err) + + i.Input.Name = tagNameErr + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestCreate(t *testing.T) { + readerWriter := &mocks.TagReaderWriter{} + + tag := models.Tag{ + Name: tagName, + } + + tagErr := models.Tag{ + Name: tagNameErr, + } + + i := Importer{ + ReaderWriter: readerWriter, + tag: tag, + } + + errCreate := errors.New("Create error") + readerWriter.On("Create", tag).Return(&models.Tag{ + ID: tagID, + }, nil).Once() + readerWriter.On("Create", tagErr).Return(nil, errCreate).Once() + + id, err := i.Create() + assert.Equal(t, tagID, *id) + assert.Nil(t, err) + + i.tag = tagErr + id, err = i.Create() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestUpdate(t *testing.T) { + readerWriter := &mocks.TagReaderWriter{} + + tag := models.Tag{ + Name: tagName, + } + + tagErr := models.Tag{ + Name: tagNameErr, + } + + i := Importer{ + ReaderWriter: readerWriter, + tag: tag, + } + + errUpdate := errors.New("Update error") + + // id needs to be set for the mock input + tag.ID = tagID + readerWriter.On("Update", tag).Return(nil, nil).Once() + + err := i.Update(tagID) + assert.Nil(t, err) + + i.tag = tagErr + + // need to set id separately + tagErr.ID = errImageID + readerWriter.On("Update", tagErr).Return(nil, errUpdate).Once() + + err = i.Update(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index 9c72a67ce..44a302f04 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -31,7 +31,9 @@ "@fortawesome/free-regular-svg-icons": "^5.14.0", "@fortawesome/free-solid-svg-icons": "^5.14.0", "@fortawesome/react-fontawesome": "^0.1.11", + "@types/apollo-upload-client": "^14.1.0", "@types/mousetrap": "^1.6.3", + "apollo-upload-client": "^14.1.2", "axios": "0.20.0", "bootstrap": "^4.5.2", "classnames": "^2.2.6", diff --git a/ui/v2.5/src/components/Changelog/versions/v040.md b/ui/v2.5/src/components/Changelog/versions/v040.md index 88633c05d..1908b125b 100644 --- a/ui/v2.5/src/components/Changelog/versions/v040.md +++ b/ui/v2.5/src/components/Changelog/versions/v040.md @@ -1,4 +1,5 @@ ### ✨ New Features +* Add partial import from zip file. * Add selective scene export. ### 🎨 Improvements diff --git a/ui/v2.5/src/components/Settings/SettingsTasksPanel/ImportDialog.tsx b/ui/v2.5/src/components/Settings/SettingsTasksPanel/ImportDialog.tsx new file mode 100644 index 000000000..e68d71b31 --- /dev/null +++ b/ui/v2.5/src/components/Settings/SettingsTasksPanel/ImportDialog.tsx @@ -0,0 +1,171 @@ +import React, { useState } from "react"; +import { Form } from "react-bootstrap"; +import { mutateImportObjects } from "src/core/StashService"; +import { Modal } from "src/components/Shared"; +import * as GQL from "src/core/generated-graphql"; +import { useToast } from "src/hooks"; + +interface IImportDialogProps { + onClose: () => void; +} + +export const ImportDialog: React.FC = ( + props: IImportDialogProps +) => { + const [duplicateBehaviour, setDuplicateBehaviour] = useState( + duplicateHandlingToString(GQL.ImportDuplicateEnum.Ignore) + ); + + const [missingRefBehaviour, setMissingRefBehaviour] = useState( + missingRefHandlingToString(GQL.ImportMissingRefEnum.Fail) + ); + + const [file, setFile] = useState(); + + // Network state + const [isRunning, setIsRunning] = useState(false); + + const Toast = useToast(); + + function duplicateHandlingToString( + value: GQL.ImportDuplicateEnum | undefined + ) { + switch (value) { + case GQL.ImportDuplicateEnum.Fail: + return "Fail"; + case GQL.ImportDuplicateEnum.Ignore: + return "Ignore"; + case GQL.ImportDuplicateEnum.Overwrite: + return "Overwrite"; + } + return "Ignore"; + } + + function translateDuplicateHandling(value: string) { + switch (value) { + case "Fail": + return GQL.ImportDuplicateEnum.Fail; + case "Ignore": + return GQL.ImportDuplicateEnum.Ignore; + case "Overwrite": + return GQL.ImportDuplicateEnum.Overwrite; + } + + return GQL.ImportDuplicateEnum.Ignore; + } + + function missingRefHandlingToString( + value: GQL.ImportMissingRefEnum | undefined + ) { + switch (value) { + case GQL.ImportMissingRefEnum.Fail: + return "Fail"; + case GQL.ImportMissingRefEnum.Ignore: + return "Ignore"; + case GQL.ImportMissingRefEnum.Create: + return "Create"; + } + return "Fail"; + } + + function translateMissingRefHandling(value: string) { + switch (value) { + case "Fail": + return GQL.ImportMissingRefEnum.Fail; + case "Ignore": + return GQL.ImportMissingRefEnum.Ignore; + case "Create": + return GQL.ImportMissingRefEnum.Create; + } + + return GQL.ImportMissingRefEnum.Fail; + } + + function onFileChange(event: React.ChangeEvent) { + if ( + event.target.validity.valid && + event.target.files && + event.target.files.length > 0 + ) { + setFile(event.target.files[0]); + } + } + + async function onImport() { + try { + setIsRunning(true); + await mutateImportObjects({ + duplicateBehaviour: translateDuplicateHandling(duplicateBehaviour), + missingRefBehaviour: translateMissingRefHandling(missingRefBehaviour), + file, + }); + setIsRunning(false); + Toast.success({ content: "Started importing" }); + } catch (e) { + Toast.error(e); + } finally { + props.onClose(); + } + } + + return ( + { + onImport(); + }, + text: "Import", + }} + cancel={{ + onClick: () => props.onClose(), + text: "Cancel", + variant: "secondary", + }} + disabled={!file} + isRunning={isRunning} + > +
+
+ +
Import zip file
+ +
+ +
Duplicate object handling
+ ) => + setDuplicateBehaviour(e.currentTarget.value) + } + > + {Object.values(GQL.ImportDuplicateEnum).map((p) => ( + + ))} + +
+ + +
Missing reference handling
+ ) => + setMissingRefBehaviour(e.currentTarget.value) + } + > + {Object.values(GQL.ImportMissingRefEnum).map((p) => ( + + ))} + +
+
+
+
+ ); +}; diff --git a/ui/v2.5/src/components/Settings/SettingsTasksPanel/SettingsTasksPanel.tsx b/ui/v2.5/src/components/Settings/SettingsTasksPanel/SettingsTasksPanel.tsx index 7d5e273ca..c74556c5e 100644 --- a/ui/v2.5/src/components/Settings/SettingsTasksPanel/SettingsTasksPanel.tsx +++ b/ui/v2.5/src/components/Settings/SettingsTasksPanel/SettingsTasksPanel.tsx @@ -18,6 +18,7 @@ import { useToast } from "src/hooks"; import * as GQL from "src/core/generated-graphql"; import { Modal } from "src/components/Shared"; import { GenerateButton } from "./GenerateButton"; +import { ImportDialog } from "./ImportDialog"; type Plugin = Pick; type PluginTask = Pick; @@ -26,6 +27,7 @@ export const SettingsTasksPanel: React.FC = () => { const Toast = useToast(); const [isImportAlertOpen, setIsImportAlertOpen] = useState(false); const [isCleanAlertOpen, setIsCleanAlertOpen] = useState(false); + const [isImportDialogOpen, setIsImportDialogOpen] = useState(false); const [useFileMetadata, setUseFileMetadata] = useState(false); const [status, setStatus] = useState(""); const [progress, setProgress] = useState(0); @@ -135,6 +137,14 @@ export const SettingsTasksPanel: React.FC = () => { ); } + function renderImportDialog() { + if (!isImportDialogOpen) { + return; + } + + return setIsImportDialogOpen(false)} />; + } + async function onScan() { try { await mutateMetadataScan({ useFileMetadata }); @@ -256,6 +266,7 @@ export const SettingsTasksPanel: React.FC = () => { <> {renderImportAlert()} {renderCleanAlert()} + {renderImportDialog()}

Running Jobs

@@ -352,10 +363,11 @@ export const SettingsTasksPanel: React.FC = () => { }) } > - Export + Full Export - Export the database content into JSON format. + Exports the database content into JSON format in the metadata + directory. @@ -365,10 +377,24 @@ export const SettingsTasksPanel: React.FC = () => { variant="danger" onClick={() => setIsImportAlertOpen(true)} > - Import + Full Import - Import from exported JSON. This is a destructive action. + Import from exported JSON in the metadata directory. Wipes the + existing database. + + + + + + + Incremental import from a supplied export zip file. diff --git a/ui/v2.5/src/components/Shared/Modal.tsx b/ui/v2.5/src/components/Shared/Modal.tsx index 06a83f2ea..a9afee153 100644 --- a/ui/v2.5/src/components/Shared/Modal.tsx +++ b/ui/v2.5/src/components/Shared/Modal.tsx @@ -17,6 +17,7 @@ interface IModal { cancel?: IButton; accept?: IButton; isRunning?: boolean; + disabled?: boolean; modalProps?: ModalProps; } @@ -29,6 +30,7 @@ const ModalComponent: React.FC = ({ accept, onHide, isRunning, + disabled, modalProps, }) => ( @@ -51,7 +53,7 @@ const ModalComponent: React.FC = ({ "" )}