mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 04:14:39 +03:00
Add partial import functionality (#812)
This commit is contained in:
@@ -10,6 +10,10 @@ mutation ExportObjects($input: ExportObjectsInput!) {
|
|||||||
exportObjects(input: $input)
|
exportObjects(input: $input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mutation ImportObjects($input: ImportObjectsInput!) {
|
||||||
|
importObjects(input: $input)
|
||||||
|
}
|
||||||
|
|
||||||
mutation MetadataScan($input: ScanMetadataInput!) {
|
mutation MetadataScan($input: ScanMetadataInput!) {
|
||||||
metadataScan(input: $input)
|
metadataScan(input: $input)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -164,9 +164,12 @@ type Mutation {
|
|||||||
"""Returns a link to download the result"""
|
"""Returns a link to download the result"""
|
||||||
exportObjects(input: ExportObjectsInput!): String
|
exportObjects(input: ExportObjectsInput!): String
|
||||||
|
|
||||||
"""Start an import. Returns the job ID"""
|
"""Performs an incremental import. Returns the job ID"""
|
||||||
|
importObjects(input: ImportObjectsInput!): String!
|
||||||
|
|
||||||
|
"""Start an full import. Completely wipes the database and imports from the metadata directory. Returns the job ID"""
|
||||||
metadataImport: String!
|
metadataImport: String!
|
||||||
"""Start an export. Returns the job ID"""
|
"""Start a full export. Outputs to the metadata directory. Returns the job ID"""
|
||||||
metadataExport: String!
|
metadataExport: String!
|
||||||
"""Start a scan. Returns the job ID"""
|
"""Start a scan. Returns the job ID"""
|
||||||
metadataScan(input: ScanMetadataInput!): String!
|
metadataScan(input: ScanMetadataInput!): String!
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
scalar Upload
|
||||||
|
|
||||||
input GenerateMetadataInput {
|
input GenerateMetadataInput {
|
||||||
sprites: Boolean!
|
sprites: Boolean!
|
||||||
previews: Boolean!
|
previews: Boolean!
|
||||||
@@ -65,3 +67,21 @@ input ExportObjectsInput {
|
|||||||
galleries: ExportObjectTypeInput
|
galleries: ExportObjectTypeInput
|
||||||
includeDependencies: Boolean
|
includeDependencies: Boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum ImportDuplicateEnum {
|
||||||
|
IGNORE
|
||||||
|
OVERWRITE
|
||||||
|
FAIL
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ImportMissingRefEnum {
|
||||||
|
IGNORE
|
||||||
|
FAIL
|
||||||
|
CREATE
|
||||||
|
}
|
||||||
|
|
||||||
|
input ImportObjectsInput {
|
||||||
|
file: Upload!
|
||||||
|
duplicateBehaviour: ImportDuplicateEnum!
|
||||||
|
missingRefBehaviour: ImportMissingRefEnum!
|
||||||
|
}
|
||||||
|
|||||||
@@ -19,6 +19,16 @@ func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
|
|||||||
return "todo", nil
|
return "todo", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) {
|
||||||
|
t := manager.CreateImportTask(config.GetVideoFileNamingAlgorithm(), input)
|
||||||
|
_, err := manager.GetInstance().RunSingleTask(t)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return "todo", nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
|
func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
|
||||||
manager.GetInstance().Export()
|
manager.GetInstance().Export()
|
||||||
return "todo", nil
|
return "todo", nil
|
||||||
|
|||||||
72
pkg/gallery/import.go
Normal file
72
pkg/gallery/import.go
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
package gallery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Importer struct {
|
||||||
|
ReaderWriter models.GalleryReaderWriter
|
||||||
|
Input jsonschema.PathMapping
|
||||||
|
|
||||||
|
gallery models.Gallery
|
||||||
|
imageData []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PreImport() error {
|
||||||
|
currentTime := time.Now()
|
||||||
|
i.gallery = models.Gallery{
|
||||||
|
Checksum: i.Input.Checksum,
|
||||||
|
Path: i.Input.Path,
|
||||||
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PostImport(id int) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Name() string {
|
||||||
|
return i.Input.Path
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) FindExistingID() (*int, error) {
|
||||||
|
existing, err := i.ReaderWriter.FindByPath(i.Name())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if existing != nil {
|
||||||
|
id := existing.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Create() (*int, error) {
|
||||||
|
created, err := i.ReaderWriter.Create(i.gallery)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating gallery: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
id := created.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Update(id int) error {
|
||||||
|
gallery := i.gallery
|
||||||
|
gallery.ID = id
|
||||||
|
_, err := i.ReaderWriter.Update(gallery)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error updating existing gallery: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
147
pkg/gallery/import_test.go
Normal file
147
pkg/gallery/import_test.go
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
package gallery
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
galleryPath = "galleryPath"
|
||||||
|
galleryPathErr = "galleryPathErr"
|
||||||
|
existingGalleryPath = "existingGalleryPath"
|
||||||
|
|
||||||
|
galleryID = 1
|
||||||
|
idErr = 2
|
||||||
|
existingGalleryID = 100
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestImporterName(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.PathMapping{
|
||||||
|
Path: galleryPath,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, galleryPath, i.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImport(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.PathMapping{
|
||||||
|
Path: galleryPath,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterFindExistingID(t *testing.T) {
|
||||||
|
readerWriter := &mocks.GalleryReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Input: jsonschema.PathMapping{
|
||||||
|
Path: galleryPath,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
errFindByPath := errors.New("FindByPath error")
|
||||||
|
readerWriter.On("FindByPath", galleryPath).Return(nil, nil).Once()
|
||||||
|
readerWriter.On("FindByPath", existingGalleryPath).Return(&models.Gallery{
|
||||||
|
ID: existingGalleryID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("FindByPath", galleryPathErr).Return(nil, errFindByPath).Once()
|
||||||
|
|
||||||
|
id, err := i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Path = existingGalleryPath
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Equal(t, existingGalleryID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Path = galleryPathErr
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.GalleryReaderWriter{}
|
||||||
|
|
||||||
|
gallery := models.Gallery{
|
||||||
|
Path: galleryPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
galleryErr := models.Gallery{
|
||||||
|
Path: galleryPathErr,
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
gallery: gallery,
|
||||||
|
}
|
||||||
|
|
||||||
|
errCreate := errors.New("Create error")
|
||||||
|
readerWriter.On("Create", gallery).Return(&models.Gallery{
|
||||||
|
ID: galleryID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("Create", galleryErr).Return(nil, errCreate).Once()
|
||||||
|
|
||||||
|
id, err := i.Create()
|
||||||
|
assert.Equal(t, galleryID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.gallery = galleryErr
|
||||||
|
id, err = i.Create()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUpdate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.GalleryReaderWriter{}
|
||||||
|
|
||||||
|
gallery := models.Gallery{
|
||||||
|
Path: galleryPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
galleryErr := models.Gallery{
|
||||||
|
Path: galleryPathErr,
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
gallery: gallery,
|
||||||
|
}
|
||||||
|
|
||||||
|
errUpdate := errors.New("Update error")
|
||||||
|
|
||||||
|
// id needs to be set for the mock input
|
||||||
|
gallery.ID = galleryID
|
||||||
|
readerWriter.On("Update", gallery).Return(nil, nil).Once()
|
||||||
|
|
||||||
|
err := i.Update(galleryID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.gallery = galleryErr
|
||||||
|
|
||||||
|
// need to set id separately
|
||||||
|
galleryErr.ID = idErr
|
||||||
|
readerWriter.On("Update", galleryErr).Return(nil, errUpdate).Once()
|
||||||
|
|
||||||
|
err = i.Update(idErr)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
61
pkg/manager/import.go
Normal file
61
pkg/manager/import.go
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
package manager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type importer interface {
|
||||||
|
PreImport() error
|
||||||
|
PostImport(id int) error
|
||||||
|
Name() string
|
||||||
|
FindExistingID() (*int, error)
|
||||||
|
Create() (*int, error)
|
||||||
|
Update(id int) error
|
||||||
|
}
|
||||||
|
|
||||||
|
func performImport(i importer, duplicateBehaviour models.ImportDuplicateEnum) error {
|
||||||
|
if err := i.PreImport(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// try to find an existing object with the same name
|
||||||
|
name := i.Name()
|
||||||
|
existing, err := i.FindExistingID()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error finding existing objects: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
var id int
|
||||||
|
|
||||||
|
if existing != nil {
|
||||||
|
if duplicateBehaviour == models.ImportDuplicateEnumFail {
|
||||||
|
return fmt.Errorf("existing object with name '%s'", name)
|
||||||
|
} else if duplicateBehaviour == models.ImportDuplicateEnumIgnore {
|
||||||
|
logger.Info("Skipping existing object")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// must be overwriting
|
||||||
|
id = *existing
|
||||||
|
if err := i.Update(id); err != nil {
|
||||||
|
return fmt.Errorf("error updating existing object: %s", err.Error())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// creating
|
||||||
|
createdID, err := i.Create()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating object: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
id = *createdID
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := i.PostImport(id); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -2,10 +2,11 @@ package jsonschema
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
|
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
jsoniter "github.com/json-iterator/go"
|
||||||
)
|
)
|
||||||
|
|
||||||
var nilTime = (time.Time{}).UnixNano()
|
var nilTime = (time.Time{}).UnixNano()
|
||||||
|
|||||||
@@ -153,7 +153,13 @@ func (s *singleton) Import() {
|
|||||||
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
task := ImportTask{fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm()}
|
task := ImportTask{
|
||||||
|
BaseDir: config.GetMetadataPath(),
|
||||||
|
Reset: true,
|
||||||
|
DuplicateBehaviour: models.ImportDuplicateEnumFail,
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
|
||||||
|
}
|
||||||
go task.Start(&wg)
|
go task.Start(&wg)
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
}()
|
}()
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -7,8 +7,8 @@ import (
|
|||||||
type GalleryReader interface {
|
type GalleryReader interface {
|
||||||
// Find(id int) (*Gallery, error)
|
// Find(id int) (*Gallery, error)
|
||||||
FindMany(ids []int) ([]*Gallery, error)
|
FindMany(ids []int) ([]*Gallery, error)
|
||||||
// FindByChecksum(checksum string) (*Gallery, error)
|
FindByChecksum(checksum string) (*Gallery, error)
|
||||||
// FindByPath(path string) (*Gallery, error)
|
FindByPath(path string) (*Gallery, error)
|
||||||
FindBySceneID(sceneID int) (*Gallery, error)
|
FindBySceneID(sceneID int) (*Gallery, error)
|
||||||
// ValidGalleriesForScenePath(scenePath string) ([]*Gallery, error)
|
// ValidGalleriesForScenePath(scenePath string) ([]*Gallery, error)
|
||||||
// Count() (int, error)
|
// Count() (int, error)
|
||||||
@@ -17,8 +17,8 @@ type GalleryReader interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type GalleryWriter interface {
|
type GalleryWriter interface {
|
||||||
// Create(newGallery Gallery) (*Gallery, error)
|
Create(newGallery Gallery) (*Gallery, error)
|
||||||
// Update(updatedGallery Gallery) (*Gallery, error)
|
Update(updatedGallery Gallery) (*Gallery, error)
|
||||||
// Destroy(id int) error
|
// Destroy(id int) error
|
||||||
// ClearGalleryId(sceneID int) error
|
// ClearGalleryId(sceneID int) error
|
||||||
}
|
}
|
||||||
@@ -44,10 +44,26 @@ func (t *galleryReaderWriter) FindMany(ids []int) ([]*Gallery, error) {
|
|||||||
return t.qb.FindMany(ids)
|
return t.qb.FindMany(ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *galleryReaderWriter) FindByChecksum(checksum string) (*Gallery, error) {
|
||||||
|
return t.qb.FindByChecksum(checksum, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *galleryReaderWriter) All() ([]*Gallery, error) {
|
func (t *galleryReaderWriter) All() ([]*Gallery, error) {
|
||||||
return t.qb.All()
|
return t.qb.All()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *galleryReaderWriter) FindByPath(path string) (*Gallery, error) {
|
||||||
|
return t.qb.FindByPath(path)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *galleryReaderWriter) FindBySceneID(sceneID int) (*Gallery, error) {
|
func (t *galleryReaderWriter) FindBySceneID(sceneID int) (*Gallery, error) {
|
||||||
return t.qb.FindBySceneID(sceneID, t.tx)
|
return t.qb.FindBySceneID(sceneID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *galleryReaderWriter) Create(newGallery Gallery) (*Gallery, error) {
|
||||||
|
return t.qb.Create(newGallery, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *galleryReaderWriter) Update(updatedGallery Gallery) (*Gallery, error) {
|
||||||
|
return t.qb.Update(updatedGallery, t.tx)
|
||||||
|
}
|
||||||
|
|||||||
@@ -11,20 +11,20 @@ type JoinReader interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type JoinWriter interface {
|
type JoinWriter interface {
|
||||||
// CreatePerformersScenes(newJoins []PerformersScenes) error
|
CreatePerformersScenes(newJoins []PerformersScenes) error
|
||||||
// AddPerformerScene(sceneID int, performerID int) (bool, error)
|
// AddPerformerScene(sceneID int, performerID int) (bool, error)
|
||||||
// UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes) error
|
UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes) error
|
||||||
// DestroyPerformersScenes(sceneID int) error
|
// DestroyPerformersScenes(sceneID int) error
|
||||||
// CreateMoviesScenes(newJoins []MoviesScenes) error
|
CreateMoviesScenes(newJoins []MoviesScenes) error
|
||||||
// AddMoviesScene(sceneID int, movieID int, sceneIdx *int) (bool, error)
|
// AddMoviesScene(sceneID int, movieID int, sceneIdx *int) (bool, error)
|
||||||
// UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes) error
|
UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes) error
|
||||||
// DestroyMoviesScenes(sceneID int) error
|
// DestroyMoviesScenes(sceneID int) error
|
||||||
// CreateScenesTags(newJoins []ScenesTags) error
|
// CreateScenesTags(newJoins []ScenesTags) error
|
||||||
// UpdateScenesTags(sceneID int, updatedJoins []ScenesTags) error
|
UpdateScenesTags(sceneID int, updatedJoins []ScenesTags) error
|
||||||
// AddSceneTag(sceneID int, tagID int) (bool, error)
|
// AddSceneTag(sceneID int, tagID int) (bool, error)
|
||||||
// DestroyScenesTags(sceneID int) error
|
// DestroyScenesTags(sceneID int) error
|
||||||
// CreateSceneMarkersTags(newJoins []SceneMarkersTags) error
|
// CreateSceneMarkersTags(newJoins []SceneMarkersTags) error
|
||||||
// UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error
|
UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error
|
||||||
// DestroySceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error
|
// DestroySceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error
|
||||||
// DestroyScenesGalleries(sceneID int) error
|
// DestroyScenesGalleries(sceneID int) error
|
||||||
// DestroyScenesMarkers(sceneID int) error
|
// DestroyScenesMarkers(sceneID int) error
|
||||||
@@ -50,3 +50,27 @@ type joinReaderWriter struct {
|
|||||||
func (t *joinReaderWriter) GetSceneMovies(sceneID int) ([]MoviesScenes, error) {
|
func (t *joinReaderWriter) GetSceneMovies(sceneID int) ([]MoviesScenes, error) {
|
||||||
return t.qb.GetSceneMovies(sceneID, t.tx)
|
return t.qb.GetSceneMovies(sceneID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *joinReaderWriter) CreatePerformersScenes(newJoins []PerformersScenes) error {
|
||||||
|
return t.qb.CreatePerformersScenes(newJoins, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *joinReaderWriter) UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes) error {
|
||||||
|
return t.qb.UpdatePerformersScenes(sceneID, updatedJoins, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *joinReaderWriter) CreateMoviesScenes(newJoins []MoviesScenes) error {
|
||||||
|
return t.qb.CreateMoviesScenes(newJoins, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *joinReaderWriter) UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes) error {
|
||||||
|
return t.qb.UpdateMoviesScenes(sceneID, updatedJoins, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *joinReaderWriter) UpdateScenesTags(sceneID int, updatedJoins []ScenesTags) error {
|
||||||
|
return t.qb.UpdateScenesTags(sceneID, updatedJoins, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *joinReaderWriter) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error {
|
||||||
|
return t.qb.UpdateSceneMarkersTags(sceneMarkerID, updatedJoins, t.tx)
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,11 +2,14 @@ package models
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var currentLocation = time.Now().Location()
|
||||||
|
|
||||||
type JSONTime struct {
|
type JSONTime struct {
|
||||||
time.Time
|
time.Time
|
||||||
}
|
}
|
||||||
@@ -28,3 +31,19 @@ func (jt *JSONTime) MarshalJSON() ([]byte, error) {
|
|||||||
}
|
}
|
||||||
return []byte(fmt.Sprintf("\"%s\"", jt.Time.Format(time.RFC3339))), nil
|
return []byte(fmt.Sprintf("\"%s\"", jt.Time.Format(time.RFC3339))), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (jt JSONTime) GetTime() time.Time {
|
||||||
|
if currentLocation != nil {
|
||||||
|
if jt.IsZero() {
|
||||||
|
return time.Now().In(currentLocation)
|
||||||
|
} else {
|
||||||
|
return jt.Time.In(currentLocation)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if jt.IsZero() {
|
||||||
|
return time.Now()
|
||||||
|
} else {
|
||||||
|
return jt.Time
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -35,6 +35,75 @@ func (_m *GalleryReaderWriter) All() ([]*models.Gallery, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create provides a mock function with given fields: newGallery
|
||||||
|
func (_m *GalleryReaderWriter) Create(newGallery models.Gallery) (*models.Gallery, error) {
|
||||||
|
ret := _m.Called(newGallery)
|
||||||
|
|
||||||
|
var r0 *models.Gallery
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Gallery) *models.Gallery); ok {
|
||||||
|
r0 = rf(newGallery)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Gallery)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Gallery) error); ok {
|
||||||
|
r1 = rf(newGallery)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindByChecksum provides a mock function with given fields: checksum
|
||||||
|
func (_m *GalleryReaderWriter) FindByChecksum(checksum string) (*models.Gallery, error) {
|
||||||
|
ret := _m.Called(checksum)
|
||||||
|
|
||||||
|
var r0 *models.Gallery
|
||||||
|
if rf, ok := ret.Get(0).(func(string) *models.Gallery); ok {
|
||||||
|
r0 = rf(checksum)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Gallery)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||||
|
r1 = rf(checksum)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindByPath provides a mock function with given fields: path
|
||||||
|
func (_m *GalleryReaderWriter) FindByPath(path string) (*models.Gallery, error) {
|
||||||
|
ret := _m.Called(path)
|
||||||
|
|
||||||
|
var r0 *models.Gallery
|
||||||
|
if rf, ok := ret.Get(0).(func(string) *models.Gallery); ok {
|
||||||
|
r0 = rf(path)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Gallery)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||||
|
r1 = rf(path)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// FindBySceneID provides a mock function with given fields: sceneID
|
// FindBySceneID provides a mock function with given fields: sceneID
|
||||||
func (_m *GalleryReaderWriter) FindBySceneID(sceneID int) (*models.Gallery, error) {
|
func (_m *GalleryReaderWriter) FindBySceneID(sceneID int) (*models.Gallery, error) {
|
||||||
ret := _m.Called(sceneID)
|
ret := _m.Called(sceneID)
|
||||||
@@ -80,3 +149,26 @@ func (_m *GalleryReaderWriter) FindMany(ids []int) ([]*models.Gallery, error) {
|
|||||||
|
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update provides a mock function with given fields: updatedGallery
|
||||||
|
func (_m *GalleryReaderWriter) Update(updatedGallery models.Gallery) (*models.Gallery, error) {
|
||||||
|
ret := _m.Called(updatedGallery)
|
||||||
|
|
||||||
|
var r0 *models.Gallery
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Gallery) *models.Gallery); ok {
|
||||||
|
r0 = rf(updatedGallery)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Gallery)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Gallery) error); ok {
|
||||||
|
r1 = rf(updatedGallery)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|||||||
@@ -12,6 +12,34 @@ type JoinReaderWriter struct {
|
|||||||
mock.Mock
|
mock.Mock
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreateMoviesScenes provides a mock function with given fields: newJoins
|
||||||
|
func (_m *JoinReaderWriter) CreateMoviesScenes(newJoins []models.MoviesScenes) error {
|
||||||
|
ret := _m.Called(newJoins)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func([]models.MoviesScenes) error); ok {
|
||||||
|
r0 = rf(newJoins)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatePerformersScenes provides a mock function with given fields: newJoins
|
||||||
|
func (_m *JoinReaderWriter) CreatePerformersScenes(newJoins []models.PerformersScenes) error {
|
||||||
|
ret := _m.Called(newJoins)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func([]models.PerformersScenes) error); ok {
|
||||||
|
r0 = rf(newJoins)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
// GetSceneMovies provides a mock function with given fields: sceneID
|
// GetSceneMovies provides a mock function with given fields: sceneID
|
||||||
func (_m *JoinReaderWriter) GetSceneMovies(sceneID int) ([]models.MoviesScenes, error) {
|
func (_m *JoinReaderWriter) GetSceneMovies(sceneID int) ([]models.MoviesScenes, error) {
|
||||||
ret := _m.Called(sceneID)
|
ret := _m.Called(sceneID)
|
||||||
@@ -34,3 +62,59 @@ func (_m *JoinReaderWriter) GetSceneMovies(sceneID int) ([]models.MoviesScenes,
|
|||||||
|
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdateMoviesScenes provides a mock function with given fields: sceneID, updatedJoins
|
||||||
|
func (_m *JoinReaderWriter) UpdateMoviesScenes(sceneID int, updatedJoins []models.MoviesScenes) error {
|
||||||
|
ret := _m.Called(sceneID, updatedJoins)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []models.MoviesScenes) error); ok {
|
||||||
|
r0 = rf(sceneID, updatedJoins)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatePerformersScenes provides a mock function with given fields: sceneID, updatedJoins
|
||||||
|
func (_m *JoinReaderWriter) UpdatePerformersScenes(sceneID int, updatedJoins []models.PerformersScenes) error {
|
||||||
|
ret := _m.Called(sceneID, updatedJoins)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []models.PerformersScenes) error); ok {
|
||||||
|
r0 = rf(sceneID, updatedJoins)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateSceneMarkersTags provides a mock function with given fields: sceneMarkerID, updatedJoins
|
||||||
|
func (_m *JoinReaderWriter) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []models.SceneMarkersTags) error {
|
||||||
|
ret := _m.Called(sceneMarkerID, updatedJoins)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []models.SceneMarkersTags) error); ok {
|
||||||
|
r0 = rf(sceneMarkerID, updatedJoins)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateScenesTags provides a mock function with given fields: sceneID, updatedJoins
|
||||||
|
func (_m *JoinReaderWriter) UpdateScenesTags(sceneID int, updatedJoins []models.ScenesTags) error {
|
||||||
|
ret := _m.Called(sceneID, updatedJoins)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []models.ScenesTags) error); ok {
|
||||||
|
r0 = rf(sceneID, updatedJoins)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|||||||
@@ -35,6 +35,29 @@ func (_m *MovieReaderWriter) All() ([]*models.Movie, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create provides a mock function with given fields: newMovie
|
||||||
|
func (_m *MovieReaderWriter) Create(newMovie models.Movie) (*models.Movie, error) {
|
||||||
|
ret := _m.Called(newMovie)
|
||||||
|
|
||||||
|
var r0 *models.Movie
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Movie) *models.Movie); ok {
|
||||||
|
r0 = rf(newMovie)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Movie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Movie) error); ok {
|
||||||
|
r1 = rf(newMovie)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// Find provides a mock function with given fields: id
|
// Find provides a mock function with given fields: id
|
||||||
func (_m *MovieReaderWriter) Find(id int) (*models.Movie, error) {
|
func (_m *MovieReaderWriter) Find(id int) (*models.Movie, error) {
|
||||||
ret := _m.Called(id)
|
ret := _m.Called(id)
|
||||||
@@ -58,6 +81,52 @@ func (_m *MovieReaderWriter) Find(id int) (*models.Movie, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FindByName provides a mock function with given fields: name, nocase
|
||||||
|
func (_m *MovieReaderWriter) FindByName(name string, nocase bool) (*models.Movie, error) {
|
||||||
|
ret := _m.Called(name, nocase)
|
||||||
|
|
||||||
|
var r0 *models.Movie
|
||||||
|
if rf, ok := ret.Get(0).(func(string, bool) *models.Movie); ok {
|
||||||
|
r0 = rf(name, nocase)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Movie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(string, bool) error); ok {
|
||||||
|
r1 = rf(name, nocase)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindByNames provides a mock function with given fields: names, nocase
|
||||||
|
func (_m *MovieReaderWriter) FindByNames(names []string, nocase bool) ([]*models.Movie, error) {
|
||||||
|
ret := _m.Called(names, nocase)
|
||||||
|
|
||||||
|
var r0 []*models.Movie
|
||||||
|
if rf, ok := ret.Get(0).(func([]string, bool) []*models.Movie); ok {
|
||||||
|
r0 = rf(names, nocase)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]*models.Movie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func([]string, bool) error); ok {
|
||||||
|
r1 = rf(names, nocase)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// FindMany provides a mock function with given fields: ids
|
// FindMany provides a mock function with given fields: ids
|
||||||
func (_m *MovieReaderWriter) FindMany(ids []int) ([]*models.Movie, error) {
|
func (_m *MovieReaderWriter) FindMany(ids []int) ([]*models.Movie, error) {
|
||||||
ret := _m.Called(ids)
|
ret := _m.Called(ids)
|
||||||
@@ -126,3 +195,63 @@ func (_m *MovieReaderWriter) GetFrontImage(movieID int) ([]byte, error) {
|
|||||||
|
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update provides a mock function with given fields: updatedMovie
|
||||||
|
func (_m *MovieReaderWriter) Update(updatedMovie models.MoviePartial) (*models.Movie, error) {
|
||||||
|
ret := _m.Called(updatedMovie)
|
||||||
|
|
||||||
|
var r0 *models.Movie
|
||||||
|
if rf, ok := ret.Get(0).(func(models.MoviePartial) *models.Movie); ok {
|
||||||
|
r0 = rf(updatedMovie)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Movie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.MoviePartial) error); ok {
|
||||||
|
r1 = rf(updatedMovie)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateFull provides a mock function with given fields: updatedMovie
|
||||||
|
func (_m *MovieReaderWriter) UpdateFull(updatedMovie models.Movie) (*models.Movie, error) {
|
||||||
|
ret := _m.Called(updatedMovie)
|
||||||
|
|
||||||
|
var r0 *models.Movie
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Movie) *models.Movie); ok {
|
||||||
|
r0 = rf(updatedMovie)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Movie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Movie) error); ok {
|
||||||
|
r1 = rf(updatedMovie)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMovieImages provides a mock function with given fields: movieID, frontImage, backImage
|
||||||
|
func (_m *MovieReaderWriter) UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error {
|
||||||
|
ret := _m.Called(movieID, frontImage, backImage)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []byte, []byte) error); ok {
|
||||||
|
r0 = rf(movieID, frontImage, backImage)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|||||||
@@ -35,6 +35,52 @@ func (_m *PerformerReaderWriter) All() ([]*models.Performer, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create provides a mock function with given fields: newPerformer
|
||||||
|
func (_m *PerformerReaderWriter) Create(newPerformer models.Performer) (*models.Performer, error) {
|
||||||
|
ret := _m.Called(newPerformer)
|
||||||
|
|
||||||
|
var r0 *models.Performer
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Performer) *models.Performer); ok {
|
||||||
|
r0 = rf(newPerformer)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Performer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Performer) error); ok {
|
||||||
|
r1 = rf(newPerformer)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindByNames provides a mock function with given fields: names, nocase
|
||||||
|
func (_m *PerformerReaderWriter) FindByNames(names []string, nocase bool) ([]*models.Performer, error) {
|
||||||
|
ret := _m.Called(names, nocase)
|
||||||
|
|
||||||
|
var r0 []*models.Performer
|
||||||
|
if rf, ok := ret.Get(0).(func([]string, bool) []*models.Performer); ok {
|
||||||
|
r0 = rf(names, nocase)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]*models.Performer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func([]string, bool) error); ok {
|
||||||
|
r1 = rf(names, nocase)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// FindBySceneID provides a mock function with given fields: sceneID
|
// FindBySceneID provides a mock function with given fields: sceneID
|
||||||
func (_m *PerformerReaderWriter) FindBySceneID(sceneID int) ([]*models.Performer, error) {
|
func (_m *PerformerReaderWriter) FindBySceneID(sceneID int) ([]*models.Performer, error) {
|
||||||
ret := _m.Called(sceneID)
|
ret := _m.Called(sceneID)
|
||||||
@@ -126,3 +172,40 @@ func (_m *PerformerReaderWriter) GetPerformerImage(performerID int) ([]byte, err
|
|||||||
|
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update provides a mock function with given fields: updatedPerformer
|
||||||
|
func (_m *PerformerReaderWriter) Update(updatedPerformer models.Performer) (*models.Performer, error) {
|
||||||
|
ret := _m.Called(updatedPerformer)
|
||||||
|
|
||||||
|
var r0 *models.Performer
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Performer) *models.Performer); ok {
|
||||||
|
r0 = rf(updatedPerformer)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Performer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Performer) error); ok {
|
||||||
|
r1 = rf(updatedPerformer)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatePerformerImage provides a mock function with given fields: performerID, image
|
||||||
|
func (_m *PerformerReaderWriter) UpdatePerformerImage(performerID int, image []byte) error {
|
||||||
|
ret := _m.Called(performerID, image)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []byte) error); ok {
|
||||||
|
r0 = rf(performerID, image)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|||||||
@@ -12,6 +12,29 @@ type SceneMarkerReaderWriter struct {
|
|||||||
mock.Mock
|
mock.Mock
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create provides a mock function with given fields: newSceneMarker
|
||||||
|
func (_m *SceneMarkerReaderWriter) Create(newSceneMarker models.SceneMarker) (*models.SceneMarker, error) {
|
||||||
|
ret := _m.Called(newSceneMarker)
|
||||||
|
|
||||||
|
var r0 *models.SceneMarker
|
||||||
|
if rf, ok := ret.Get(0).(func(models.SceneMarker) *models.SceneMarker); ok {
|
||||||
|
r0 = rf(newSceneMarker)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.SceneMarker)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.SceneMarker) error); ok {
|
||||||
|
r1 = rf(newSceneMarker)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// FindBySceneID provides a mock function with given fields: sceneID
|
// FindBySceneID provides a mock function with given fields: sceneID
|
||||||
func (_m *SceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*models.SceneMarker, error) {
|
func (_m *SceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*models.SceneMarker, error) {
|
||||||
ret := _m.Called(sceneID)
|
ret := _m.Called(sceneID)
|
||||||
@@ -34,3 +57,26 @@ func (_m *SceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*models.SceneMa
|
|||||||
|
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update provides a mock function with given fields: updatedSceneMarker
|
||||||
|
func (_m *SceneMarkerReaderWriter) Update(updatedSceneMarker models.SceneMarker) (*models.SceneMarker, error) {
|
||||||
|
ret := _m.Called(updatedSceneMarker)
|
||||||
|
|
||||||
|
var r0 *models.SceneMarker
|
||||||
|
if rf, ok := ret.Get(0).(func(models.SceneMarker) *models.SceneMarker); ok {
|
||||||
|
r0 = rf(updatedSceneMarker)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.SceneMarker)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.SceneMarker) error); ok {
|
||||||
|
r1 = rf(updatedSceneMarker)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|||||||
@@ -35,6 +35,75 @@ func (_m *SceneReaderWriter) All() ([]*models.Scene, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create provides a mock function with given fields: newScene
|
||||||
|
func (_m *SceneReaderWriter) Create(newScene models.Scene) (*models.Scene, error) {
|
||||||
|
ret := _m.Called(newScene)
|
||||||
|
|
||||||
|
var r0 *models.Scene
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Scene) *models.Scene); ok {
|
||||||
|
r0 = rf(newScene)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Scene)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Scene) error); ok {
|
||||||
|
r1 = rf(newScene)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindByChecksum provides a mock function with given fields: checksum
|
||||||
|
func (_m *SceneReaderWriter) FindByChecksum(checksum string) (*models.Scene, error) {
|
||||||
|
ret := _m.Called(checksum)
|
||||||
|
|
||||||
|
var r0 *models.Scene
|
||||||
|
if rf, ok := ret.Get(0).(func(string) *models.Scene); ok {
|
||||||
|
r0 = rf(checksum)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Scene)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||||
|
r1 = rf(checksum)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindByOSHash provides a mock function with given fields: oshash
|
||||||
|
func (_m *SceneReaderWriter) FindByOSHash(oshash string) (*models.Scene, error) {
|
||||||
|
ret := _m.Called(oshash)
|
||||||
|
|
||||||
|
var r0 *models.Scene
|
||||||
|
if rf, ok := ret.Get(0).(func(string) *models.Scene); ok {
|
||||||
|
r0 = rf(oshash)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Scene)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||||
|
r1 = rf(oshash)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// FindMany provides a mock function with given fields: ids
|
// FindMany provides a mock function with given fields: ids
|
||||||
func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) {
|
func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) {
|
||||||
ret := _m.Called(ids)
|
ret := _m.Called(ids)
|
||||||
@@ -80,3 +149,63 @@ func (_m *SceneReaderWriter) GetSceneCover(sceneID int) ([]byte, error) {
|
|||||||
|
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update provides a mock function with given fields: updatedScene
|
||||||
|
func (_m *SceneReaderWriter) Update(updatedScene models.ScenePartial) (*models.Scene, error) {
|
||||||
|
ret := _m.Called(updatedScene)
|
||||||
|
|
||||||
|
var r0 *models.Scene
|
||||||
|
if rf, ok := ret.Get(0).(func(models.ScenePartial) *models.Scene); ok {
|
||||||
|
r0 = rf(updatedScene)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Scene)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.ScenePartial) error); ok {
|
||||||
|
r1 = rf(updatedScene)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateFull provides a mock function with given fields: updatedScene
|
||||||
|
func (_m *SceneReaderWriter) UpdateFull(updatedScene models.Scene) (*models.Scene, error) {
|
||||||
|
ret := _m.Called(updatedScene)
|
||||||
|
|
||||||
|
var r0 *models.Scene
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Scene) *models.Scene); ok {
|
||||||
|
r0 = rf(updatedScene)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Scene)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Scene) error); ok {
|
||||||
|
r1 = rf(updatedScene)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateSceneCover provides a mock function with given fields: sceneID, cover
|
||||||
|
func (_m *SceneReaderWriter) UpdateSceneCover(sceneID int, cover []byte) error {
|
||||||
|
ret := _m.Called(sceneID, cover)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []byte) error); ok {
|
||||||
|
r0 = rf(sceneID, cover)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|||||||
@@ -35,6 +35,29 @@ func (_m *StudioReaderWriter) All() ([]*models.Studio, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create provides a mock function with given fields: newStudio
|
||||||
|
func (_m *StudioReaderWriter) Create(newStudio models.Studio) (*models.Studio, error) {
|
||||||
|
ret := _m.Called(newStudio)
|
||||||
|
|
||||||
|
var r0 *models.Studio
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Studio) *models.Studio); ok {
|
||||||
|
r0 = rf(newStudio)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Studio)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Studio) error); ok {
|
||||||
|
r1 = rf(newStudio)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// Find provides a mock function with given fields: id
|
// Find provides a mock function with given fields: id
|
||||||
func (_m *StudioReaderWriter) Find(id int) (*models.Studio, error) {
|
func (_m *StudioReaderWriter) Find(id int) (*models.Studio, error) {
|
||||||
ret := _m.Called(id)
|
ret := _m.Called(id)
|
||||||
@@ -58,6 +81,29 @@ func (_m *StudioReaderWriter) Find(id int) (*models.Studio, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FindByName provides a mock function with given fields: name, nocase
|
||||||
|
func (_m *StudioReaderWriter) FindByName(name string, nocase bool) (*models.Studio, error) {
|
||||||
|
ret := _m.Called(name, nocase)
|
||||||
|
|
||||||
|
var r0 *models.Studio
|
||||||
|
if rf, ok := ret.Get(0).(func(string, bool) *models.Studio); ok {
|
||||||
|
r0 = rf(name, nocase)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Studio)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(string, bool) error); ok {
|
||||||
|
r1 = rf(name, nocase)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// FindMany provides a mock function with given fields: ids
|
// FindMany provides a mock function with given fields: ids
|
||||||
func (_m *StudioReaderWriter) FindMany(ids []int) ([]*models.Studio, error) {
|
func (_m *StudioReaderWriter) FindMany(ids []int) ([]*models.Studio, error) {
|
||||||
ret := _m.Called(ids)
|
ret := _m.Called(ids)
|
||||||
@@ -103,3 +149,63 @@ func (_m *StudioReaderWriter) GetStudioImage(studioID int) ([]byte, error) {
|
|||||||
|
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update provides a mock function with given fields: updatedStudio
|
||||||
|
func (_m *StudioReaderWriter) Update(updatedStudio models.StudioPartial) (*models.Studio, error) {
|
||||||
|
ret := _m.Called(updatedStudio)
|
||||||
|
|
||||||
|
var r0 *models.Studio
|
||||||
|
if rf, ok := ret.Get(0).(func(models.StudioPartial) *models.Studio); ok {
|
||||||
|
r0 = rf(updatedStudio)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Studio)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.StudioPartial) error); ok {
|
||||||
|
r1 = rf(updatedStudio)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateFull provides a mock function with given fields: updatedStudio
|
||||||
|
func (_m *StudioReaderWriter) UpdateFull(updatedStudio models.Studio) (*models.Studio, error) {
|
||||||
|
ret := _m.Called(updatedStudio)
|
||||||
|
|
||||||
|
var r0 *models.Studio
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Studio) *models.Studio); ok {
|
||||||
|
r0 = rf(updatedStudio)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Studio)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Studio) error); ok {
|
||||||
|
r1 = rf(updatedStudio)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateStudioImage provides a mock function with given fields: studioID, image
|
||||||
|
func (_m *StudioReaderWriter) UpdateStudioImage(studioID int, image []byte) error {
|
||||||
|
ret := _m.Called(studioID, image)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []byte) error); ok {
|
||||||
|
r0 = rf(studioID, image)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|||||||
@@ -35,6 +35,29 @@ func (_m *TagReaderWriter) All() ([]*models.Tag, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create provides a mock function with given fields: newTag
|
||||||
|
func (_m *TagReaderWriter) Create(newTag models.Tag) (*models.Tag, error) {
|
||||||
|
ret := _m.Called(newTag)
|
||||||
|
|
||||||
|
var r0 *models.Tag
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Tag) *models.Tag); ok {
|
||||||
|
r0 = rf(newTag)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Tag) error); ok {
|
||||||
|
r1 = rf(newTag)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// Find provides a mock function with given fields: id
|
// Find provides a mock function with given fields: id
|
||||||
func (_m *TagReaderWriter) Find(id int) (*models.Tag, error) {
|
func (_m *TagReaderWriter) Find(id int) (*models.Tag, error) {
|
||||||
ret := _m.Called(id)
|
ret := _m.Called(id)
|
||||||
@@ -58,6 +81,52 @@ func (_m *TagReaderWriter) Find(id int) (*models.Tag, error) {
|
|||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FindByName provides a mock function with given fields: name, nocase
|
||||||
|
func (_m *TagReaderWriter) FindByName(name string, nocase bool) (*models.Tag, error) {
|
||||||
|
ret := _m.Called(name, nocase)
|
||||||
|
|
||||||
|
var r0 *models.Tag
|
||||||
|
if rf, ok := ret.Get(0).(func(string, bool) *models.Tag); ok {
|
||||||
|
r0 = rf(name, nocase)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(string, bool) error); ok {
|
||||||
|
r1 = rf(name, nocase)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindByNames provides a mock function with given fields: names, nocase
|
||||||
|
func (_m *TagReaderWriter) FindByNames(names []string, nocase bool) ([]*models.Tag, error) {
|
||||||
|
ret := _m.Called(names, nocase)
|
||||||
|
|
||||||
|
var r0 []*models.Tag
|
||||||
|
if rf, ok := ret.Get(0).(func([]string, bool) []*models.Tag); ok {
|
||||||
|
r0 = rf(names, nocase)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).([]*models.Tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func([]string, bool) error); ok {
|
||||||
|
r1 = rf(names, nocase)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
// FindBySceneID provides a mock function with given fields: sceneID
|
// FindBySceneID provides a mock function with given fields: sceneID
|
||||||
func (_m *TagReaderWriter) FindBySceneID(sceneID int) ([]*models.Tag, error) {
|
func (_m *TagReaderWriter) FindBySceneID(sceneID int) ([]*models.Tag, error) {
|
||||||
ret := _m.Called(sceneID)
|
ret := _m.Called(sceneID)
|
||||||
@@ -149,3 +218,40 @@ func (_m *TagReaderWriter) GetTagImage(tagID int) ([]byte, error) {
|
|||||||
|
|
||||||
return r0, r1
|
return r0, r1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update provides a mock function with given fields: updatedTag
|
||||||
|
func (_m *TagReaderWriter) Update(updatedTag models.Tag) (*models.Tag, error) {
|
||||||
|
ret := _m.Called(updatedTag)
|
||||||
|
|
||||||
|
var r0 *models.Tag
|
||||||
|
if rf, ok := ret.Get(0).(func(models.Tag) *models.Tag); ok {
|
||||||
|
r0 = rf(updatedTag)
|
||||||
|
} else {
|
||||||
|
if ret.Get(0) != nil {
|
||||||
|
r0 = ret.Get(0).(*models.Tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var r1 error
|
||||||
|
if rf, ok := ret.Get(1).(func(models.Tag) error); ok {
|
||||||
|
r1 = rf(updatedTag)
|
||||||
|
} else {
|
||||||
|
r1 = ret.Error(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0, r1
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateTagImage provides a mock function with given fields: tagID, image
|
||||||
|
func (_m *TagReaderWriter) UpdateTagImage(tagID int, image []byte) error {
|
||||||
|
ret := _m.Called(tagID, image)
|
||||||
|
|
||||||
|
var r0 error
|
||||||
|
if rf, ok := ret.Get(0).(func(int, []byte) error); ok {
|
||||||
|
r0 = rf(tagID, image)
|
||||||
|
} else {
|
||||||
|
r0 = ret.Error(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
return r0
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,9 @@ package models
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Movie struct {
|
type Movie struct {
|
||||||
@@ -37,3 +40,13 @@ type MoviePartial struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var DefaultMovieImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC"
|
var DefaultMovieImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC"
|
||||||
|
|
||||||
|
func NewMovie(name string) *Movie {
|
||||||
|
currentTime := time.Now()
|
||||||
|
return &Movie{
|
||||||
|
Checksum: utils.MD5FromString(name),
|
||||||
|
Name: sql.NullString{String: name, Valid: true},
|
||||||
|
CreatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
UpdatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,9 @@ package models
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Performer struct {
|
type Performer struct {
|
||||||
@@ -27,3 +30,14 @@ type Performer struct {
|
|||||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewPerformer(name string) *Performer {
|
||||||
|
currentTime := time.Now()
|
||||||
|
return &Performer{
|
||||||
|
Checksum: utils.MD5FromString(name),
|
||||||
|
Name: sql.NullString{String: name, Valid: true},
|
||||||
|
Favorite: sql.NullBool{Bool: false, Valid: true},
|
||||||
|
CreatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
UpdatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,9 @@ package models
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Studio struct {
|
type Studio struct {
|
||||||
@@ -25,3 +28,13 @@ type StudioPartial struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var DefaultStudioImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC"
|
var DefaultStudioImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC"
|
||||||
|
|
||||||
|
func NewStudio(name string) *Studio {
|
||||||
|
currentTime := time.Now()
|
||||||
|
return &Studio{
|
||||||
|
Checksum: utils.MD5FromString(name),
|
||||||
|
Name: sql.NullString{String: name, Valid: true},
|
||||||
|
CreatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
UpdatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
type Tag struct {
|
type Tag struct {
|
||||||
ID int `db:"id" json:"id"`
|
ID int `db:"id" json:"id"`
|
||||||
Name string `db:"name" json:"name"` // TODO make schema not null
|
Name string `db:"name" json:"name"` // TODO make schema not null
|
||||||
@@ -7,6 +9,15 @@ type Tag struct {
|
|||||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewTag(name string) *Tag {
|
||||||
|
currentTime := time.Now()
|
||||||
|
return &Tag{
|
||||||
|
Name: name,
|
||||||
|
CreatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
UpdatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Original Tag image from: https://fontawesome.com/icons/tag?style=solid
|
// Original Tag image from: https://fontawesome.com/icons/tag?style=solid
|
||||||
// Modified to change color and rotate
|
// Modified to change color and rotate
|
||||||
// Licensed under CC Attribution 4.0: https://fontawesome.com/license
|
// Licensed under CC Attribution 4.0: https://fontawesome.com/license
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ type MovieReader interface {
|
|||||||
Find(id int) (*Movie, error)
|
Find(id int) (*Movie, error)
|
||||||
FindMany(ids []int) ([]*Movie, error)
|
FindMany(ids []int) ([]*Movie, error)
|
||||||
// FindBySceneID(sceneID int) ([]*Movie, error)
|
// FindBySceneID(sceneID int) ([]*Movie, error)
|
||||||
// FindByName(name string, nocase bool) (*Movie, error)
|
FindByName(name string, nocase bool) (*Movie, error)
|
||||||
// FindByNames(names []string, nocase bool) ([]*Movie, error)
|
FindByNames(names []string, nocase bool) ([]*Movie, error)
|
||||||
All() ([]*Movie, error)
|
All() ([]*Movie, error)
|
||||||
// AllSlim() ([]*Movie, error)
|
// AllSlim() ([]*Movie, error)
|
||||||
// Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int)
|
// Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int)
|
||||||
@@ -18,10 +18,11 @@ type MovieReader interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type MovieWriter interface {
|
type MovieWriter interface {
|
||||||
// Create(newMovie Movie) (*Movie, error)
|
Create(newMovie Movie) (*Movie, error)
|
||||||
// Update(updatedMovie MoviePartial) (*Movie, error)
|
Update(updatedMovie MoviePartial) (*Movie, error)
|
||||||
|
UpdateFull(updatedMovie Movie) (*Movie, error)
|
||||||
// Destroy(id string) error
|
// Destroy(id string) error
|
||||||
// UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error
|
UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error
|
||||||
// DestroyMovieImages(movieID int) error
|
// DestroyMovieImages(movieID int) error
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,6 +51,14 @@ func (t *movieReaderWriter) FindMany(ids []int) ([]*Movie, error) {
|
|||||||
return t.qb.FindMany(ids)
|
return t.qb.FindMany(ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *movieReaderWriter) FindByName(name string, nocase bool) (*Movie, error) {
|
||||||
|
return t.qb.FindByName(name, t.tx, nocase)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *movieReaderWriter) FindByNames(names []string, nocase bool) ([]*Movie, error) {
|
||||||
|
return t.qb.FindByNames(names, t.tx, nocase)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *movieReaderWriter) All() ([]*Movie, error) {
|
func (t *movieReaderWriter) All() ([]*Movie, error) {
|
||||||
return t.qb.All()
|
return t.qb.All()
|
||||||
}
|
}
|
||||||
@@ -61,3 +70,19 @@ func (t *movieReaderWriter) GetFrontImage(movieID int) ([]byte, error) {
|
|||||||
func (t *movieReaderWriter) GetBackImage(movieID int) ([]byte, error) {
|
func (t *movieReaderWriter) GetBackImage(movieID int) ([]byte, error) {
|
||||||
return t.qb.GetBackImage(movieID, t.tx)
|
return t.qb.GetBackImage(movieID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *movieReaderWriter) Create(newMovie Movie) (*Movie, error) {
|
||||||
|
return t.qb.Create(newMovie, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *movieReaderWriter) Update(updatedMovie MoviePartial) (*Movie, error) {
|
||||||
|
return t.qb.Update(updatedMovie, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *movieReaderWriter) UpdateFull(updatedMovie Movie) (*Movie, error) {
|
||||||
|
return t.qb.UpdateFull(updatedMovie, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *movieReaderWriter) UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error {
|
||||||
|
return t.qb.UpdateMovieImages(movieID, frontImage, backImage, t.tx)
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ type PerformerReader interface {
|
|||||||
FindMany(ids []int) ([]*Performer, error)
|
FindMany(ids []int) ([]*Performer, error)
|
||||||
FindBySceneID(sceneID int) ([]*Performer, error)
|
FindBySceneID(sceneID int) ([]*Performer, error)
|
||||||
FindNamesBySceneID(sceneID int) ([]*Performer, error)
|
FindNamesBySceneID(sceneID int) ([]*Performer, error)
|
||||||
// FindByNames(names []string, nocase bool) ([]*Performer, error)
|
FindByNames(names []string, nocase bool) ([]*Performer, error)
|
||||||
// Count() (int, error)
|
// Count() (int, error)
|
||||||
All() ([]*Performer, error)
|
All() ([]*Performer, error)
|
||||||
// AllSlim() ([]*Performer, error)
|
// AllSlim() ([]*Performer, error)
|
||||||
@@ -18,10 +18,10 @@ type PerformerReader interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type PerformerWriter interface {
|
type PerformerWriter interface {
|
||||||
// Create(newPerformer Performer) (*Performer, error)
|
Create(newPerformer Performer) (*Performer, error)
|
||||||
// Update(updatedPerformer Performer) (*Performer, error)
|
Update(updatedPerformer Performer) (*Performer, error)
|
||||||
// Destroy(id string) error
|
// Destroy(id string) error
|
||||||
// UpdatePerformerImage(performerID int, image []byte) error
|
UpdatePerformerImage(performerID int, image []byte) error
|
||||||
// DestroyPerformerImage(performerID int) error
|
// DestroyPerformerImage(performerID int) error
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -46,6 +46,10 @@ func (t *performerReaderWriter) FindMany(ids []int) ([]*Performer, error) {
|
|||||||
return t.qb.FindMany(ids)
|
return t.qb.FindMany(ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *performerReaderWriter) FindByNames(names []string, nocase bool) ([]*Performer, error) {
|
||||||
|
return t.qb.FindByNames(names, t.tx, nocase)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *performerReaderWriter) All() ([]*Performer, error) {
|
func (t *performerReaderWriter) All() ([]*Performer, error) {
|
||||||
return t.qb.All()
|
return t.qb.All()
|
||||||
}
|
}
|
||||||
@@ -61,3 +65,15 @@ func (t *performerReaderWriter) FindBySceneID(id int) ([]*Performer, error) {
|
|||||||
func (t *performerReaderWriter) FindNamesBySceneID(sceneID int) ([]*Performer, error) {
|
func (t *performerReaderWriter) FindNamesBySceneID(sceneID int) ([]*Performer, error) {
|
||||||
return t.qb.FindNameBySceneID(sceneID, t.tx)
|
return t.qb.FindNameBySceneID(sceneID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *performerReaderWriter) Create(newPerformer Performer) (*Performer, error) {
|
||||||
|
return t.qb.Create(newPerformer, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *performerReaderWriter) Update(updatedPerformer Performer) (*Performer, error) {
|
||||||
|
return t.qb.Update(updatedPerformer, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *performerReaderWriter) UpdatePerformerImage(performerID int, image []byte) error {
|
||||||
|
return t.qb.UpdatePerformerImage(performerID, image, t.tx)
|
||||||
|
}
|
||||||
|
|||||||
@@ -49,6 +49,19 @@ func (qb *MovieQueryBuilder) Update(updatedMovie MoviePartial, tx *sqlx.Tx) (*Mo
|
|||||||
return qb.Find(updatedMovie.ID, tx)
|
return qb.Find(updatedMovie.ID, tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *MovieQueryBuilder) UpdateFull(updatedMovie Movie, tx *sqlx.Tx) (*Movie, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE movies SET `+SQLGenKeys(updatedMovie)+` WHERE movies.id = :id`,
|
||||||
|
updatedMovie,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return qb.Find(updatedMovie.ID, tx)
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *MovieQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
func (qb *MovieQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||||
// delete movie from movies_scenes
|
// delete movie from movies_scenes
|
||||||
|
|
||||||
|
|||||||
@@ -93,6 +93,19 @@ func (qb *SceneQueryBuilder) Update(updatedScene ScenePartial, tx *sqlx.Tx) (*Sc
|
|||||||
return qb.find(updatedScene.ID, tx)
|
return qb.find(updatedScene.ID, tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *SceneQueryBuilder) UpdateFull(updatedScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE scenes SET `+SQLGenKeys(updatedScene)+` WHERE scenes.id = :id`,
|
||||||
|
updatedScene,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return qb.find(updatedScene.ID, tx)
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *SceneQueryBuilder) IncrementOCounter(id int, tx *sqlx.Tx) (int, error) {
|
func (qb *SceneQueryBuilder) IncrementOCounter(id int, tx *sqlx.Tx) (int, error) {
|
||||||
ensureTx(tx)
|
ensureTx(tx)
|
||||||
_, err := tx.Exec(
|
_, err := tx.Exec(
|
||||||
|
|||||||
@@ -53,6 +53,23 @@ func (qb *StudioQueryBuilder) Update(updatedStudio StudioPartial, tx *sqlx.Tx) (
|
|||||||
return &ret, nil
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (qb *StudioQueryBuilder) UpdateFull(updatedStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE studios SET `+SQLGenKeys(updatedStudio)+` WHERE studios.id = :id`,
|
||||||
|
updatedStudio,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var ret Studio
|
||||||
|
if err := tx.Get(&ret, `SELECT * FROM studios WHERE id = ? LIMIT 1`, updatedStudio.ID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (qb *StudioQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
func (qb *StudioQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||||
// remove studio from scenes
|
// remove studio from scenes
|
||||||
_, err := tx.Exec("UPDATE scenes SET studio_id = null WHERE studio_id = ?", id)
|
_, err := tx.Exec("UPDATE scenes SET studio_id = null WHERE studio_id = ?", id)
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ import (
|
|||||||
type SceneReader interface {
|
type SceneReader interface {
|
||||||
// Find(id int) (*Scene, error)
|
// Find(id int) (*Scene, error)
|
||||||
FindMany(ids []int) ([]*Scene, error)
|
FindMany(ids []int) ([]*Scene, error)
|
||||||
// FindByChecksum(checksum string) (*Scene, error)
|
FindByChecksum(checksum string) (*Scene, error)
|
||||||
// FindByOSHash(oshash string) (*Scene, error)
|
FindByOSHash(oshash string) (*Scene, error)
|
||||||
// FindByPath(path string) (*Scene, error)
|
// FindByPath(path string) (*Scene, error)
|
||||||
// FindByPerformerID(performerID int) ([]*Scene, error)
|
// FindByPerformerID(performerID int) ([]*Scene, error)
|
||||||
// CountByPerformerID(performerID int) (int, error)
|
// CountByPerformerID(performerID int) (int, error)
|
||||||
@@ -30,8 +30,9 @@ type SceneReader interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type SceneWriter interface {
|
type SceneWriter interface {
|
||||||
// Create(newScene Scene) (*Scene, error)
|
Create(newScene Scene) (*Scene, error)
|
||||||
// Update(updatedScene ScenePartial) (*Scene, error)
|
Update(updatedScene ScenePartial) (*Scene, error)
|
||||||
|
UpdateFull(updatedScene Scene) (*Scene, error)
|
||||||
// IncrementOCounter(id int) (int, error)
|
// IncrementOCounter(id int) (int, error)
|
||||||
// DecrementOCounter(id int) (int, error)
|
// DecrementOCounter(id int) (int, error)
|
||||||
// ResetOCounter(id int) (int, error)
|
// ResetOCounter(id int) (int, error)
|
||||||
@@ -39,7 +40,7 @@ type SceneWriter interface {
|
|||||||
// UpdateFormat(id int, format string) error
|
// UpdateFormat(id int, format string) error
|
||||||
// UpdateOSHash(id int, oshash string) error
|
// UpdateOSHash(id int, oshash string) error
|
||||||
// UpdateChecksum(id int, checksum string) error
|
// UpdateChecksum(id int, checksum string) error
|
||||||
// UpdateSceneCover(sceneID int, cover []byte) error
|
UpdateSceneCover(sceneID int, cover []byte) error
|
||||||
// DestroySceneCover(sceneID int) error
|
// DestroySceneCover(sceneID int) error
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,6 +65,14 @@ func (t *sceneReaderWriter) FindMany(ids []int) ([]*Scene, error) {
|
|||||||
return t.qb.FindMany(ids)
|
return t.qb.FindMany(ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *sceneReaderWriter) FindByChecksum(checksum string) (*Scene, error) {
|
||||||
|
return t.qb.FindByChecksum(checksum)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *sceneReaderWriter) FindByOSHash(oshash string) (*Scene, error) {
|
||||||
|
return t.qb.FindByOSHash(oshash)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *sceneReaderWriter) All() ([]*Scene, error) {
|
func (t *sceneReaderWriter) All() ([]*Scene, error) {
|
||||||
return t.qb.All()
|
return t.qb.All()
|
||||||
}
|
}
|
||||||
@@ -71,3 +80,19 @@ func (t *sceneReaderWriter) All() ([]*Scene, error) {
|
|||||||
func (t *sceneReaderWriter) GetSceneCover(sceneID int) ([]byte, error) {
|
func (t *sceneReaderWriter) GetSceneCover(sceneID int) ([]byte, error) {
|
||||||
return t.qb.GetSceneCover(sceneID, t.tx)
|
return t.qb.GetSceneCover(sceneID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *sceneReaderWriter) Create(newScene Scene) (*Scene, error) {
|
||||||
|
return t.qb.Create(newScene, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *sceneReaderWriter) Update(updatedScene ScenePartial) (*Scene, error) {
|
||||||
|
return t.qb.Update(updatedScene, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *sceneReaderWriter) UpdateFull(updatedScene Scene) (*Scene, error) {
|
||||||
|
return t.qb.UpdateFull(updatedScene, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *sceneReaderWriter) UpdateSceneCover(sceneID int, cover []byte) error {
|
||||||
|
return t.qb.UpdateSceneCover(sceneID, cover, t.tx)
|
||||||
|
}
|
||||||
|
|||||||
@@ -15,8 +15,8 @@ type SceneMarkerReader interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type SceneMarkerWriter interface {
|
type SceneMarkerWriter interface {
|
||||||
// Create(newSceneMarker SceneMarker) (*SceneMarker, error)
|
Create(newSceneMarker SceneMarker) (*SceneMarker, error)
|
||||||
// Update(updatedSceneMarker SceneMarker) (*SceneMarker, error)
|
Update(updatedSceneMarker SceneMarker) (*SceneMarker, error)
|
||||||
// Destroy(id string) error
|
// Destroy(id string) error
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -40,3 +40,11 @@ type sceneMarkerReaderWriter struct {
|
|||||||
func (t *sceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*SceneMarker, error) {
|
func (t *sceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*SceneMarker, error) {
|
||||||
return t.qb.FindBySceneID(sceneID, t.tx)
|
return t.qb.FindBySceneID(sceneID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *sceneMarkerReaderWriter) Create(newSceneMarker SceneMarker) (*SceneMarker, error) {
|
||||||
|
return t.qb.Create(newSceneMarker, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *sceneMarkerReaderWriter) Update(updatedSceneMarker SceneMarker) (*SceneMarker, error) {
|
||||||
|
return t.qb.Update(updatedSceneMarker, t.tx)
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,9 +2,10 @@ package models
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"database/sql/driver"
|
"database/sql/driver"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type SQLiteDate struct {
|
type SQLiteDate struct {
|
||||||
@@ -32,6 +33,11 @@ func (t *SQLiteDate) Scan(value interface{}) error {
|
|||||||
|
|
||||||
// Value implements the driver Valuer interface.
|
// Value implements the driver Valuer interface.
|
||||||
func (t SQLiteDate) Value() (driver.Value, error) {
|
func (t SQLiteDate) Value() (driver.Value, error) {
|
||||||
|
// handle empty string
|
||||||
|
if t.String == "" {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
result, err := utils.ParseDateStringAsFormat(t.String, "2006-01-02")
|
result, err := utils.ParseDateStringAsFormat(t.String, "2006-01-02")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Debugf("sqlite date conversion error: %s", err.Error())
|
logger.Debugf("sqlite date conversion error: %s", err.Error())
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ type StudioReader interface {
|
|||||||
FindMany(ids []int) ([]*Studio, error)
|
FindMany(ids []int) ([]*Studio, error)
|
||||||
// FindChildren(id int) ([]*Studio, error)
|
// FindChildren(id int) ([]*Studio, error)
|
||||||
// FindBySceneID(sceneID int) (*Studio, error)
|
// FindBySceneID(sceneID int) (*Studio, error)
|
||||||
// FindByName(name string, nocase bool) (*Studio, error)
|
FindByName(name string, nocase bool) (*Studio, error)
|
||||||
// Count() (int, error)
|
// Count() (int, error)
|
||||||
All() ([]*Studio, error)
|
All() ([]*Studio, error)
|
||||||
// AllSlim() ([]*Studio, error)
|
// AllSlim() ([]*Studio, error)
|
||||||
@@ -18,10 +18,11 @@ type StudioReader interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type StudioWriter interface {
|
type StudioWriter interface {
|
||||||
// Create(newStudio Studio) (*Studio, error)
|
Create(newStudio Studio) (*Studio, error)
|
||||||
// Update(updatedStudio StudioPartial) (*Studio, error)
|
Update(updatedStudio StudioPartial) (*Studio, error)
|
||||||
|
UpdateFull(updatedStudio Studio) (*Studio, error)
|
||||||
// Destroy(id string) error
|
// Destroy(id string) error
|
||||||
// UpdateStudioImage(studioID int, image []byte) error
|
UpdateStudioImage(studioID int, image []byte) error
|
||||||
// DestroyStudioImage(studioID int) error
|
// DestroyStudioImage(studioID int) error
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,6 +51,10 @@ func (t *studioReaderWriter) FindMany(ids []int) ([]*Studio, error) {
|
|||||||
return t.qb.FindMany(ids)
|
return t.qb.FindMany(ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *studioReaderWriter) FindByName(name string, nocase bool) (*Studio, error) {
|
||||||
|
return t.qb.FindByName(name, t.tx, nocase)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *studioReaderWriter) All() ([]*Studio, error) {
|
func (t *studioReaderWriter) All() ([]*Studio, error) {
|
||||||
return t.qb.All()
|
return t.qb.All()
|
||||||
}
|
}
|
||||||
@@ -57,3 +62,19 @@ func (t *studioReaderWriter) All() ([]*Studio, error) {
|
|||||||
func (t *studioReaderWriter) GetStudioImage(studioID int) ([]byte, error) {
|
func (t *studioReaderWriter) GetStudioImage(studioID int) ([]byte, error) {
|
||||||
return t.qb.GetStudioImage(studioID, t.tx)
|
return t.qb.GetStudioImage(studioID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *studioReaderWriter) Create(newStudio Studio) (*Studio, error) {
|
||||||
|
return t.qb.Create(newStudio, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *studioReaderWriter) Update(updatedStudio StudioPartial) (*Studio, error) {
|
||||||
|
return t.qb.Update(updatedStudio, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *studioReaderWriter) UpdateFull(updatedStudio Studio) (*Studio, error) {
|
||||||
|
return t.qb.UpdateFull(updatedStudio, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *studioReaderWriter) UpdateStudioImage(studioID int, image []byte) error {
|
||||||
|
return t.qb.UpdateStudioImage(studioID, image, t.tx)
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ type TagReader interface {
|
|||||||
FindMany(ids []int) ([]*Tag, error)
|
FindMany(ids []int) ([]*Tag, error)
|
||||||
FindBySceneID(sceneID int) ([]*Tag, error)
|
FindBySceneID(sceneID int) ([]*Tag, error)
|
||||||
FindBySceneMarkerID(sceneMarkerID int) ([]*Tag, error)
|
FindBySceneMarkerID(sceneMarkerID int) ([]*Tag, error)
|
||||||
// FindByName(name string, nocase bool) (*Tag, error)
|
FindByName(name string, nocase bool) (*Tag, error)
|
||||||
// FindByNames(names []string, nocase bool) ([]*Tag, error)
|
FindByNames(names []string, nocase bool) ([]*Tag, error)
|
||||||
// Count() (int, error)
|
// Count() (int, error)
|
||||||
All() ([]*Tag, error)
|
All() ([]*Tag, error)
|
||||||
// AllSlim() ([]*Tag, error)
|
// AllSlim() ([]*Tag, error)
|
||||||
@@ -19,10 +19,10 @@ type TagReader interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type TagWriter interface {
|
type TagWriter interface {
|
||||||
// Create(newTag Tag) (*Tag, error)
|
Create(newTag Tag) (*Tag, error)
|
||||||
// Update(updatedTag Tag) (*Tag, error)
|
Update(updatedTag Tag) (*Tag, error)
|
||||||
// Destroy(id string) error
|
// Destroy(id string) error
|
||||||
// UpdateTagImage(tagID int, image []byte) error
|
UpdateTagImage(tagID int, image []byte) error
|
||||||
// DestroyTagImage(tagID int) error
|
// DestroyTagImage(tagID int) error
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -59,6 +59,14 @@ func (t *tagReaderWriter) FindBySceneMarkerID(sceneMarkerID int) ([]*Tag, error)
|
|||||||
return t.qb.FindBySceneMarkerID(sceneMarkerID, t.tx)
|
return t.qb.FindBySceneMarkerID(sceneMarkerID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *tagReaderWriter) FindByName(name string, nocase bool) (*Tag, error) {
|
||||||
|
return t.qb.FindByName(name, t.tx, nocase)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *tagReaderWriter) FindByNames(names []string, nocase bool) ([]*Tag, error) {
|
||||||
|
return t.qb.FindByNames(names, t.tx, nocase)
|
||||||
|
}
|
||||||
|
|
||||||
func (t *tagReaderWriter) GetTagImage(tagID int) ([]byte, error) {
|
func (t *tagReaderWriter) GetTagImage(tagID int) ([]byte, error) {
|
||||||
return t.qb.GetTagImage(tagID, t.tx)
|
return t.qb.GetTagImage(tagID, t.tx)
|
||||||
}
|
}
|
||||||
@@ -66,3 +74,15 @@ func (t *tagReaderWriter) GetTagImage(tagID int) ([]byte, error) {
|
|||||||
func (t *tagReaderWriter) FindBySceneID(sceneID int) ([]*Tag, error) {
|
func (t *tagReaderWriter) FindBySceneID(sceneID int) ([]*Tag, error) {
|
||||||
return t.qb.FindBySceneID(sceneID, t.tx)
|
return t.qb.FindBySceneID(sceneID, t.tx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *tagReaderWriter) Create(newTag Tag) (*Tag, error) {
|
||||||
|
return t.qb.Create(newTag, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *tagReaderWriter) Update(updatedTag Tag) (*Tag, error) {
|
||||||
|
return t.qb.Update(updatedTag, t.tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *tagReaderWriter) UpdateTagImage(tagID int, image []byte) error {
|
||||||
|
return t.qb.UpdateTagImage(tagID, image, t.tx)
|
||||||
|
}
|
||||||
|
|||||||
166
pkg/movie/import.go
Normal file
166
pkg/movie/import.go
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
package movie
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Importer struct {
|
||||||
|
ReaderWriter models.MovieReaderWriter
|
||||||
|
StudioWriter models.StudioReaderWriter
|
||||||
|
Input jsonschema.Movie
|
||||||
|
MissingRefBehaviour models.ImportMissingRefEnum
|
||||||
|
|
||||||
|
movie models.Movie
|
||||||
|
frontImageData []byte
|
||||||
|
backImageData []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PreImport() error {
|
||||||
|
i.movie = i.movieJSONToMovie(i.Input)
|
||||||
|
|
||||||
|
if err := i.populateStudio(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
if len(i.Input.FrontImage) > 0 {
|
||||||
|
_, i.frontImageData, err = utils.ProcessBase64Image(i.Input.FrontImage)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid front_image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(i.Input.BackImage) > 0 {
|
||||||
|
_, i.backImageData, err = utils.ProcessBase64Image(i.Input.BackImage)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid back_image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) movieJSONToMovie(movieJSON jsonschema.Movie) models.Movie {
|
||||||
|
checksum := utils.MD5FromString(movieJSON.Name)
|
||||||
|
|
||||||
|
newMovie := models.Movie{
|
||||||
|
Checksum: checksum,
|
||||||
|
Name: sql.NullString{String: movieJSON.Name, Valid: true},
|
||||||
|
Aliases: sql.NullString{String: movieJSON.Aliases, Valid: true},
|
||||||
|
Date: models.SQLiteDate{String: movieJSON.Date, Valid: true},
|
||||||
|
Director: sql.NullString{String: movieJSON.Director, Valid: true},
|
||||||
|
Synopsis: sql.NullString{String: movieJSON.Synopsis, Valid: true},
|
||||||
|
URL: sql.NullString{String: movieJSON.URL, Valid: true},
|
||||||
|
CreatedAt: models.SQLiteTimestamp{Timestamp: movieJSON.CreatedAt.GetTime()},
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: movieJSON.UpdatedAt.GetTime()},
|
||||||
|
}
|
||||||
|
|
||||||
|
if movieJSON.Rating != 0 {
|
||||||
|
newMovie.Rating = sql.NullInt64{Int64: int64(movieJSON.Rating), Valid: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
if movieJSON.Duration != 0 {
|
||||||
|
newMovie.Duration = sql.NullInt64{Int64: int64(movieJSON.Duration), Valid: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newMovie
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) populateStudio() error {
|
||||||
|
if i.Input.Studio != "" {
|
||||||
|
studio, err := i.StudioWriter.FindByName(i.Input.Studio, false)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error finding studio by name: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if studio == nil {
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||||
|
return fmt.Errorf("movie studio '%s' not found", i.Input.Studio)
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate {
|
||||||
|
studioID, err := i.createStudio(i.Input.Studio)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
i.movie.StudioID = sql.NullInt64{
|
||||||
|
Int64: int64(studioID),
|
||||||
|
Valid: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
i.movie.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) createStudio(name string) (int, error) {
|
||||||
|
newStudio := *models.NewStudio(name)
|
||||||
|
|
||||||
|
created, err := i.StudioWriter.Create(newStudio)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return created.ID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PostImport(id int) error {
|
||||||
|
if len(i.frontImageData) > 0 {
|
||||||
|
if err := i.ReaderWriter.UpdateMovieImages(id, i.frontImageData, i.backImageData); err != nil {
|
||||||
|
return fmt.Errorf("error setting movie images: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Name() string {
|
||||||
|
return i.Input.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) FindExistingID() (*int, error) {
|
||||||
|
const nocase = false
|
||||||
|
existing, err := i.ReaderWriter.FindByName(i.Name(), nocase)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if existing != nil {
|
||||||
|
id := existing.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Create() (*int, error) {
|
||||||
|
created, err := i.ReaderWriter.Create(i.movie)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating movie: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
id := created.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Update(id int) error {
|
||||||
|
movie := i.movie
|
||||||
|
movie.ID = id
|
||||||
|
_, err := i.ReaderWriter.UpdateFull(movie)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error updating existing movie: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
278
pkg/movie/import_test.go
Normal file
278
pkg/movie/import_test.go
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
package movie
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
|
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/mock"
|
||||||
|
)
|
||||||
|
|
||||||
|
const invalidImage = "aW1hZ2VCeXRlcw&&"
|
||||||
|
|
||||||
|
const (
|
||||||
|
movieNameErr = "movieNameErr"
|
||||||
|
existingMovieName = "existingMovieName"
|
||||||
|
|
||||||
|
existingMovieID = 100
|
||||||
|
existingStudioID = 101
|
||||||
|
|
||||||
|
existingStudioName = "existingStudioName"
|
||||||
|
existingStudioErr = "existingStudioErr"
|
||||||
|
missingStudioName = "existingStudioName"
|
||||||
|
|
||||||
|
errImageID = 3
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestImporterName(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.Movie{
|
||||||
|
Name: movieName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, movieName, i.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImport(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.Movie{
|
||||||
|
Name: movieName,
|
||||||
|
FrontImage: invalidImage,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.Input.FrontImage = frontImage
|
||||||
|
i.Input.BackImage = invalidImage
|
||||||
|
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.Input.BackImage = ""
|
||||||
|
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.BackImage = backImage
|
||||||
|
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithStudio(t *testing.T) {
|
||||||
|
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
StudioWriter: studioReaderWriter,
|
||||||
|
Input: jsonschema.Movie{
|
||||||
|
Name: movieName,
|
||||||
|
FrontImage: frontImage,
|
||||||
|
Studio: existingStudioName,
|
||||||
|
Rating: 5,
|
||||||
|
Duration: 10,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
studioReaderWriter.On("FindByName", existingStudioName, false).Return(&models.Studio{
|
||||||
|
ID: existingStudioID,
|
||||||
|
}, nil).Once()
|
||||||
|
studioReaderWriter.On("FindByName", existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, int64(existingStudioID), i.movie.StudioID.Int64)
|
||||||
|
|
||||||
|
i.Input.Studio = existingStudioErr
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
studioReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||||
|
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
StudioWriter: studioReaderWriter,
|
||||||
|
Input: jsonschema.Movie{
|
||||||
|
Name: movieName,
|
||||||
|
FrontImage: frontImage,
|
||||||
|
Studio: missingStudioName,
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
}
|
||||||
|
|
||||||
|
studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Times(3)
|
||||||
|
studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||||
|
ID: existingStudioID,
|
||||||
|
}, nil)
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, int64(existingStudioID), i.movie.StudioID.Int64)
|
||||||
|
|
||||||
|
studioReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||||
|
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
StudioWriter: studioReaderWriter,
|
||||||
|
Input: jsonschema.Movie{
|
||||||
|
Name: movieName,
|
||||||
|
FrontImage: frontImage,
|
||||||
|
Studio: missingStudioName,
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||||
|
}
|
||||||
|
|
||||||
|
studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Once()
|
||||||
|
studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImport(t *testing.T) {
|
||||||
|
readerWriter := &mocks.MovieReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
frontImageData: frontImageBytes,
|
||||||
|
backImageData: backImageBytes,
|
||||||
|
}
|
||||||
|
|
||||||
|
updateMovieImageErr := errors.New("UpdateMovieImage error")
|
||||||
|
|
||||||
|
readerWriter.On("UpdateMovieImages", movieID, frontImageBytes, backImageBytes).Return(nil).Once()
|
||||||
|
readerWriter.On("UpdateMovieImages", errImageID, frontImageBytes, backImageBytes).Return(updateMovieImageErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(movieID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterFindExistingID(t *testing.T) {
|
||||||
|
readerWriter := &mocks.MovieReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Input: jsonschema.Movie{
|
||||||
|
Name: movieName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
errFindByName := errors.New("FindByName error")
|
||||||
|
readerWriter.On("FindByName", movieName, false).Return(nil, nil).Once()
|
||||||
|
readerWriter.On("FindByName", existingMovieName, false).Return(&models.Movie{
|
||||||
|
ID: existingMovieID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("FindByName", movieNameErr, false).Return(nil, errFindByName).Once()
|
||||||
|
|
||||||
|
id, err := i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Name = existingMovieName
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Equal(t, existingMovieID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Name = movieNameErr
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.MovieReaderWriter{}
|
||||||
|
|
||||||
|
movie := models.Movie{
|
||||||
|
Name: modelstest.NullString(movieName),
|
||||||
|
}
|
||||||
|
|
||||||
|
movieErr := models.Movie{
|
||||||
|
Name: modelstest.NullString(movieNameErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
movie: movie,
|
||||||
|
}
|
||||||
|
|
||||||
|
errCreate := errors.New("Create error")
|
||||||
|
readerWriter.On("Create", movie).Return(&models.Movie{
|
||||||
|
ID: movieID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("Create", movieErr).Return(nil, errCreate).Once()
|
||||||
|
|
||||||
|
id, err := i.Create()
|
||||||
|
assert.Equal(t, movieID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.movie = movieErr
|
||||||
|
id, err = i.Create()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUpdate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.MovieReaderWriter{}
|
||||||
|
|
||||||
|
movie := models.Movie{
|
||||||
|
Name: modelstest.NullString(movieName),
|
||||||
|
}
|
||||||
|
|
||||||
|
movieErr := models.Movie{
|
||||||
|
Name: modelstest.NullString(movieNameErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
movie: movie,
|
||||||
|
}
|
||||||
|
|
||||||
|
errUpdate := errors.New("Update error")
|
||||||
|
|
||||||
|
// id needs to be set for the mock input
|
||||||
|
movie.ID = movieID
|
||||||
|
readerWriter.On("UpdateFull", movie).Return(nil, nil).Once()
|
||||||
|
|
||||||
|
err := i.Update(movieID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.movie = movieErr
|
||||||
|
|
||||||
|
// need to set id separately
|
||||||
|
movieErr.ID = errImageID
|
||||||
|
readerWriter.On("UpdateFull", movieErr).Return(nil, errUpdate).Once()
|
||||||
|
|
||||||
|
err = i.Update(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/models/mocks"
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
"github.com/stashapp/stash/pkg/models/modelstest"
|
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
"testing"
|
"testing"
|
||||||
@@ -46,13 +47,14 @@ var birthDate = models.SQLiteDate{
|
|||||||
String: "2001-01-01",
|
String: "2001-01-01",
|
||||||
Valid: true,
|
Valid: true,
|
||||||
}
|
}
|
||||||
var createTime time.Time = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
var createTime time.Time = time.Date(2001, 01, 01, 0, 0, 0, 0, time.Local)
|
||||||
var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.Local)
|
||||||
|
|
||||||
func createFullPerformer(id int) models.Performer {
|
func createFullPerformer(id int, name string) *models.Performer {
|
||||||
return models.Performer{
|
return &models.Performer{
|
||||||
ID: id,
|
ID: id,
|
||||||
Name: modelstest.NullString(performerName),
|
Name: modelstest.NullString(name),
|
||||||
|
Checksum: utils.MD5FromString(name),
|
||||||
URL: modelstest.NullString(url),
|
URL: modelstest.NullString(url),
|
||||||
Aliases: modelstest.NullString(aliases),
|
Aliases: modelstest.NullString(aliases),
|
||||||
Birthdate: birthDate,
|
Birthdate: birthDate,
|
||||||
@@ -93,9 +95,9 @@ func createEmptyPerformer(id int) models.Performer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func createFullJSONPerformer(image string) *jsonschema.Performer {
|
func createFullJSONPerformer(name string, image string) *jsonschema.Performer {
|
||||||
return &jsonschema.Performer{
|
return &jsonschema.Performer{
|
||||||
Name: performerName,
|
Name: name,
|
||||||
URL: url,
|
URL: url,
|
||||||
Aliases: aliases,
|
Aliases: aliases,
|
||||||
Birthdate: birthDate.String,
|
Birthdate: birthDate.String,
|
||||||
@@ -144,8 +146,8 @@ var scenarios []testScenario
|
|||||||
func initTestTable() {
|
func initTestTable() {
|
||||||
scenarios = []testScenario{
|
scenarios = []testScenario{
|
||||||
testScenario{
|
testScenario{
|
||||||
createFullPerformer(performerID),
|
*createFullPerformer(performerID, performerName),
|
||||||
createFullJSONPerformer(image),
|
createFullJSONPerformer(performerName, image),
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
testScenario{
|
testScenario{
|
||||||
@@ -154,7 +156,7 @@ func initTestTable() {
|
|||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
testScenario{
|
testScenario{
|
||||||
createFullPerformer(errImageID),
|
*createFullPerformer(errImageID, performerName),
|
||||||
nil,
|
nil,
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
|
|||||||
144
pkg/performer/import.go
Normal file
144
pkg/performer/import.go
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
package performer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Importer struct {
|
||||||
|
ReaderWriter models.PerformerReaderWriter
|
||||||
|
Input jsonschema.Performer
|
||||||
|
|
||||||
|
performer models.Performer
|
||||||
|
imageData []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PreImport() error {
|
||||||
|
i.performer = performerJSONToPerformer(i.Input)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
if len(i.Input.Image) > 0 {
|
||||||
|
_, i.imageData, err = utils.ProcessBase64Image(i.Input.Image)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PostImport(id int) error {
|
||||||
|
if len(i.imageData) > 0 {
|
||||||
|
if err := i.ReaderWriter.UpdatePerformerImage(id, i.imageData); err != nil {
|
||||||
|
return fmt.Errorf("error setting performer image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Name() string {
|
||||||
|
return i.Input.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) FindExistingID() (*int, error) {
|
||||||
|
const nocase = false
|
||||||
|
existing, err := i.ReaderWriter.FindByNames([]string{i.Name()}, nocase)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(existing) > 0 {
|
||||||
|
id := existing[0].ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Create() (*int, error) {
|
||||||
|
created, err := i.ReaderWriter.Create(i.performer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating performer: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
id := created.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Update(id int) error {
|
||||||
|
performer := i.performer
|
||||||
|
performer.ID = id
|
||||||
|
_, err := i.ReaderWriter.Update(performer)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error updating existing performer: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Performer {
|
||||||
|
checksum := utils.MD5FromString(performerJSON.Name)
|
||||||
|
|
||||||
|
newPerformer := models.Performer{
|
||||||
|
Checksum: checksum,
|
||||||
|
Favorite: sql.NullBool{Bool: performerJSON.Favorite, Valid: true},
|
||||||
|
CreatedAt: models.SQLiteTimestamp{Timestamp: performerJSON.CreatedAt.GetTime()},
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: performerJSON.UpdatedAt.GetTime()},
|
||||||
|
}
|
||||||
|
|
||||||
|
if performerJSON.Name != "" {
|
||||||
|
newPerformer.Name = sql.NullString{String: performerJSON.Name, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Gender != "" {
|
||||||
|
newPerformer.Gender = sql.NullString{String: performerJSON.Gender, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.URL != "" {
|
||||||
|
newPerformer.URL = sql.NullString{String: performerJSON.URL, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Birthdate != "" {
|
||||||
|
newPerformer.Birthdate = models.SQLiteDate{String: performerJSON.Birthdate, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Ethnicity != "" {
|
||||||
|
newPerformer.Ethnicity = sql.NullString{String: performerJSON.Ethnicity, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Country != "" {
|
||||||
|
newPerformer.Country = sql.NullString{String: performerJSON.Country, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.EyeColor != "" {
|
||||||
|
newPerformer.EyeColor = sql.NullString{String: performerJSON.EyeColor, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Height != "" {
|
||||||
|
newPerformer.Height = sql.NullString{String: performerJSON.Height, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Measurements != "" {
|
||||||
|
newPerformer.Measurements = sql.NullString{String: performerJSON.Measurements, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.FakeTits != "" {
|
||||||
|
newPerformer.FakeTits = sql.NullString{String: performerJSON.FakeTits, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.CareerLength != "" {
|
||||||
|
newPerformer.CareerLength = sql.NullString{String: performerJSON.CareerLength, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Tattoos != "" {
|
||||||
|
newPerformer.Tattoos = sql.NullString{String: performerJSON.Tattoos, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Piercings != "" {
|
||||||
|
newPerformer.Piercings = sql.NullString{String: performerJSON.Piercings, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Aliases != "" {
|
||||||
|
newPerformer.Aliases = sql.NullString{String: performerJSON.Aliases, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Twitter != "" {
|
||||||
|
newPerformer.Twitter = sql.NullString{String: performerJSON.Twitter, Valid: true}
|
||||||
|
}
|
||||||
|
if performerJSON.Instagram != "" {
|
||||||
|
newPerformer.Instagram = sql.NullString{String: performerJSON.Instagram, Valid: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newPerformer
|
||||||
|
}
|
||||||
184
pkg/performer/import_test.go
Normal file
184
pkg/performer/import_test.go
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
package performer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
|
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
const invalidImage = "aW1hZ2VCeXRlcw&&"
|
||||||
|
|
||||||
|
const (
|
||||||
|
existingPerformerID = 100
|
||||||
|
|
||||||
|
existingPerformerName = "existingPerformerName"
|
||||||
|
performerNameErr = "performerNameErr"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestImporterName(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.Performer{
|
||||||
|
Name: performerName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, performerName, i.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImport(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.Performer{
|
||||||
|
Name: performerName,
|
||||||
|
Image: invalidImage,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.Input = *createFullJSONPerformer(performerName, image)
|
||||||
|
|
||||||
|
err = i.PreImport()
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
expectedPerformer := *createFullPerformer(0, performerName)
|
||||||
|
expectedPerformer.Checksum = utils.MD5FromString(performerName)
|
||||||
|
assert.Equal(t, expectedPerformer, i.performer)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImport(t *testing.T) {
|
||||||
|
readerWriter := &mocks.PerformerReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
imageData: imageBytes,
|
||||||
|
}
|
||||||
|
|
||||||
|
updatePerformerImageErr := errors.New("UpdatePerformerImage error")
|
||||||
|
|
||||||
|
readerWriter.On("UpdatePerformerImage", performerID, imageBytes).Return(nil).Once()
|
||||||
|
readerWriter.On("UpdatePerformerImage", errImageID, imageBytes).Return(updatePerformerImageErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(performerID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterFindExistingID(t *testing.T) {
|
||||||
|
readerWriter := &mocks.PerformerReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Input: jsonschema.Performer{
|
||||||
|
Name: performerName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
errFindByNames := errors.New("FindByNames error")
|
||||||
|
readerWriter.On("FindByNames", []string{performerName}, false).Return(nil, nil).Once()
|
||||||
|
readerWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||||
|
{
|
||||||
|
ID: existingPerformerID,
|
||||||
|
},
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("FindByNames", []string{performerNameErr}, false).Return(nil, errFindByNames).Once()
|
||||||
|
|
||||||
|
id, err := i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Name = existingPerformerName
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Equal(t, existingPerformerID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Name = performerNameErr
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.PerformerReaderWriter{}
|
||||||
|
|
||||||
|
performer := models.Performer{
|
||||||
|
Name: modelstest.NullString(performerName),
|
||||||
|
}
|
||||||
|
|
||||||
|
performerErr := models.Performer{
|
||||||
|
Name: modelstest.NullString(performerNameErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
performer: performer,
|
||||||
|
}
|
||||||
|
|
||||||
|
errCreate := errors.New("Create error")
|
||||||
|
readerWriter.On("Create", performer).Return(&models.Performer{
|
||||||
|
ID: performerID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("Create", performerErr).Return(nil, errCreate).Once()
|
||||||
|
|
||||||
|
id, err := i.Create()
|
||||||
|
assert.Equal(t, performerID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.performer = performerErr
|
||||||
|
id, err = i.Create()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUpdate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.PerformerReaderWriter{}
|
||||||
|
|
||||||
|
performer := models.Performer{
|
||||||
|
Name: modelstest.NullString(performerName),
|
||||||
|
}
|
||||||
|
|
||||||
|
performerErr := models.Performer{
|
||||||
|
Name: modelstest.NullString(performerNameErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
performer: performer,
|
||||||
|
}
|
||||||
|
|
||||||
|
errUpdate := errors.New("Update error")
|
||||||
|
|
||||||
|
// id needs to be set for the mock input
|
||||||
|
performer.ID = performerID
|
||||||
|
readerWriter.On("Update", performer).Return(nil, nil).Once()
|
||||||
|
|
||||||
|
err := i.Update(performerID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.performer = performerErr
|
||||||
|
|
||||||
|
// need to set id separately
|
||||||
|
performerErr.ID = errImageID
|
||||||
|
readerWriter.On("Update", performerErr).Return(nil, errUpdate).Once()
|
||||||
|
|
||||||
|
err = i.Update(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
483
pkg/scene/import.go
Normal file
483
pkg/scene/import.go
Normal file
@@ -0,0 +1,483 @@
|
|||||||
|
package scene
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Importer struct {
|
||||||
|
ReaderWriter models.SceneReaderWriter
|
||||||
|
StudioWriter models.StudioReaderWriter
|
||||||
|
GalleryWriter models.GalleryReaderWriter
|
||||||
|
PerformerWriter models.PerformerReaderWriter
|
||||||
|
MovieWriter models.MovieReaderWriter
|
||||||
|
TagWriter models.TagReaderWriter
|
||||||
|
JoinWriter models.JoinReaderWriter
|
||||||
|
Input jsonschema.Scene
|
||||||
|
Path string
|
||||||
|
MissingRefBehaviour models.ImportMissingRefEnum
|
||||||
|
FileNamingAlgorithm models.HashAlgorithm
|
||||||
|
|
||||||
|
ID int
|
||||||
|
scene models.Scene
|
||||||
|
gallery *models.Gallery
|
||||||
|
performers []*models.Performer
|
||||||
|
movies []models.MoviesScenes
|
||||||
|
tags []*models.Tag
|
||||||
|
coverImageData []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PreImport() error {
|
||||||
|
i.scene = i.sceneJSONToScene(i.Input)
|
||||||
|
|
||||||
|
if err := i.populateStudio(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := i.populateGallery(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := i.populatePerformers(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := i.populateTags(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := i.populateMovies(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
if len(i.Input.Cover) > 0 {
|
||||||
|
_, i.coverImageData, err = utils.ProcessBase64Image(i.Input.Cover)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid cover image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
|
||||||
|
newScene := models.Scene{
|
||||||
|
Checksum: sql.NullString{String: sceneJSON.Checksum, Valid: sceneJSON.Checksum != ""},
|
||||||
|
OSHash: sql.NullString{String: sceneJSON.OSHash, Valid: sceneJSON.OSHash != ""},
|
||||||
|
Path: i.Path,
|
||||||
|
}
|
||||||
|
|
||||||
|
if sceneJSON.Title != "" {
|
||||||
|
newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.Details != "" {
|
||||||
|
newScene.Details = sql.NullString{String: sceneJSON.Details, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.URL != "" {
|
||||||
|
newScene.URL = sql.NullString{String: sceneJSON.URL, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.Date != "" {
|
||||||
|
newScene.Date = models.SQLiteDate{String: sceneJSON.Date, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.Rating != 0 {
|
||||||
|
newScene.Rating = sql.NullInt64{Int64: int64(sceneJSON.Rating), Valid: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
newScene.OCounter = sceneJSON.OCounter
|
||||||
|
newScene.CreatedAt = models.SQLiteTimestamp{Timestamp: sceneJSON.CreatedAt.GetTime()}
|
||||||
|
newScene.UpdatedAt = models.SQLiteTimestamp{Timestamp: sceneJSON.UpdatedAt.GetTime()}
|
||||||
|
|
||||||
|
if sceneJSON.File != nil {
|
||||||
|
if sceneJSON.File.Size != "" {
|
||||||
|
newScene.Size = sql.NullString{String: sceneJSON.File.Size, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Duration != "" {
|
||||||
|
duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64)
|
||||||
|
newScene.Duration = sql.NullFloat64{Float64: duration, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.VideoCodec != "" {
|
||||||
|
newScene.VideoCodec = sql.NullString{String: sceneJSON.File.VideoCodec, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.AudioCodec != "" {
|
||||||
|
newScene.AudioCodec = sql.NullString{String: sceneJSON.File.AudioCodec, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Format != "" {
|
||||||
|
newScene.Format = sql.NullString{String: sceneJSON.File.Format, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Width != 0 {
|
||||||
|
newScene.Width = sql.NullInt64{Int64: int64(sceneJSON.File.Width), Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Height != 0 {
|
||||||
|
newScene.Height = sql.NullInt64{Int64: int64(sceneJSON.File.Height), Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Framerate != "" {
|
||||||
|
framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64)
|
||||||
|
newScene.Framerate = sql.NullFloat64{Float64: framerate, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Bitrate != 0 {
|
||||||
|
newScene.Bitrate = sql.NullInt64{Int64: int64(sceneJSON.File.Bitrate), Valid: true}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return newScene
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) populateStudio() error {
|
||||||
|
if i.Input.Studio != "" {
|
||||||
|
studio, err := i.StudioWriter.FindByName(i.Input.Studio, false)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error finding studio by name: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if studio == nil {
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||||
|
return fmt.Errorf("scene studio '%s' not found", i.Input.Studio)
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate {
|
||||||
|
studioID, err := i.createStudio(i.Input.Studio)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
i.scene.StudioID = sql.NullInt64{
|
||||||
|
Int64: int64(studioID),
|
||||||
|
Valid: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
i.scene.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) createStudio(name string) (int, error) {
|
||||||
|
newStudio := *models.NewStudio(name)
|
||||||
|
|
||||||
|
created, err := i.StudioWriter.Create(newStudio)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return created.ID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) populateGallery() error {
|
||||||
|
if i.Input.Gallery != "" {
|
||||||
|
gallery, err := i.GalleryWriter.FindByChecksum(i.Input.Gallery)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error finding gallery: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if gallery == nil {
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||||
|
return fmt.Errorf("scene gallery '%s' not found", i.Input.Studio)
|
||||||
|
}
|
||||||
|
|
||||||
|
// we don't create galleries - just ignore
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore || i.MissingRefBehaviour == models.ImportMissingRefEnumCreate {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
i.gallery = gallery
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) populatePerformers() error {
|
||||||
|
if len(i.Input.Performers) > 0 {
|
||||||
|
names := i.Input.Performers
|
||||||
|
performers, err := i.PerformerWriter.FindByNames(names, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var pluckedNames []string
|
||||||
|
for _, performer := range performers {
|
||||||
|
if !performer.Name.Valid {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pluckedNames = append(pluckedNames, performer.Name.String)
|
||||||
|
}
|
||||||
|
|
||||||
|
missingPerformers := utils.StrFilter(names, func(name string) bool {
|
||||||
|
return !utils.StrInclude(pluckedNames, name)
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(missingPerformers) > 0 {
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||||
|
return fmt.Errorf("scene performers [%s] not found", strings.Join(missingPerformers, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate {
|
||||||
|
createdPerformers, err := i.createPerformers(missingPerformers)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating scene performers: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
performers = append(performers, createdPerformers...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ignore if MissingRefBehaviour set to Ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
i.performers = performers
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) createPerformers(names []string) ([]*models.Performer, error) {
|
||||||
|
var ret []*models.Performer
|
||||||
|
for _, name := range names {
|
||||||
|
newPerformer := *models.NewPerformer(name)
|
||||||
|
|
||||||
|
created, err := i.PerformerWriter.Create(newPerformer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, created)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) populateMovies() error {
|
||||||
|
if len(i.Input.Movies) > 0 {
|
||||||
|
for _, inputMovie := range i.Input.Movies {
|
||||||
|
movie, err := i.MovieWriter.FindByName(inputMovie.MovieName, false)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error finding scene movie: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if movie == nil {
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||||
|
return fmt.Errorf("scene movie [%s] not found", inputMovie.MovieName)
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate {
|
||||||
|
movie, err = i.createMovie(inputMovie.MovieName)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating scene movie: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ignore if MissingRefBehaviour set to Ignore
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
toAdd := models.MoviesScenes{
|
||||||
|
MovieID: movie.ID,
|
||||||
|
}
|
||||||
|
|
||||||
|
if inputMovie.SceneIndex != 0 {
|
||||||
|
toAdd.SceneIndex = sql.NullInt64{
|
||||||
|
Int64: int64(inputMovie.SceneIndex),
|
||||||
|
Valid: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
i.movies = append(i.movies, toAdd)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) createMovie(name string) (*models.Movie, error) {
|
||||||
|
newMovie := *models.NewMovie(name)
|
||||||
|
|
||||||
|
created, err := i.MovieWriter.Create(newMovie)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return created, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) populateTags() error {
|
||||||
|
if len(i.Input.Tags) > 0 {
|
||||||
|
|
||||||
|
tags, err := importTags(i.TagWriter, i.Input.Tags, i.MissingRefBehaviour)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
i.tags = tags
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PostImport(id int) error {
|
||||||
|
if len(i.coverImageData) > 0 {
|
||||||
|
if err := i.ReaderWriter.UpdateSceneCover(id, i.coverImageData); err != nil {
|
||||||
|
return fmt.Errorf("error setting scene images: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.gallery != nil {
|
||||||
|
i.gallery.SceneID = sql.NullInt64{Int64: int64(id), Valid: true}
|
||||||
|
_, err := i.GalleryWriter.Update(*i.gallery)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to update gallery: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(i.performers) > 0 {
|
||||||
|
var performerJoins []models.PerformersScenes
|
||||||
|
for _, performer := range i.performers {
|
||||||
|
join := models.PerformersScenes{
|
||||||
|
PerformerID: performer.ID,
|
||||||
|
SceneID: id,
|
||||||
|
}
|
||||||
|
performerJoins = append(performerJoins, join)
|
||||||
|
}
|
||||||
|
if err := i.JoinWriter.UpdatePerformersScenes(id, performerJoins); err != nil {
|
||||||
|
return fmt.Errorf("failed to associate performers: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(i.movies) > 0 {
|
||||||
|
for index := range i.movies {
|
||||||
|
i.movies[index].SceneID = id
|
||||||
|
}
|
||||||
|
if err := i.JoinWriter.UpdateMoviesScenes(id, i.movies); err != nil {
|
||||||
|
return fmt.Errorf("failed to associate movies: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(i.tags) > 0 {
|
||||||
|
var tagJoins []models.ScenesTags
|
||||||
|
for _, tag := range i.tags {
|
||||||
|
join := models.ScenesTags{
|
||||||
|
SceneID: id,
|
||||||
|
TagID: tag.ID,
|
||||||
|
}
|
||||||
|
tagJoins = append(tagJoins, join)
|
||||||
|
}
|
||||||
|
if err := i.JoinWriter.UpdateScenesTags(id, tagJoins); err != nil {
|
||||||
|
return fmt.Errorf("failed to associate tags: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Name() string {
|
||||||
|
return i.Path
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) FindExistingID() (*int, error) {
|
||||||
|
var existing *models.Scene
|
||||||
|
var err error
|
||||||
|
if i.FileNamingAlgorithm == models.HashAlgorithmMd5 {
|
||||||
|
existing, err = i.ReaderWriter.FindByChecksum(i.Input.Checksum)
|
||||||
|
} else if i.FileNamingAlgorithm == models.HashAlgorithmOshash {
|
||||||
|
existing, err = i.ReaderWriter.FindByOSHash(i.Input.OSHash)
|
||||||
|
} else {
|
||||||
|
panic("unknown file naming algorithm")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if existing != nil {
|
||||||
|
id := existing.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Create() (*int, error) {
|
||||||
|
created, err := i.ReaderWriter.Create(i.scene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating scene: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
id := created.ID
|
||||||
|
i.ID = id
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Update(id int) error {
|
||||||
|
scene := i.scene
|
||||||
|
scene.ID = id
|
||||||
|
i.ID = id
|
||||||
|
_, err := i.ReaderWriter.UpdateFull(scene)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error updating existing scene: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func importTags(tagWriter models.TagReaderWriter, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) {
|
||||||
|
tags, err := tagWriter.FindByNames(names, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var pluckedNames []string
|
||||||
|
for _, tag := range tags {
|
||||||
|
pluckedNames = append(pluckedNames, tag.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
missingTags := utils.StrFilter(names, func(name string) bool {
|
||||||
|
return !utils.StrInclude(pluckedNames, name)
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(missingTags) > 0 {
|
||||||
|
if missingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||||
|
return nil, fmt.Errorf("tags [%s] not found", strings.Join(missingTags, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
if missingRefBehaviour == models.ImportMissingRefEnumCreate {
|
||||||
|
createdTags, err := createTags(tagWriter, missingTags)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating tags: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
tags = append(tags, createdTags...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ignore if MissingRefBehaviour set to Ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
return tags, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func createTags(tagWriter models.TagWriter, names []string) ([]*models.Tag, error) {
|
||||||
|
var ret []*models.Tag
|
||||||
|
for _, name := range names {
|
||||||
|
newTag := *models.NewTag(name)
|
||||||
|
|
||||||
|
created, err := tagWriter.Create(newTag)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, created)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
761
pkg/scene/import_test.go
Normal file
761
pkg/scene/import_test.go
Normal file
@@ -0,0 +1,761 @@
|
|||||||
|
package scene
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
|
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/mock"
|
||||||
|
)
|
||||||
|
|
||||||
|
const invalidImage = "aW1hZ2VCeXRlcw&&"
|
||||||
|
|
||||||
|
const (
|
||||||
|
path = "path"
|
||||||
|
|
||||||
|
sceneNameErr = "sceneNameErr"
|
||||||
|
existingSceneName = "existingSceneName"
|
||||||
|
|
||||||
|
existingSceneID = 100
|
||||||
|
existingStudioID = 101
|
||||||
|
existingGalleryID = 102
|
||||||
|
existingPerformerID = 103
|
||||||
|
existingMovieID = 104
|
||||||
|
existingTagID = 105
|
||||||
|
|
||||||
|
existingStudioName = "existingStudioName"
|
||||||
|
existingStudioErr = "existingStudioErr"
|
||||||
|
missingStudioName = "missingStudioName"
|
||||||
|
|
||||||
|
existingGalleryChecksum = "existingGalleryChecksum"
|
||||||
|
existingGalleryErr = "existingGalleryErr"
|
||||||
|
missingGalleryChecksum = "missingGalleryChecksum"
|
||||||
|
|
||||||
|
existingPerformerName = "existingPerformerName"
|
||||||
|
existingPerformerErr = "existingPerformerErr"
|
||||||
|
missingPerformerName = "missingPerformerName"
|
||||||
|
|
||||||
|
existingMovieName = "existingMovieName"
|
||||||
|
existingMovieErr = "existingMovieErr"
|
||||||
|
missingMovieName = "missingMovieName"
|
||||||
|
|
||||||
|
existingTagName = "existingTagName"
|
||||||
|
existingTagErr = "existingTagErr"
|
||||||
|
missingTagName = "missingTagName"
|
||||||
|
|
||||||
|
errPerformersID = 200
|
||||||
|
|
||||||
|
missingChecksum = "missingChecksum"
|
||||||
|
missingOSHash = "missingOSHash"
|
||||||
|
errChecksum = "errChecksum"
|
||||||
|
errOSHash = "errOSHash"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestImporterName(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, path, i.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImport(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Cover: invalidImage,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.Input.Cover = image
|
||||||
|
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithStudio(t *testing.T) {
|
||||||
|
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
StudioWriter: studioReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Studio: existingStudioName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
studioReaderWriter.On("FindByName", existingStudioName, false).Return(&models.Studio{
|
||||||
|
ID: existingStudioID,
|
||||||
|
}, nil).Once()
|
||||||
|
studioReaderWriter.On("FindByName", existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, int64(existingStudioID), i.scene.StudioID.Int64)
|
||||||
|
|
||||||
|
i.Input.Studio = existingStudioErr
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
studioReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||||
|
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
Path: path,
|
||||||
|
StudioWriter: studioReaderWriter,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Studio: missingStudioName,
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
}
|
||||||
|
|
||||||
|
studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Times(3)
|
||||||
|
studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||||
|
ID: existingStudioID,
|
||||||
|
}, nil)
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, int64(existingStudioID), i.scene.StudioID.Int64)
|
||||||
|
|
||||||
|
studioReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||||
|
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
StudioWriter: studioReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Studio: missingStudioName,
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||||
|
}
|
||||||
|
|
||||||
|
studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Once()
|
||||||
|
studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithGallery(t *testing.T) {
|
||||||
|
galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
GalleryWriter: galleryReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Gallery: existingGalleryChecksum,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
galleryReaderWriter.On("FindByChecksum", existingGalleryChecksum).Return(&models.Gallery{
|
||||||
|
ID: existingGalleryID,
|
||||||
|
}, nil).Once()
|
||||||
|
galleryReaderWriter.On("FindByChecksum", existingGalleryErr).Return(nil, errors.New("FindByChecksum error")).Once()
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingGalleryID, i.gallery.ID)
|
||||||
|
|
||||||
|
i.Input.Gallery = existingGalleryErr
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
galleryReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingGallery(t *testing.T) {
|
||||||
|
galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
Path: path,
|
||||||
|
GalleryWriter: galleryReaderWriter,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Gallery: missingGalleryChecksum,
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
}
|
||||||
|
|
||||||
|
galleryReaderWriter.On("FindByChecksum", missingGalleryChecksum).Return(nil, nil).Times(3)
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Nil(t, i.gallery)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Nil(t, i.gallery)
|
||||||
|
|
||||||
|
galleryReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||||
|
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
PerformerWriter: performerReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Performers: []string{
|
||||||
|
existingPerformerName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
performerReaderWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||||
|
{
|
||||||
|
ID: existingPerformerID,
|
||||||
|
Name: modelstest.NullString(existingPerformerName),
|
||||||
|
},
|
||||||
|
}, nil).Once()
|
||||||
|
performerReaderWriter.On("FindByNames", []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingPerformerID, i.performers[0].ID)
|
||||||
|
|
||||||
|
i.Input.Performers = []string{existingPerformerErr}
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
performerReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||||
|
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
Path: path,
|
||||||
|
PerformerWriter: performerReaderWriter,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Performers: []string{
|
||||||
|
missingPerformerName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
}
|
||||||
|
|
||||||
|
performerReaderWriter.On("FindByNames", []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||||
|
performerReaderWriter.On("Create", mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||||
|
ID: existingPerformerID,
|
||||||
|
}, nil)
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingPerformerID, i.performers[0].ID)
|
||||||
|
|
||||||
|
performerReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||||
|
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
PerformerWriter: performerReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Performers: []string{
|
||||||
|
missingPerformerName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||||
|
}
|
||||||
|
|
||||||
|
performerReaderWriter.On("FindByNames", []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||||
|
performerReaderWriter.On("Create", mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMovie(t *testing.T) {
|
||||||
|
movieReaderWriter := &mocks.MovieReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
MovieWriter: movieReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Movies: []jsonschema.SceneMovie{
|
||||||
|
{
|
||||||
|
MovieName: existingMovieName,
|
||||||
|
SceneIndex: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
movieReaderWriter.On("FindByName", existingMovieName, false).Return(&models.Movie{
|
||||||
|
ID: existingMovieID,
|
||||||
|
Name: modelstest.NullString(existingMovieName),
|
||||||
|
}, nil).Once()
|
||||||
|
movieReaderWriter.On("FindByName", existingMovieErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingMovieID, i.movies[0].MovieID)
|
||||||
|
|
||||||
|
i.Input.Movies[0].MovieName = existingMovieErr
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
movieReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingMovie(t *testing.T) {
|
||||||
|
movieReaderWriter := &mocks.MovieReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
Path: path,
|
||||||
|
MovieWriter: movieReaderWriter,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Movies: []jsonschema.SceneMovie{
|
||||||
|
{
|
||||||
|
MovieName: missingMovieName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
}
|
||||||
|
|
||||||
|
movieReaderWriter.On("FindByName", missingMovieName, false).Return(nil, nil).Times(3)
|
||||||
|
movieReaderWriter.On("Create", mock.AnythingOfType("models.Movie")).Return(&models.Movie{
|
||||||
|
ID: existingMovieID,
|
||||||
|
}, nil)
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingMovieID, i.movies[0].MovieID)
|
||||||
|
|
||||||
|
movieReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingMovieCreateErr(t *testing.T) {
|
||||||
|
movieReaderWriter := &mocks.MovieReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
MovieWriter: movieReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Movies: []jsonschema.SceneMovie{
|
||||||
|
{
|
||||||
|
MovieName: missingMovieName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||||
|
}
|
||||||
|
|
||||||
|
movieReaderWriter.On("FindByName", missingMovieName, false).Return(nil, nil).Once()
|
||||||
|
movieReaderWriter.On("Create", mock.AnythingOfType("models.Movie")).Return(nil, errors.New("Create error"))
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithTag(t *testing.T) {
|
||||||
|
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
TagWriter: tagReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Tags: []string{
|
||||||
|
existingTagName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
tagReaderWriter.On("FindByNames", []string{existingTagName}, false).Return([]*models.Tag{
|
||||||
|
{
|
||||||
|
ID: existingTagID,
|
||||||
|
Name: existingTagName,
|
||||||
|
},
|
||||||
|
}, nil).Once()
|
||||||
|
tagReaderWriter.On("FindByNames", []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingTagID, i.tags[0].ID)
|
||||||
|
|
||||||
|
i.Input.Tags = []string{existingTagErr}
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
tagReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||||
|
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
Path: path,
|
||||||
|
TagWriter: tagReaderWriter,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Tags: []string{
|
||||||
|
missingTagName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
}
|
||||||
|
|
||||||
|
tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||||
|
tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||||
|
ID: existingTagID,
|
||||||
|
}, nil)
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingTagID, i.tags[0].ID)
|
||||||
|
|
||||||
|
tagReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||||
|
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
TagWriter: tagReaderWriter,
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Tags: []string{
|
||||||
|
missingTagName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||||
|
}
|
||||||
|
|
||||||
|
tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Once()
|
||||||
|
tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImport(t *testing.T) {
|
||||||
|
readerWriter := &mocks.SceneReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
coverImageData: imageBytes,
|
||||||
|
}
|
||||||
|
|
||||||
|
updateSceneImageErr := errors.New("UpdateSceneCover error")
|
||||||
|
|
||||||
|
readerWriter.On("UpdateSceneCover", sceneID, imageBytes).Return(nil).Once()
|
||||||
|
readerWriter.On("UpdateSceneCover", errImageID, imageBytes).Return(updateSceneImageErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(sceneID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImportUpdateGallery(t *testing.T) {
|
||||||
|
galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
GalleryWriter: galleryReaderWriter,
|
||||||
|
gallery: &models.Gallery{
|
||||||
|
ID: existingGalleryID,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
updateErr := errors.New("Update error")
|
||||||
|
|
||||||
|
updateArg := *i.gallery
|
||||||
|
updateArg.SceneID = modelstest.NullInt64(sceneID)
|
||||||
|
|
||||||
|
galleryReaderWriter.On("Update", updateArg).Return(nil, nil).Once()
|
||||||
|
|
||||||
|
updateArg.SceneID = modelstest.NullInt64(errGalleryID)
|
||||||
|
galleryReaderWriter.On("Update", updateArg).Return(nil, updateErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(sceneID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errGalleryID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
galleryReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImportUpdatePerformers(t *testing.T) {
|
||||||
|
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
JoinWriter: joinReaderWriter,
|
||||||
|
performers: []*models.Performer{
|
||||||
|
{
|
||||||
|
ID: existingPerformerID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
updateErr := errors.New("UpdatePerformersScenes error")
|
||||||
|
|
||||||
|
joinReaderWriter.On("UpdatePerformersScenes", sceneID, []models.PerformersScenes{
|
||||||
|
{
|
||||||
|
PerformerID: existingPerformerID,
|
||||||
|
SceneID: sceneID,
|
||||||
|
},
|
||||||
|
}).Return(nil).Once()
|
||||||
|
joinReaderWriter.On("UpdatePerformersScenes", errPerformersID, mock.AnythingOfType("[]models.PerformersScenes")).Return(updateErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(sceneID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errPerformersID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
joinReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImportUpdateMovies(t *testing.T) {
|
||||||
|
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
JoinWriter: joinReaderWriter,
|
||||||
|
movies: []models.MoviesScenes{
|
||||||
|
{
|
||||||
|
MovieID: existingMovieID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
updateErr := errors.New("UpdateMoviesScenes error")
|
||||||
|
|
||||||
|
joinReaderWriter.On("UpdateMoviesScenes", sceneID, []models.MoviesScenes{
|
||||||
|
{
|
||||||
|
MovieID: existingMovieID,
|
||||||
|
SceneID: sceneID,
|
||||||
|
},
|
||||||
|
}).Return(nil).Once()
|
||||||
|
joinReaderWriter.On("UpdateMoviesScenes", errMoviesID, mock.AnythingOfType("[]models.MoviesScenes")).Return(updateErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(sceneID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errMoviesID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
joinReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImportUpdateTags(t *testing.T) {
|
||||||
|
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
JoinWriter: joinReaderWriter,
|
||||||
|
tags: []*models.Tag{
|
||||||
|
{
|
||||||
|
ID: existingTagID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
updateErr := errors.New("UpdateScenesTags error")
|
||||||
|
|
||||||
|
joinReaderWriter.On("UpdateScenesTags", sceneID, []models.ScenesTags{
|
||||||
|
{
|
||||||
|
TagID: existingTagID,
|
||||||
|
SceneID: sceneID,
|
||||||
|
},
|
||||||
|
}).Return(nil).Once()
|
||||||
|
joinReaderWriter.On("UpdateScenesTags", errTagsID, mock.AnythingOfType("[]models.ScenesTags")).Return(updateErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(sceneID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errTagsID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
joinReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterFindExistingID(t *testing.T) {
|
||||||
|
readerWriter := &mocks.SceneReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Path: path,
|
||||||
|
Input: jsonschema.Scene{
|
||||||
|
Checksum: missingChecksum,
|
||||||
|
OSHash: missingOSHash,
|
||||||
|
},
|
||||||
|
FileNamingAlgorithm: models.HashAlgorithmMd5,
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedErr := errors.New("FindBy* error")
|
||||||
|
readerWriter.On("FindByChecksum", missingChecksum).Return(nil, nil).Once()
|
||||||
|
readerWriter.On("FindByChecksum", checksum).Return(&models.Scene{
|
||||||
|
ID: existingSceneID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("FindByChecksum", errChecksum).Return(nil, expectedErr).Once()
|
||||||
|
|
||||||
|
readerWriter.On("FindByOSHash", missingOSHash).Return(nil, nil).Once()
|
||||||
|
readerWriter.On("FindByOSHash", oshash).Return(&models.Scene{
|
||||||
|
ID: existingSceneID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("FindByOSHash", errOSHash).Return(nil, expectedErr).Once()
|
||||||
|
|
||||||
|
id, err := i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Checksum = checksum
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Equal(t, existingSceneID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Checksum = errChecksum
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.FileNamingAlgorithm = models.HashAlgorithmOshash
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.OSHash = oshash
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Equal(t, existingSceneID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.OSHash = errOSHash
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.SceneReaderWriter{}
|
||||||
|
|
||||||
|
scene := models.Scene{
|
||||||
|
Title: modelstest.NullString(title),
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneErr := models.Scene{
|
||||||
|
Title: modelstest.NullString(sceneNameErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
scene: scene,
|
||||||
|
}
|
||||||
|
|
||||||
|
errCreate := errors.New("Create error")
|
||||||
|
readerWriter.On("Create", scene).Return(&models.Scene{
|
||||||
|
ID: sceneID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("Create", sceneErr).Return(nil, errCreate).Once()
|
||||||
|
|
||||||
|
id, err := i.Create()
|
||||||
|
assert.Equal(t, sceneID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, sceneID, i.ID)
|
||||||
|
|
||||||
|
i.scene = sceneErr
|
||||||
|
id, err = i.Create()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUpdate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.SceneReaderWriter{}
|
||||||
|
|
||||||
|
scene := models.Scene{
|
||||||
|
Title: modelstest.NullString(title),
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneErr := models.Scene{
|
||||||
|
Title: modelstest.NullString(sceneNameErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
scene: scene,
|
||||||
|
}
|
||||||
|
|
||||||
|
errUpdate := errors.New("Update error")
|
||||||
|
|
||||||
|
// id needs to be set for the mock input
|
||||||
|
scene.ID = sceneID
|
||||||
|
readerWriter.On("UpdateFull", scene).Return(nil, nil).Once()
|
||||||
|
|
||||||
|
err := i.Update(sceneID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, sceneID, i.ID)
|
||||||
|
|
||||||
|
i.scene = sceneErr
|
||||||
|
|
||||||
|
// need to set id separately
|
||||||
|
sceneErr.ID = errImageID
|
||||||
|
readerWriter.On("UpdateFull", sceneErr).Return(nil, errUpdate).Once()
|
||||||
|
|
||||||
|
err = i.Update(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
125
pkg/scene/marker_import.go
Normal file
125
pkg/scene/marker_import.go
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
package scene
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MarkerImporter struct {
|
||||||
|
SceneID int
|
||||||
|
ReaderWriter models.SceneMarkerReaderWriter
|
||||||
|
TagWriter models.TagReaderWriter
|
||||||
|
JoinWriter models.JoinReaderWriter
|
||||||
|
Input jsonschema.SceneMarker
|
||||||
|
MissingRefBehaviour models.ImportMissingRefEnum
|
||||||
|
|
||||||
|
tags []*models.Tag
|
||||||
|
marker models.SceneMarker
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *MarkerImporter) PreImport() error {
|
||||||
|
seconds, _ := strconv.ParseFloat(i.Input.Seconds, 64)
|
||||||
|
i.marker = models.SceneMarker{
|
||||||
|
Title: i.Input.Title,
|
||||||
|
Seconds: seconds,
|
||||||
|
SceneID: sql.NullInt64{Int64: int64(i.SceneID), Valid: true},
|
||||||
|
CreatedAt: models.SQLiteTimestamp{Timestamp: i.Input.CreatedAt.GetTime()},
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: i.Input.UpdatedAt.GetTime()},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := i.populateTags(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *MarkerImporter) populateTags() error {
|
||||||
|
// primary tag cannot be ignored
|
||||||
|
mrb := i.MissingRefBehaviour
|
||||||
|
if mrb == models.ImportMissingRefEnumIgnore {
|
||||||
|
mrb = models.ImportMissingRefEnumFail
|
||||||
|
}
|
||||||
|
|
||||||
|
primaryTag, err := importTags(i.TagWriter, []string{i.Input.PrimaryTag}, mrb)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
i.marker.PrimaryTagID = primaryTag[0].ID
|
||||||
|
|
||||||
|
if len(i.Input.Tags) > 0 {
|
||||||
|
tags, err := importTags(i.TagWriter, i.Input.Tags, i.MissingRefBehaviour)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
i.tags = tags
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *MarkerImporter) PostImport(id int) error {
|
||||||
|
if len(i.tags) > 0 {
|
||||||
|
var tagJoins []models.SceneMarkersTags
|
||||||
|
for _, tag := range i.tags {
|
||||||
|
join := models.SceneMarkersTags{
|
||||||
|
SceneMarkerID: id,
|
||||||
|
TagID: tag.ID,
|
||||||
|
}
|
||||||
|
tagJoins = append(tagJoins, join)
|
||||||
|
}
|
||||||
|
if err := i.JoinWriter.UpdateSceneMarkersTags(id, tagJoins); err != nil {
|
||||||
|
return fmt.Errorf("failed to associate tags: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *MarkerImporter) Name() string {
|
||||||
|
return fmt.Sprintf("%s (%s)", i.Input.Title, i.Input.Seconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *MarkerImporter) FindExistingID() (*int, error) {
|
||||||
|
existingMarkers, err := i.ReaderWriter.FindBySceneID(i.SceneID)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, m := range existingMarkers {
|
||||||
|
if m.Seconds == i.marker.Seconds {
|
||||||
|
id := m.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *MarkerImporter) Create() (*int, error) {
|
||||||
|
created, err := i.ReaderWriter.Create(i.marker)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating marker: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
id := created.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *MarkerImporter) Update(id int) error {
|
||||||
|
marker := i.marker
|
||||||
|
marker.ID = id
|
||||||
|
_, err := i.ReaderWriter.Update(marker)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error updating existing marker: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
210
pkg/scene/marker_import_test.go
Normal file
210
pkg/scene/marker_import_test.go
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
package scene
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/mock"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
seconds = "5"
|
||||||
|
secondsFloat = 5.0
|
||||||
|
errSceneID = 999
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMarkerImporterName(t *testing.T) {
|
||||||
|
i := MarkerImporter{
|
||||||
|
Input: jsonschema.SceneMarker{
|
||||||
|
Title: title,
|
||||||
|
Seconds: seconds,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, title+" (5)", i.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkerImporterPreImportWithTag(t *testing.T) {
|
||||||
|
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||||
|
|
||||||
|
i := MarkerImporter{
|
||||||
|
TagWriter: tagReaderWriter,
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
Input: jsonschema.SceneMarker{
|
||||||
|
PrimaryTag: existingTagName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
tagReaderWriter.On("FindByNames", []string{existingTagName}, false).Return([]*models.Tag{
|
||||||
|
{
|
||||||
|
ID: existingTagID,
|
||||||
|
Name: existingTagName,
|
||||||
|
},
|
||||||
|
}, nil).Times(4)
|
||||||
|
tagReaderWriter.On("FindByNames", []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Times(2)
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingTagID, i.marker.PrimaryTagID)
|
||||||
|
|
||||||
|
i.Input.PrimaryTag = existingTagErr
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.Input.PrimaryTag = existingTagName
|
||||||
|
i.Input.Tags = []string{
|
||||||
|
existingTagName,
|
||||||
|
}
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, existingTagID, i.tags[0].ID)
|
||||||
|
|
||||||
|
i.Input.Tags[0] = existingTagErr
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
tagReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkerImporterPostImportUpdateTags(t *testing.T) {
|
||||||
|
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||||
|
|
||||||
|
i := MarkerImporter{
|
||||||
|
JoinWriter: joinReaderWriter,
|
||||||
|
tags: []*models.Tag{
|
||||||
|
{
|
||||||
|
ID: existingTagID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
updateErr := errors.New("UpdateSceneMarkersTags error")
|
||||||
|
|
||||||
|
joinReaderWriter.On("UpdateSceneMarkersTags", sceneID, []models.SceneMarkersTags{
|
||||||
|
{
|
||||||
|
TagID: existingTagID,
|
||||||
|
SceneMarkerID: sceneID,
|
||||||
|
},
|
||||||
|
}).Return(nil).Once()
|
||||||
|
joinReaderWriter.On("UpdateSceneMarkersTags", errTagsID, mock.AnythingOfType("[]models.SceneMarkersTags")).Return(updateErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(sceneID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errTagsID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
joinReaderWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkerImporterFindExistingID(t *testing.T) {
|
||||||
|
readerWriter := &mocks.SceneMarkerReaderWriter{}
|
||||||
|
|
||||||
|
i := MarkerImporter{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
SceneID: sceneID,
|
||||||
|
marker: models.SceneMarker{
|
||||||
|
Seconds: secondsFloat,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedErr := errors.New("FindBy* error")
|
||||||
|
readerWriter.On("FindBySceneID", sceneID).Return([]*models.SceneMarker{
|
||||||
|
{
|
||||||
|
ID: existingSceneID,
|
||||||
|
Seconds: secondsFloat,
|
||||||
|
},
|
||||||
|
}, nil).Times(2)
|
||||||
|
readerWriter.On("FindBySceneID", errSceneID).Return(nil, expectedErr).Once()
|
||||||
|
|
||||||
|
id, err := i.FindExistingID()
|
||||||
|
assert.Equal(t, existingSceneID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.marker.Seconds++
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.SceneID = errSceneID
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkerImporterCreate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.SceneMarkerReaderWriter{}
|
||||||
|
|
||||||
|
scene := models.SceneMarker{
|
||||||
|
Title: title,
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneErr := models.SceneMarker{
|
||||||
|
Title: sceneNameErr,
|
||||||
|
}
|
||||||
|
|
||||||
|
i := MarkerImporter{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
marker: scene,
|
||||||
|
}
|
||||||
|
|
||||||
|
errCreate := errors.New("Create error")
|
||||||
|
readerWriter.On("Create", scene).Return(&models.SceneMarker{
|
||||||
|
ID: sceneID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("Create", sceneErr).Return(nil, errCreate).Once()
|
||||||
|
|
||||||
|
id, err := i.Create()
|
||||||
|
assert.Equal(t, sceneID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.marker = sceneErr
|
||||||
|
id, err = i.Create()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMarkerImporterUpdate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.SceneMarkerReaderWriter{}
|
||||||
|
|
||||||
|
scene := models.SceneMarker{
|
||||||
|
Title: title,
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneErr := models.SceneMarker{
|
||||||
|
Title: sceneNameErr,
|
||||||
|
}
|
||||||
|
|
||||||
|
i := MarkerImporter{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
marker: scene,
|
||||||
|
}
|
||||||
|
|
||||||
|
errUpdate := errors.New("Update error")
|
||||||
|
|
||||||
|
// id needs to be set for the mock input
|
||||||
|
scene.ID = sceneID
|
||||||
|
readerWriter.On("Update", scene).Return(nil, nil).Once()
|
||||||
|
|
||||||
|
err := i.Update(sceneID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.marker = sceneErr
|
||||||
|
|
||||||
|
// need to set id separately
|
||||||
|
sceneErr.ID = errImageID
|
||||||
|
readerWriter.On("Update", sceneErr).Return(nil, errUpdate).Once()
|
||||||
|
|
||||||
|
err = i.Update(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
143
pkg/studio/import.go
Normal file
143
pkg/studio/import.go
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
package studio
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ErrParentStudioNotExist = errors.New("parent studio does not exist")
|
||||||
|
|
||||||
|
type Importer struct {
|
||||||
|
ReaderWriter models.StudioReaderWriter
|
||||||
|
Input jsonschema.Studio
|
||||||
|
MissingRefBehaviour models.ImportMissingRefEnum
|
||||||
|
|
||||||
|
studio models.Studio
|
||||||
|
imageData []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PreImport() error {
|
||||||
|
checksum := utils.MD5FromString(i.Input.Name)
|
||||||
|
|
||||||
|
i.studio = models.Studio{
|
||||||
|
Checksum: checksum,
|
||||||
|
Name: sql.NullString{String: i.Input.Name, Valid: true},
|
||||||
|
URL: sql.NullString{String: i.Input.URL, Valid: true},
|
||||||
|
CreatedAt: models.SQLiteTimestamp{Timestamp: i.Input.CreatedAt.GetTime()},
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: i.Input.UpdatedAt.GetTime()},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := i.populateParentStudio(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
if len(i.Input.Image) > 0 {
|
||||||
|
_, i.imageData, err = utils.ProcessBase64Image(i.Input.Image)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) populateParentStudio() error {
|
||||||
|
if i.Input.ParentStudio != "" {
|
||||||
|
studio, err := i.ReaderWriter.FindByName(i.Input.ParentStudio, false)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error finding studio by name: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if studio == nil {
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||||
|
return ErrParentStudioNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate {
|
||||||
|
parentID, err := i.createParentStudio(i.Input.ParentStudio)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
i.studio.ParentID = sql.NullInt64{
|
||||||
|
Int64: int64(parentID),
|
||||||
|
Valid: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
i.studio.ParentID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) createParentStudio(name string) (int, error) {
|
||||||
|
newStudio := *models.NewStudio(name)
|
||||||
|
|
||||||
|
created, err := i.ReaderWriter.Create(newStudio)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return created.ID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PostImport(id int) error {
|
||||||
|
if len(i.imageData) > 0 {
|
||||||
|
if err := i.ReaderWriter.UpdateStudioImage(id, i.imageData); err != nil {
|
||||||
|
return fmt.Errorf("error setting studio image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Name() string {
|
||||||
|
return i.Input.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) FindExistingID() (*int, error) {
|
||||||
|
const nocase = false
|
||||||
|
existing, err := i.ReaderWriter.FindByName(i.Name(), nocase)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if existing != nil {
|
||||||
|
id := existing.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Create() (*int, error) {
|
||||||
|
created, err := i.ReaderWriter.Create(i.studio)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating studio: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
id := created.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Update(id int) error {
|
||||||
|
studio := i.studio
|
||||||
|
studio.ID = id
|
||||||
|
_, err := i.ReaderWriter.UpdateFull(studio)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error updating existing studio: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
263
pkg/studio/import_test.go
Normal file
263
pkg/studio/import_test.go
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
package studio
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
|
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/mock"
|
||||||
|
)
|
||||||
|
|
||||||
|
const invalidImage = "aW1hZ2VCeXRlcw&&"
|
||||||
|
|
||||||
|
const (
|
||||||
|
studioNameErr = "studioNameErr"
|
||||||
|
existingStudioName = "existingTagName"
|
||||||
|
|
||||||
|
existingStudioID = 100
|
||||||
|
|
||||||
|
existingParentStudioName = "existingParentStudioName"
|
||||||
|
existingParentStudioErr = "existingParentStudioErr"
|
||||||
|
missingParentStudioName = "existingParentStudioName"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestImporterName(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.Studio{
|
||||||
|
Name: studioName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, studioName, i.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImport(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.Studio{
|
||||||
|
Name: studioName,
|
||||||
|
Image: invalidImage,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.Input.Image = image
|
||||||
|
|
||||||
|
err = i.PreImport()
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithParent(t *testing.T) {
|
||||||
|
readerWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Input: jsonschema.Studio{
|
||||||
|
Name: studioName,
|
||||||
|
Image: image,
|
||||||
|
ParentStudio: existingParentStudioName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
readerWriter.On("FindByName", existingParentStudioName, false).Return(&models.Studio{
|
||||||
|
ID: existingStudioID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("FindByName", existingParentStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, int64(existingStudioID), i.studio.ParentID.Int64)
|
||||||
|
|
||||||
|
i.Input.ParentStudio = existingParentStudioErr
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingParent(t *testing.T) {
|
||||||
|
readerWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Input: jsonschema.Studio{
|
||||||
|
Name: studioName,
|
||||||
|
Image: image,
|
||||||
|
ParentStudio: missingParentStudioName,
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||||
|
}
|
||||||
|
|
||||||
|
readerWriter.On("FindByName", missingParentStudioName, false).Return(nil, nil).Times(3)
|
||||||
|
readerWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||||
|
ID: existingStudioID,
|
||||||
|
}, nil)
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||||
|
err = i.PreImport()
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, int64(existingStudioID), i.studio.ParentID.Int64)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImportWithMissingParentCreateErr(t *testing.T) {
|
||||||
|
readerWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Input: jsonschema.Studio{
|
||||||
|
Name: studioName,
|
||||||
|
Image: image,
|
||||||
|
ParentStudio: missingParentStudioName,
|
||||||
|
},
|
||||||
|
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||||
|
}
|
||||||
|
|
||||||
|
readerWriter.On("FindByName", missingParentStudioName, false).Return(nil, nil).Once()
|
||||||
|
readerWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImport(t *testing.T) {
|
||||||
|
readerWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
imageData: imageBytes,
|
||||||
|
}
|
||||||
|
|
||||||
|
updateStudioImageErr := errors.New("UpdateStudioImage error")
|
||||||
|
|
||||||
|
readerWriter.On("UpdateStudioImage", studioID, imageBytes).Return(nil).Once()
|
||||||
|
readerWriter.On("UpdateStudioImage", errImageID, imageBytes).Return(updateStudioImageErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(studioID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterFindExistingID(t *testing.T) {
|
||||||
|
readerWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Input: jsonschema.Studio{
|
||||||
|
Name: studioName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
errFindByName := errors.New("FindByName error")
|
||||||
|
readerWriter.On("FindByName", studioName, false).Return(nil, nil).Once()
|
||||||
|
readerWriter.On("FindByName", existingStudioName, false).Return(&models.Studio{
|
||||||
|
ID: existingStudioID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("FindByName", studioNameErr, false).Return(nil, errFindByName).Once()
|
||||||
|
|
||||||
|
id, err := i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Name = existingStudioName
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Equal(t, existingStudioID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Name = studioNameErr
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
studio := models.Studio{
|
||||||
|
Name: modelstest.NullString(studioName),
|
||||||
|
}
|
||||||
|
|
||||||
|
studioErr := models.Studio{
|
||||||
|
Name: modelstest.NullString(studioNameErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
studio: studio,
|
||||||
|
}
|
||||||
|
|
||||||
|
errCreate := errors.New("Create error")
|
||||||
|
readerWriter.On("Create", studio).Return(&models.Studio{
|
||||||
|
ID: studioID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("Create", studioErr).Return(nil, errCreate).Once()
|
||||||
|
|
||||||
|
id, err := i.Create()
|
||||||
|
assert.Equal(t, studioID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.studio = studioErr
|
||||||
|
id, err = i.Create()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUpdate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.StudioReaderWriter{}
|
||||||
|
|
||||||
|
studio := models.Studio{
|
||||||
|
Name: modelstest.NullString(studioName),
|
||||||
|
}
|
||||||
|
|
||||||
|
studioErr := models.Studio{
|
||||||
|
Name: modelstest.NullString(studioNameErr),
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
studio: studio,
|
||||||
|
}
|
||||||
|
|
||||||
|
errUpdate := errors.New("Update error")
|
||||||
|
|
||||||
|
// id needs to be set for the mock input
|
||||||
|
studio.ID = studioID
|
||||||
|
readerWriter.On("UpdateFull", studio).Return(nil, nil).Once()
|
||||||
|
|
||||||
|
err := i.Update(studioID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.studio = studioErr
|
||||||
|
|
||||||
|
// need to set id separately
|
||||||
|
studioErr.ID = errImageID
|
||||||
|
readerWriter.On("UpdateFull", studioErr).Return(nil, errUpdate).Once()
|
||||||
|
|
||||||
|
err = i.Update(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
85
pkg/tag/import.go
Normal file
85
pkg/tag/import.go
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
package tag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Importer struct {
|
||||||
|
ReaderWriter models.TagReaderWriter
|
||||||
|
Input jsonschema.Tag
|
||||||
|
|
||||||
|
tag models.Tag
|
||||||
|
imageData []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PreImport() error {
|
||||||
|
i.tag = models.Tag{
|
||||||
|
Name: i.Input.Name,
|
||||||
|
CreatedAt: models.SQLiteTimestamp{Timestamp: i.Input.CreatedAt.GetTime()},
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: i.Input.UpdatedAt.GetTime()},
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
if len(i.Input.Image) > 0 {
|
||||||
|
_, i.imageData, err = utils.ProcessBase64Image(i.Input.Image)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) PostImport(id int) error {
|
||||||
|
if len(i.imageData) > 0 {
|
||||||
|
if err := i.ReaderWriter.UpdateTagImage(id, i.imageData); err != nil {
|
||||||
|
return fmt.Errorf("error setting tag image: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Name() string {
|
||||||
|
return i.Input.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) FindExistingID() (*int, error) {
|
||||||
|
const nocase = false
|
||||||
|
existing, err := i.ReaderWriter.FindByName(i.Name(), nocase)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if existing != nil {
|
||||||
|
id := existing.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Create() (*int, error) {
|
||||||
|
created, err := i.ReaderWriter.Create(i.tag)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating tag: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
id := created.ID
|
||||||
|
return &id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Importer) Update(id int) error {
|
||||||
|
tag := i.tag
|
||||||
|
tag.ID = id
|
||||||
|
_, err := i.ReaderWriter.Update(tag)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error updating existing tag: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
179
pkg/tag/import_test.go
Normal file
179
pkg/tag/import_test.go
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
package tag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
const image = "aW1hZ2VCeXRlcw=="
|
||||||
|
const invalidImage = "aW1hZ2VCeXRlcw&&"
|
||||||
|
|
||||||
|
var imageBytes = []byte("imageBytes")
|
||||||
|
|
||||||
|
const (
|
||||||
|
tagNameErr = "tagNameErr"
|
||||||
|
existingTagName = "existingTagName"
|
||||||
|
|
||||||
|
existingTagID = 100
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestImporterName(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.Tag{
|
||||||
|
Name: tagName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, tagName, i.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPreImport(t *testing.T) {
|
||||||
|
i := Importer{
|
||||||
|
Input: jsonschema.Tag{
|
||||||
|
Name: tagName,
|
||||||
|
Image: invalidImage,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := i.PreImport()
|
||||||
|
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
i.Input.Image = image
|
||||||
|
|
||||||
|
err = i.PreImport()
|
||||||
|
|
||||||
|
assert.Nil(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterPostImport(t *testing.T) {
|
||||||
|
readerWriter := &mocks.TagReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
imageData: imageBytes,
|
||||||
|
}
|
||||||
|
|
||||||
|
updateTagImageErr := errors.New("UpdateTagImage error")
|
||||||
|
|
||||||
|
readerWriter.On("UpdateTagImage", tagID, imageBytes).Return(nil).Once()
|
||||||
|
readerWriter.On("UpdateTagImage", errImageID, imageBytes).Return(updateTagImageErr).Once()
|
||||||
|
|
||||||
|
err := i.PostImport(tagID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
err = i.PostImport(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImporterFindExistingID(t *testing.T) {
|
||||||
|
readerWriter := &mocks.TagReaderWriter{}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
Input: jsonschema.Tag{
|
||||||
|
Name: tagName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
errFindByName := errors.New("FindByName error")
|
||||||
|
readerWriter.On("FindByName", tagName, false).Return(nil, nil).Once()
|
||||||
|
readerWriter.On("FindByName", existingTagName, false).Return(&models.Tag{
|
||||||
|
ID: existingTagID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("FindByName", tagNameErr, false).Return(nil, errFindByName).Once()
|
||||||
|
|
||||||
|
id, err := i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Name = existingTagName
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Equal(t, existingTagID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.Input.Name = tagNameErr
|
||||||
|
id, err = i.FindExistingID()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.TagReaderWriter{}
|
||||||
|
|
||||||
|
tag := models.Tag{
|
||||||
|
Name: tagName,
|
||||||
|
}
|
||||||
|
|
||||||
|
tagErr := models.Tag{
|
||||||
|
Name: tagNameErr,
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
tag: tag,
|
||||||
|
}
|
||||||
|
|
||||||
|
errCreate := errors.New("Create error")
|
||||||
|
readerWriter.On("Create", tag).Return(&models.Tag{
|
||||||
|
ID: tagID,
|
||||||
|
}, nil).Once()
|
||||||
|
readerWriter.On("Create", tagErr).Return(nil, errCreate).Once()
|
||||||
|
|
||||||
|
id, err := i.Create()
|
||||||
|
assert.Equal(t, tagID, *id)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.tag = tagErr
|
||||||
|
id, err = i.Create()
|
||||||
|
assert.Nil(t, id)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUpdate(t *testing.T) {
|
||||||
|
readerWriter := &mocks.TagReaderWriter{}
|
||||||
|
|
||||||
|
tag := models.Tag{
|
||||||
|
Name: tagName,
|
||||||
|
}
|
||||||
|
|
||||||
|
tagErr := models.Tag{
|
||||||
|
Name: tagNameErr,
|
||||||
|
}
|
||||||
|
|
||||||
|
i := Importer{
|
||||||
|
ReaderWriter: readerWriter,
|
||||||
|
tag: tag,
|
||||||
|
}
|
||||||
|
|
||||||
|
errUpdate := errors.New("Update error")
|
||||||
|
|
||||||
|
// id needs to be set for the mock input
|
||||||
|
tag.ID = tagID
|
||||||
|
readerWriter.On("Update", tag).Return(nil, nil).Once()
|
||||||
|
|
||||||
|
err := i.Update(tagID)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
i.tag = tagErr
|
||||||
|
|
||||||
|
// need to set id separately
|
||||||
|
tagErr.ID = errImageID
|
||||||
|
readerWriter.On("Update", tagErr).Return(nil, errUpdate).Once()
|
||||||
|
|
||||||
|
err = i.Update(errImageID)
|
||||||
|
assert.NotNil(t, err)
|
||||||
|
|
||||||
|
readerWriter.AssertExpectations(t)
|
||||||
|
}
|
||||||
@@ -31,7 +31,9 @@
|
|||||||
"@fortawesome/free-regular-svg-icons": "^5.14.0",
|
"@fortawesome/free-regular-svg-icons": "^5.14.0",
|
||||||
"@fortawesome/free-solid-svg-icons": "^5.14.0",
|
"@fortawesome/free-solid-svg-icons": "^5.14.0",
|
||||||
"@fortawesome/react-fontawesome": "^0.1.11",
|
"@fortawesome/react-fontawesome": "^0.1.11",
|
||||||
|
"@types/apollo-upload-client": "^14.1.0",
|
||||||
"@types/mousetrap": "^1.6.3",
|
"@types/mousetrap": "^1.6.3",
|
||||||
|
"apollo-upload-client": "^14.1.2",
|
||||||
"axios": "0.20.0",
|
"axios": "0.20.0",
|
||||||
"bootstrap": "^4.5.2",
|
"bootstrap": "^4.5.2",
|
||||||
"classnames": "^2.2.6",
|
"classnames": "^2.2.6",
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
### ✨ New Features
|
### ✨ New Features
|
||||||
|
* Add partial import from zip file.
|
||||||
* Add selective scene export.
|
* Add selective scene export.
|
||||||
|
|
||||||
### 🎨 Improvements
|
### 🎨 Improvements
|
||||||
|
|||||||
@@ -0,0 +1,171 @@
|
|||||||
|
import React, { useState } from "react";
|
||||||
|
import { Form } from "react-bootstrap";
|
||||||
|
import { mutateImportObjects } from "src/core/StashService";
|
||||||
|
import { Modal } from "src/components/Shared";
|
||||||
|
import * as GQL from "src/core/generated-graphql";
|
||||||
|
import { useToast } from "src/hooks";
|
||||||
|
|
||||||
|
interface IImportDialogProps {
|
||||||
|
onClose: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ImportDialog: React.FC<IImportDialogProps> = (
|
||||||
|
props: IImportDialogProps
|
||||||
|
) => {
|
||||||
|
const [duplicateBehaviour, setDuplicateBehaviour] = useState<string>(
|
||||||
|
duplicateHandlingToString(GQL.ImportDuplicateEnum.Ignore)
|
||||||
|
);
|
||||||
|
|
||||||
|
const [missingRefBehaviour, setMissingRefBehaviour] = useState<string>(
|
||||||
|
missingRefHandlingToString(GQL.ImportMissingRefEnum.Fail)
|
||||||
|
);
|
||||||
|
|
||||||
|
const [file, setFile] = useState<File | undefined>();
|
||||||
|
|
||||||
|
// Network state
|
||||||
|
const [isRunning, setIsRunning] = useState(false);
|
||||||
|
|
||||||
|
const Toast = useToast();
|
||||||
|
|
||||||
|
function duplicateHandlingToString(
|
||||||
|
value: GQL.ImportDuplicateEnum | undefined
|
||||||
|
) {
|
||||||
|
switch (value) {
|
||||||
|
case GQL.ImportDuplicateEnum.Fail:
|
||||||
|
return "Fail";
|
||||||
|
case GQL.ImportDuplicateEnum.Ignore:
|
||||||
|
return "Ignore";
|
||||||
|
case GQL.ImportDuplicateEnum.Overwrite:
|
||||||
|
return "Overwrite";
|
||||||
|
}
|
||||||
|
return "Ignore";
|
||||||
|
}
|
||||||
|
|
||||||
|
function translateDuplicateHandling(value: string) {
|
||||||
|
switch (value) {
|
||||||
|
case "Fail":
|
||||||
|
return GQL.ImportDuplicateEnum.Fail;
|
||||||
|
case "Ignore":
|
||||||
|
return GQL.ImportDuplicateEnum.Ignore;
|
||||||
|
case "Overwrite":
|
||||||
|
return GQL.ImportDuplicateEnum.Overwrite;
|
||||||
|
}
|
||||||
|
|
||||||
|
return GQL.ImportDuplicateEnum.Ignore;
|
||||||
|
}
|
||||||
|
|
||||||
|
function missingRefHandlingToString(
|
||||||
|
value: GQL.ImportMissingRefEnum | undefined
|
||||||
|
) {
|
||||||
|
switch (value) {
|
||||||
|
case GQL.ImportMissingRefEnum.Fail:
|
||||||
|
return "Fail";
|
||||||
|
case GQL.ImportMissingRefEnum.Ignore:
|
||||||
|
return "Ignore";
|
||||||
|
case GQL.ImportMissingRefEnum.Create:
|
||||||
|
return "Create";
|
||||||
|
}
|
||||||
|
return "Fail";
|
||||||
|
}
|
||||||
|
|
||||||
|
function translateMissingRefHandling(value: string) {
|
||||||
|
switch (value) {
|
||||||
|
case "Fail":
|
||||||
|
return GQL.ImportMissingRefEnum.Fail;
|
||||||
|
case "Ignore":
|
||||||
|
return GQL.ImportMissingRefEnum.Ignore;
|
||||||
|
case "Create":
|
||||||
|
return GQL.ImportMissingRefEnum.Create;
|
||||||
|
}
|
||||||
|
|
||||||
|
return GQL.ImportMissingRefEnum.Fail;
|
||||||
|
}
|
||||||
|
|
||||||
|
function onFileChange(event: React.ChangeEvent<HTMLInputElement>) {
|
||||||
|
if (
|
||||||
|
event.target.validity.valid &&
|
||||||
|
event.target.files &&
|
||||||
|
event.target.files.length > 0
|
||||||
|
) {
|
||||||
|
setFile(event.target.files[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function onImport() {
|
||||||
|
try {
|
||||||
|
setIsRunning(true);
|
||||||
|
await mutateImportObjects({
|
||||||
|
duplicateBehaviour: translateDuplicateHandling(duplicateBehaviour),
|
||||||
|
missingRefBehaviour: translateMissingRefHandling(missingRefBehaviour),
|
||||||
|
file,
|
||||||
|
});
|
||||||
|
setIsRunning(false);
|
||||||
|
Toast.success({ content: "Started importing" });
|
||||||
|
} catch (e) {
|
||||||
|
Toast.error(e);
|
||||||
|
} finally {
|
||||||
|
props.onClose();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Modal
|
||||||
|
show
|
||||||
|
icon="pencil-alt"
|
||||||
|
header="Import"
|
||||||
|
accept={{
|
||||||
|
onClick: () => {
|
||||||
|
onImport();
|
||||||
|
},
|
||||||
|
text: "Import",
|
||||||
|
}}
|
||||||
|
cancel={{
|
||||||
|
onClick: () => props.onClose(),
|
||||||
|
text: "Cancel",
|
||||||
|
variant: "secondary",
|
||||||
|
}}
|
||||||
|
disabled={!file}
|
||||||
|
isRunning={isRunning}
|
||||||
|
>
|
||||||
|
<div className="dialog-container">
|
||||||
|
<Form>
|
||||||
|
<Form.Group id="import-file">
|
||||||
|
<h6>Import zip file</h6>
|
||||||
|
<Form.File onChange={onFileChange} accept=".zip" />
|
||||||
|
</Form.Group>
|
||||||
|
<Form.Group id="duplicate-handling">
|
||||||
|
<h6>Duplicate object handling</h6>
|
||||||
|
<Form.Control
|
||||||
|
className="w-auto input-control"
|
||||||
|
as="select"
|
||||||
|
value={duplicateBehaviour}
|
||||||
|
onChange={(e: React.ChangeEvent<HTMLSelectElement>) =>
|
||||||
|
setDuplicateBehaviour(e.currentTarget.value)
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{Object.values(GQL.ImportDuplicateEnum).map((p) => (
|
||||||
|
<option key={p}>{duplicateHandlingToString(p)}</option>
|
||||||
|
))}
|
||||||
|
</Form.Control>
|
||||||
|
</Form.Group>
|
||||||
|
|
||||||
|
<Form.Group id="missing-ref-handling">
|
||||||
|
<h6>Missing reference handling</h6>
|
||||||
|
<Form.Control
|
||||||
|
className="w-auto input-control"
|
||||||
|
as="select"
|
||||||
|
value={missingRefBehaviour}
|
||||||
|
onChange={(e: React.ChangeEvent<HTMLSelectElement>) =>
|
||||||
|
setMissingRefBehaviour(e.currentTarget.value)
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{Object.values(GQL.ImportMissingRefEnum).map((p) => (
|
||||||
|
<option key={p}>{missingRefHandlingToString(p)}</option>
|
||||||
|
))}
|
||||||
|
</Form.Control>
|
||||||
|
</Form.Group>
|
||||||
|
</Form>
|
||||||
|
</div>
|
||||||
|
</Modal>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -18,6 +18,7 @@ import { useToast } from "src/hooks";
|
|||||||
import * as GQL from "src/core/generated-graphql";
|
import * as GQL from "src/core/generated-graphql";
|
||||||
import { Modal } from "src/components/Shared";
|
import { Modal } from "src/components/Shared";
|
||||||
import { GenerateButton } from "./GenerateButton";
|
import { GenerateButton } from "./GenerateButton";
|
||||||
|
import { ImportDialog } from "./ImportDialog";
|
||||||
|
|
||||||
type Plugin = Pick<GQL.Plugin, "id">;
|
type Plugin = Pick<GQL.Plugin, "id">;
|
||||||
type PluginTask = Pick<GQL.PluginTask, "name" | "description">;
|
type PluginTask = Pick<GQL.PluginTask, "name" | "description">;
|
||||||
@@ -26,6 +27,7 @@ export const SettingsTasksPanel: React.FC = () => {
|
|||||||
const Toast = useToast();
|
const Toast = useToast();
|
||||||
const [isImportAlertOpen, setIsImportAlertOpen] = useState<boolean>(false);
|
const [isImportAlertOpen, setIsImportAlertOpen] = useState<boolean>(false);
|
||||||
const [isCleanAlertOpen, setIsCleanAlertOpen] = useState<boolean>(false);
|
const [isCleanAlertOpen, setIsCleanAlertOpen] = useState<boolean>(false);
|
||||||
|
const [isImportDialogOpen, setIsImportDialogOpen] = useState<boolean>(false);
|
||||||
const [useFileMetadata, setUseFileMetadata] = useState<boolean>(false);
|
const [useFileMetadata, setUseFileMetadata] = useState<boolean>(false);
|
||||||
const [status, setStatus] = useState<string>("");
|
const [status, setStatus] = useState<string>("");
|
||||||
const [progress, setProgress] = useState<number>(0);
|
const [progress, setProgress] = useState<number>(0);
|
||||||
@@ -135,6 +137,14 @@ export const SettingsTasksPanel: React.FC = () => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function renderImportDialog() {
|
||||||
|
if (!isImportDialogOpen) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return <ImportDialog onClose={() => setIsImportDialogOpen(false)} />;
|
||||||
|
}
|
||||||
|
|
||||||
async function onScan() {
|
async function onScan() {
|
||||||
try {
|
try {
|
||||||
await mutateMetadataScan({ useFileMetadata });
|
await mutateMetadataScan({ useFileMetadata });
|
||||||
@@ -256,6 +266,7 @@ export const SettingsTasksPanel: React.FC = () => {
|
|||||||
<>
|
<>
|
||||||
{renderImportAlert()}
|
{renderImportAlert()}
|
||||||
{renderCleanAlert()}
|
{renderCleanAlert()}
|
||||||
|
{renderImportDialog()}
|
||||||
|
|
||||||
<h4>Running Jobs</h4>
|
<h4>Running Jobs</h4>
|
||||||
|
|
||||||
@@ -352,10 +363,11 @@ export const SettingsTasksPanel: React.FC = () => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
Export
|
Full Export
|
||||||
</Button>
|
</Button>
|
||||||
<Form.Text className="text-muted">
|
<Form.Text className="text-muted">
|
||||||
Export the database content into JSON format.
|
Exports the database content into JSON format in the metadata
|
||||||
|
directory.
|
||||||
</Form.Text>
|
</Form.Text>
|
||||||
</Form.Group>
|
</Form.Group>
|
||||||
|
|
||||||
@@ -365,10 +377,24 @@ export const SettingsTasksPanel: React.FC = () => {
|
|||||||
variant="danger"
|
variant="danger"
|
||||||
onClick={() => setIsImportAlertOpen(true)}
|
onClick={() => setIsImportAlertOpen(true)}
|
||||||
>
|
>
|
||||||
Import
|
Full Import
|
||||||
</Button>
|
</Button>
|
||||||
<Form.Text className="text-muted">
|
<Form.Text className="text-muted">
|
||||||
Import from exported JSON. This is a destructive action.
|
Import from exported JSON in the metadata directory. Wipes the
|
||||||
|
existing database.
|
||||||
|
</Form.Text>
|
||||||
|
</Form.Group>
|
||||||
|
|
||||||
|
<Form.Group>
|
||||||
|
<Button
|
||||||
|
id="partial-import"
|
||||||
|
variant="danger"
|
||||||
|
onClick={() => setIsImportDialogOpen(true)}
|
||||||
|
>
|
||||||
|
Import from file
|
||||||
|
</Button>
|
||||||
|
<Form.Text className="text-muted">
|
||||||
|
Incremental import from a supplied export zip file.
|
||||||
</Form.Text>
|
</Form.Text>
|
||||||
</Form.Group>
|
</Form.Group>
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ interface IModal {
|
|||||||
cancel?: IButton;
|
cancel?: IButton;
|
||||||
accept?: IButton;
|
accept?: IButton;
|
||||||
isRunning?: boolean;
|
isRunning?: boolean;
|
||||||
|
disabled?: boolean;
|
||||||
modalProps?: ModalProps;
|
modalProps?: ModalProps;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -29,6 +30,7 @@ const ModalComponent: React.FC<IModal> = ({
|
|||||||
accept,
|
accept,
|
||||||
onHide,
|
onHide,
|
||||||
isRunning,
|
isRunning,
|
||||||
|
disabled,
|
||||||
modalProps,
|
modalProps,
|
||||||
}) => (
|
}) => (
|
||||||
<Modal keyboard={false} onHide={onHide} show={show} {...modalProps}>
|
<Modal keyboard={false} onHide={onHide} show={show} {...modalProps}>
|
||||||
@@ -51,7 +53,7 @@ const ModalComponent: React.FC<IModal> = ({
|
|||||||
""
|
""
|
||||||
)}
|
)}
|
||||||
<Button
|
<Button
|
||||||
disabled={isRunning}
|
disabled={isRunning || disabled}
|
||||||
variant={accept?.variant ?? "primary"}
|
variant={accept?.variant ?? "primary"}
|
||||||
onClick={accept?.onClick}
|
onClick={accept?.onClick}
|
||||||
>
|
>
|
||||||
|
|||||||
@@ -574,6 +574,12 @@ export const mutateMetadataImport = () =>
|
|||||||
mutation: GQL.MetadataImportDocument,
|
mutation: GQL.MetadataImportDocument,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const mutateImportObjects = (input: GQL.ImportObjectsInput) =>
|
||||||
|
client.mutate<GQL.ImportObjectsMutation>({
|
||||||
|
mutation: GQL.ImportObjectsDocument,
|
||||||
|
variables: { input },
|
||||||
|
});
|
||||||
|
|
||||||
export const querySceneByPathRegex = (filter: GQL.FindFilterType) =>
|
export const querySceneByPathRegex = (filter: GQL.FindFilterType) =>
|
||||||
client.query<GQL.FindScenesByPathRegexQuery>({
|
client.query<GQL.FindScenesByPathRegexQuery>({
|
||||||
query: GQL.FindScenesByPathRegexDocument,
|
query: GQL.FindScenesByPathRegexDocument,
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ import {
|
|||||||
split,
|
split,
|
||||||
from,
|
from,
|
||||||
ServerError,
|
ServerError,
|
||||||
HttpLink,
|
|
||||||
} from "@apollo/client";
|
} from "@apollo/client";
|
||||||
import { WebSocketLink } from "@apollo/client/link/ws";
|
import { WebSocketLink } from "@apollo/client/link/ws";
|
||||||
import { onError } from "@apollo/client/link/error";
|
import { onError } from "@apollo/client/link/error";
|
||||||
import { getMainDefinition } from "@apollo/client/utilities";
|
import { getMainDefinition } from "@apollo/client/utilities";
|
||||||
|
import { createUploadLink } from "apollo-upload-client";
|
||||||
|
|
||||||
export const getPlatformURL = (ws?: boolean) => {
|
export const getPlatformURL = (ws?: boolean) => {
|
||||||
const platformUrl = new URL(window.location.origin);
|
const platformUrl = new URL(window.location.origin);
|
||||||
@@ -39,7 +39,7 @@ export const createClient = () => {
|
|||||||
const url = `${platformUrl.toString().slice(0, -1)}/graphql`;
|
const url = `${platformUrl.toString().slice(0, -1)}/graphql`;
|
||||||
const wsUrl = `${wsPlatformUrl.toString().slice(0, -1)}/graphql`;
|
const wsUrl = `${wsPlatformUrl.toString().slice(0, -1)}/graphql`;
|
||||||
|
|
||||||
const httpLink = new HttpLink({
|
const httpLink = createUploadLink({
|
||||||
uri: url,
|
uri: url,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -67,6 +67,7 @@ export const createClient = () => {
|
|||||||
);
|
);
|
||||||
},
|
},
|
||||||
wsLink,
|
wsLink,
|
||||||
|
// @ts-ignore
|
||||||
httpLink
|
httpLink
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,26 @@
|
|||||||
# yarn lockfile v1
|
# yarn lockfile v1
|
||||||
|
|
||||||
|
|
||||||
|
"@apollo/client@^3.1.3", "@apollo/client@^3.1.5":
|
||||||
|
version "3.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@apollo/client/-/client-3.2.0.tgz#d16ea4384a2126bf60e7d87b0a6c6df00382220b"
|
||||||
|
integrity sha512-6ISMYW9QpEykJAkN6ZZteTkXXwtYSPGbh+4iBZ478p/Eox1JOMGYlqosGgMGv2oduug9SnsR65y0iCAxKOFGiQ==
|
||||||
|
dependencies:
|
||||||
|
"@graphql-typed-document-node/core" "^3.0.0"
|
||||||
|
"@types/zen-observable" "^0.8.0"
|
||||||
|
"@wry/context" "^0.5.2"
|
||||||
|
"@wry/equality" "^0.2.0"
|
||||||
|
fast-json-stable-stringify "^2.0.0"
|
||||||
|
graphql-tag "^2.11.0"
|
||||||
|
hoist-non-react-statics "^3.3.2"
|
||||||
|
optimism "^0.12.1"
|
||||||
|
prop-types "^15.7.2"
|
||||||
|
symbol-observable "^2.0.0"
|
||||||
|
terser "^5.2.0"
|
||||||
|
ts-invariant "^0.4.4"
|
||||||
|
tslib "^1.10.0"
|
||||||
|
zen-observable "^0.8.14"
|
||||||
|
|
||||||
"@apollo/client@^3.1.4":
|
"@apollo/client@^3.1.4":
|
||||||
version "3.1.4"
|
version "3.1.4"
|
||||||
resolved "https://registry.yarnpkg.com/@apollo/client/-/client-3.1.4.tgz#2848a9f29619275df9af55966c4f5984e31cea6e"
|
resolved "https://registry.yarnpkg.com/@apollo/client/-/client-3.1.4.tgz#2848a9f29619275df9af55966c4f5984e31cea6e"
|
||||||
@@ -1488,7 +1508,7 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
regenerator-runtime "^0.13.2"
|
regenerator-runtime "^0.13.2"
|
||||||
|
|
||||||
"@babel/runtime@^7.10.2":
|
"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2":
|
||||||
version "7.11.2"
|
version "7.11.2"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.11.2.tgz#f549c13c754cc40b87644b9fa9f09a6a95fe0736"
|
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.11.2.tgz#f549c13c754cc40b87644b9fa9f09a6a95fe0736"
|
||||||
integrity sha512-TeWkU52so0mPtDcaCTxNBI/IHiz0pZgr8VEFqXFtZWpYD08ZB6FaSwVAS8MKRQAP3bYKiVjwysOJgMFY28o6Tw==
|
integrity sha512-TeWkU52so0mPtDcaCTxNBI/IHiz0pZgr8VEFqXFtZWpYD08ZB6FaSwVAS8MKRQAP3bYKiVjwysOJgMFY28o6Tw==
|
||||||
@@ -2186,6 +2206,11 @@
|
|||||||
is-promise "4.0.0"
|
is-promise "4.0.0"
|
||||||
tslib "~2.0.1"
|
tslib "~2.0.1"
|
||||||
|
|
||||||
|
"@graphql-typed-document-node/core@^3.0.0":
|
||||||
|
version "3.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950"
|
||||||
|
integrity sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg==
|
||||||
|
|
||||||
"@hapi/address@2.x.x":
|
"@hapi/address@2.x.x":
|
||||||
version "2.1.4"
|
version "2.1.4"
|
||||||
resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5"
|
resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5"
|
||||||
@@ -2843,6 +2868,15 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82"
|
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82"
|
||||||
integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==
|
integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==
|
||||||
|
|
||||||
|
"@types/apollo-upload-client@^14.1.0":
|
||||||
|
version "14.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/apollo-upload-client/-/apollo-upload-client-14.1.0.tgz#21a57d7e3f29ff946ba51a53b3d7da46ddd21fbc"
|
||||||
|
integrity sha512-ZLvcEqu+l9qKGdrIpASt/A2WY1ghAC9L3qaoegkiBOccjxvQmWN9liZzVFiuHTuWseWpVbMklqbs/z+KEjll9Q==
|
||||||
|
dependencies:
|
||||||
|
"@apollo/client" "^3.1.3"
|
||||||
|
"@types/extract-files" "*"
|
||||||
|
graphql "^15.3.0"
|
||||||
|
|
||||||
"@types/babel__core@^7.1.0":
|
"@types/babel__core@^7.1.0":
|
||||||
version "7.1.3"
|
version "7.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.3.tgz#e441ea7df63cd080dfcd02ab199e6d16a735fc30"
|
resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.3.tgz#e441ea7df63cd080dfcd02ab199e6d16a735fc30"
|
||||||
@@ -2912,6 +2946,11 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7"
|
resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7"
|
||||||
integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g==
|
integrity sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g==
|
||||||
|
|
||||||
|
"@types/extract-files@*":
|
||||||
|
version "8.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/extract-files/-/extract-files-8.1.0.tgz#4728440e1d92a6d1d11ac47f5a10e3f9ce47f044"
|
||||||
|
integrity sha512-ulxvlFU71yLVV3JxdBgryASAIp+aZQuQOpkhU1SznJlcWz0qsJCWHqdJqP6Lprs3blqGS5FH5GbBkU0977+Wew==
|
||||||
|
|
||||||
"@types/fs-extra@^8.1.0":
|
"@types/fs-extra@^8.1.0":
|
||||||
version "8.1.0"
|
version "8.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-8.1.0.tgz#1114834b53c3914806cd03b3304b37b3bd221a4d"
|
resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-8.1.0.tgz#1114834b53c3914806cd03b3304b37b3bd221a4d"
|
||||||
@@ -3719,6 +3758,15 @@ aphrodite@^0.5.0:
|
|||||||
asap "^2.0.3"
|
asap "^2.0.3"
|
||||||
inline-style-prefixer "^2.0.0"
|
inline-style-prefixer "^2.0.0"
|
||||||
|
|
||||||
|
apollo-upload-client@^14.1.2:
|
||||||
|
version "14.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/apollo-upload-client/-/apollo-upload-client-14.1.2.tgz#7a72b000f1cd67eaf8f12b4bda2796d0898c0dae"
|
||||||
|
integrity sha512-ozaW+4tnVz1rpfwiQwG3RCdCcZ93RV/37ZQbRnObcQ9mjb+zur58sGDPVg9Ef3fiujLmiE/Fe9kdgvIMA3VOjA==
|
||||||
|
dependencies:
|
||||||
|
"@apollo/client" "^3.1.5"
|
||||||
|
"@babel/runtime" "^7.11.2"
|
||||||
|
extract-files "^9.0.0"
|
||||||
|
|
||||||
aproba@^1.0.3, aproba@^1.1.1:
|
aproba@^1.0.3, aproba@^1.1.1:
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
|
resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
|
||||||
@@ -14607,6 +14655,11 @@ symbol-observable@^1.0.4, symbol-observable@^1.1.0, symbol-observable@^1.2.0:
|
|||||||
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
|
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
|
||||||
integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==
|
integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==
|
||||||
|
|
||||||
|
symbol-observable@^2.0.0:
|
||||||
|
version "2.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-2.0.1.tgz#ce66c36a04ed0f3056e7293184749a6fdd7063ea"
|
||||||
|
integrity sha512-QrfHrrEUMadQCgMijc3YpfA4ncwgqGv58Xgvdu3JZVQB7iY7cAkiqobZEZbaA863jof8AdpR01CPnZ5UWeqZBQ==
|
||||||
|
|
||||||
symbol-tree@^3.2.2:
|
symbol-tree@^3.2.2:
|
||||||
version "3.2.4"
|
version "3.2.4"
|
||||||
resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2"
|
resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2"
|
||||||
|
|||||||
Reference in New Issue
Block a user