mirror of
https://github.com/stashapp/stash.git
synced 2025-12-18 04:44:37 +03:00
[Files Refactor] Import export fixup (#2763)
* Adjust json schema * Remove mappings file from export * Import file/folder support * Update documentation * Make gallery filenames unique
This commit is contained in:
@@ -13,14 +13,17 @@ import (
|
||||
// does not convert the relationships to other objects.
|
||||
func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) {
|
||||
newGalleryJSON := jsonschema.Gallery{
|
||||
Title: gallery.Title,
|
||||
URL: gallery.URL,
|
||||
Details: gallery.Details,
|
||||
CreatedAt: json.JSONTime{Time: gallery.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt},
|
||||
FolderPath: gallery.FolderPath,
|
||||
Title: gallery.Title,
|
||||
URL: gallery.URL,
|
||||
Details: gallery.Details,
|
||||
CreatedAt: json.JSONTime{Time: gallery.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt},
|
||||
}
|
||||
|
||||
newGalleryJSON.Path = gallery.Path()
|
||||
for _, f := range gallery.Files {
|
||||
newGalleryJSON.ZipFiles = append(newGalleryJSON.ZipFiles, f.Base().Path)
|
||||
}
|
||||
|
||||
if gallery.Date != nil {
|
||||
newGalleryJSON.Date = gallery.Date.String()
|
||||
@@ -61,12 +64,22 @@ func GetIDs(galleries []*models.Gallery) []int {
|
||||
return results
|
||||
}
|
||||
|
||||
func GetChecksums(galleries []*models.Gallery) []string {
|
||||
var results []string
|
||||
func GetRefs(galleries []*models.Gallery) []jsonschema.GalleryRef {
|
||||
var results []jsonschema.GalleryRef
|
||||
for _, gallery := range galleries {
|
||||
if gallery.Checksum() != "" {
|
||||
results = append(results, gallery.Checksum())
|
||||
toAdd := jsonschema.GalleryRef{}
|
||||
switch {
|
||||
case gallery.FolderPath != "":
|
||||
toAdd.FolderPath = gallery.FolderPath
|
||||
case len(gallery.Files) > 0:
|
||||
for _, f := range gallery.Files {
|
||||
toAdd.ZipFiles = append(toAdd.ZipFiles, f.Base().Path)
|
||||
}
|
||||
default:
|
||||
toAdd.Title = gallery.Title
|
||||
}
|
||||
|
||||
results = append(results, toAdd)
|
||||
}
|
||||
|
||||
return results
|
||||
|
||||
@@ -1,171 +1,162 @@
|
||||
package gallery
|
||||
|
||||
// import (
|
||||
// "errors"
|
||||
import (
|
||||
"errors"
|
||||
|
||||
// "github.com/stashapp/stash/pkg/models"
|
||||
// "github.com/stashapp/stash/pkg/models/json"
|
||||
// "github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
// "github.com/stashapp/stash/pkg/models/mocks"
|
||||
// "github.com/stretchr/testify/assert"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
// "testing"
|
||||
// "time"
|
||||
// )
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// const (
|
||||
// galleryID = 1
|
||||
const (
|
||||
galleryID = 1
|
||||
|
||||
// studioID = 4
|
||||
// missingStudioID = 5
|
||||
// errStudioID = 6
|
||||
studioID = 4
|
||||
missingStudioID = 5
|
||||
errStudioID = 6
|
||||
|
||||
// // noTagsID = 11
|
||||
// )
|
||||
// noTagsID = 11
|
||||
)
|
||||
|
||||
// var (
|
||||
// path = "path"
|
||||
// isZip = true
|
||||
// url = "url"
|
||||
// checksum = "checksum"
|
||||
// title = "title"
|
||||
// date = "2001-01-01"
|
||||
// dateObj = models.NewDate(date)
|
||||
// rating = 5
|
||||
// organized = true
|
||||
// details = "details"
|
||||
// )
|
||||
var (
|
||||
url = "url"
|
||||
title = "title"
|
||||
date = "2001-01-01"
|
||||
dateObj = models.NewDate(date)
|
||||
rating = 5
|
||||
organized = true
|
||||
details = "details"
|
||||
)
|
||||
|
||||
// const (
|
||||
// studioName = "studioName"
|
||||
// )
|
||||
const (
|
||||
studioName = "studioName"
|
||||
)
|
||||
|
||||
// var (
|
||||
// createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
// updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
// )
|
||||
var (
|
||||
createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
)
|
||||
|
||||
// func createFullGallery(id int) models.Gallery {
|
||||
// return models.Gallery{
|
||||
// ID: id,
|
||||
// Path: &path,
|
||||
// Zip: isZip,
|
||||
// Title: title,
|
||||
// Checksum: checksum,
|
||||
// Date: &dateObj,
|
||||
// Details: details,
|
||||
// Rating: &rating,
|
||||
// Organized: organized,
|
||||
// URL: url,
|
||||
// CreatedAt: createTime,
|
||||
// UpdatedAt: updateTime,
|
||||
// }
|
||||
// }
|
||||
func createFullGallery(id int) models.Gallery {
|
||||
return models.Gallery{
|
||||
ID: id,
|
||||
Title: title,
|
||||
Date: &dateObj,
|
||||
Details: details,
|
||||
Rating: &rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
CreatedAt: createTime,
|
||||
UpdatedAt: updateTime,
|
||||
}
|
||||
}
|
||||
|
||||
// func createFullJSONGallery() *jsonschema.Gallery {
|
||||
// return &jsonschema.Gallery{
|
||||
// Title: title,
|
||||
// Path: path,
|
||||
// Zip: isZip,
|
||||
// Checksum: checksum,
|
||||
// Date: date,
|
||||
// Details: details,
|
||||
// Rating: rating,
|
||||
// Organized: organized,
|
||||
// URL: url,
|
||||
// CreatedAt: json.JSONTime{
|
||||
// Time: createTime,
|
||||
// },
|
||||
// UpdatedAt: json.JSONTime{
|
||||
// Time: updateTime,
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
func createFullJSONGallery() *jsonschema.Gallery {
|
||||
return &jsonschema.Gallery{
|
||||
Title: title,
|
||||
Date: date,
|
||||
Details: details,
|
||||
Rating: rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
},
|
||||
UpdatedAt: json.JSONTime{
|
||||
Time: updateTime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// type basicTestScenario struct {
|
||||
// input models.Gallery
|
||||
// expected *jsonschema.Gallery
|
||||
// err bool
|
||||
// }
|
||||
type basicTestScenario struct {
|
||||
input models.Gallery
|
||||
expected *jsonschema.Gallery
|
||||
err bool
|
||||
}
|
||||
|
||||
// var scenarios = []basicTestScenario{
|
||||
// {
|
||||
// createFullGallery(galleryID),
|
||||
// createFullJSONGallery(),
|
||||
// false,
|
||||
// },
|
||||
// }
|
||||
var scenarios = []basicTestScenario{
|
||||
{
|
||||
createFullGallery(galleryID),
|
||||
createFullJSONGallery(),
|
||||
false,
|
||||
},
|
||||
}
|
||||
|
||||
// func TestToJSON(t *testing.T) {
|
||||
// for i, s := range scenarios {
|
||||
// gallery := s.input
|
||||
// json, err := ToBasicJSON(&gallery)
|
||||
func TestToJSON(t *testing.T) {
|
||||
for i, s := range scenarios {
|
||||
gallery := s.input
|
||||
json, err := ToBasicJSON(&gallery)
|
||||
|
||||
// switch {
|
||||
// case !s.err && err != nil:
|
||||
// t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
// case s.err && err == nil:
|
||||
// t.Errorf("[%d] expected error not returned", i)
|
||||
// default:
|
||||
// assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
switch {
|
||||
case !s.err && err != nil:
|
||||
t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
case s.err && err == nil:
|
||||
t.Errorf("[%d] expected error not returned", i)
|
||||
default:
|
||||
assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// func createStudioGallery(studioID int) models.Gallery {
|
||||
// return models.Gallery{
|
||||
// StudioID: &studioID,
|
||||
// }
|
||||
// }
|
||||
func createStudioGallery(studioID int) models.Gallery {
|
||||
return models.Gallery{
|
||||
StudioID: &studioID,
|
||||
}
|
||||
}
|
||||
|
||||
// type stringTestScenario struct {
|
||||
// input models.Gallery
|
||||
// expected string
|
||||
// err bool
|
||||
// }
|
||||
type stringTestScenario struct {
|
||||
input models.Gallery
|
||||
expected string
|
||||
err bool
|
||||
}
|
||||
|
||||
// var getStudioScenarios = []stringTestScenario{
|
||||
// {
|
||||
// createStudioGallery(studioID),
|
||||
// studioName,
|
||||
// false,
|
||||
// },
|
||||
// {
|
||||
// createStudioGallery(missingStudioID),
|
||||
// "",
|
||||
// false,
|
||||
// },
|
||||
// {
|
||||
// createStudioGallery(errStudioID),
|
||||
// "",
|
||||
// true,
|
||||
// },
|
||||
// }
|
||||
var getStudioScenarios = []stringTestScenario{
|
||||
{
|
||||
createStudioGallery(studioID),
|
||||
studioName,
|
||||
false,
|
||||
},
|
||||
{
|
||||
createStudioGallery(missingStudioID),
|
||||
"",
|
||||
false,
|
||||
},
|
||||
{
|
||||
createStudioGallery(errStudioID),
|
||||
"",
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
// func TestGetStudioName(t *testing.T) {
|
||||
// mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
func TestGetStudioName(t *testing.T) {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
|
||||
// studioErr := errors.New("error getting image")
|
||||
studioErr := errors.New("error getting image")
|
||||
|
||||
// mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
|
||||
// Name: models.NullString(studioName),
|
||||
// }, nil).Once()
|
||||
// mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
|
||||
// mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
|
||||
mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
|
||||
Name: models.NullString(studioName),
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
|
||||
mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
|
||||
|
||||
// for i, s := range getStudioScenarios {
|
||||
// gallery := s.input
|
||||
// json, err := GetStudioName(testCtx, mockStudioReader, &gallery)
|
||||
for i, s := range getStudioScenarios {
|
||||
gallery := s.input
|
||||
json, err := GetStudioName(testCtx, mockStudioReader, &gallery)
|
||||
|
||||
// switch {
|
||||
// case !s.err && err != nil:
|
||||
// t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
// case s.err && err == nil:
|
||||
// t.Errorf("[%d] expected error not returned", i)
|
||||
// default:
|
||||
// assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
// }
|
||||
// }
|
||||
switch {
|
||||
case !s.err && err != nil:
|
||||
t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
case s.err && err == nil:
|
||||
t.Errorf("[%d] expected error not returned", i)
|
||||
default:
|
||||
assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
}
|
||||
}
|
||||
|
||||
// mockStudioReader.AssertExpectations(t)
|
||||
// }
|
||||
mockStudioReader.AssertExpectations(t)
|
||||
}
|
||||
|
||||
@@ -3,8 +3,10 @@ package gallery
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/performer"
|
||||
@@ -18,6 +20,8 @@ type Importer struct {
|
||||
StudioWriter studio.NameFinderCreator
|
||||
PerformerWriter performer.NameFinderCreator
|
||||
TagWriter tag.NameFinderCreator
|
||||
FileFinder file.Getter
|
||||
FolderFinder file.FolderGetter
|
||||
Input jsonschema.Gallery
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
|
||||
@@ -32,6 +36,10 @@ type FullCreatorUpdater interface {
|
||||
func (i *Importer) PreImport(ctx context.Context) error {
|
||||
i.gallery = i.galleryJSONToGallery(i.Input)
|
||||
|
||||
if err := i.populateFilesFolder(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := i.populateStudio(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -238,31 +246,97 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (i *Importer) populateFilesFolder(ctx context.Context) error {
|
||||
for _, ref := range i.Input.ZipFiles {
|
||||
path := filepath.FromSlash(ref)
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return fmt.Errorf("gallery zip file '%s' not found", path)
|
||||
} else {
|
||||
i.gallery.Files = append(i.gallery.Files, f)
|
||||
}
|
||||
}
|
||||
|
||||
if i.Input.FolderPath != "" {
|
||||
path := filepath.FromSlash(i.Input.FolderPath)
|
||||
f, err := i.FolderFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding folder: %w", err)
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return fmt.Errorf("gallery folder '%s' not found", path)
|
||||
} else {
|
||||
i.gallery.FolderID = &f.ID
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) PostImport(ctx context.Context, id int) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) Name() string {
|
||||
return i.Input.Path
|
||||
if i.Input.Title != "" {
|
||||
return i.Input.Title
|
||||
}
|
||||
|
||||
if i.Input.FolderPath != "" {
|
||||
return i.Input.FolderPath
|
||||
}
|
||||
|
||||
if len(i.Input.ZipFiles) > 0 {
|
||||
return i.Input.ZipFiles[0]
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
// TODO
|
||||
// existing, err := i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
var existing []*models.Gallery
|
||||
var err error
|
||||
switch {
|
||||
case len(i.gallery.Files) > 0:
|
||||
for _, f := range i.gallery.Files {
|
||||
existing, err := i.ReaderWriter.FindByFileID(ctx, f.Base().ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if existing != nil {
|
||||
// id := existing.ID
|
||||
// return &id, nil
|
||||
// }
|
||||
if existing != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
case i.gallery.FolderID != nil:
|
||||
existing, err = i.ReaderWriter.FindByFolderID(ctx, *i.gallery.FolderID)
|
||||
default:
|
||||
existing, err = i.ReaderWriter.FindUserGalleryByTitle(ctx, i.gallery.Title)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(existing) > 0 {
|
||||
id := existing[0].ID
|
||||
return &id, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
err := i.ReaderWriter.Create(ctx, &i.gallery, nil)
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.gallery.Files {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating gallery: %v", err)
|
||||
}
|
||||
|
||||
@@ -1,441 +1,322 @@
|
||||
package gallery
|
||||
|
||||
// import (
|
||||
// "context"
|
||||
// "errors"
|
||||
// "testing"
|
||||
// "time"
|
||||
|
||||
// "github.com/stashapp/stash/pkg/models"
|
||||
// "github.com/stashapp/stash/pkg/models/json"
|
||||
// "github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
// "github.com/stashapp/stash/pkg/models/mocks"
|
||||
// "github.com/stretchr/testify/assert"
|
||||
// "github.com/stretchr/testify/mock"
|
||||
// )
|
||||
|
||||
// var (
|
||||
// galleryNameErr = "galleryNameErr"
|
||||
// // existingGalleryName = "existingGalleryName"
|
||||
|
||||
// existingGalleryID = 100
|
||||
// existingStudioID = 101
|
||||
// existingPerformerID = 103
|
||||
// existingTagID = 105
|
||||
|
||||
// existingStudioName = "existingStudioName"
|
||||
// existingStudioErr = "existingStudioErr"
|
||||
// missingStudioName = "missingStudioName"
|
||||
|
||||
// existingPerformerName = "existingPerformerName"
|
||||
// existingPerformerErr = "existingPerformerErr"
|
||||
// missingPerformerName = "missingPerformerName"
|
||||
|
||||
// existingTagName = "existingTagName"
|
||||
// existingTagErr = "existingTagErr"
|
||||
// missingTagName = "missingTagName"
|
||||
|
||||
// missingChecksum = "missingChecksum"
|
||||
// errChecksum = "errChecksum"
|
||||
// )
|
||||
|
||||
// var testCtx = context.Background()
|
||||
|
||||
// var (
|
||||
// createdAt = time.Date(2001, time.January, 2, 1, 2, 3, 4, time.Local)
|
||||
// updatedAt = time.Date(2002, time.January, 2, 1, 2, 3, 4, time.Local)
|
||||
// )
|
||||
|
||||
// func TestImporterName(t *testing.T) {
|
||||
// i := Importer{
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// },
|
||||
// }
|
||||
|
||||
// assert.Equal(t, path, i.Name())
|
||||
// }
|
||||
|
||||
// func TestImporterPreImport(t *testing.T) {
|
||||
// i := Importer{
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Checksum: checksum,
|
||||
// Title: title,
|
||||
// Date: date,
|
||||
// Details: details,
|
||||
// Rating: rating,
|
||||
// Organized: organized,
|
||||
// URL: url,
|
||||
// CreatedAt: json.JSONTime{
|
||||
// Time: createdAt,
|
||||
// },
|
||||
// UpdatedAt: json.JSONTime{
|
||||
// Time: updatedAt,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// expectedGallery := models.Gallery{
|
||||
// Path: &path,
|
||||
// Checksum: checksum,
|
||||
// Title: title,
|
||||
// Date: &dateObj,
|
||||
// Details: details,
|
||||
// Rating: &rating,
|
||||
// Organized: organized,
|
||||
// URL: url,
|
||||
// CreatedAt: createdAt,
|
||||
// UpdatedAt: updatedAt,
|
||||
// }
|
||||
|
||||
// assert.Equal(t, expectedGallery, i.gallery)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithStudio(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Studio: existingStudioName,
|
||||
// Path: path,
|
||||
// },
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
|
||||
// ID: existingStudioID,
|
||||
// }, nil).Once()
|
||||
// studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingStudioID, *i.gallery.StudioID)
|
||||
|
||||
// i.Input.Studio = existingStudioErr
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// studioReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Studio: missingStudioName,
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
|
||||
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||
// ID: existingStudioID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingStudioID, *i.gallery.StudioID)
|
||||
|
||||
// studioReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Studio: missingStudioName,
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
|
||||
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Performers: []string{
|
||||
// existingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
// {
|
||||
// ID: existingPerformerID,
|
||||
// Name: models.NullString(existingPerformerName),
|
||||
// },
|
||||
// }, nil).Once()
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs)
|
||||
|
||||
// i.Input.Performers = []string{existingPerformerErr}
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// performerReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Performers: []string{
|
||||
// missingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||
// ID: existingPerformerID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs)
|
||||
|
||||
// performerReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Performers: []string{
|
||||
// missingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithTag(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Tags: []string{
|
||||
// existingTagName,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
// {
|
||||
// ID: existingTagID,
|
||||
// Name: existingTagName,
|
||||
// },
|
||||
// }, nil).Once()
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs)
|
||||
|
||||
// i.Input.Tags = []string{existingTagErr}
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// tagReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Tags: []string{
|
||||
// missingTagName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
// ID: existingTagID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs)
|
||||
|
||||
// tagReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Tags: []string{
|
||||
// missingTagName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterFindExistingID(t *testing.T) {
|
||||
// readerWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// Input: jsonschema.Gallery{
|
||||
// Path: path,
|
||||
// Checksum: missingChecksum,
|
||||
// },
|
||||
// }
|
||||
|
||||
// expectedErr := errors.New("FindBy* error")
|
||||
// readerWriter.On("FindByChecksum", testCtx, missingChecksum).Return(nil, nil).Once()
|
||||
// readerWriter.On("FindByChecksum", testCtx, checksum).Return(&models.Gallery{
|
||||
// ID: existingGalleryID,
|
||||
// }, nil).Once()
|
||||
// readerWriter.On("FindByChecksum", testCtx, errChecksum).Return(nil, expectedErr).Once()
|
||||
|
||||
// id, err := i.FindExistingID(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.Input.Checksum = checksum
|
||||
// id, err = i.FindExistingID(testCtx)
|
||||
// assert.Equal(t, existingGalleryID, *id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.Input.Checksum = errChecksum
|
||||
// id, err = i.FindExistingID(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestCreate(t *testing.T) {
|
||||
// readerWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// gallery := models.Gallery{
|
||||
// Title: title,
|
||||
// }
|
||||
|
||||
// galleryErr := models.Gallery{
|
||||
// Title: galleryNameErr,
|
||||
// }
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// gallery: gallery,
|
||||
// }
|
||||
|
||||
// errCreate := errors.New("Create error")
|
||||
// readerWriter.On("Create", testCtx, &gallery).Run(func(args mock.Arguments) {
|
||||
// args.Get(1).(*models.Gallery).ID = galleryID
|
||||
// }).Return(nil).Once()
|
||||
// readerWriter.On("Create", testCtx, &galleryErr).Return(errCreate).Once()
|
||||
|
||||
// id, err := i.Create(testCtx)
|
||||
// assert.Equal(t, galleryID, *id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.gallery = galleryErr
|
||||
// id, err = i.Create(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestUpdate(t *testing.T) {
|
||||
// readerWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// gallery := models.Gallery{
|
||||
// Title: title,
|
||||
// }
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// gallery: gallery,
|
||||
// }
|
||||
|
||||
// // id needs to be set for the mock input
|
||||
// gallery.ID = galleryID
|
||||
// readerWriter.On("Update", testCtx, &gallery).Return(nil, nil).Once()
|
||||
|
||||
// err := i.Update(testCtx, galleryID)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var (
|
||||
existingStudioID = 101
|
||||
existingPerformerID = 103
|
||||
existingTagID = 105
|
||||
|
||||
existingStudioName = "existingStudioName"
|
||||
existingStudioErr = "existingStudioErr"
|
||||
missingStudioName = "missingStudioName"
|
||||
|
||||
existingPerformerName = "existingPerformerName"
|
||||
existingPerformerErr = "existingPerformerErr"
|
||||
missingPerformerName = "missingPerformerName"
|
||||
|
||||
existingTagName = "existingTagName"
|
||||
existingTagErr = "existingTagErr"
|
||||
missingTagName = "missingTagName"
|
||||
)
|
||||
|
||||
var testCtx = context.Background()
|
||||
|
||||
var (
|
||||
createdAt = time.Date(2001, time.January, 2, 1, 2, 3, 4, time.Local)
|
||||
updatedAt = time.Date(2002, time.January, 2, 1, 2, 3, 4, time.Local)
|
||||
)
|
||||
|
||||
func TestImporterPreImport(t *testing.T) {
|
||||
i := Importer{
|
||||
Input: jsonschema.Gallery{
|
||||
Title: title,
|
||||
Date: date,
|
||||
Details: details,
|
||||
Rating: rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createdAt,
|
||||
},
|
||||
UpdatedAt: json.JSONTime{
|
||||
Time: updatedAt,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
expectedGallery := models.Gallery{
|
||||
Title: title,
|
||||
Date: &dateObj,
|
||||
Details: details,
|
||||
Rating: &rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedGallery, i.gallery)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithStudio(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Studio: existingStudioName,
|
||||
},
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
|
||||
ID: existingStudioID,
|
||||
}, nil).Once()
|
||||
studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingStudioID, *i.gallery.StudioID)
|
||||
|
||||
i.Input.Studio = existingStudioErr
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
studioReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Studio: missingStudioName,
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
|
||||
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||
ID: existingStudioID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingStudioID, *i.gallery.StudioID)
|
||||
|
||||
studioReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Studio: missingStudioName,
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
|
||||
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Gallery{
|
||||
Performers: []string{
|
||||
existingPerformerName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
{
|
||||
ID: existingPerformerID,
|
||||
Name: models.NullString(existingPerformerName),
|
||||
},
|
||||
}, nil).Once()
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs.List())
|
||||
|
||||
i.Input.Performers = []string{existingPerformerErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
performerReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Performers: []string{
|
||||
missingPerformerName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||
ID: existingPerformerID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs.List())
|
||||
|
||||
performerReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Performers: []string{
|
||||
missingPerformerName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithTag(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Gallery{
|
||||
Tags: []string{
|
||||
existingTagName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
{
|
||||
ID: existingTagID,
|
||||
Name: existingTagName,
|
||||
},
|
||||
}, nil).Once()
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs.List())
|
||||
|
||||
i.Input.Tags = []string{existingTagErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
tagReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
ID: existingTagID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs.List())
|
||||
|
||||
tagReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
Input: jsonschema.Gallery{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
@@ -15,8 +16,12 @@ type CountQueryer interface {
|
||||
QueryCount(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (int, error)
|
||||
}
|
||||
|
||||
type ChecksumsFinder interface {
|
||||
FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error)
|
||||
type Finder interface {
|
||||
FindByPath(ctx context.Context, p string) ([]*models.Gallery, error)
|
||||
FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error)
|
||||
FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error)
|
||||
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error)
|
||||
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error)
|
||||
}
|
||||
|
||||
func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) {
|
||||
|
||||
@@ -16,8 +16,7 @@ import (
|
||||
// const mutexType = "gallery"
|
||||
|
||||
type FinderCreatorUpdater interface {
|
||||
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error)
|
||||
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error)
|
||||
Finder
|
||||
Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error
|
||||
AddFileID(ctx context.Context, id int, fileID file.ID) error
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ import (
|
||||
// of cover image.
|
||||
func ToBasicJSON(image *models.Image) *jsonschema.Image {
|
||||
newImageJSON := jsonschema.Image{
|
||||
Checksum: image.Checksum(),
|
||||
Title: image.Title,
|
||||
CreatedAt: json.JSONTime{Time: image.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: image.UpdatedAt},
|
||||
@@ -27,23 +26,25 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image {
|
||||
newImageJSON.Organized = image.Organized
|
||||
newImageJSON.OCounter = image.OCounter
|
||||
|
||||
newImageJSON.File = getImageFileJSON(image)
|
||||
for _, f := range image.Files {
|
||||
newImageJSON.Files = append(newImageJSON.Files, f.Base().Path)
|
||||
}
|
||||
|
||||
return &newImageJSON
|
||||
}
|
||||
|
||||
func getImageFileJSON(image *models.Image) *jsonschema.ImageFile {
|
||||
ret := &jsonschema.ImageFile{}
|
||||
// func getImageFileJSON(image *models.Image) *jsonschema.ImageFile {
|
||||
// ret := &jsonschema.ImageFile{}
|
||||
|
||||
f := image.PrimaryFile()
|
||||
// f := image.PrimaryFile()
|
||||
|
||||
ret.ModTime = json.JSONTime{Time: f.ModTime}
|
||||
ret.Size = f.Size
|
||||
ret.Width = f.Width
|
||||
ret.Height = f.Height
|
||||
// ret.ModTime = json.JSONTime{Time: f.ModTime}
|
||||
// ret.Size = f.Size
|
||||
// ret.Width = f.Width
|
||||
// ret.Height = f.Height
|
||||
|
||||
return ret
|
||||
}
|
||||
// return ret
|
||||
// }
|
||||
|
||||
// GetStudioName returns the name of the provided image's studio. It returns an
|
||||
// empty string if there is no studio assigned to the image.
|
||||
|
||||
@@ -1,165 +1,144 @@
|
||||
package image
|
||||
|
||||
// import (
|
||||
// "errors"
|
||||
import (
|
||||
"errors"
|
||||
|
||||
// "github.com/stashapp/stash/pkg/file"
|
||||
// "github.com/stashapp/stash/pkg/models"
|
||||
// "github.com/stashapp/stash/pkg/models/json"
|
||||
// "github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
// "github.com/stashapp/stash/pkg/models/mocks"
|
||||
// "github.com/stretchr/testify/assert"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
// "testing"
|
||||
// "time"
|
||||
// )
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// const (
|
||||
// imageID = 1
|
||||
// errImageID = 3
|
||||
const (
|
||||
imageID = 1
|
||||
|
||||
// studioID = 4
|
||||
// missingStudioID = 5
|
||||
// errStudioID = 6
|
||||
// )
|
||||
studioID = 4
|
||||
missingStudioID = 5
|
||||
errStudioID = 6
|
||||
)
|
||||
|
||||
// var (
|
||||
// checksum = "checksum"
|
||||
// title = "title"
|
||||
// rating = 5
|
||||
// organized = true
|
||||
// ocounter = 2
|
||||
// size int64 = 123
|
||||
// width = 100
|
||||
// height = 100
|
||||
// )
|
||||
var (
|
||||
title = "title"
|
||||
rating = 5
|
||||
organized = true
|
||||
ocounter = 2
|
||||
)
|
||||
|
||||
// const (
|
||||
// studioName = "studioName"
|
||||
// )
|
||||
const (
|
||||
studioName = "studioName"
|
||||
)
|
||||
|
||||
// var (
|
||||
// createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
// updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
// )
|
||||
var (
|
||||
createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
)
|
||||
|
||||
// func createFullImage(id int) models.Image {
|
||||
// return models.Image{
|
||||
// ID: id,
|
||||
// Title: title,
|
||||
// Files: []*file.ImageFile{
|
||||
// {
|
||||
// BaseFile: &file.BaseFile{
|
||||
// Size: size,
|
||||
// },
|
||||
// Height: height,
|
||||
// Width: width,
|
||||
// },
|
||||
// },
|
||||
// OCounter: ocounter,
|
||||
// Rating: &rating,
|
||||
// Organized: organized,
|
||||
// CreatedAt: createTime,
|
||||
// UpdatedAt: updateTime,
|
||||
// }
|
||||
// }
|
||||
func createFullImage(id int) models.Image {
|
||||
return models.Image{
|
||||
ID: id,
|
||||
Title: title,
|
||||
OCounter: ocounter,
|
||||
Rating: &rating,
|
||||
Organized: organized,
|
||||
CreatedAt: createTime,
|
||||
UpdatedAt: updateTime,
|
||||
}
|
||||
}
|
||||
|
||||
// func createFullJSONImage() *jsonschema.Image {
|
||||
// return &jsonschema.Image{
|
||||
// Title: title,
|
||||
// Checksum: checksum,
|
||||
// OCounter: ocounter,
|
||||
// Rating: rating,
|
||||
// Organized: organized,
|
||||
// File: &jsonschema.ImageFile{
|
||||
// Height: height,
|
||||
// Size: size,
|
||||
// Width: width,
|
||||
// },
|
||||
// CreatedAt: json.JSONTime{
|
||||
// Time: createTime,
|
||||
// },
|
||||
// UpdatedAt: json.JSONTime{
|
||||
// Time: updateTime,
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
func createFullJSONImage() *jsonschema.Image {
|
||||
return &jsonschema.Image{
|
||||
Title: title,
|
||||
OCounter: ocounter,
|
||||
Rating: rating,
|
||||
Organized: organized,
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
},
|
||||
UpdatedAt: json.JSONTime{
|
||||
Time: updateTime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// type basicTestScenario struct {
|
||||
// input models.Image
|
||||
// expected *jsonschema.Image
|
||||
// }
|
||||
type basicTestScenario struct {
|
||||
input models.Image
|
||||
expected *jsonschema.Image
|
||||
}
|
||||
|
||||
// var scenarios = []basicTestScenario{
|
||||
// {
|
||||
// createFullImage(imageID),
|
||||
// createFullJSONImage(),
|
||||
// },
|
||||
// }
|
||||
var scenarios = []basicTestScenario{
|
||||
{
|
||||
createFullImage(imageID),
|
||||
createFullJSONImage(),
|
||||
},
|
||||
}
|
||||
|
||||
// func TestToJSON(t *testing.T) {
|
||||
// for i, s := range scenarios {
|
||||
// image := s.input
|
||||
// json := ToBasicJSON(&image)
|
||||
func TestToJSON(t *testing.T) {
|
||||
for i, s := range scenarios {
|
||||
image := s.input
|
||||
json := ToBasicJSON(&image)
|
||||
|
||||
// assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
// }
|
||||
// }
|
||||
assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
}
|
||||
}
|
||||
|
||||
// func createStudioImage(studioID int) models.Image {
|
||||
// return models.Image{
|
||||
// StudioID: &studioID,
|
||||
// }
|
||||
// }
|
||||
func createStudioImage(studioID int) models.Image {
|
||||
return models.Image{
|
||||
StudioID: &studioID,
|
||||
}
|
||||
}
|
||||
|
||||
// type stringTestScenario struct {
|
||||
// input models.Image
|
||||
// expected string
|
||||
// err bool
|
||||
// }
|
||||
type stringTestScenario struct {
|
||||
input models.Image
|
||||
expected string
|
||||
err bool
|
||||
}
|
||||
|
||||
// var getStudioScenarios = []stringTestScenario{
|
||||
// {
|
||||
// createStudioImage(studioID),
|
||||
// studioName,
|
||||
// false,
|
||||
// },
|
||||
// {
|
||||
// createStudioImage(missingStudioID),
|
||||
// "",
|
||||
// false,
|
||||
// },
|
||||
// {
|
||||
// createStudioImage(errStudioID),
|
||||
// "",
|
||||
// true,
|
||||
// },
|
||||
// }
|
||||
var getStudioScenarios = []stringTestScenario{
|
||||
{
|
||||
createStudioImage(studioID),
|
||||
studioName,
|
||||
false,
|
||||
},
|
||||
{
|
||||
createStudioImage(missingStudioID),
|
||||
"",
|
||||
false,
|
||||
},
|
||||
{
|
||||
createStudioImage(errStudioID),
|
||||
"",
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
// func TestGetStudioName(t *testing.T) {
|
||||
// mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
func TestGetStudioName(t *testing.T) {
|
||||
mockStudioReader := &mocks.StudioReaderWriter{}
|
||||
|
||||
// studioErr := errors.New("error getting image")
|
||||
studioErr := errors.New("error getting image")
|
||||
|
||||
// mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
|
||||
// Name: models.NullString(studioName),
|
||||
// }, nil).Once()
|
||||
// mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
|
||||
// mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
|
||||
mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
|
||||
Name: models.NullString(studioName),
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
|
||||
mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
|
||||
|
||||
// for i, s := range getStudioScenarios {
|
||||
// image := s.input
|
||||
// json, err := GetStudioName(testCtx, mockStudioReader, &image)
|
||||
for i, s := range getStudioScenarios {
|
||||
image := s.input
|
||||
json, err := GetStudioName(testCtx, mockStudioReader, &image)
|
||||
|
||||
// switch {
|
||||
// case !s.err && err != nil:
|
||||
// t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
// case s.err && err == nil:
|
||||
// t.Errorf("[%d] expected error not returned", i)
|
||||
// default:
|
||||
// assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
// }
|
||||
// }
|
||||
switch {
|
||||
case !s.err && err != nil:
|
||||
t.Errorf("[%d] unexpected error: %s", i, err.Error())
|
||||
case s.err && err == nil:
|
||||
t.Errorf("[%d] expected error not returned", i)
|
||||
default:
|
||||
assert.Equal(t, s.expected, json, "[%d]", i)
|
||||
}
|
||||
}
|
||||
|
||||
// mockStudioReader.AssertExpectations(t)
|
||||
// }
|
||||
mockStudioReader.AssertExpectations(t)
|
||||
}
|
||||
|
||||
@@ -3,8 +3,10 @@ package image
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/performer"
|
||||
@@ -13,8 +15,9 @@ import (
|
||||
"github.com/stashapp/stash/pkg/tag"
|
||||
)
|
||||
|
||||
type GalleryChecksumsFinder interface {
|
||||
FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error)
|
||||
type GalleryFinder interface {
|
||||
FindByPath(ctx context.Context, p string) ([]*models.Gallery, error)
|
||||
FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error)
|
||||
}
|
||||
|
||||
type FullCreatorUpdater interface {
|
||||
@@ -24,12 +27,12 @@ type FullCreatorUpdater interface {
|
||||
|
||||
type Importer struct {
|
||||
ReaderWriter FullCreatorUpdater
|
||||
FileFinder file.Getter
|
||||
StudioWriter studio.NameFinderCreator
|
||||
GalleryWriter GalleryChecksumsFinder
|
||||
GalleryFinder GalleryFinder
|
||||
PerformerWriter performer.NameFinderCreator
|
||||
TagWriter tag.NameFinderCreator
|
||||
Input jsonschema.Image
|
||||
Path string
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
|
||||
ID int
|
||||
@@ -39,6 +42,10 @@ type Importer struct {
|
||||
func (i *Importer) PreImport(ctx context.Context) error {
|
||||
i.image = i.imageJSONToImage(i.Input)
|
||||
|
||||
if err := i.populateFiles(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := i.populateStudio(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -65,6 +72,12 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
|
||||
Title: imageJSON.Title,
|
||||
Organized: imageJSON.Organized,
|
||||
OCounter: imageJSON.OCounter,
|
||||
CreatedAt: imageJSON.CreatedAt.GetTime(),
|
||||
UpdatedAt: imageJSON.UpdatedAt.GetTime(),
|
||||
}
|
||||
|
||||
if imageJSON.Title != "" {
|
||||
@@ -74,26 +87,27 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
|
||||
newImage.Rating = &imageJSON.Rating
|
||||
}
|
||||
|
||||
newImage.Organized = imageJSON.Organized
|
||||
newImage.OCounter = imageJSON.OCounter
|
||||
newImage.CreatedAt = imageJSON.CreatedAt.GetTime()
|
||||
newImage.UpdatedAt = imageJSON.UpdatedAt.GetTime()
|
||||
|
||||
// if imageJSON.File != nil {
|
||||
// if imageJSON.File.Size != 0 {
|
||||
// newImage.Size = &imageJSON.File.Size
|
||||
// }
|
||||
// if imageJSON.File.Width != 0 {
|
||||
// newImage.Width = &imageJSON.File.Width
|
||||
// }
|
||||
// if imageJSON.File.Height != 0 {
|
||||
// newImage.Height = &imageJSON.File.Height
|
||||
// }
|
||||
// }
|
||||
|
||||
return newImage
|
||||
}
|
||||
|
||||
func (i *Importer) populateFiles(ctx context.Context) error {
|
||||
for _, ref := range i.Input.Files {
|
||||
path := filepath.FromSlash(ref)
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return fmt.Errorf("image file '%s' not found", path)
|
||||
} else {
|
||||
i.image.Files = append(i.image.Files, f.(*file.ImageFile))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) populateStudio(ctx context.Context) error {
|
||||
if i.Input.Studio != "" {
|
||||
studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false)
|
||||
@@ -136,16 +150,45 @@ func (i *Importer) createStudio(ctx context.Context, name string) (int, error) {
|
||||
return created.ID, nil
|
||||
}
|
||||
|
||||
func (i *Importer) locateGallery(ctx context.Context, ref jsonschema.GalleryRef) (*models.Gallery, error) {
|
||||
var galleries []*models.Gallery
|
||||
var err error
|
||||
switch {
|
||||
case ref.FolderPath != "":
|
||||
galleries, err = i.GalleryFinder.FindByPath(ctx, ref.FolderPath)
|
||||
case len(ref.ZipFiles) > 0:
|
||||
for _, p := range ref.ZipFiles {
|
||||
galleries, err = i.GalleryFinder.FindByPath(ctx, p)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
if len(galleries) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case ref.Title != "":
|
||||
galleries, err = i.GalleryFinder.FindUserGalleryByTitle(ctx, ref.Title)
|
||||
}
|
||||
|
||||
var ret *models.Gallery
|
||||
if len(galleries) > 0 {
|
||||
ret = galleries[0]
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (i *Importer) populateGalleries(ctx context.Context) error {
|
||||
for _, checksum := range i.Input.Galleries {
|
||||
gallery, err := i.GalleryWriter.FindByChecksums(ctx, []string{checksum})
|
||||
for _, ref := range i.Input.Galleries {
|
||||
gallery, err := i.locateGallery(ctx, ref)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding gallery: %v", err)
|
||||
}
|
||||
|
||||
if len(gallery) == 0 {
|
||||
if gallery == nil {
|
||||
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||
return fmt.Errorf("image gallery '%s' not found", i.Input.Studio)
|
||||
return fmt.Errorf("image gallery '%s' not found", ref.String())
|
||||
}
|
||||
|
||||
// we don't create galleries - just ignore
|
||||
@@ -153,7 +196,7 @@ func (i *Importer) populateGalleries(ctx context.Context) error {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
i.image.GalleryIDs.Add(gallery[0].ID)
|
||||
i.image.GalleryIDs.Add(gallery.ID)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -242,28 +285,46 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
|
||||
}
|
||||
|
||||
func (i *Importer) Name() string {
|
||||
return i.Path
|
||||
if i.Input.Title != "" {
|
||||
return i.Input.Title
|
||||
}
|
||||
|
||||
if len(i.Input.Files) > 0 {
|
||||
return i.Input.Files[0]
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
// var existing []*models.Image
|
||||
// var err error
|
||||
// existing, err = i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum)
|
||||
var existing []*models.Image
|
||||
var err error
|
||||
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
for _, f := range i.image.Files {
|
||||
existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if len(existing) > 0 {
|
||||
// id := existing[0].ID
|
||||
// return &id, nil
|
||||
// }
|
||||
if len(existing) > 0 {
|
||||
id := existing[0].ID
|
||||
return &id, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
err := i.ReaderWriter.Create(ctx, &models.ImageCreateInput{Image: &i.image})
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.image.Files {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
|
||||
err := i.ReaderWriter.Create(ctx, &models.ImageCreateInput{
|
||||
Image: &i.image,
|
||||
FileIDs: fileIDs,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating image: %v", err)
|
||||
}
|
||||
|
||||
@@ -1,492 +1,285 @@
|
||||
package image
|
||||
|
||||
// import (
|
||||
// "context"
|
||||
// "errors"
|
||||
// "testing"
|
||||
|
||||
// "github.com/stashapp/stash/pkg/models"
|
||||
// "github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
// "github.com/stashapp/stash/pkg/models/mocks"
|
||||
// "github.com/stretchr/testify/assert"
|
||||
// "github.com/stretchr/testify/mock"
|
||||
// )
|
||||
|
||||
// var (
|
||||
// path = "path"
|
||||
|
||||
// imageNameErr = "imageNameErr"
|
||||
// // existingImageName = "existingImageName"
|
||||
|
||||
// existingImageID = 100
|
||||
// existingStudioID = 101
|
||||
// existingGalleryID = 102
|
||||
// existingPerformerID = 103
|
||||
// // existingMovieID = 104
|
||||
// existingTagID = 105
|
||||
|
||||
// existingStudioName = "existingStudioName"
|
||||
// existingStudioErr = "existingStudioErr"
|
||||
// missingStudioName = "missingStudioName"
|
||||
|
||||
// existingGalleryChecksum = "existingGalleryChecksum"
|
||||
// existingGalleryErr = "existingGalleryErr"
|
||||
// missingGalleryChecksum = "missingGalleryChecksum"
|
||||
|
||||
// existingPerformerName = "existingPerformerName"
|
||||
// existingPerformerErr = "existingPerformerErr"
|
||||
// missingPerformerName = "missingPerformerName"
|
||||
|
||||
// existingTagName = "existingTagName"
|
||||
// existingTagErr = "existingTagErr"
|
||||
// missingTagName = "missingTagName"
|
||||
|
||||
// missingChecksum = "missingChecksum"
|
||||
// errChecksum = "errChecksum"
|
||||
// )
|
||||
|
||||
// var testCtx = context.Background()
|
||||
|
||||
// func TestImporterName(t *testing.T) {
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{},
|
||||
// }
|
||||
|
||||
// assert.Equal(t, path, i.Name())
|
||||
// }
|
||||
|
||||
// func TestImporterPreImport(t *testing.T) {
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// }
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithStudio(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Studio: existingStudioName,
|
||||
// },
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
|
||||
// ID: existingStudioID,
|
||||
// }, nil).Once()
|
||||
// studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingStudioID, *i.image.StudioID)
|
||||
|
||||
// i.Input.Studio = existingStudioErr
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// studioReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Input: jsonschema.Image{
|
||||
// Studio: missingStudioName,
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
|
||||
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||
// ID: existingStudioID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingStudioID, *i.image.StudioID)
|
||||
|
||||
// studioReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||
// studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// StudioWriter: studioReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Studio: missingStudioName,
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
|
||||
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithGallery(t *testing.T) {
|
||||
// galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// GalleryWriter: galleryReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Galleries: []string{
|
||||
// existingGalleryChecksum,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{existingGalleryChecksum}).Return([]*models.Gallery{{
|
||||
// ID: existingGalleryID,
|
||||
// }}, nil).Once()
|
||||
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{existingGalleryErr}).Return(nil, errors.New("FindByChecksum error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, existingGalleryID, i.image.GalleryIDs[0])
|
||||
|
||||
// i.Input.Galleries = []string{
|
||||
// existingGalleryErr,
|
||||
// }
|
||||
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// galleryReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingGallery(t *testing.T) {
|
||||
// galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// GalleryWriter: galleryReaderWriter,
|
||||
// Input: jsonschema.Image{
|
||||
// Galleries: []string{
|
||||
// missingGalleryChecksum,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{missingGalleryChecksum}).Return(nil, nil).Times(3)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Nil(t, i.image.GalleryIDs)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Nil(t, i.image.GalleryIDs)
|
||||
|
||||
// galleryReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Path: path,
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// Input: jsonschema.Image{
|
||||
// Performers: []string{
|
||||
// existingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
// {
|
||||
// ID: existingPerformerID,
|
||||
// Name: models.NullString(existingPerformerName),
|
||||
// },
|
||||
// }, nil).Once()
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs)
|
||||
|
||||
// i.Input.Performers = []string{existingPerformerErr}
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// performerReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Input: jsonschema.Image{
|
||||
// Performers: []string{
|
||||
// missingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||
// ID: existingPerformerID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs)
|
||||
|
||||
// performerReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||
// performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// PerformerWriter: performerReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Performers: []string{
|
||||
// missingPerformerName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithTag(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Path: path,
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// Input: jsonschema.Image{
|
||||
// Tags: []string{
|
||||
// existingTagName,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
// {
|
||||
// ID: existingTagID,
|
||||
// Name: existingTagName,
|
||||
// },
|
||||
// }, nil).Once()
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingTagID}, i.image.TagIDs)
|
||||
|
||||
// i.Input.Tags = []string{existingTagErr}
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// tagReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// Path: path,
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Input: jsonschema.Image{
|
||||
// Tags: []string{
|
||||
// missingTagName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
// ID: existingTagID,
|
||||
// }, nil)
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
// err = i.PreImport(testCtx)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, []int{existingTagID}, i.image.TagIDs)
|
||||
|
||||
// tagReaderWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
// tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// TagWriter: tagReaderWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Tags: []string{
|
||||
// missingTagName,
|
||||
// },
|
||||
// },
|
||||
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
// }
|
||||
|
||||
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||
|
||||
// err := i.PreImport(testCtx)
|
||||
// assert.NotNil(t, err)
|
||||
// }
|
||||
|
||||
// func TestImporterFindExistingID(t *testing.T) {
|
||||
// readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// Path: path,
|
||||
// Input: jsonschema.Image{
|
||||
// Checksum: missingChecksum,
|
||||
// },
|
||||
// }
|
||||
|
||||
// expectedErr := errors.New("FindBy* error")
|
||||
// readerWriter.On("FindByChecksum", testCtx, missingChecksum).Return(nil, nil).Once()
|
||||
// readerWriter.On("FindByChecksum", testCtx, checksum).Return(&models.Image{
|
||||
// ID: existingImageID,
|
||||
// }, nil).Once()
|
||||
// readerWriter.On("FindByChecksum", testCtx, errChecksum).Return(nil, expectedErr).Once()
|
||||
|
||||
// id, err := i.FindExistingID(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.Input.Checksum = checksum
|
||||
// id, err = i.FindExistingID(testCtx)
|
||||
// assert.Equal(t, existingImageID, *id)
|
||||
// assert.Nil(t, err)
|
||||
|
||||
// i.Input.Checksum = errChecksum
|
||||
// id, err = i.FindExistingID(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestCreate(t *testing.T) {
|
||||
// readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
// image := models.Image{
|
||||
// Title: title,
|
||||
// }
|
||||
|
||||
// imageErr := models.Image{
|
||||
// Title: imageNameErr,
|
||||
// }
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// image: image,
|
||||
// }
|
||||
|
||||
// errCreate := errors.New("Create error")
|
||||
// readerWriter.On("Create", testCtx, &image).Run(func(args mock.Arguments) {
|
||||
// args.Get(1).(*models.Image).ID = imageID
|
||||
// }).Return(nil).Once()
|
||||
// readerWriter.On("Create", testCtx, &imageErr).Return(errCreate).Once()
|
||||
|
||||
// id, err := i.Create(testCtx)
|
||||
// assert.Equal(t, imageID, *id)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, imageID, i.ID)
|
||||
|
||||
// i.image = imageErr
|
||||
// id, err = i.Create(testCtx)
|
||||
// assert.Nil(t, id)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
|
||||
// func TestUpdate(t *testing.T) {
|
||||
// readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
// image := models.Image{
|
||||
// Title: title,
|
||||
// }
|
||||
|
||||
// imageErr := models.Image{
|
||||
// Title: imageNameErr,
|
||||
// }
|
||||
|
||||
// i := Importer{
|
||||
// ReaderWriter: readerWriter,
|
||||
// image: image,
|
||||
// }
|
||||
|
||||
// errUpdate := errors.New("Update error")
|
||||
|
||||
// // id needs to be set for the mock input
|
||||
// image.ID = imageID
|
||||
// readerWriter.On("Update", testCtx, &image).Return(nil).Once()
|
||||
|
||||
// err := i.Update(testCtx, imageID)
|
||||
// assert.Nil(t, err)
|
||||
// assert.Equal(t, imageID, i.ID)
|
||||
|
||||
// i.image = imageErr
|
||||
|
||||
// // need to set id separately
|
||||
// imageErr.ID = errImageID
|
||||
// readerWriter.On("Update", testCtx, &imageErr).Return(errUpdate).Once()
|
||||
|
||||
// err = i.Update(testCtx, errImageID)
|
||||
// assert.NotNil(t, err)
|
||||
|
||||
// readerWriter.AssertExpectations(t)
|
||||
// }
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var (
|
||||
existingStudioID = 101
|
||||
existingPerformerID = 103
|
||||
existingTagID = 105
|
||||
|
||||
existingStudioName = "existingStudioName"
|
||||
existingStudioErr = "existingStudioErr"
|
||||
missingStudioName = "missingStudioName"
|
||||
|
||||
existingPerformerName = "existingPerformerName"
|
||||
existingPerformerErr = "existingPerformerErr"
|
||||
missingPerformerName = "missingPerformerName"
|
||||
|
||||
existingTagName = "existingTagName"
|
||||
existingTagErr = "existingTagErr"
|
||||
missingTagName = "missingTagName"
|
||||
)
|
||||
|
||||
var testCtx = context.Background()
|
||||
|
||||
func TestImporterPreImport(t *testing.T) {
|
||||
i := Importer{}
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithStudio(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Studio: existingStudioName,
|
||||
},
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
|
||||
ID: existingStudioID,
|
||||
}, nil).Once()
|
||||
studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingStudioID, *i.image.StudioID)
|
||||
|
||||
i.Input.Studio = existingStudioErr
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
studioReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingStudio(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Studio: missingStudioName,
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
|
||||
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
|
||||
ID: existingStudioID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingStudioID, *i.image.StudioID)
|
||||
|
||||
studioReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
||||
studioReaderWriter := &mocks.StudioReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
StudioWriter: studioReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Studio: missingStudioName,
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
|
||||
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Image{
|
||||
Performers: []string{
|
||||
existingPerformerName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
{
|
||||
ID: existingPerformerID,
|
||||
Name: models.NullString(existingPerformerName),
|
||||
},
|
||||
}, nil).Once()
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs.List())
|
||||
|
||||
i.Input.Performers = []string{existingPerformerErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
performerReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingPerformer(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Performers: []string{
|
||||
missingPerformerName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
|
||||
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
|
||||
ID: existingPerformerID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs.List())
|
||||
|
||||
performerReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
|
||||
performerReaderWriter := &mocks.PerformerReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
PerformerWriter: performerReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Performers: []string{
|
||||
missingPerformerName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
|
||||
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithTag(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Image{
|
||||
Tags: []string{
|
||||
existingTagName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
{
|
||||
ID: existingTagID,
|
||||
Name: existingTagName,
|
||||
},
|
||||
}, nil).Once()
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingTagID}, i.image.TagIDs.List())
|
||||
|
||||
i.Input.Tags = []string{existingTagErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
tagReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
ID: existingTagID,
|
||||
}, nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []int{existingTagID}, i.image.TagIDs.List())
|
||||
|
||||
tagReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
tagReaderWriter := &mocks.TagReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
TagWriter: tagReaderWriter,
|
||||
Input: jsonschema.Image{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
156
pkg/models/jsonschema/file_folder.go
Normal file
156
pkg/models/jsonschema/file_folder.go
Normal file
@@ -0,0 +1,156 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stashapp/stash/pkg/hash/md5"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
)
|
||||
|
||||
const (
|
||||
DirEntryTypeFolder = "folder"
|
||||
DirEntryTypeVideo = "video"
|
||||
DirEntryTypeImage = "image"
|
||||
DirEntryTypeFile = "file"
|
||||
)
|
||||
|
||||
type DirEntry interface {
|
||||
IsFile() bool
|
||||
Filename() string
|
||||
DirEntry() *BaseDirEntry
|
||||
}
|
||||
|
||||
type BaseDirEntry struct {
|
||||
ZipFile string `json:"zip_file,omitempty"`
|
||||
ModTime json.JSONTime `json:"mod_time"`
|
||||
|
||||
Type string `json:"type,omitempty"`
|
||||
|
||||
Path string `json:"path,omitempty"`
|
||||
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (f *BaseDirEntry) DirEntry() *BaseDirEntry {
|
||||
return f
|
||||
}
|
||||
|
||||
func (f *BaseDirEntry) IsFile() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (f *BaseDirEntry) Filename() string {
|
||||
// prefix with the path depth so that we can import lower-level files/folders first
|
||||
depth := strings.Count(f.Path, string("/"))
|
||||
|
||||
// hash the full path for a unique filename
|
||||
hash := md5.FromString(f.Path)
|
||||
|
||||
basename := path.Base(f.Path)
|
||||
|
||||
return fmt.Sprintf("%02x.%s.%s.json", depth, basename, hash)
|
||||
}
|
||||
|
||||
type BaseFile struct {
|
||||
BaseDirEntry
|
||||
|
||||
Fingerprints []Fingerprint `json:"fingerprints,omitempty"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
func (f *BaseFile) IsFile() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
type Fingerprint struct {
|
||||
Type string `json:"type,omitempty"`
|
||||
Fingerprint interface{} `json:"fingerprint,omitempty"`
|
||||
}
|
||||
|
||||
type VideoFile struct {
|
||||
*BaseFile
|
||||
Format string `json:"format,omitempty"`
|
||||
Width int `json:"width,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
Duration float64 `json:"duration,omitempty"`
|
||||
VideoCodec string `json:"video_codec,omitempty"`
|
||||
AudioCodec string `json:"audio_codec,omitempty"`
|
||||
FrameRate float64 `json:"frame_rate,omitempty"`
|
||||
BitRate int64 `json:"bitrate,omitempty"`
|
||||
|
||||
Interactive bool `json:"interactive,omitempty"`
|
||||
InteractiveSpeed *int `json:"interactive_speed,omitempty"`
|
||||
}
|
||||
|
||||
type ImageFile struct {
|
||||
*BaseFile
|
||||
Format string `json:"format,omitempty"`
|
||||
Width int `json:"width,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
}
|
||||
|
||||
func LoadFileFile(filePath string) (DirEntry, error) {
|
||||
r, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
jsonParser := json.NewDecoder(bytes.NewReader(data))
|
||||
|
||||
var bf BaseDirEntry
|
||||
if err := jsonParser.Decode(&bf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
jsonParser = json.NewDecoder(bytes.NewReader(data))
|
||||
|
||||
switch bf.Type {
|
||||
case DirEntryTypeFolder:
|
||||
return &bf, nil
|
||||
case DirEntryTypeVideo:
|
||||
var vf VideoFile
|
||||
if err := jsonParser.Decode(&vf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &vf, nil
|
||||
case DirEntryTypeImage:
|
||||
var imf ImageFile
|
||||
if err := jsonParser.Decode(&imf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &imf, nil
|
||||
case DirEntryTypeFile:
|
||||
var bff BaseFile
|
||||
if err := jsonParser.Decode(&bff); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &bff, nil
|
||||
default:
|
||||
return nil, errors.New("unknown file type")
|
||||
}
|
||||
}
|
||||
|
||||
func SaveFileFile(filePath string, file DirEntry) error {
|
||||
if file == nil {
|
||||
return fmt.Errorf("file must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, file)
|
||||
}
|
||||
56
pkg/models/jsonschema/folder.go
Normal file
56
pkg/models/jsonschema/folder.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stashapp/stash/pkg/hash/md5"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
)
|
||||
|
||||
type Folder struct {
|
||||
BaseDirEntry
|
||||
|
||||
Path string `json:"path,omitempty"`
|
||||
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (f *Folder) Filename() string {
|
||||
// prefix with the path depth so that we can import lower-level folders first
|
||||
depth := strings.Count(f.Path, string("/"))
|
||||
|
||||
// hash the full path for a unique filename
|
||||
hash := md5.FromString(f.Path)
|
||||
|
||||
basename := path.Base(f.Path)
|
||||
|
||||
return fmt.Sprintf("%2x.%s.%s.json", depth, basename, hash)
|
||||
}
|
||||
|
||||
func LoadFolderFile(filePath string) (*Folder, error) {
|
||||
var folder Folder
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&folder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &folder, nil
|
||||
}
|
||||
|
||||
func SaveFolderFile(filePath string, folder *Folder) error {
|
||||
if folder == nil {
|
||||
return fmt.Errorf("folder must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, folder)
|
||||
}
|
||||
@@ -3,27 +3,37 @@ package jsonschema
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
)
|
||||
|
||||
type Gallery struct {
|
||||
Path string `json:"path,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
Zip bool `json:"zip,omitempty"`
|
||||
Title string `json:"title,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Organized bool `json:"organized,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
FileModTime json.JSONTime `json:"file_mod_time,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
ZipFiles []string `json:"zip_files,omitempty"`
|
||||
FolderPath string `json:"folder_path,omitempty"`
|
||||
Title string `json:"title,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Organized bool `json:"organized,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (s Gallery) Filename(basename string, hash string) string {
|
||||
ret := basename
|
||||
|
||||
if ret != "" {
|
||||
ret += "."
|
||||
}
|
||||
ret += hash
|
||||
|
||||
return ret + ".json"
|
||||
}
|
||||
|
||||
func LoadGalleryFile(filePath string) (*Gallery, error) {
|
||||
@@ -48,3 +58,23 @@ func SaveGalleryFile(filePath string, gallery *Gallery) error {
|
||||
}
|
||||
return marshalToFile(filePath, gallery)
|
||||
}
|
||||
|
||||
// GalleryRef is used to identify a Gallery.
|
||||
// Only one field should be populated.
|
||||
type GalleryRef struct {
|
||||
ZipFiles []string `json:"zip_files,omitempty"`
|
||||
FolderPath string `json:"folder_path,omitempty"`
|
||||
// Title is used only if FolderPath and ZipPaths is empty
|
||||
Title string `json:"title,omitempty"`
|
||||
}
|
||||
|
||||
func (r GalleryRef) String() string {
|
||||
switch {
|
||||
case r.FolderPath != "":
|
||||
return "{ folder: " + r.FolderPath + " }"
|
||||
case len(r.ZipFiles) > 0:
|
||||
return "{ zipFiles: [" + strings.Join(r.ZipFiles, ", ") + "] }"
|
||||
default:
|
||||
return "{ title: " + r.Title + " }"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,28 +8,33 @@ import (
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
)
|
||||
|
||||
type ImageFile struct {
|
||||
ModTime json.JSONTime `json:"mod_time,omitempty"`
|
||||
Size int64 `json:"size"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
}
|
||||
|
||||
type Image struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Organized bool `json:"organized,omitempty"`
|
||||
OCounter int `json:"o_counter,omitempty"`
|
||||
Galleries []string `json:"galleries,omitempty"`
|
||||
Galleries []GalleryRef `json:"galleries,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
File *ImageFile `json:"file,omitempty"`
|
||||
Files []string `json:"files,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (s Image) Filename(basename string, hash string) string {
|
||||
ret := s.Title
|
||||
if ret == "" {
|
||||
ret = basename
|
||||
}
|
||||
|
||||
if hash != "" {
|
||||
ret += "." + hash
|
||||
}
|
||||
|
||||
return ret + ".json"
|
||||
}
|
||||
|
||||
func LoadImageFile(filePath string) (*Image, error) {
|
||||
var image Image
|
||||
file, err := os.Open(filePath)
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
type PathNameMapping struct {
|
||||
Path string `json:"path,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Checksum string `json:"checksum"`
|
||||
}
|
||||
|
||||
type Mappings struct {
|
||||
Tags []PathNameMapping `json:"tags"`
|
||||
Performers []PathNameMapping `json:"performers"`
|
||||
Studios []PathNameMapping `json:"studios"`
|
||||
Movies []PathNameMapping `json:"movies"`
|
||||
Galleries []PathNameMapping `json:"galleries"`
|
||||
Scenes []PathNameMapping `json:"scenes"`
|
||||
Images []PathNameMapping `json:"images"`
|
||||
}
|
||||
|
||||
func LoadMappingsFile(filePath string) (*Mappings, error) {
|
||||
var mappings Mappings
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
jsonParser := json.NewDecoder(file)
|
||||
err = jsonParser.Decode(&mappings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &mappings, nil
|
||||
}
|
||||
|
||||
func SaveMappingsFile(filePath string, mappings *Mappings) error {
|
||||
if mappings == nil {
|
||||
return fmt.Errorf("mappings must not be nil")
|
||||
}
|
||||
return marshalToFile(filePath, mappings)
|
||||
}
|
||||
@@ -26,6 +26,10 @@ type Movie struct {
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (s Movie) Filename() string {
|
||||
return s.Name + ".json"
|
||||
}
|
||||
|
||||
// Backwards Compatible synopsis for the movie
|
||||
type MovieSynopsisBC struct {
|
||||
Synopsis string `json:"sypnopsis,omitempty"`
|
||||
|
||||
@@ -40,6 +40,10 @@ type Performer struct {
|
||||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||
}
|
||||
|
||||
func (s Performer) Filename() string {
|
||||
return s.Name + ".json"
|
||||
}
|
||||
|
||||
func LoadPerformerFile(filePath string) (*Performer, error) {
|
||||
var performer Performer
|
||||
file, err := os.Open(filePath)
|
||||
|
||||
@@ -38,9 +38,6 @@ type SceneMovie struct {
|
||||
|
||||
type Scene struct {
|
||||
Title string `json:"title,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
OSHash string `json:"oshash,omitempty"`
|
||||
Phash string `json:"phash,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
@@ -48,18 +45,31 @@ type Scene struct {
|
||||
Organized bool `json:"organized,omitempty"`
|
||||
OCounter int `json:"o_counter,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Galleries []string `json:"galleries,omitempty"`
|
||||
Galleries []GalleryRef `json:"galleries,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Movies []SceneMovie `json:"movies,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
Markers []SceneMarker `json:"markers,omitempty"`
|
||||
File *SceneFile `json:"file,omitempty"`
|
||||
Files []string `json:"files,omitempty"`
|
||||
Cover string `json:"cover,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
||||
}
|
||||
|
||||
func (s Scene) Filename(basename string, hash string) string {
|
||||
ret := s.Title
|
||||
if ret == "" {
|
||||
ret = basename
|
||||
}
|
||||
|
||||
if hash != "" {
|
||||
ret += "." + hash
|
||||
}
|
||||
|
||||
return ret + ".json"
|
||||
}
|
||||
|
||||
func LoadSceneFile(filePath string) (*Scene, error) {
|
||||
var scene Scene
|
||||
file, err := os.Open(filePath)
|
||||
|
||||
@@ -23,6 +23,10 @@ type Studio struct {
|
||||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||
}
|
||||
|
||||
func (s Studio) Filename() string {
|
||||
return s.Name + ".json"
|
||||
}
|
||||
|
||||
func LoadStudioFile(filePath string) (*Studio, error) {
|
||||
var studio Studio
|
||||
file, err := os.Open(filePath)
|
||||
|
||||
@@ -18,6 +18,10 @@ type Tag struct {
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func (s Tag) Filename() string {
|
||||
return s.Name + ".json"
|
||||
}
|
||||
|
||||
func LoadTagFile(filePath string) (*Tag, error) {
|
||||
var tag Tag
|
||||
file, err := os.Open(filePath)
|
||||
|
||||
@@ -10,8 +10,7 @@ import (
|
||||
type JSONPaths struct {
|
||||
Metadata string
|
||||
|
||||
MappingsFile string
|
||||
ScrapedFile string
|
||||
ScrapedFile string
|
||||
|
||||
Performers string
|
||||
Scenes string
|
||||
@@ -20,12 +19,12 @@ type JSONPaths struct {
|
||||
Studios string
|
||||
Tags string
|
||||
Movies string
|
||||
Files string
|
||||
}
|
||||
|
||||
func newJSONPaths(baseDir string) *JSONPaths {
|
||||
jp := JSONPaths{}
|
||||
jp.Metadata = baseDir
|
||||
jp.MappingsFile = filepath.Join(baseDir, "mappings.json")
|
||||
jp.ScrapedFile = filepath.Join(baseDir, "scraped.json")
|
||||
jp.Performers = filepath.Join(baseDir, "performers")
|
||||
jp.Scenes = filepath.Join(baseDir, "scenes")
|
||||
@@ -34,6 +33,7 @@ func newJSONPaths(baseDir string) *JSONPaths {
|
||||
jp.Studios = filepath.Join(baseDir, "studios")
|
||||
jp.Movies = filepath.Join(baseDir, "movies")
|
||||
jp.Tags = filepath.Join(baseDir, "tags")
|
||||
jp.Files = filepath.Join(baseDir, "files")
|
||||
return &jp
|
||||
}
|
||||
|
||||
@@ -42,6 +42,18 @@ func GetJSONPaths(baseDir string) *JSONPaths {
|
||||
return jp
|
||||
}
|
||||
|
||||
func EmptyJSONDirs(baseDir string) {
|
||||
jsonPaths := GetJSONPaths(baseDir)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Scenes)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Images)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Galleries)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Performers)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Studios)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Movies)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Tags)
|
||||
_ = fsutil.EmptyDir(jsonPaths.Files)
|
||||
}
|
||||
|
||||
func EnsureJSONDirs(baseDir string) {
|
||||
jsonPaths := GetJSONPaths(baseDir)
|
||||
if err := fsutil.EnsureDir(jsonPaths.Metadata); err != nil {
|
||||
@@ -68,32 +80,7 @@ func EnsureJSONDirs(baseDir string) {
|
||||
if err := fsutil.EnsureDir(jsonPaths.Tags); err != nil {
|
||||
logger.Warnf("couldn't create directories for Tags: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) PerformerJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Performers, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) SceneJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Scenes, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) ImageJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Images, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) GalleryJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Galleries, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) StudioJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Studios, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) TagJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Tags, checksum+".json")
|
||||
}
|
||||
|
||||
func (jp *JSONPaths) MovieJSONPath(checksum string) string {
|
||||
return filepath.Join(jp.Movies, checksum+".json")
|
||||
if err := fsutil.EnsureDir(jsonPaths.Files); err != nil {
|
||||
logger.Warnf("couldn't create directories for Files: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,18 +45,6 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
|
||||
UpdatedAt: json.JSONTime{Time: scene.UpdatedAt},
|
||||
}
|
||||
|
||||
// if scene.Checksum != nil {
|
||||
// newSceneJSON.Checksum = *scene.Checksum
|
||||
// }
|
||||
|
||||
// if scene.OSHash != nil {
|
||||
// newSceneJSON.OSHash = *scene.OSHash
|
||||
// }
|
||||
|
||||
// if scene.Phash != nil {
|
||||
// newSceneJSON.Phash = utils.PhashToString(*scene.Phash)
|
||||
// }
|
||||
|
||||
if scene.Date != nil {
|
||||
newSceneJSON.Date = scene.Date.String()
|
||||
}
|
||||
@@ -68,7 +56,9 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
|
||||
newSceneJSON.Organized = scene.Organized
|
||||
newSceneJSON.OCounter = scene.OCounter
|
||||
|
||||
newSceneJSON.File = getSceneFileJSON(scene)
|
||||
for _, f := range scene.Files {
|
||||
newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path)
|
||||
}
|
||||
|
||||
cover, err := reader.GetCover(ctx, scene.ID)
|
||||
if err != nil {
|
||||
@@ -93,52 +83,52 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
|
||||
return &newSceneJSON, nil
|
||||
}
|
||||
|
||||
func getSceneFileJSON(scene *models.Scene) *jsonschema.SceneFile {
|
||||
ret := &jsonschema.SceneFile{}
|
||||
// func getSceneFileJSON(scene *models.Scene) *jsonschema.SceneFile {
|
||||
// ret := &jsonschema.SceneFile{}
|
||||
|
||||
// TODO
|
||||
// if scene.FileModTime != nil {
|
||||
// ret.ModTime = json.JSONTime{Time: *scene.FileModTime}
|
||||
// }
|
||||
// TODO
|
||||
// if scene.FileModTime != nil {
|
||||
// ret.ModTime = json.JSONTime{Time: *scene.FileModTime}
|
||||
// }
|
||||
|
||||
// if scene.Size != nil {
|
||||
// ret.Size = *scene.Size
|
||||
// }
|
||||
// if scene.Size != nil {
|
||||
// ret.Size = *scene.Size
|
||||
// }
|
||||
|
||||
// if scene.Duration != nil {
|
||||
// ret.Duration = getDecimalString(*scene.Duration)
|
||||
// }
|
||||
// if scene.Duration != nil {
|
||||
// ret.Duration = getDecimalString(*scene.Duration)
|
||||
// }
|
||||
|
||||
// if scene.VideoCodec != nil {
|
||||
// ret.VideoCodec = *scene.VideoCodec
|
||||
// }
|
||||
// if scene.VideoCodec != nil {
|
||||
// ret.VideoCodec = *scene.VideoCodec
|
||||
// }
|
||||
|
||||
// if scene.AudioCodec != nil {
|
||||
// ret.AudioCodec = *scene.AudioCodec
|
||||
// }
|
||||
// if scene.AudioCodec != nil {
|
||||
// ret.AudioCodec = *scene.AudioCodec
|
||||
// }
|
||||
|
||||
// if scene.Format != nil {
|
||||
// ret.Format = *scene.Format
|
||||
// }
|
||||
// if scene.Format != nil {
|
||||
// ret.Format = *scene.Format
|
||||
// }
|
||||
|
||||
// if scene.Width != nil {
|
||||
// ret.Width = *scene.Width
|
||||
// }
|
||||
// if scene.Width != nil {
|
||||
// ret.Width = *scene.Width
|
||||
// }
|
||||
|
||||
// if scene.Height != nil {
|
||||
// ret.Height = *scene.Height
|
||||
// }
|
||||
// if scene.Height != nil {
|
||||
// ret.Height = *scene.Height
|
||||
// }
|
||||
|
||||
// if scene.Framerate != nil {
|
||||
// ret.Framerate = getDecimalString(*scene.Framerate)
|
||||
// }
|
||||
// if scene.Framerate != nil {
|
||||
// ret.Framerate = getDecimalString(*scene.Framerate)
|
||||
// }
|
||||
|
||||
// if scene.Bitrate != nil {
|
||||
// ret.Bitrate = int(*scene.Bitrate)
|
||||
// }
|
||||
// if scene.Bitrate != nil {
|
||||
// ret.Bitrate = int(*scene.Bitrate)
|
||||
// }
|
||||
|
||||
return ret
|
||||
}
|
||||
// return ret
|
||||
// }
|
||||
|
||||
// GetStudioName returns the name of the provided scene's studio. It returns an
|
||||
// empty string if there is no studio assigned to the scene.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,8 +3,10 @@ package scene
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/gallery"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
@@ -24,13 +26,13 @@ type FullCreatorUpdater interface {
|
||||
|
||||
type Importer struct {
|
||||
ReaderWriter FullCreatorUpdater
|
||||
FileFinder file.Getter
|
||||
StudioWriter studio.NameFinderCreator
|
||||
GalleryWriter gallery.ChecksumsFinder
|
||||
GalleryFinder gallery.Finder
|
||||
PerformerWriter performer.NameFinderCreator
|
||||
MovieWriter movie.NameFinderCreator
|
||||
TagWriter tag.NameFinderCreator
|
||||
Input jsonschema.Scene
|
||||
Path string
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
FileNamingAlgorithm models.HashAlgorithm
|
||||
|
||||
@@ -42,6 +44,10 @@ type Importer struct {
|
||||
func (i *Importer) PreImport(ctx context.Context) error {
|
||||
i.scene = i.sceneJSONToScene(i.Input)
|
||||
|
||||
if err := i.populateFiles(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := i.populateStudio(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -86,21 +92,6 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
|
||||
StashIDs: models.NewRelatedStashIDs(sceneJSON.StashIDs),
|
||||
}
|
||||
|
||||
// if sceneJSON.Checksum != "" {
|
||||
// newScene.Checksum = &sceneJSON.Checksum
|
||||
// }
|
||||
// if sceneJSON.OSHash != "" {
|
||||
// newScene.OSHash = &sceneJSON.OSHash
|
||||
// }
|
||||
|
||||
// if sceneJSON.Phash != "" {
|
||||
// hash, err := strconv.ParseUint(sceneJSON.Phash, 16, 64)
|
||||
// if err == nil {
|
||||
// v := int64(hash)
|
||||
// newScene.Phash = &v
|
||||
// }
|
||||
// }
|
||||
|
||||
if sceneJSON.Date != "" {
|
||||
d := models.NewDate(sceneJSON.Date)
|
||||
newScene.Date = &d
|
||||
@@ -114,42 +105,27 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
|
||||
newScene.CreatedAt = sceneJSON.CreatedAt.GetTime()
|
||||
newScene.UpdatedAt = sceneJSON.UpdatedAt.GetTime()
|
||||
|
||||
// if sceneJSON.File != nil {
|
||||
// if sceneJSON.File.Size != "" {
|
||||
// newScene.Size = &sceneJSON.File.Size
|
||||
// }
|
||||
// if sceneJSON.File.Duration != "" {
|
||||
// duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64)
|
||||
// newScene.Duration = &duration
|
||||
// }
|
||||
// if sceneJSON.File.VideoCodec != "" {
|
||||
// newScene.VideoCodec = &sceneJSON.File.VideoCodec
|
||||
// }
|
||||
// if sceneJSON.File.AudioCodec != "" {
|
||||
// newScene.AudioCodec = &sceneJSON.File.AudioCodec
|
||||
// }
|
||||
// if sceneJSON.File.Format != "" {
|
||||
// newScene.Format = &sceneJSON.File.Format
|
||||
// }
|
||||
// if sceneJSON.File.Width != 0 {
|
||||
// newScene.Width = &sceneJSON.File.Width
|
||||
// }
|
||||
// if sceneJSON.File.Height != 0 {
|
||||
// newScene.Height = &sceneJSON.File.Height
|
||||
// }
|
||||
// if sceneJSON.File.Framerate != "" {
|
||||
// framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64)
|
||||
// newScene.Framerate = &framerate
|
||||
// }
|
||||
// if sceneJSON.File.Bitrate != 0 {
|
||||
// v := int64(sceneJSON.File.Bitrate)
|
||||
// newScene.Bitrate = &v
|
||||
// }
|
||||
// }
|
||||
|
||||
return newScene
|
||||
}
|
||||
|
||||
func (i *Importer) populateFiles(ctx context.Context) error {
|
||||
for _, ref := range i.Input.Files {
|
||||
path := filepath.FromSlash(ref)
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return fmt.Errorf("scene file '%s' not found", path)
|
||||
} else {
|
||||
i.scene.Files = append(i.scene.Files, f.(*file.VideoFile))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) populateStudio(ctx context.Context) error {
|
||||
if i.Input.Studio != "" {
|
||||
studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false)
|
||||
@@ -192,33 +168,50 @@ func (i *Importer) createStudio(ctx context.Context, name string) (int, error) {
|
||||
return created.ID, nil
|
||||
}
|
||||
|
||||
func (i *Importer) locateGallery(ctx context.Context, ref jsonschema.GalleryRef) (*models.Gallery, error) {
|
||||
var galleries []*models.Gallery
|
||||
var err error
|
||||
switch {
|
||||
case ref.FolderPath != "":
|
||||
galleries, err = i.GalleryFinder.FindByPath(ctx, ref.FolderPath)
|
||||
case len(ref.ZipFiles) > 0:
|
||||
for _, p := range ref.ZipFiles {
|
||||
galleries, err = i.GalleryFinder.FindByPath(ctx, p)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
if len(galleries) > 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case ref.Title != "":
|
||||
galleries, err = i.GalleryFinder.FindUserGalleryByTitle(ctx, ref.Title)
|
||||
}
|
||||
|
||||
var ret *models.Gallery
|
||||
if len(galleries) > 0 {
|
||||
ret = galleries[0]
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (i *Importer) populateGalleries(ctx context.Context) error {
|
||||
if len(i.Input.Galleries) > 0 {
|
||||
checksums := i.Input.Galleries
|
||||
galleries, err := i.GalleryWriter.FindByChecksums(ctx, checksums)
|
||||
for _, ref := range i.Input.Galleries {
|
||||
gallery, err := i.locateGallery(ctx, ref)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var pluckedChecksums []string
|
||||
for _, gallery := range galleries {
|
||||
pluckedChecksums = append(pluckedChecksums, gallery.Checksum())
|
||||
}
|
||||
|
||||
missingGalleries := stringslice.StrFilter(checksums, func(checksum string) bool {
|
||||
return !stringslice.StrInclude(pluckedChecksums, checksum)
|
||||
})
|
||||
|
||||
if len(missingGalleries) > 0 {
|
||||
if gallery == nil {
|
||||
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||
return fmt.Errorf("scene galleries [%s] not found", strings.Join(missingGalleries, ", "))
|
||||
return fmt.Errorf("scene gallery '%s' not found", ref.String())
|
||||
}
|
||||
|
||||
// we don't create galleries - just ignore
|
||||
}
|
||||
|
||||
for _, o := range galleries {
|
||||
i.scene.GalleryIDs.Add(o.ID)
|
||||
} else {
|
||||
i.scene.GalleryIDs.Add(gallery.ID)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -366,37 +359,42 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
|
||||
}
|
||||
|
||||
func (i *Importer) Name() string {
|
||||
return i.Path
|
||||
if i.Input.Title != "" {
|
||||
return i.Input.Title
|
||||
}
|
||||
|
||||
if len(i.Input.Files) > 0 {
|
||||
return i.Input.Files[0]
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
// TODO
|
||||
// var existing []*models.Scene
|
||||
// var err error
|
||||
var existing []*models.Scene
|
||||
var err error
|
||||
|
||||
// switch i.FileNamingAlgorithm {
|
||||
// case models.HashAlgorithmMd5:
|
||||
// existing, err = i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum)
|
||||
// case models.HashAlgorithmOshash:
|
||||
// existing, err = i.ReaderWriter.FindByOSHash(ctx, i.Input.OSHash)
|
||||
// default:
|
||||
// panic("unknown file naming algorithm")
|
||||
// }
|
||||
for _, f := range i.scene.Files {
|
||||
existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
|
||||
// if len(existing) > 0 {
|
||||
// id := existing[0].ID
|
||||
// return &id, nil
|
||||
// }
|
||||
if len(existing) > 0 {
|
||||
id := existing[0].ID
|
||||
return &id, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
if err := i.ReaderWriter.Create(ctx, &i.scene, nil); err != nil {
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.scene.Files {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
if err := i.ReaderWriter.Create(ctx, &i.scene, fileIDs); err != nil {
|
||||
return nil, fmt.Errorf("error creating scene: %v", err)
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -173,8 +173,8 @@ type fileQueryRow struct {
|
||||
ParentFolderID null.Int `db:"parent_folder_id"`
|
||||
Size null.Int `db:"size"`
|
||||
ModTime null.Time `db:"mod_time"`
|
||||
CreatedAt null.Time `db:"created_at"`
|
||||
UpdatedAt null.Time `db:"updated_at"`
|
||||
CreatedAt null.Time `db:"file_created_at"`
|
||||
UpdatedAt null.Time `db:"file_updated_at"`
|
||||
|
||||
ZipBasename null.String `db:"zip_basename"`
|
||||
ZipFolderPath null.String `db:"zip_folder_path"`
|
||||
@@ -445,8 +445,8 @@ func (qb *FileStore) selectDataset() *goqu.SelectDataset {
|
||||
table.Col("parent_folder_id"),
|
||||
table.Col("size"),
|
||||
table.Col("mod_time"),
|
||||
table.Col("created_at"),
|
||||
table.Col("updated_at"),
|
||||
table.Col("created_at").As("file_created_at"),
|
||||
table.Col("updated_at").As("file_updated_at"),
|
||||
folderTable.Col("path").As("parent_folder_path"),
|
||||
fingerprintTable.Col("type").As("fingerprint_type"),
|
||||
fingerprintTable.Col("fingerprint"),
|
||||
|
||||
@@ -443,7 +443,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal
|
||||
sq := dialect.From(table).LeftJoin(
|
||||
galleriesFilesJoinTable,
|
||||
goqu.On(galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn))),
|
||||
).InnerJoin(
|
||||
).LeftJoin(
|
||||
filesTable,
|
||||
goqu.On(filesTable.Col(idColumn).Eq(galleriesFilesJoinTable.Col(fileIDColumn))),
|
||||
).LeftJoin(
|
||||
@@ -518,6 +518,26 @@ func (qb *GalleryStore) CountByImageID(ctx context.Context, imageID int) (int, e
|
||||
return count(ctx, q)
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) {
|
||||
table := qb.table()
|
||||
|
||||
sq := dialect.From(table).LeftJoin(
|
||||
galleriesFilesJoinTable,
|
||||
goqu.On(galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn))),
|
||||
).Select(table.Col(idColumn)).Where(
|
||||
table.Col("folder_id").IsNull(),
|
||||
galleriesFilesJoinTable.Col("file_id").IsNull(),
|
||||
table.Col("title").Eq(title),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("getting user galleries for title %s: %w", title, err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) Count(ctx context.Context) (int, error) {
|
||||
q := dialect.Select(goqu.COUNT("*")).From(qb.table())
|
||||
return count(ctx, q)
|
||||
|
||||
Reference in New Issue
Block a user