Various bug fixes (#2935)

* Sort scene/image/gallery tags by name
* Calculate md5 if missing
* Prevent multiple folder create logs
This commit is contained in:
WithoutPants
2022-09-20 17:02:14 +10:00
committed by GitHub
parent 3fa7b470e7
commit cffcd9f4b8
11 changed files with 263 additions and 52 deletions

View File

@@ -9,6 +9,7 @@ import (
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/hash/md5" "github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/hash/oshash" "github.com/stashapp/stash/pkg/hash/oshash"
"github.com/stashapp/stash/pkg/logger"
) )
type fingerprintCalculator struct { type fingerprintCalculator struct {
@@ -58,28 +59,56 @@ func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint,
}, nil }, nil
} }
func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener) ([]file.Fingerprint, error) { func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener, useExisting bool) ([]file.Fingerprint, error) {
var ret []file.Fingerprint var ret []file.Fingerprint
calculateMD5 := true calculateMD5 := true
if isVideo(f.Basename) { if isVideo(f.Basename) {
var (
fp *file.Fingerprint
err error
)
if useExisting {
fp = f.Fingerprints.For(file.FingerprintTypeOshash)
}
if fp == nil {
// calculate oshash first // calculate oshash first
fp, err := c.calculateOshash(f, o) fp, err = c.calculateOshash(f, o)
if err != nil { if err != nil {
return nil, err return nil, err
} }
}
ret = append(ret, *fp) ret = append(ret, *fp)
// only calculate MD5 if enabled in config // only calculate MD5 if enabled in config
calculateMD5 = c.Config.IsCalculateMD5() // always re-calculate MD5 if the file already has it
calculateMD5 = c.Config.IsCalculateMD5() || f.Fingerprints.For(file.FingerprintTypeMD5) != nil
} }
if calculateMD5 { if calculateMD5 {
fp, err := c.calculateMD5(o) var (
fp *file.Fingerprint
err error
)
if useExisting {
fp = f.Fingerprints.For(file.FingerprintTypeMD5)
}
if fp == nil {
if useExisting {
// log to indicate missing fingerprint is being calculated
logger.Infof("Calculating checksum for %s ...", f.Path)
}
fp, err = c.calculateMD5(o)
if err != nil { if err != nil {
return nil, err return nil, err
} }
}
ret = append(ret, *fp) ret = append(ret, *fp)
} }

View File

@@ -14,6 +14,39 @@ type Fingerprint struct {
type Fingerprints []Fingerprint type Fingerprints []Fingerprint
func (f Fingerprints) Equals(other Fingerprints) bool {
if len(f) != len(other) {
return false
}
for _, ff := range f {
found := false
for _, oo := range other {
if ff == oo {
found = true
break
}
}
if !found {
return false
}
}
return true
}
// For returns a pointer to the first Fingerprint element matching the provided type.
func (f Fingerprints) For(type_ string) *Fingerprint {
for _, fp := range f {
if fp.Type == type_ {
return &fp
}
}
return nil
}
func (f Fingerprints) Get(type_ string) interface{} { func (f Fingerprints) Get(type_ string) interface{} {
for _, fp := range f { for _, fp := range f {
if fp.Type == type_ { if fp.Type == type_ {
@@ -59,5 +92,5 @@ func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints {
// FingerprintCalculator calculates a fingerprint for the provided file. // FingerprintCalculator calculates a fingerprint for the provided file.
type FingerprintCalculator interface { type FingerprintCalculator interface {
CalculateFingerprints(f *BaseFile, o Opener) ([]Fingerprint, error) CalculateFingerprints(f *BaseFile, o Opener, useExisting bool) ([]Fingerprint, error)
} }

View File

@@ -0,0 +1,86 @@
package file
import "testing"
func TestFingerprints_Equals(t *testing.T) {
var (
value1 = 1
value2 = "2"
value3 = 1.23
fingerprint1 = Fingerprint{
Type: FingerprintTypeMD5,
Fingerprint: value1,
}
fingerprint2 = Fingerprint{
Type: FingerprintTypeOshash,
Fingerprint: value2,
}
fingerprint3 = Fingerprint{
Type: FingerprintTypePhash,
Fingerprint: value3,
}
)
tests := []struct {
name string
f Fingerprints
other Fingerprints
want bool
}{
{
"identical",
Fingerprints{
fingerprint1,
fingerprint2,
},
Fingerprints{
fingerprint1,
fingerprint2,
},
true,
},
{
"different order",
Fingerprints{
fingerprint1,
fingerprint2,
},
Fingerprints{
fingerprint2,
fingerprint1,
},
true,
},
{
"different length",
Fingerprints{
fingerprint1,
fingerprint2,
},
Fingerprints{
fingerprint1,
},
false,
},
{
"different",
Fingerprints{
fingerprint1,
fingerprint2,
},
Fingerprints{
fingerprint1,
fingerprint3,
},
false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := tt.f.Equals(tt.other); got != tt.want {
t.Errorf("Fingerprints.Equals() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -482,7 +482,14 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro
} }
} }
txn.AddPostCommitHook(ctx, func(ctx context.Context) error {
// log at the end so that if anything fails above due to a locked database
// error and the transaction must be retried, then we shouldn't get multiple
// logs of the same thing.
logger.Infof("%s doesn't exist. Creating new folder entry...", file.Path) logger.Infof("%s doesn't exist. Creating new folder entry...", file.Path)
return nil
})
if err := s.Repository.FolderStore.Create(ctx, toCreate); err != nil { if err := s.Repository.FolderStore.Create(ctx, toCreate); err != nil {
return nil, fmt.Errorf("creating folder %q: %w", file.Path, err) return nil, fmt.Errorf("creating folder %q: %w", file.Path, err)
} }
@@ -513,6 +520,8 @@ func modTime(info fs.FileInfo) time.Time {
} }
func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { func (s *scanJob) handleFile(ctx context.Context, f scanFile) error {
defer s.incrementProgress(f)
var ff File var ff File
// don't use a transaction to check if new or existing // don't use a transaction to check if new or existing
if err := s.withDB(ctx, func(ctx context.Context) error { if err := s.withDB(ctx, func(ctx context.Context) error {
@@ -581,7 +590,6 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
// add this file to the queue to be created later // add this file to the queue to be created later
if s.retrying { if s.retrying {
// if we're retrying and the folder still doesn't exist, then it's a problem // if we're retrying and the folder still doesn't exist, then it's a problem
s.incrementProgress(f)
return nil, fmt.Errorf("parent folder for %q doesn't exist", path) return nil, fmt.Errorf("parent folder for %q doesn't exist", path)
} }
@@ -593,7 +601,6 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
zipFileID, err := s.getZipFileID(ctx, f.zipFile) zipFileID, err := s.getZipFileID(ctx, f.zipFile)
if err != nil { if err != nil {
s.incrementProgress(f)
return nil, err return nil, err
} }
@@ -601,9 +608,9 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
baseFile.ZipFileID = zipFileID baseFile.ZipFileID = zipFileID
} }
fp, err := s.calculateFingerprints(f.fs, baseFile, path) const useExisting = false
fp, err := s.calculateFingerprints(f.fs, baseFile, path, useExisting)
if err != nil { if err != nil {
s.incrementProgress(f)
return nil, err return nil, err
} }
@@ -611,7 +618,6 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
file, err := s.fireDecorators(ctx, f.fs, baseFile) file, err := s.fireDecorators(ctx, f.fs, baseFile)
if err != nil { if err != nil {
s.incrementProgress(f)
return nil, err return nil, err
} }
@@ -619,7 +625,6 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
// do this after decoration so that missing fields can be populated // do this after decoration so that missing fields can be populated
renamed, err := s.handleRename(ctx, file, fp) renamed, err := s.handleRename(ctx, file, fp)
if err != nil { if err != nil {
s.incrementProgress(f)
return nil, err return nil, err
} }
@@ -667,14 +672,17 @@ func (s *scanJob) fireHandlers(ctx context.Context, f File) error {
return nil return nil
} }
func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string) ([]Fingerprint, error) { func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string, useExisting bool) (Fingerprints, error) {
// only log if we're (re)calculating fingerprints
if !useExisting {
logger.Infof("Calculating fingerprints for %s ...", path) logger.Infof("Calculating fingerprints for %s ...", path)
}
// calculate primary fingerprint for the file // calculate primary fingerprint for the file
fp, err := s.FingerprintCalculator.CalculateFingerprints(f, &fsOpener{ fp, err := s.FingerprintCalculator.CalculateFingerprints(f, &fsOpener{
fs: fs, fs: fs,
name: path, name: path,
}) }, useExisting)
if err != nil { if err != nil {
return nil, fmt.Errorf("calculating fingerprint for file %q: %w", path, err) return nil, fmt.Errorf("calculating fingerprint for file %q: %w", path, err)
} }
@@ -844,6 +852,30 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing F
return existing, nil return existing, nil
} }
func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing File) (File, error) {
const useExisting = true
fp, err := s.calculateFingerprints(f.fs, existing.Base(), f.Path, useExisting)
if err != nil {
return nil, err
}
if !fp.Equals(existing.Base().Fingerprints) {
existing.SetFingerprints(fp)
if err := s.withTxn(ctx, func(ctx context.Context) error {
if err := s.Repository.Update(ctx, existing); err != nil {
return fmt.Errorf("updating file %q: %w", f.Path, err)
}
return nil
}); err != nil {
return nil, err
}
}
return existing, nil
}
// returns a file only if it was updated // returns a file only if it was updated
func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) (File, error) { func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) (File, error) {
base := existing.Base() base := existing.Base()
@@ -853,16 +885,23 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
updated := !fileModTime.Equal(base.ModTime) updated := !fileModTime.Equal(base.ModTime)
if !updated { if !updated {
var err error
isMissingMetdata := s.isMissingMetadata(existing) isMissingMetdata := s.isMissingMetadata(existing)
// set missing information // set missing information
if isMissingMetdata { if isMissingMetdata {
var err error
existing, err = s.setMissingMetadata(ctx, f, existing) existing, err = s.setMissingMetadata(ctx, f, existing)
if err != nil { if err != nil {
return nil, err return nil, err
} }
} }
// calculate missing fingerprints
existing, err = s.setMissingFingerprints(ctx, f, existing)
if err != nil {
return nil, err
}
handlerRequired := false handlerRequired := false
if err := s.withDB(ctx, func(ctx context.Context) error { if err := s.withDB(ctx, func(ctx context.Context) error {
// check if the handler needs to be run // check if the handler needs to be run
@@ -873,8 +912,6 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
} }
if !handlerRequired { if !handlerRequired {
s.incrementProgress(f)
// if this file is a zip file, then we need to rescan the contents // if this file is a zip file, then we need to rescan the contents
// as well. We do this by returning the file, instead of nil. // as well. We do this by returning the file, instead of nil.
if isMissingMetdata { if isMissingMetdata {
@@ -889,7 +926,6 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
return err return err
} }
s.incrementProgress(f)
return nil return nil
}); err != nil { }); err != nil {
return nil, err return nil, err
@@ -910,9 +946,9 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
base.UpdatedAt = time.Now() base.UpdatedAt = time.Now()
// calculate and update fingerprints for the file // calculate and update fingerprints for the file
fp, err := s.calculateFingerprints(f.fs, base, path) const useExisting = false
fp, err := s.calculateFingerprints(f.fs, base, path, useExisting)
if err != nil { if err != nil {
s.incrementProgress(f)
return nil, err return nil, err
} }
@@ -920,7 +956,6 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
existing, err = s.fireDecorators(ctx, f.fs, existing) existing, err = s.fireDecorators(ctx, f.fs, existing)
if err != nil { if err != nil {
s.incrementProgress(f)
return nil, err return nil, err
} }

View File

@@ -1136,6 +1136,8 @@ func (qb *GalleryStore) tagsRepository() *joinRepository {
idColumn: galleryIDColumn, idColumn: galleryIDColumn,
}, },
fkColumn: "tag_id", fkColumn: "tag_id",
foreignTable: tagTable,
orderBy: "tags.name ASC",
} }
} }

View File

@@ -82,7 +82,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}), SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
}, },
false, false,
@@ -103,7 +103,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}), SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
}, },
false, false,
@@ -235,7 +235,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}), SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
}, },
false, false,

View File

@@ -1077,6 +1077,8 @@ func (qb *ImageStore) tagsRepository() *joinRepository {
idColumn: imageIDColumn, idColumn: imageIDColumn,
}, },
fkColumn: tagIDColumn, fkColumn: tagIDColumn,
foreignTable: tagTable,
orderBy: "tags.name ASC",
} }
} }

View File

@@ -78,7 +78,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
}, },
false, false,
@@ -99,7 +99,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
}, },
false, false,
@@ -231,7 +231,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
}, },
false, false,
@@ -432,7 +432,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
}, },
false, false,
@@ -537,9 +537,12 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
}, },
}, },
models.Image{ models.Image{
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]), TagIDs: models.NewRelatedIDs(append(
tagIDs[tagIdx1WithDupName], []int{
tagIDs[tagIdx1WithGallery], tagIDs[tagIdx1WithGallery],
tagIDs[tagIdx1WithDupName],
},
indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags])...,
)), )),
}, },
false, false,
@@ -587,8 +590,9 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
}, },
}, },
models.Image{ models.Image{
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]), TagIDs: models.NewRelatedIDs(append(
tagIDs[tagIdx1WithGallery], []int{tagIDs[tagIdx1WithGallery]},
indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags])...,
)), )),
}, },
false, false,

View File

@@ -303,10 +303,24 @@ type joiner interface {
type joinRepository struct { type joinRepository struct {
repository repository
fkColumn string fkColumn string
// fields for ordering
foreignTable string
orderBy string
} }
func (r *joinRepository) getIDs(ctx context.Context, id int) ([]int, error) { func (r *joinRepository) getIDs(ctx context.Context, id int) ([]int, error) {
query := fmt.Sprintf(`SELECT %s as id from %s WHERE %s = ?`, r.fkColumn, r.tableName, r.idColumn) var joinStr string
if r.foreignTable != "" {
joinStr = fmt.Sprintf(" INNER JOIN %s ON %[1]s.id = %s.%s", r.foreignTable, r.tableName, r.fkColumn)
}
query := fmt.Sprintf(`SELECT %[2]s.%[1]s as id from %s%s WHERE %s = ?`, r.fkColumn, r.tableName, joinStr, r.idColumn)
if r.orderBy != "" {
query += " ORDER BY " + r.orderBy
}
return r.runIdsQuery(ctx, query, []interface{}{id}) return r.runIdsQuery(ctx, query, []interface{}{id})
} }

View File

@@ -1470,6 +1470,8 @@ func (qb *SceneStore) tagsRepository() *joinRepository {
idColumn: sceneIDColumn, idColumn: sceneIDColumn,
}, },
fkColumn: tagIDColumn, fkColumn: tagIDColumn,
foreignTable: tagTable,
orderBy: "tags.name ASC",
} }
} }

View File

@@ -110,7 +110,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
Movies: models.NewRelatedMovies([]models.MoviesScenes{ Movies: models.NewRelatedMovies([]models.MoviesScenes{
{ {
@@ -152,7 +152,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
Movies: models.NewRelatedMovies([]models.MoviesScenes{ Movies: models.NewRelatedMovies([]models.MoviesScenes{
{ {
@@ -330,7 +330,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
Movies: models.NewRelatedMovies([]models.MoviesScenes{ Movies: models.NewRelatedMovies([]models.MoviesScenes{
{ {
@@ -588,7 +588,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
Movies: models.NewRelatedMovies([]models.MoviesScenes{ Movies: models.NewRelatedMovies([]models.MoviesScenes{
{ {
@@ -749,9 +749,12 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
}, },
}, },
models.Scene{ models.Scene{
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]), TagIDs: models.NewRelatedIDs(append(
tagIDs[tagIdx1WithDupName], []int{
tagIDs[tagIdx1WithGallery], tagIDs[tagIdx1WithGallery],
tagIDs[tagIdx1WithDupName],
},
indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags])...,
)), )),
}, },
false, false,
@@ -854,8 +857,9 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
}, },
}, },
models.Scene{ models.Scene{
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]), TagIDs: models.NewRelatedIDs(append(
tagIDs[tagIdx1WithGallery], []int{tagIDs[tagIdx1WithGallery]},
indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags])...,
)), )),
}, },
false, false,