SQLite model refactoring, part 2 (#3839)

* Treat empty image input as null
* Add validation to models.Date
* Allow zero dates in database
* Make scene_markers.scene_id non-nullable
* Drop scraped_items table
* Remove movie/studio checksum
* Add migration notes
---------
Co-authored-by: WithoutPants <53250216+WithoutPants@users.noreply.github.com>
This commit is contained in:
DingDongSoLong4
2023-07-13 04:15:02 +02:00
committed by GitHub
parent 67d4f9729a
commit 5580525c2d
74 changed files with 520 additions and 807 deletions

View File

@@ -1,6 +1,5 @@
fragment MovieData on Movie {
id
checksum
name
aliases
duration

View File

@@ -1,6 +1,5 @@
fragment StudioData on Studio {
id
checksum
name
url
parent_studio {

View File

@@ -1,7 +1,7 @@
type Movie {
id: ID!
checksum: String!
name: String!
checksum: String! @deprecated(reason: "MD5 hash of name, use name directly")
aliases: String
"""Duration in seconds"""
duration: Int

View File

@@ -1,7 +1,7 @@
type Studio {
id: ID!
checksum: String!
name: String!
checksum: String! @deprecated(reason: "MD5 hash of name, use name directly")
url: String
parent_studio: Studio
child_studios: [Studio!]!

View File

@@ -107,28 +107,36 @@ func (t changesetTranslator) optionalString(value *string, field string) models.
return models.NewOptionalStringPtr(value)
}
func (t changesetTranslator) optionalDate(value *string, field string) models.OptionalDate {
func (t changesetTranslator) optionalDate(value *string, field string) (models.OptionalDate, error) {
if !t.hasField(field) {
return models.OptionalDate{}
return models.OptionalDate{}, nil
}
if value == nil || *value == "" {
return models.OptionalDate{
Set: true,
Null: true,
}
}, nil
}
return models.NewOptionalDate(models.NewDate(*value))
date, err := models.ParseDate(*value)
if err != nil {
return models.OptionalDate{}, err
}
return models.NewOptionalDate(date), nil
}
func (t changesetTranslator) datePtr(value *string, field string) *models.Date {
if value == nil {
return nil
func (t changesetTranslator) datePtr(value *string, field string) (*models.Date, error) {
if value == nil || *value == "" {
return nil, nil
}
d := models.NewDate(*value)
return &d
date, err := models.ParseDate(*value)
if err != nil {
return nil, err
}
return &date, nil
}
func (t changesetTranslator) intPtrFromString(value *string, field string) (*int, error) {

View File

@@ -5,9 +5,15 @@ import (
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models"
)
func (r *movieResolver) Checksum(ctx context.Context, obj *models.Movie) (string, error) {
// generate checksum from movie name
return md5.FromString(obj.Name), nil
}
func (r *movieResolver) Date(ctx context.Context, obj *models.Movie) (*string, error) {
if obj.Date != nil {
result := obj.Date.String()

View File

@@ -6,6 +6,7 @@ import (
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/movie"
@@ -13,6 +14,11 @@ import (
"github.com/stashapp/stash/pkg/scene"
)
func (r *studioResolver) Checksum(ctx context.Context, obj *models.Studio) (string, error) {
// generate checksum from studio name
return md5.FromString(obj.Name), nil
}
func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) {
var hasImage bool
if err := r.withReadTxn(ctx, func(ctx context.Context) error {

View File

@@ -66,9 +66,9 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
UpdatedAt: currentTime,
}
if input.Date != nil {
d := models.NewDate(*input.Date)
newGallery.Date = &d
newGallery.Date, err = translator.datePtr(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
newGallery.StudioID, err = translator.intPtrFromString(input.StudioID, "studio_id")
if err != nil {
@@ -182,7 +182,10 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle
updatedGallery.Details = translator.optionalString(input.Details, "details")
updatedGallery.URL = translator.optionalString(input.URL, "url")
updatedGallery.Date = translator.optionalDate(input.Date, "date")
updatedGallery.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
updatedGallery.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
@@ -262,7 +265,10 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall
updatedGallery.Details = translator.optionalString(input.Details, "details")
updatedGallery.URL = translator.optionalString(input.URL, "url")
updatedGallery.Date = translator.optionalDate(input.Date, "date")
updatedGallery.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
updatedGallery.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {

View File

@@ -107,7 +107,10 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp
updatedImage.Title = translator.optionalString(input.Title, "title")
updatedImage.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedImage.URL = translator.optionalString(input.URL, "url")
updatedImage.Date = translator.optionalDate(input.Date, "date")
updatedImage.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
updatedImage.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
@@ -206,7 +209,10 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
updatedImage.Title = translator.optionalString(input.Title, "title")
updatedImage.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedImage.URL = translator.optionalString(input.URL, "url")
updatedImage.Date = translator.optionalDate(input.Date, "date")
updatedImage.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
updatedImage.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)

View File

@@ -6,7 +6,6 @@ import (
"strconv"
"time"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
@@ -30,18 +29,15 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp
}
// generate checksum from movie name rather than image
checksum := md5.FromString(input.Name)
// Populate a new movie from the input
currentTime := time.Now()
newMovie := models.Movie{
Checksum: checksum,
Name: input.Name,
CreatedAt: currentTime,
UpdatedAt: currentTime,
Aliases: translator.string(input.Aliases, "aliases"),
Duration: input.Duration,
Date: translator.datePtr(input.Date, "date"),
Rating: translator.ratingConversionInt(input.Rating, input.Rating100),
Director: translator.string(input.Director, "director"),
Synopsis: translator.string(input.Synopsis, "synopsis"),
@@ -50,6 +46,10 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp
var err error
newMovie.Date, err = translator.datePtr(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
newMovie.StudioID, err = translator.intPtrFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
@@ -123,16 +123,13 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp
// Populate movie from the input
updatedMovie := models.NewMoviePartial()
if input.Name != nil {
// generate checksum from movie name rather than image
checksum := md5.FromString(*input.Name)
updatedMovie.Name = models.NewOptionalString(*input.Name)
updatedMovie.Checksum = models.NewOptionalString(checksum)
}
updatedMovie.Name = translator.optionalString(input.Name, "name")
updatedMovie.Aliases = translator.optionalString(input.Aliases, "aliases")
updatedMovie.Duration = translator.optionalInt(input.Duration, "duration")
updatedMovie.Date = translator.optionalDate(input.Date, "date")
updatedMovie.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
updatedMovie.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedMovie.Director = translator.optionalString(input.Director, "director")
updatedMovie.Synopsis = translator.optionalString(input.Synopsis, "synopsis")

View File

@@ -75,13 +75,13 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC
StashIDs: models.NewRelatedStashIDs(stashIDPtrSliceToSlice(input.StashIds)),
}
if input.Birthdate != nil {
d := models.NewDate(*input.Birthdate)
newPerformer.Birthdate = &d
newPerformer.Birthdate, err = translator.datePtr(input.Birthdate, "birthdate")
if err != nil {
return nil, fmt.Errorf("converting birthdate: %w", err)
}
if input.DeathDate != nil {
d := models.NewDate(*input.DeathDate)
newPerformer.DeathDate = &d
newPerformer.DeathDate, err = translator.datePtr(input.DeathDate, "death_date")
if err != nil {
return nil, fmt.Errorf("converting death date: %w", err)
}
// prefer height_cm over height
@@ -157,7 +157,10 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU
updatedPerformer.Disambiguation = translator.optionalString(input.Disambiguation, "disambiguation")
updatedPerformer.URL = translator.optionalString(input.URL, "url")
updatedPerformer.Gender = translator.optionalString((*string)(input.Gender), "gender")
updatedPerformer.Birthdate = translator.optionalDate(input.Birthdate, "birthdate")
updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate")
if err != nil {
return nil, fmt.Errorf("converting birthdate: %w", err)
}
updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity")
updatedPerformer.Country = translator.optionalString(input.Country, "country")
updatedPerformer.EyeColor = translator.optionalString(input.EyeColor, "eye_color")
@@ -184,7 +187,10 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU
updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite")
updatedPerformer.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedPerformer.Details = translator.optionalString(input.Details, "details")
updatedPerformer.DeathDate = translator.optionalDate(input.DeathDate, "death_date")
updatedPerformer.DeathDate, err = translator.optionalDate(input.DeathDate, "death_date")
if err != nil {
return nil, fmt.Errorf("converting death date: %w", err)
}
updatedPerformer.HairColor = translator.optionalString(input.HairColor, "hair_color")
updatedPerformer.Weight = translator.optionalInt(input.Weight, "weight")
updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
@@ -282,7 +288,10 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
updatedPerformer.Disambiguation = translator.optionalString(input.Disambiguation, "disambiguation")
updatedPerformer.URL = translator.optionalString(input.URL, "url")
updatedPerformer.Gender = translator.optionalString((*string)(input.Gender), "gender")
updatedPerformer.Birthdate = translator.optionalDate(input.Birthdate, "birthdate")
updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate")
if err != nil {
return nil, fmt.Errorf("converting birthdate: %w", err)
}
updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity")
updatedPerformer.Country = translator.optionalString(input.Country, "country")
updatedPerformer.EyeColor = translator.optionalString(input.EyeColor, "eye_color")
@@ -309,7 +318,10 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite")
updatedPerformer.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedPerformer.Details = translator.optionalString(input.Details, "details")
updatedPerformer.DeathDate = translator.optionalDate(input.DeathDate, "death_date")
updatedPerformer.DeathDate, err = translator.optionalDate(input.DeathDate, "death_date")
if err != nil {
return nil, fmt.Errorf("converting death date: %w", err)
}
updatedPerformer.HairColor = translator.optionalString(input.HairColor, "hair_color")
updatedPerformer.Weight = translator.optionalInt(input.Weight, "weight")
updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")

View File

@@ -67,7 +67,6 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input SceneCreateInp
Code: translator.string(input.Code, "code"),
Details: translator.string(input.Details, "details"),
Director: translator.string(input.Director, "director"),
Date: translator.datePtr(input.Date, "date"),
Rating: translator.ratingConversionInt(input.Rating, input.Rating100),
Organized: translator.bool(input.Organized, "organized"),
PerformerIDs: models.NewRelatedIDs(performerIDs),
@@ -77,6 +76,10 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input SceneCreateInp
StashIDs: models.NewRelatedStashIDs(stashIDPtrSliceToSlice(input.StashIds)),
}
newScene.Date, err = translator.datePtr(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
newScene.StudioID, err = translator.intPtrFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
@@ -89,7 +92,7 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input SceneCreateInp
}
var coverImageData []byte
if input.CoverImage != nil && *input.CoverImage != "" {
if input.CoverImage != nil {
var err error
coverImageData, err = utils.ProcessImageInput(ctx, *input.CoverImage)
if err != nil {
@@ -169,16 +172,21 @@ func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.Sce
func scenePartialFromInput(input models.SceneUpdateInput, translator changesetTranslator) (*models.ScenePartial, error) {
updatedScene := models.NewScenePartial()
var err error
updatedScene.Title = translator.optionalString(input.Title, "title")
updatedScene.Code = translator.optionalString(input.Code, "code")
updatedScene.Details = translator.optionalString(input.Details, "details")
updatedScene.Director = translator.optionalString(input.Director, "director")
updatedScene.Date = translator.optionalDate(input.Date, "date")
updatedScene.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
updatedScene.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedScene.OCounter = translator.optionalInt(input.OCounter, "o_counter")
updatedScene.PlayCount = translator.optionalInt(input.PlayCount, "play_count")
updatedScene.PlayDuration = translator.optionalFloat64(input.PlayDuration, "play_duration")
var err error
updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
@@ -355,7 +363,10 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
updatedScene.Code = translator.optionalString(input.Code, "code")
updatedScene.Details = translator.optionalString(input.Details, "details")
updatedScene.Director = translator.optionalString(input.Director, "director")
updatedScene.Date = translator.optionalDate(input.Date, "date")
updatedScene.Date, err = translator.optionalDate(input.Date, "date")
if err != nil {
return nil, fmt.Errorf("converting date: %w", err)
}
updatedScene.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)
updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
@@ -602,7 +613,7 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
}
var coverImageData []byte
if input.Values.CoverImage != nil && *input.Values.CoverImage != "" {
if input.Values.CoverImage != nil {
var err error
coverImageData, err = utils.ProcessImageInput(ctx, *input.Values.CoverImage)
if err != nil {

View File

@@ -6,13 +6,11 @@ import (
"strconv"
"time"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
"github.com/stashapp/stash/pkg/studio"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
"github.com/stashapp/stash/pkg/studio"
"github.com/stashapp/stash/pkg/utils"
)
@@ -32,13 +30,9 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateI
inputMap: getUpdateInputMap(ctx),
}
// generate checksum from studio name rather than image
checksum := md5.FromString(input.Name)
// Populate a new studio from the input
currentTime := time.Now()
newStudio := models.Studio{
Checksum: checksum,
Name: input.Name,
CreatedAt: currentTime,
UpdatedAt: currentTime,
@@ -57,7 +51,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateI
// Process the base 64 encoded image string
var imageData []byte
if input.Image != nil && *input.Image != "" {
if input.Image != nil {
imageData, err = utils.ProcessImageInput(ctx, *input.Image)
if err != nil {
return nil, err
@@ -120,13 +114,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateI
// Populate studio from the input
updatedStudio := models.NewStudioPartial()
if input.Name != nil {
// generate checksum from studio name rather than image
checksum := md5.FromString(*input.Name)
updatedStudio.Name = models.NewOptionalString(*input.Name)
updatedStudio.Checksum = models.NewOptionalString(checksum)
}
updatedStudio.Name = translator.optionalString(input.Name, "name")
updatedStudio.URL = translator.optionalString(input.URL, "url")
updatedStudio.Details = translator.optionalString(input.Details, "details")
updatedStudio.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100)

View File

@@ -99,7 +99,6 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error {
func createStudio(ctx context.Context, qb models.StudioWriter, name string) (*models.Studio, error) {
// create the studio
studio := models.Studio{
Checksum: name,
Name: name,
}

View File

@@ -370,8 +370,10 @@ func getScenePartial(scene *models.Scene, scraped *scraper.ScrapedScene, fieldOp
}
if scraped.Date != nil && (scene.Date == nil || scene.Date.String() != *scraped.Date) {
if shouldSetSingleValueField(fieldOptions["date"], scene.Date != nil) {
d := models.NewDate(*scraped.Date)
partial.Date = models.NewOptionalDate(d)
d, err := models.ParseDate(*scraped.Date)
if err == nil {
partial.Date = models.NewOptionalDate(d)
}
}
}
if scraped.Details != nil && (scene.Details != *scraped.Details) {

View File

@@ -346,8 +346,8 @@ func Test_getScenePartial(t *testing.T) {
scrapedURL = "scrapedURL"
)
originalDateObj := models.NewDate(originalDate)
scrapedDateObj := models.NewDate(scrapedDate)
originalDateObj, _ := models.ParseDate(originalDate)
scrapedDateObj, _ := models.ParseDate(scrapedDate)
originalScene := &models.Scene{
Title: originalTitle,

View File

@@ -80,12 +80,16 @@ func scrapedToPerformerInput(performer *models.ScrapedPerformer) models.Performe
ret.Disambiguation = *performer.Disambiguation
}
if performer.Birthdate != nil {
d := models.NewDate(*performer.Birthdate)
ret.Birthdate = &d
d, err := models.ParseDate(*performer.Birthdate)
if err == nil {
ret.Birthdate = &d
}
}
if performer.DeathDate != nil {
d := models.NewDate(*performer.DeathDate)
ret.DeathDate = &d
d, err := models.ParseDate(*performer.DeathDate)
if err == nil {
ret.DeathDate = &d
}
}
if performer.Gender != nil {
v := models.GenderEnum(*performer.Gender)

View File

@@ -244,13 +244,24 @@ func Test_scrapedToPerformerInput(t *testing.T) {
return &ret
}
dateToDatePtr := func(d models.Date) *models.Date {
dateFromInt := func(i int) *models.Date {
t := time.Date(2001, 1, i, 0, 0, 0, 0, time.UTC)
d := models.Date{Time: t}
return &d
}
dateStrFromInt := func(i int) *string {
s := dateFromInt(i).String()
return &s
}
genderPtr := func(g models.GenderEnum) *models.GenderEnum {
genderFromInt := func(i int) *models.GenderEnum {
g := models.AllGenderEnum[i%len(models.AllGenderEnum)]
return &g
}
genderStrFromInt := func(i int) *string {
s := genderFromInt(i).String()
return &s
}
tests := []struct {
name string
@@ -262,9 +273,9 @@ func Test_scrapedToPerformerInput(t *testing.T) {
&models.ScrapedPerformer{
Name: &name,
Disambiguation: nextVal(),
Birthdate: nextVal(),
DeathDate: nextVal(),
Gender: nextVal(),
Birthdate: dateStrFromInt(*nextIntVal()),
DeathDate: dateStrFromInt(*nextIntVal()),
Gender: genderStrFromInt(*nextIntVal()),
Ethnicity: nextVal(),
Country: nextVal(),
EyeColor: nextVal(),
@@ -285,9 +296,9 @@ func Test_scrapedToPerformerInput(t *testing.T) {
models.Performer{
Name: name,
Disambiguation: *nextVal(),
Birthdate: dateToDatePtr(models.NewDate(*nextVal())),
DeathDate: dateToDatePtr(models.NewDate(*nextVal())),
Gender: genderPtr(models.GenderEnum(*nextVal())),
Birthdate: dateFromInt(*nextIntVal()),
DeathDate: dateFromInt(*nextIntVal()),
Gender: genderFromInt(*nextIntVal()),
Ethnicity: *nextVal(),
Country: *nextVal(),
EyeColor: *nextVal(),

View File

@@ -246,7 +246,7 @@ func (g sceneRelationships) stashIDs(ctx context.Context) ([]models.StashID, err
func (g sceneRelationships) cover(ctx context.Context) ([]byte, error) {
scraped := g.result.result.Image
if scraped == nil {
if scraped == nil || *scraped == "" {
return nil, nil
}

View File

@@ -5,7 +5,6 @@ import (
"fmt"
"time"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
@@ -54,7 +53,6 @@ func scrapedToStudioInput(studio *models.ScrapedStudio) models.Studio {
currentTime := time.Now()
ret := models.Studio{
Name: studio.Name,
Checksum: md5.FromString(studio.Name),
CreatedAt: currentTime,
UpdatedAt: currentTime,
}

View File

@@ -117,7 +117,6 @@ func Test_createMissingStudio(t *testing.T) {
func Test_scrapedToStudioInput(t *testing.T) {
const name = "name"
const md5 = "b068931cc450442b63f5b3d276ea4297"
url := "url"
tests := []struct {
@@ -132,9 +131,8 @@ func Test_scrapedToStudioInput(t *testing.T) {
URL: &url,
},
models.Studio{
Name: name,
Checksum: md5,
URL: url,
Name: name,
URL: url,
},
},
{
@@ -143,8 +141,7 @@ func Test_scrapedToStudioInput(t *testing.T) {
Name: name,
},
models.Studio{
Name: name,
Checksum: md5,
Name: name,
},
},
}

View File

@@ -262,10 +262,11 @@ func validateRating100(rating100 int) bool {
return rating100 >= 1 && rating100 <= 100
}
func validateDate(dateStr string) bool {
// returns nil if invalid
func parseDate(dateStr string) *models.Date {
splits := strings.Split(dateStr, "-")
if len(splits) != 3 {
return false
return nil
}
year, _ := strconv.Atoi(splits[0])
@@ -274,19 +275,23 @@ func validateDate(dateStr string) bool {
// assume year must be between 1900 and 2100
if year < 1900 || year > 2100 {
return false
return nil
}
if month < 1 || month > 12 {
return false
return nil
}
// not checking individual months to ensure date is in the correct range
if d < 1 || d > 31 {
return false
return nil
}
return true
ret, err := models.ParseDate(dateStr)
if err != nil {
return nil
}
return &ret
}
func (h *sceneHolder) setDate(field *parserField, value string) {
@@ -315,9 +320,9 @@ func (h *sceneHolder) setDate(field *parserField, value string) {
// ensure the date is valid
// only set if new value is different from the old
if validateDate(fullDate) && h.scene.Date != nil && h.scene.Date.String() != fullDate {
d := models.NewDate(fullDate)
h.result.Date = &d
newDate := parseDate(fullDate)
if newDate != nil && h.scene.Date != nil && *h.scene.Date != *newDate {
h.result.Date = newDate
}
}
@@ -346,10 +351,7 @@ func (h *sceneHolder) setField(field parserField, value interface{}) {
v := value.(string)
h.result.Title = v
case "date":
if validateDate(value.(string)) {
d := models.NewDate(value.(string))
h.result.Date = &d
}
h.result.Date = parseDate(value.(string))
case "rating":
rating, _ := strconv.Atoi(value.(string))
if validateRating(rating) {

View File

@@ -11,14 +11,6 @@ type jsonUtils struct {
json paths.JSONPaths
}
func (jp *jsonUtils) getScraped() ([]jsonschema.ScrapedItem, error) {
return jsonschema.LoadScrapedFile(jp.json.ScrapedFile)
}
func (jp *jsonUtils) saveScaped(scraped []jsonschema.ScrapedItem) error {
return jsonschema.SaveScrapedFile(jp.json.ScrapedFile, scraped)
}
func (jp *jsonUtils) savePerformer(fn string, performer *jsonschema.Performer) error {
return jsonschema.SavePerformerFile(filepath.Join(jp.json.Performers, fn), performer)
}

View File

@@ -56,7 +56,6 @@ type Repository struct {
Performer models.PerformerReaderWriter
Scene SceneReaderWriter
SceneMarker models.SceneMarkerReaderWriter
ScrapedItem models.ScrapedItemReaderWriter
Studio models.StudioReaderWriter
Tag models.TagReaderWriter
SavedFilter models.SavedFilterReaderWriter
@@ -88,7 +87,6 @@ func sqliteRepository(d *sqlite.Database) Repository {
Performer: txnRepo.Performer,
Scene: d.Scene,
SceneMarker: txnRepo.SceneMarker,
ScrapedItem: txnRepo.ScrapedItem,
Studio: txnRepo.Studio,
Tag: txnRepo.Tag,
SavedFilter: txnRepo.SavedFilter,

View File

@@ -173,10 +173,6 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) {
t.ExportStudios(ctx, workerCount, r)
t.ExportTags(ctx, workerCount, r)
if t.full {
t.ExportScrapedItems(ctx, r)
}
return nil
})
if txnErr != nil {
@@ -296,13 +292,13 @@ func (t *ExportTask) populateMovieScenes(ctx context.Context, repo Repository) {
}
if err != nil {
logger.Errorf("[movies] failed to fetch movies: %s", err.Error())
logger.Errorf("[movies] failed to fetch movies: %v", err)
}
for _, m := range movies {
scenes, err := sceneReader.FindByMovieID(ctx, m.ID)
if err != nil {
logger.Errorf("[movies] <%s> failed to fetch scenes for movie: %s", m.Checksum, err.Error())
logger.Errorf("[movies] <%s> failed to fetch scenes for movie: %v", m.Name, err)
continue
}
@@ -978,14 +974,14 @@ func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobCh
newStudioJSON, err := studio.ToJSON(ctx, studioReader, s)
if err != nil {
logger.Errorf("[studios] <%s> error getting studio JSON: %s", s.Checksum, err.Error())
logger.Errorf("[studios] <%s> error getting studio JSON: %v", s.Name, err)
continue
}
fn := newStudioJSON.Filename()
if err := t.json.saveStudio(fn, newStudioJSON); err != nil {
logger.Errorf("[studios] <%s> failed to save json: %s", s.Checksum, err.Error())
logger.Errorf("[studios] <%s> failed to save json: %v", s.Name, err)
}
}
}
@@ -1101,7 +1097,7 @@ func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobCha
newMovieJSON, err := movie.ToJSON(ctx, movieReader, studioReader, m)
if err != nil {
logger.Errorf("[movies] <%s> error getting tag JSON: %s", m.Checksum, err.Error())
logger.Errorf("[movies] <%s> error getting tag JSON: %v", m.Name, err)
continue
}
@@ -1114,90 +1110,7 @@ func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobCha
fn := newMovieJSON.Filename()
if err := t.json.saveMovie(fn, newMovieJSON); err != nil {
logger.Errorf("[movies] <%s> failed to save json: %s", fn, err.Error())
logger.Errorf("[movies] <%s> failed to save json: %v", m.Name, err)
}
}
}
func (t *ExportTask) ExportScrapedItems(ctx context.Context, repo Repository) {
qb := repo.ScrapedItem
sqb := repo.Studio
scrapedItems, err := qb.All(ctx)
if err != nil {
logger.Errorf("[scraped sites] failed to fetch all items: %s", err.Error())
}
logger.Info("[scraped sites] exporting")
scraped := []jsonschema.ScrapedItem{}
for i, scrapedItem := range scrapedItems {
index := i + 1
logger.Progressf("[scraped sites] %d of %d", index, len(scrapedItems))
var studioName string
if scrapedItem.StudioID.Valid {
studio, _ := sqb.Find(ctx, int(scrapedItem.StudioID.Int64))
if studio != nil {
studioName = studio.Name
}
}
newScrapedItemJSON := jsonschema.ScrapedItem{}
if scrapedItem.Title.Valid {
newScrapedItemJSON.Title = scrapedItem.Title.String
}
if scrapedItem.Description.Valid {
newScrapedItemJSON.Description = scrapedItem.Description.String
}
if scrapedItem.URL.Valid {
newScrapedItemJSON.URL = scrapedItem.URL.String
}
if scrapedItem.Date != nil {
newScrapedItemJSON.Date = scrapedItem.Date.String()
}
if scrapedItem.Rating.Valid {
newScrapedItemJSON.Rating = scrapedItem.Rating.String
}
if scrapedItem.Tags.Valid {
newScrapedItemJSON.Tags = scrapedItem.Tags.String
}
if scrapedItem.Models.Valid {
newScrapedItemJSON.Models = scrapedItem.Models.String
}
if scrapedItem.Episode.Valid {
newScrapedItemJSON.Episode = int(scrapedItem.Episode.Int64)
}
if scrapedItem.GalleryFilename.Valid {
newScrapedItemJSON.GalleryFilename = scrapedItem.GalleryFilename.String
}
if scrapedItem.GalleryURL.Valid {
newScrapedItemJSON.GalleryURL = scrapedItem.GalleryURL.String
}
if scrapedItem.VideoFilename.Valid {
newScrapedItemJSON.VideoFilename = scrapedItem.VideoFilename.String
}
if scrapedItem.VideoURL.Valid {
newScrapedItemJSON.VideoURL = scrapedItem.VideoURL.String
}
newScrapedItemJSON.Studio = studioName
updatedAt := json.JSONTime{Time: scrapedItem.UpdatedAt} // TODO keeping ruby format
newScrapedItemJSON.UpdatedAt = updatedAt
scraped = append(scraped, newScrapedItemJSON)
}
scrapedJSON, err := t.json.getScraped()
if err != nil {
logger.Debugf("[scraped sites] error reading json: %s", err.Error())
}
if !jsonschema.CompareJSON(scrapedJSON, scraped) {
if err := t.json.saveScaped(scraped); err != nil {
logger.Errorf("[scraped sites] failed to save json: %s", err.Error())
}
}
logger.Infof("[scraped sites] export complete")
}

View File

@@ -3,13 +3,11 @@ package manager
import (
"archive/zip"
"context"
"database/sql"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"time"
"github.com/99designs/gqlgen/graphql"
"github.com/stashapp/stash/pkg/fsutil"
@@ -17,7 +15,6 @@ import (
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json"
"github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/models/paths"
"github.com/stashapp/stash/pkg/movie"
@@ -25,7 +22,6 @@ import (
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/studio"
"github.com/stashapp/stash/pkg/tag"
"github.com/stashapp/stash/pkg/utils"
)
type ImportTask struct {
@@ -38,7 +34,6 @@ type ImportTask struct {
DuplicateBehaviour ImportDuplicateEnum
MissingRefBehaviour models.ImportMissingRefEnum
scraped []jsonschema.ScrapedItem
fileNamingAlgorithm models.HashAlgorithm
}
@@ -112,12 +107,6 @@ func (t *ImportTask) Start(ctx context.Context) {
t.MissingRefBehaviour = models.ImportMissingRefEnumFail
}
scraped, _ := t.json.getScraped()
if scraped == nil {
logger.Warn("missing scraped json")
}
t.scraped = scraped
if t.Reset {
err := t.txnManager.Reset()
@@ -134,7 +123,6 @@ func (t *ImportTask) Start(ctx context.Context) {
t.ImportFiles(ctx)
t.ImportGalleries(ctx)
t.ImportScrapedItems(ctx)
t.ImportScenes(ctx)
t.ImportImages(ctx)
}
@@ -614,61 +602,6 @@ func (t *ImportTask) ImportTag(ctx context.Context, tagJSON *jsonschema.Tag, pen
return nil
}
func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
logger.Info("[scraped sites] importing")
r := t.txnManager
qb := r.ScrapedItem
sqb := r.Studio
currentTime := time.Now()
for i, mappingJSON := range t.scraped {
index := i + 1
logger.Progressf("[scraped sites] %d of %d", index, len(t.scraped))
newScrapedItem := models.ScrapedItem{
Title: sql.NullString{String: mappingJSON.Title, Valid: true},
Description: sql.NullString{String: mappingJSON.Description, Valid: true},
URL: sql.NullString{String: mappingJSON.URL, Valid: true},
Rating: sql.NullString{String: mappingJSON.Rating, Valid: true},
Tags: sql.NullString{String: mappingJSON.Tags, Valid: true},
Models: sql.NullString{String: mappingJSON.Models, Valid: true},
Episode: sql.NullInt64{Int64: int64(mappingJSON.Episode), Valid: true},
GalleryFilename: sql.NullString{String: mappingJSON.GalleryFilename, Valid: true},
GalleryURL: sql.NullString{String: mappingJSON.GalleryURL, Valid: true},
VideoFilename: sql.NullString{String: mappingJSON.VideoFilename, Valid: true},
VideoURL: sql.NullString{String: mappingJSON.VideoURL, Valid: true},
CreatedAt: currentTime,
UpdatedAt: t.getTimeFromJSONTime(mappingJSON.UpdatedAt),
}
time, err := utils.ParseDateStringAsTime(mappingJSON.Date)
if err == nil {
newScrapedItem.Date = &models.Date{Time: time}
}
studio, err := sqb.FindByName(ctx, mappingJSON.Studio, false)
if err != nil {
logger.Errorf("[scraped sites] failed to fetch studio: %s", err.Error())
}
if studio != nil {
newScrapedItem.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
}
_, err = qb.Create(ctx, newScrapedItem)
if err != nil {
logger.Errorf("[scraped sites] <%s> failed to create: %s", newScrapedItem.Title.String, err.Error())
}
}
return nil
}); err != nil {
logger.Errorf("[scraped sites] import failed to commit: %s", err.Error())
}
logger.Info("[scraped sites] import complete")
}
func (t *ImportTask) ImportScenes(ctx context.Context) {
logger.Info("[scenes] importing")
@@ -799,21 +732,3 @@ func (t *ImportTask) ImportImages(ctx context.Context) {
logger.Info("[images] import complete")
}
var currentLocation = time.Now().Location()
func (t *ImportTask) getTimeFromJSONTime(jsonTime json.JSONTime) time.Time {
if currentLocation != nil {
if jsonTime.IsZero() {
return time.Now().In(currentLocation)
} else {
return jsonTime.Time.In(currentLocation)
}
} else {
if jsonTime.IsZero() {
return time.Now()
} else {
return jsonTime.Time
}
}
}

View File

@@ -283,7 +283,10 @@ func getDate(val *string) *models.Date {
return nil
}
ret := models.NewDate(*val)
ret, err := models.ParseDate(*val)
if err != nil {
return nil
}
return &ret
}

View File

@@ -28,13 +28,13 @@ const (
)
var (
url = "url"
title = "title"
date = "2001-01-01"
dateObj = models.NewDate(date)
rating = 5
organized = true
details = "details"
url = "url"
title = "title"
date = "2001-01-01"
dateObj, _ = models.ParseDate(date)
rating = 5
organized = true
details = "details"
)
const (

View File

@@ -71,8 +71,10 @@ func (i *Importer) galleryJSONToGallery(galleryJSON jsonschema.Gallery) models.G
newGallery.URL = galleryJSON.URL
}
if galleryJSON.Date != "" {
d := models.NewDate(galleryJSON.Date)
newGallery.Date = &d
d, err := models.ParseDate(galleryJSON.Date)
if err == nil {
newGallery.Date = &d
}
}
if galleryJSON.Rating != 0 {
newGallery.Rating = &galleryJSON.Rating

View File

@@ -23,13 +23,13 @@ const (
)
var (
title = "title"
rating = 5
url = "http://a.com"
date = "2001-01-01"
dateObj = models.NewDate(date)
organized = true
ocounter = 2
title = "title"
rating = 5
url = "http://a.com"
date = "2001-01-01"
dateObj, _ = models.ParseDate(date)
organized = true
ocounter = 2
)
const (

View File

@@ -89,8 +89,10 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
newImage.URL = imageJSON.URL
}
if imageJSON.Date != "" {
d := models.NewDate(imageJSON.Date)
newImage.Date = &d
d, err := models.ParseDate(imageJSON.Date)
if err == nil {
newImage.Date = &d
}
}
return newImage

View File

@@ -1,6 +1,10 @@
package models
import "time"
import (
"time"
"github.com/stashapp/stash/pkg/utils"
)
// Date wraps a time.Time with a format of "YYYY-MM-DD"
type Date struct {
@@ -13,7 +17,11 @@ func (d Date) String() string {
return d.Format(dateFormat)
}
func NewDate(s string) Date {
t, _ := time.Parse(dateFormat, s)
return Date{t}
// ParseDate uses utils.ParseDateStringAsTime to parse a string into a date.
func ParseDate(s string) (Date, error) {
ret, err := utils.ParseDateStringAsTime(s)
if err != nil {
return Date{}, err
}
return Date{Time: ret}, nil
}

View File

@@ -8,4 +8,6 @@ var (
// ErrConversion signifies conversion errors
ErrConversion = errors.New("conversion error")
ErrScraperSource = errors.New("invalid ScraperSource")
)

View File

@@ -1,49 +0,0 @@
package jsonschema
import (
"fmt"
"os"
jsoniter "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/models/json"
)
type ScrapedItem struct {
Title string `json:"title,omitempty"`
Description string `json:"description,omitempty"`
URL string `json:"url,omitempty"`
Date string `json:"date,omitempty"`
Rating string `json:"rating,omitempty"`
Tags string `json:"tags,omitempty"`
Models string `json:"models,omitempty"`
Episode int `json:"episode,omitempty"`
GalleryFilename string `json:"gallery_filename,omitempty"`
GalleryURL string `json:"gallery_url,omitempty"`
VideoFilename string `json:"video_filename,omitempty"`
VideoURL string `json:"video_url,omitempty"`
Studio string `json:"studio,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
}
func LoadScrapedFile(filePath string) ([]ScrapedItem, error) {
var scraped []ScrapedItem
file, err := os.Open(filePath)
if err != nil {
return nil, err
}
defer file.Close()
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&scraped)
if err != nil {
return nil, err
}
return scraped, nil
}
func SaveScrapedFile(filePath string, scrapedItems []ScrapedItem) error {
if scrapedItems == nil {
return fmt.Errorf("scraped items must not be nil")
}
return marshalToFile(filePath, scrapedItems)
}

View File

@@ -1,61 +0,0 @@
// Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks
import (
context "context"
models "github.com/stashapp/stash/pkg/models"
mock "github.com/stretchr/testify/mock"
)
// ScrapedItemReaderWriter is an autogenerated mock type for the ScrapedItemReaderWriter type
type ScrapedItemReaderWriter struct {
mock.Mock
}
// All provides a mock function with given fields: ctx
func (_m *ScrapedItemReaderWriter) All(ctx context.Context) ([]*models.ScrapedItem, error) {
ret := _m.Called(ctx)
var r0 []*models.ScrapedItem
if rf, ok := ret.Get(0).(func(context.Context) []*models.ScrapedItem); ok {
r0 = rf(ctx)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.ScrapedItem)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Create provides a mock function with given fields: ctx, newObject
func (_m *ScrapedItemReaderWriter) Create(ctx context.Context, newObject models.ScrapedItem) (*models.ScrapedItem, error) {
ret := _m.Called(ctx, newObject)
var r0 *models.ScrapedItem
if rf, ok := ret.Get(0).(func(context.Context, models.ScrapedItem) *models.ScrapedItem); ok {
r0 = rf(ctx, newObject)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*models.ScrapedItem)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, models.ScrapedItem) error); ok {
r1 = rf(ctx, newObject)
} else {
r1 = ret.Error(1)
}
return r0, r1
}

View File

@@ -52,7 +52,6 @@ func NewTxnRepository() models.Repository {
Performer: &PerformerReaderWriter{},
Scene: &SceneReaderWriter{},
SceneMarker: &SceneMarkerReaderWriter{},
ScrapedItem: &ScrapedItemReaderWriter{},
Studio: &StudioReaderWriter{},
Tag: &TagReaderWriter{},
SavedFilter: &SavedFilterReaderWriter{},

View File

@@ -2,13 +2,10 @@ package models
import (
"time"
"github.com/stashapp/stash/pkg/hash/md5"
)
type Movie struct {
ID int `json:"id"`
Checksum string `json:"checksum"`
Name string `json:"name"`
Aliases string `json:"aliases"`
Duration *int `json:"duration"`
@@ -24,7 +21,6 @@ type Movie struct {
}
type MoviePartial struct {
Checksum OptionalString
Name OptionalString
Aliases OptionalString
Duration OptionalInt
@@ -44,7 +40,6 @@ var DefaultMovieImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkC
func NewMovie(name string) *Movie {
currentTime := time.Now()
return &Movie{
Checksum: md5.FromString(name),
Name: name,
CreatedAt: currentTime,
UpdatedAt: currentTime,

View File

@@ -25,7 +25,7 @@ func TestScenePartial_UpdateInput(t *testing.T) {
studioIDStr = "2"
)
dateObj := NewDate(date)
dateObj, _ := ParseDate(date)
tests := []struct {
name string

View File

@@ -1,10 +1,5 @@
package models
import (
"database/sql"
"time"
)
type ScrapedStudio struct {
// Set if studio matched
StoredID *string `json:"stored_id"`
@@ -79,34 +74,3 @@ type ScrapedMovie struct {
}
func (ScrapedMovie) IsScrapedContent() {}
type ScrapedItem struct {
ID int `db:"id" json:"id"`
Title sql.NullString `db:"title" json:"title"`
Code sql.NullString `db:"code" json:"code"`
Description sql.NullString `db:"description" json:"description"`
Director sql.NullString `db:"director" json:"director"`
URL sql.NullString `db:"url" json:"url"`
Date *Date `db:"date" json:"date"`
Rating sql.NullString `db:"rating" json:"rating"`
Tags sql.NullString `db:"tags" json:"tags"`
Models sql.NullString `db:"models" json:"models"`
Episode sql.NullInt64 `db:"episode" json:"episode"`
GalleryFilename sql.NullString `db:"gallery_filename" json:"gallery_filename"`
GalleryURL sql.NullString `db:"gallery_url" json:"gallery_url"`
VideoFilename sql.NullString `db:"video_filename" json:"video_filename"`
VideoURL sql.NullString `db:"video_url" json:"video_url"`
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
}
type ScrapedItems []*ScrapedItem
func (s *ScrapedItems) Append(o interface{}) {
*s = append(*s, o.(*ScrapedItem))
}
func (s *ScrapedItems) New() interface{} {
return &ScrapedItem{}
}

View File

@@ -2,13 +2,10 @@ package models
import (
"time"
"github.com/stashapp/stash/pkg/hash/md5"
)
type Studio struct {
ID int `json:"id"`
Checksum string `json:"checksum"`
Name string `json:"name"`
URL string `json:"url"`
ParentID *int `json:"parent_id"`
@@ -21,7 +18,6 @@ type Studio struct {
}
type StudioPartial struct {
Checksum OptionalString
Name OptionalString
URL OptionalString
ParentID OptionalInt
@@ -36,7 +32,6 @@ type StudioPartial struct {
func NewStudio(name string) *Studio {
currentTime := time.Now()
return &Studio{
Checksum: md5.FromString(name),
Name: name,
CreatedAt: currentTime,
UpdatedAt: currentTime,

View File

@@ -23,7 +23,6 @@ type Repository struct {
Performer PerformerReaderWriter
Scene SceneReaderWriter
SceneMarker SceneMarkerReaderWriter
ScrapedItem ScrapedItemReaderWriter
Studio StudioReaderWriter
Tag TagReaderWriter
SavedFilter SavedFilterReaderWriter

View File

@@ -1,21 +0,0 @@
package models
import (
"context"
"errors"
)
var ErrScraperSource = errors.New("invalid ScraperSource")
type ScrapedItemReader interface {
All(ctx context.Context) ([]*ScrapedItem, error)
}
type ScrapedItemWriter interface {
Create(ctx context.Context, newObject ScrapedItem) (*ScrapedItem, error)
}
type ScrapedItemReaderWriter interface {
ScrapedItemReader
ScrapedItemWriter
}

View File

@@ -32,13 +32,13 @@ const movieName = "testMovie"
const movieAliases = "aliases"
var (
date = "2001-01-01"
dateObj = models.NewDate(date)
rating = 5
duration = 100
director = "director"
synopsis = "synopsis"
url = "url"
date = "2001-01-01"
dateObj, _ = models.ParseDate(date)
rating = 5
duration = 100
director = "director"
synopsis = "synopsis"
url = "url"
)
const studioName = "studio"

View File

@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/studio"
@@ -58,10 +57,7 @@ func (i *Importer) PreImport(ctx context.Context) error {
}
func (i *Importer) movieJSONToMovie(movieJSON jsonschema.Movie) models.Movie {
checksum := md5.FromString(movieJSON.Name)
newMovie := models.Movie{
Checksum: checksum,
Name: movieJSON.Name,
Aliases: movieJSON.Aliases,
Director: movieJSON.Director,
@@ -72,8 +68,10 @@ func (i *Importer) movieJSONToMovie(movieJSON jsonschema.Movie) models.Movie {
}
if movieJSON.Date != "" {
d := models.NewDate(movieJSON.Date)
newMovie.Date = &d
d, err := models.ParseDate(movieJSON.Date)
if err == nil {
newMovie.Date = &d
}
}
if movieJSON.Rating != 0 {
newMovie.Rating = &movieJSON.Rating

View File

@@ -64,8 +64,8 @@ var stashIDs = []models.StashID{
const image = "aW1hZ2VCeXRlcw=="
var birthDate = models.NewDate("2001-01-01")
var deathDate = models.NewDate("2021-02-02")
var birthDate, _ = models.ParseDate("2001-01-01")
var deathDate, _ = models.ParseDate("2021-02-02")
var (
createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.Local)

View File

@@ -223,22 +223,18 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform
}
if performerJSON.Birthdate != "" {
d, err := utils.ParseDateStringAsTime(performerJSON.Birthdate)
date, err := models.ParseDate(performerJSON.Birthdate)
if err == nil {
newPerformer.Birthdate = &models.Date{
Time: d,
}
newPerformer.Birthdate = &date
}
}
if performerJSON.Rating != 0 {
newPerformer.Rating = &performerJSON.Rating
}
if performerJSON.DeathDate != "" {
d, err := utils.ParseDateStringAsTime(performerJSON.DeathDate)
date, err := models.ParseDate(performerJSON.DeathDate)
if err == nil {
newPerformer.DeathDate = &models.Date{
Time: d,
}
newPerformer.DeathDate = &date
}
}

View File

@@ -16,8 +16,8 @@ func TestValidateDeathDate(t *testing.T) {
date4 := "2004-01-01"
empty := ""
md2 := models.NewDate(date2)
md3 := models.NewDate(date3)
md2, _ := models.ParseDate(date2)
md3, _ := models.ParseDate(date3)
emptyPerformer := models.Performer{}
invalidPerformer := models.Performer{

View File

@@ -36,14 +36,14 @@ const (
)
var (
url = "url"
title = "title"
date = "2001-01-01"
dateObj = models.NewDate(date)
rating = 5
ocounter = 2
organized = true
details = "details"
url = "url"
title = "title"
date = "2001-01-01"
dateObj, _ = models.ParseDate(date)
rating = 5
ocounter = 2
organized = true
details = "details"
)
var (

View File

@@ -98,8 +98,10 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
}
if sceneJSON.Date != "" {
d := models.NewDate(sceneJSON.Date)
newScene.Date = &d
d, err := models.ParseDate(sceneJSON.Date)
if err == nil {
newScene.Date = &d
}
}
if sceneJSON.Rating != 0 {
newScene.Rating = &sceneJSON.Rating

View File

@@ -32,7 +32,7 @@ const (
dbConnTimeout = 30
)
var appSchemaVersion uint = 47
var appSchemaVersion uint = 48
//go:embed migrations/*.sql
var migrationsBox embed.FS

View File

@@ -41,23 +41,13 @@ func (d *NullDate) Scan(value interface{}) error {
return nil
}
// Zero dates, which primarily come from empty strings in the DB, are treated as being invalid.
// TODO: add migration to remove invalid dates from the database and remove this.
// Ensure elsewhere that empty date inputs resolve to a null date and not a zero date.
// Zero dates shouldn't be invalid.
if d.Date.IsZero() {
d.Valid = false
} else {
d.Valid = true
}
d.Valid = true
return nil
}
// Value implements the driver Valuer interface.
func (d NullDate) Value() (driver.Value, error) {
// TODO: don't ignore zero value, as above
if !d.Valid || d.Date.IsZero() {
if !d.Valid {
return nil, nil
}

View File

@@ -884,7 +884,7 @@ func galleryIsMissingCriterionHandler(qb *GalleryStore, isMissing *string) crite
qb.performersRepository().join(f, "performers_join", "galleries.id")
f.addWhere("performers_join.gallery_id IS NULL")
case "date":
f.addWhere("galleries.date IS NULL OR galleries.date IS \"\" OR galleries.date IS \"0001-01-01\"")
f.addWhere("galleries.date IS NULL OR galleries.date IS \"\"")
case "tags":
qb.tagsRepository().join(f, "tags_join", "galleries.id")
f.addWhere("tags_join.gallery_id IS NULL")
@@ -1008,7 +1008,6 @@ func galleryPerformerAgeCriterionHandler(performerAge *models.IntCriterionInput)
f.addWhere("galleries.date != '' AND performers.birthdate != ''")
f.addWhere("galleries.date IS NOT NULL AND performers.birthdate IS NOT NULL")
f.addWhere("galleries.date != '0001-01-01' AND performers.birthdate != '0001-01-01'")
ageCalc := "cast(strftime('%Y.%m%d', galleries.date) - strftime('%Y.%m%d', performers.birthdate) as int)"
whereClause, args := getIntWhereClause(ageCalc, performerAge.Modifier, performerAge.Value, performerAge.Value2)

View File

@@ -62,7 +62,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
galleryFile = makeFileWithID(fileIdxStartGalleryFiles)
)
date := models.NewDate("2003-02-01")
date, _ := models.ParseDate("2003-02-01")
tests := []struct {
name string
@@ -211,7 +211,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
)
date := models.NewDate("2003-02-01")
date, _ := models.ParseDate("2003-02-01")
tests := []struct {
name string
@@ -403,7 +403,7 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
date = models.NewDate("2003-02-01")
date, _ = models.ParseDate("2003-02-01")
)
tests := []struct {
@@ -844,10 +844,6 @@ func makeGalleryWithID(index int) *models.Gallery {
ret := makeGallery(index, includeScenes)
ret.ID = galleryIDs[index]
if ret.Date != nil && ret.Date.IsZero() {
ret.Date = nil
}
ret.Files = models.NewRelatedFiles([]file.File{makeGalleryFile(index)})
return ret
@@ -1932,12 +1928,12 @@ func TestGalleryQueryIsMissingDate(t *testing.T) {
galleries := queryGallery(ctx, t, sqb, &galleryFilter, nil)
// three in four scenes have no date
assert.Len(t, galleries, int(math.Ceil(float64(totalGalleries)/4*3)))
// one in four galleries have no date
assert.Len(t, galleries, int(math.Ceil(float64(totalGalleries)/4)))
// ensure date is null, empty or "0001-01-01"
// ensure date is null
for _, g := range galleries {
assert.True(t, g.Date == nil || g.Date.Time == time.Time{})
assert.Nil(t, g.Date)
}
return nil

View File

@@ -57,7 +57,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
rating = 60
ocounter = 5
url = "url"
date = models.NewDate("2003-02-01")
date, _ = models.ParseDate("2003-02-01")
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@@ -216,7 +216,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
title = "title"
rating = 60
url = "url"
date = models.NewDate("2003-02-01")
date, _ = models.ParseDate("2003-02-01")
ocounter = 5
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@@ -396,7 +396,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
title = "title"
rating = 60
url = "url"
date = models.NewDate("2003-02-01")
date, _ = models.ParseDate("2003-02-01")
ocounter = 5
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@@ -965,10 +965,6 @@ func makeImageWithID(index int) *models.Image {
ret := makeImage(index)
ret.ID = imageIDs[index]
if ret.Date != nil && ret.Date.IsZero() {
ret.Date = nil
}
ret.Files = models.NewRelatedFiles([]file.File{makeImageFile(index)})
return ret
@@ -2877,7 +2873,7 @@ func TestImageQuerySorting(t *testing.T) {
"date",
models.SortDirectionEnumDesc,
imageIdxWithTwoGalleries,
imageIdxWithPerformerParentTag,
imageIdxWithGrandChildStudio,
},
}

View File

@@ -0,0 +1,91 @@
PRAGMA foreign_keys=OFF;
-- Cleanup old invalid dates
UPDATE `scenes` SET `date` = NULL WHERE `date` = '0001-01-01' OR `date` = '';
UPDATE `galleries` SET `date` = NULL WHERE `date` = '0001-01-01' OR `date` = '';
UPDATE `performers` SET `birthdate` = NULL WHERE `birthdate` = '0001-01-01' OR `birthdate` = '';
UPDATE `performers` SET `death_date` = NULL WHERE `death_date` = '0001-01-01' OR `death_date` = '';
-- Delete scene markers with missing scenes
DELETE FROM `scene_markers` WHERE `scene_id` IS NULL;
-- make scene_id not null
DROP INDEX `index_scene_markers_on_scene_id`;
DROP INDEX `index_scene_markers_on_primary_tag_id`;
CREATE TABLE `scene_markers_new` (
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
`title` VARCHAR(255) NOT NULL,
`seconds` FLOAT NOT NULL,
`primary_tag_id` INTEGER NOT NULL,
`scene_id` INTEGER NOT NULL,
`created_at` DATETIME NOT NULL,
`updated_at` DATETIME NOT NULL,
FOREIGN KEY(`primary_tag_id`) REFERENCES `tags`(`id`),
FOREIGN KEY(`scene_id`) REFERENCES `scenes`(`id`)
);
INSERT INTO `scene_markers_new` SELECT * FROM `scene_markers`;
DROP TABLE `scene_markers`;
ALTER TABLE `scene_markers_new` RENAME TO `scene_markers`;
CREATE INDEX `index_scene_markers_on_primary_tag_id` ON `scene_markers`(`primary_tag_id`);
CREATE INDEX `index_scene_markers_on_scene_id` ON `scene_markers`(`scene_id`);
-- drop unused scraped items table
DROP TABLE IF EXISTS `scraped_items`;
-- remove checksum from movies
DROP INDEX `movies_checksum_unique`;
DROP INDEX `movies_name_unique`;
CREATE TABLE `movies_new` (
`id` integer not null primary key autoincrement,
`name` varchar(255) not null,
`aliases` varchar(255),
`duration` integer,
`date` date,
`rating` tinyint,
`studio_id` integer REFERENCES `studios`(`id`) ON DELETE SET NULL,
`director` varchar(255),
`synopsis` text,
`url` varchar(255),
`created_at` datetime not null,
`updated_at` datetime not null,
`front_image_blob` varchar(255) REFERENCES `blobs`(`checksum`),
`back_image_blob` varchar(255) REFERENCES `blobs`(`checksum`)
);
INSERT INTO `movies_new` SELECT `id`, `name`, `aliases`, `duration`, `date`, `rating`, `studio_id`, `director`, `synopsis`, `url`, `created_at`, `updated_at`, `front_image_blob`, `back_image_blob` FROM `movies`;
DROP TABLE `movies`;
ALTER TABLE `movies_new` RENAME TO `movies`;
CREATE UNIQUE INDEX `index_movies_on_name_unique` ON `movies`(`name`);
CREATE INDEX `index_movies_on_studio_id` on `movies` (`studio_id`);
-- remove checksum from studios
DROP INDEX `index_studios_on_checksum`;
DROP INDEX `index_studios_on_name`;
DROP INDEX `studios_checksum_unique`;
CREATE TABLE `studios_new` (
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
`name` VARCHAR(255) NOT NULL,
`url` VARCHAR(255),
`parent_id` INTEGER DEFAULT NULL CHECK (`id` IS NOT `parent_id`) REFERENCES `studios`(`id`) ON DELETE SET NULL,
`created_at` DATETIME NOT NULL,
`updated_at` DATETIME NOT NULL,
`details` TEXT,
`rating` TINYINT,
`ignore_auto_tag` BOOLEAN NOT NULL DEFAULT FALSE,
`image_blob` VARCHAR(255) REFERENCES `blobs`(`checksum`)
);
INSERT INTO `studios_new` SELECT `id`, `name`, `url`, `parent_id`, `created_at`, `updated_at`, `details`, `rating`, `ignore_auto_tag`, `image_blob` FROM `studios`;
DROP TABLE `studios`;
ALTER TABLE `studios_new` RENAME TO `studios`;
CREATE UNIQUE INDEX `index_studios_on_name_unique` ON `studios`(`name`);
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,150 @@
package migrations
import (
"context"
"fmt"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/sqlite"
)
func pre48(ctx context.Context, db *sqlx.DB) error {
logger.Info("Running pre-migration for schema version 48")
m := schema48PreMigrator{
migrator: migrator{
db: db,
},
}
if err := m.validateScrapedItems(ctx); err != nil {
return err
}
if err := m.fixStudioNames(ctx); err != nil {
return err
}
return nil
}
type schema48PreMigrator struct {
migrator
}
func (m *schema48PreMigrator) validateScrapedItems(ctx context.Context) error {
var count int
row := m.db.QueryRowx("SELECT COUNT(*) FROM scraped_items")
err := row.Scan(&count)
if err != nil {
return err
}
if count == 0 {
return nil
}
return fmt.Errorf("found %d row(s) in scraped_items table, cannot migrate", count)
}
func (m *schema48PreMigrator) fixStudioNames(ctx context.Context) error {
// First remove NULL names
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
_, err := m.db.Exec("UPDATE studios SET name = 'NULL' WHERE name IS NULL")
return err
}); err != nil {
return err
}
// Then remove duplicate names
dupes := make(map[string][]int)
// collect names
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
rows, err := m.db.Query("SELECT id, name FROM studios ORDER BY name, id")
if err != nil {
return err
}
defer rows.Close()
first := true
var lastName string
for rows.Next() {
var (
id int
name string
)
err := rows.Scan(&id, &name)
if err != nil {
return err
}
if first {
first = false
lastName = name
continue
}
if lastName == name {
dupes[name] = append(dupes[name], id)
} else {
lastName = name
}
}
return rows.Err()
}); err != nil {
return err
}
// rename them
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
for name, ids := range dupes {
i := 0
for _, id := range ids {
var newName string
for j := 0; ; j++ {
i++
newName = fmt.Sprintf("%s (%d)", name, i)
var count int
row := m.db.QueryRowx("SELECT COUNT(*) FROM studios WHERE name = ?", newName)
err := row.Scan(&count)
if err != nil {
return err
}
if count == 0 {
break
}
// try up to 100 times to find a unique name
if j == 100 {
return fmt.Errorf("cannot make unique studio name for %s", name)
}
}
logger.Info("Renaming duplicate studio id %d to %s", id, newName)
_, err := m.db.Exec("UPDATE studios SET name = ? WHERE id = ?", newName, id)
if err != nil {
return err
}
}
}
return nil
}); err != nil {
return err
}
return nil
}
func init() {
sqlite.RegisterPreMigration(48, pre48)
}

View File

@@ -26,7 +26,6 @@ const (
type movieRow struct {
ID int `db:"id" goqu:"skipinsert"`
Checksum string `db:"checksum"`
Name zero.String `db:"name"`
Aliases zero.String `db:"aliases"`
Duration null.Int `db:"duration"`
@@ -47,7 +46,6 @@ type movieRow struct {
func (r *movieRow) fromMovie(o models.Movie) {
r.ID = o.ID
r.Checksum = o.Checksum
r.Name = zero.StringFrom(o.Name)
r.Aliases = zero.StringFrom(o.Aliases)
r.Duration = intFromPtr(o.Duration)
@@ -64,7 +62,6 @@ func (r *movieRow) fromMovie(o models.Movie) {
func (r *movieRow) resolve() *models.Movie {
ret := &models.Movie{
ID: r.ID,
Checksum: r.Checksum,
Name: r.Name.String,
Aliases: r.Aliases.String,
Duration: nullIntPtr(r.Duration),
@@ -86,7 +83,6 @@ type movieRowRecord struct {
}
func (r *movieRowRecord) fromPartial(o models.MoviePartial) {
r.setString("checksum", o.Checksum)
r.setNullString("name", o.Name)
r.setNullString("aliases", o.Aliases)
r.setNullInt("duration", o.Duration)

View File

@@ -12,7 +12,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models"
)
@@ -293,7 +292,6 @@ func TestMovieUpdateFrontImage(t *testing.T) {
const name = "TestMovieUpdateMovieImages"
movie := models.Movie{
Name: name,
Checksum: md5.FromString(name),
}
err := qb.Create(ctx, &movie)
if err != nil {
@@ -314,7 +312,6 @@ func TestMovieUpdateBackImage(t *testing.T) {
const name = "TestMovieUpdateMovieImages"
movie := models.Movie{
Name: name,
Checksum: md5.FromString(name),
}
err := qb.Create(ctx, &movie)
if err != nil {

View File

@@ -766,7 +766,7 @@ func performerAgeFilterCriterionHandler(age *models.IntCriterionInput) criterion
return func(ctx context.Context, f *filterBuilder) {
if age != nil && age.Modifier.IsValid() {
clause, args := getIntCriterionWhereClause(
"cast(strftime('%Y.%m%d',CASE WHEN performers.death_date IS NULL OR performers.death_date = '0001-01-01' OR performers.death_date = '' THEN 'now' ELSE performers.death_date END) - strftime('%Y.%m%d', performers.birthdate) as int)",
"cast(IFNULL(strftime('%Y.%m%d', performers.death_date), strftime('%Y.%m%d', 'now')) - strftime('%Y.%m%d', performers.birthdate) as int)",
*age,
)
f.addWhere(clause, args...)

View File

@@ -69,8 +69,8 @@ func Test_PerformerStore_Create(t *testing.T) {
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
birthdate = models.NewDate("2003-02-01")
deathdate = models.NewDate("2023-02-01")
birthdate, _ = models.ParseDate("2003-02-01")
deathdate, _ = models.ParseDate("2023-02-01")
)
tests := []struct {
@@ -217,8 +217,8 @@ func Test_PerformerStore_Update(t *testing.T) {
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
birthdate = models.NewDate("2003-02-01")
deathdate = models.NewDate("2023-02-01")
birthdate, _ = models.ParseDate("2003-02-01")
deathdate, _ = models.ParseDate("2023-02-01")
)
tests := []struct {
@@ -400,8 +400,8 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) {
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
birthdate = models.NewDate("2003-02-01")
deathdate = models.NewDate("2023-02-01")
birthdate, _ = models.ParseDate("2003-02-01")
deathdate, _ = models.ParseDate("2023-02-01")
)
tests := []struct {

View File

@@ -5,7 +5,6 @@ import (
"database/sql"
"errors"
"fmt"
"reflect"
"strings"
"github.com/jmoiron/sqlx"
@@ -27,65 +26,11 @@ type repository struct {
idColumn string
}
func (r *repository) getByID(ctx context.Context, id int, dest interface{}) error {
stmt := fmt.Sprintf("SELECT * FROM %s WHERE %s = ? LIMIT 1", r.tableName, r.idColumn)
return r.tx.Get(ctx, dest, stmt, id)
}
func (r *repository) getAll(ctx context.Context, id int, f func(rows *sqlx.Rows) error) error {
stmt := fmt.Sprintf("SELECT * FROM %s WHERE %s = ?", r.tableName, r.idColumn)
return r.queryFunc(ctx, stmt, []interface{}{id}, false, f)
}
func (r *repository) insert(ctx context.Context, obj interface{}) (sql.Result, error) {
stmt := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s)", r.tableName, listKeys(obj, false), listKeys(obj, true))
return r.tx.NamedExec(ctx, stmt, obj)
}
func (r *repository) insertObject(ctx context.Context, obj interface{}, out interface{}) error {
result, err := r.insert(ctx, obj)
if err != nil {
return err
}
id, err := result.LastInsertId()
if err != nil {
return err
}
return r.getByID(ctx, int(id), out)
}
func (r *repository) update(ctx context.Context, id int, obj interface{}, partial bool) error {
exists, err := r.exists(ctx, id)
if err != nil {
return err
}
if !exists {
return fmt.Errorf("%s %d does not exist in %s", r.idColumn, id, r.tableName)
}
stmt := fmt.Sprintf("UPDATE %s SET %s WHERE %s.%s = :id", r.tableName, updateSet(obj, partial), r.tableName, r.idColumn)
_, err = r.tx.NamedExec(ctx, stmt, obj)
return err
}
// func (r *repository) updateMap(ctx context.Context, id int, m map[string]interface{}) error {
// exists, err := r.exists(ctx, id)
// if err != nil {
// return err
// }
// if !exists {
// return fmt.Errorf("%s %d does not exist in %s", r.idColumn, id, r.tableName)
// }
// stmt := fmt.Sprintf("UPDATE %s SET %s WHERE %s.%s = :id", r.tableName, updateSetMap(m), r.tableName, r.idColumn)
// _, err = r.tx.NamedExec(ctx, stmt, m)
// return err
// }
func (r *repository) destroyExisting(ctx context.Context, ids []int) error {
for _, id := range ids {
exists, err := r.exists(ctx, id)
@@ -597,53 +542,3 @@ func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) {
return ret, nil
}
func listKeys(i interface{}, addPrefix bool) string {
var query []string
v := reflect.ValueOf(i)
for i := 0; i < v.NumField(); i++ {
// Get key for struct tag
rawKey := v.Type().Field(i).Tag.Get("db")
key := strings.Split(rawKey, ",")[0]
if key == "id" {
continue
}
if addPrefix {
key = ":" + key
}
query = append(query, key)
}
return strings.Join(query, ", ")
}
func updateSet(i interface{}, partial bool) string {
var query []string
v := reflect.ValueOf(i)
for i := 0; i < v.NumField(); i++ {
// Get key for struct tag
rawKey := v.Type().Field(i).Tag.Get("db")
key := strings.Split(rawKey, ",")[0]
if key == "id" {
continue
}
add := true
if partial {
reflectValue := reflect.ValueOf(v.Field(i).Interface())
add = !reflectValue.IsNil()
}
if add {
query = append(query, fmt.Sprintf("%s=:%s", key, key))
}
}
return strings.Join(query, ", ")
}
// func updateSetMap(m map[string]interface{}) string {
// var query []string
// for k := range m {
// query = append(query, fmt.Sprintf("%s=:%s", k, k))
// }
// return strings.Join(query, ", ")
// }

View File

@@ -1304,7 +1304,7 @@ func sceneIsMissingCriterionHandler(qb *SceneStore, isMissing *string) criterion
qb.performersRepository().join(f, "performers_join", "scenes.id")
f.addWhere("performers_join.scene_id IS NULL")
case "date":
f.addWhere(`scenes.date IS NULL OR scenes.date IS "" OR scenes.date IS "0001-01-01"`)
f.addWhere(`scenes.date IS NULL OR scenes.date IS ""`)
case "tags":
qb.tagsRepository().join(f, "tags_join", "scenes.id")
f.addWhere("tags_join.scene_id IS NULL")
@@ -1441,7 +1441,6 @@ func scenePerformerAgeCriterionHandler(performerAge *models.IntCriterionInput) c
f.addWhere("scenes.date != '' AND performers.birthdate != ''")
f.addWhere("scenes.date IS NOT NULL AND performers.birthdate IS NOT NULL")
f.addWhere("scenes.date != '0001-01-01' AND performers.birthdate != '0001-01-01'")
ageCalc := "cast(strftime('%Y.%m%d', scenes.date) - strftime('%Y.%m%d', performers.birthdate) as int)"
whereClause, args := getIntWhereClause(ageCalc, performerAge.Modifier, performerAge.Value, performerAge.Value2)

View File

@@ -9,7 +9,6 @@ import (
"github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp"
"github.com/jmoiron/sqlx"
"gopkg.in/guregu/null.v4/zero"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/intslice"
@@ -29,7 +28,7 @@ type sceneMarkerRow struct {
Title string `db:"title"`
Seconds float64 `db:"seconds"`
PrimaryTagID int `db:"primary_tag_id"`
SceneID zero.Int `db:"scene_id,omitempty"` // TODO: make schema non-nullable
SceneID int `db:"scene_id"`
CreatedAt Timestamp `db:"created_at"`
UpdatedAt Timestamp `db:"updated_at"`
}
@@ -39,7 +38,7 @@ func (r *sceneMarkerRow) fromSceneMarker(o models.SceneMarker) {
r.Title = o.Title
r.Seconds = o.Seconds
r.PrimaryTagID = o.PrimaryTagID
r.SceneID = zero.IntFrom(int64(o.SceneID))
r.SceneID = o.SceneID
r.CreatedAt = Timestamp{Timestamp: o.CreatedAt}
r.UpdatedAt = Timestamp{Timestamp: o.UpdatedAt}
}
@@ -50,7 +49,7 @@ func (r *sceneMarkerRow) resolve() *models.SceneMarker {
Title: r.Title,
Seconds: r.Seconds,
PrimaryTagID: r.PrimaryTagID,
SceneID: int(r.SceneID.Int64),
SceneID: r.SceneID,
CreatedAt: r.CreatedAt.Timestamp,
UpdatedAt: r.UpdatedAt.Timestamp,
}

View File

@@ -97,7 +97,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
stashID1 = "stashid1"
stashID2 = "stashid2"
date = models.NewDate("2003-02-01")
date, _ = models.ParseDate("2003-02-01")
videoFile = makeFileWithID(fileIdxStartVideoFiles)
)
@@ -336,7 +336,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
stashID1 = "stashid1"
stashID2 = "stashid2"
date = models.NewDate("2003-02-01")
date, _ = models.ParseDate("2003-02-01")
)
tests := []struct {
@@ -552,7 +552,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
stashID1 = "stashid1"
stashID2 = "stashid2"
date = models.NewDate("2003-02-01")
date, _ = models.ParseDate("2003-02-01")
)
tests := []struct {
@@ -1460,10 +1460,6 @@ func makeSceneWithID(index int) *models.Scene {
ret := makeScene(index)
ret.ID = sceneIDs[index]
if ret.Date != nil && ret.Date.IsZero() {
ret.Date = nil
}
ret.Files = models.NewRelatedVideoFiles([]*file.VideoFile{makeSceneFile(index)})
return ret
@@ -3243,12 +3239,12 @@ func TestSceneQueryIsMissingDate(t *testing.T) {
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
// three in four scenes have no date
assert.Len(t, scenes, int(math.Ceil(float64(totalScenes)/4*3)))
// one in four scenes have no date
assert.Len(t, scenes, int(math.Ceil(float64(totalScenes)/4)))
// ensure date is null, empty or "0001-01-01"
// ensure date is null
for _, scene := range scenes {
assert.True(t, scene.Date == nil || scene.Date.Time == time.Time{})
assert.Nil(t, scene.Date)
}
return nil
@@ -3293,7 +3289,7 @@ func TestSceneQueryIsMissingRating(t *testing.T) {
assert.True(t, len(scenes) > 0)
// ensure date is null, empty or "0001-01-01"
// ensure rating is null
for _, scene := range scenes {
assert.Nil(t, scene.Rating)
}

View File

@@ -1,81 +0,0 @@
package sqlite
import (
"context"
"database/sql"
"errors"
"github.com/stashapp/stash/pkg/models"
)
const scrapedItemTable = "scraped_items"
type scrapedItemQueryBuilder struct {
repository
}
var ScrapedItemReaderWriter = &scrapedItemQueryBuilder{
repository{
tableName: scrapedItemTable,
idColumn: idColumn,
},
}
func (qb *scrapedItemQueryBuilder) Create(ctx context.Context, newObject models.ScrapedItem) (*models.ScrapedItem, error) {
var ret models.ScrapedItem
if err := qb.insertObject(ctx, newObject, &ret); err != nil {
return nil, err
}
return &ret, nil
}
func (qb *scrapedItemQueryBuilder) Update(ctx context.Context, updatedObject models.ScrapedItem) (*models.ScrapedItem, error) {
const partial = false
if err := qb.update(ctx, updatedObject.ID, updatedObject, partial); err != nil {
return nil, err
}
return qb.find(ctx, updatedObject.ID)
}
func (qb *scrapedItemQueryBuilder) Find(ctx context.Context, id int) (*models.ScrapedItem, error) {
return qb.find(ctx, id)
}
func (qb *scrapedItemQueryBuilder) find(ctx context.Context, id int) (*models.ScrapedItem, error) {
var ret models.ScrapedItem
if err := qb.getByID(ctx, id, &ret); err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, err
}
return &ret, nil
}
func (qb *scrapedItemQueryBuilder) All(ctx context.Context) ([]*models.ScrapedItem, error) {
return qb.queryScrapedItems(ctx, selectAll("scraped_items")+qb.getScrapedItemsSort(nil), nil)
}
func (qb *scrapedItemQueryBuilder) getScrapedItemsSort(findFilter *models.FindFilterType) string {
var sort string
var direction string
if findFilter == nil {
sort = "id" // TODO studio_id and title
direction = "ASC"
} else {
sort = findFilter.GetSort("id")
direction = findFilter.GetDirection()
}
return getSort(sort, direction, "scraped_items")
}
func (qb *scrapedItemQueryBuilder) queryScrapedItems(ctx context.Context, query string, args []interface{}) ([]*models.ScrapedItem, error) {
var ret models.ScrapedItems
if err := qb.query(ctx, query, args, &ret); err != nil {
return nil, err
}
return []*models.ScrapedItem(ret), nil
}

View File

@@ -15,7 +15,6 @@ import (
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/intslice"
"github.com/stashapp/stash/pkg/sqlite"
@@ -956,14 +955,14 @@ func getWidth(index int) int {
}
func getObjectDate(index int) *models.Date {
dates := []string{"null", "", "0001-01-01", "2001-02-03"}
dates := []string{"null", "2000-01-01", "0001-01-01", "2001-02-03"}
date := dates[index%len(dates)]
if date == "null" {
return nil
}
ret := models.NewDate(date)
ret, _ := models.ParseDate(date)
return &ret
}
@@ -1313,7 +1312,6 @@ func createMovies(ctx context.Context, mqb models.MovieReaderWriter, n int, o in
movie := models.Movie{
Name: name,
URL: getMovieNullStringValue(index, urlField),
Checksum: md5.FromString(name),
}
err := mqb.Create(ctx, &movie)
@@ -1578,7 +1576,6 @@ func getStudioNullStringValue(index int, field string) string {
func createStudio(ctx context.Context, sqb models.StudioReaderWriter, name string, parentID *int) (*models.Studio, error) {
studio := models.Studio{
Name: name,
Checksum: md5.FromString(name),
}
if parentID != nil {
@@ -1621,7 +1618,6 @@ func createStudios(ctx context.Context, sqb models.StudioReaderWriter, n int, o
name = getStudioStringValue(index, name)
studio := models.Studio{
Name: name,
Checksum: md5.FromString(name),
URL: getStudioNullStringValue(index, urlField),
IgnoreAutoTag: getIgnoreAutoTag(i),
}

View File

@@ -249,9 +249,9 @@ func getDateWhereClause(column string, modifier models.CriterionModifier, value
switch modifier {
case models.CriterionModifierIsNull:
return fmt.Sprintf("(%s IS NULL OR %s = '' OR %s = '0001-01-01')", column, column, column), nil
return fmt.Sprintf("(%s IS NULL OR %s = '')", column, column), nil
case models.CriterionModifierNotNull:
return fmt.Sprintf("(%s IS NOT NULL AND %s != '' AND %s != '0001-01-01')", column, column, column), nil
return fmt.Sprintf("(%s IS NOT NULL AND %s != '')", column, column), nil
case models.CriterionModifierEquals:
return fmt.Sprintf("%s = ?", column), args
case models.CriterionModifierNotEquals:

View File

@@ -28,7 +28,6 @@ const (
type studioRow struct {
ID int `db:"id" goqu:"skipinsert"`
Checksum string `db:"checksum"`
Name zero.String `db:"name"`
URL zero.String `db:"url"`
ParentID null.Int `db:"parent_id,omitempty"`
@@ -45,7 +44,6 @@ type studioRow struct {
func (r *studioRow) fromStudio(o models.Studio) {
r.ID = o.ID
r.Checksum = o.Checksum
r.Name = zero.StringFrom(o.Name)
r.URL = zero.StringFrom(o.URL)
r.ParentID = intFromPtr(o.ParentID)
@@ -59,7 +57,6 @@ func (r *studioRow) fromStudio(o models.Studio) {
func (r *studioRow) resolve() *models.Studio {
ret := &models.Studio{
ID: r.ID,
Checksum: r.Checksum,
Name: r.Name.String,
URL: r.URL.String,
ParentID: nullIntPtr(r.ParentID),
@@ -78,7 +75,6 @@ type studioRowRecord struct {
}
func (r *studioRowRecord) fromPartial(o models.StudioPartial) {
r.setString("checksum", o.Checksum)
r.setNullString("name", o.Name)
r.setNullString("url", o.URL)
r.setNullInt("parent_id", o.ParentID)
@@ -173,13 +169,6 @@ func (qb *StudioStore) Destroy(ctx context.Context, id int) error {
return err
}
// TODO - set null on foreign key in scraped items
// remove studio from scraped items
_, err := qb.tx.Exec(ctx, "UPDATE scraped_items SET studio_id = null WHERE studio_id = ?", id)
if err != nil {
return err
}
return qb.destroyExisting(ctx, []int{id})
}

View File

@@ -135,7 +135,6 @@ func (db *Database) TxnRepository() models.Repository {
Performer: db.Performer,
Scene: db.Scene,
SceneMarker: db.SceneMarker,
ScrapedItem: ScrapedItemReaderWriter,
Studio: db.Studio,
Tag: db.Tag,
SavedFilter: db.SavedFilter,

View File

@@ -5,7 +5,6 @@ import (
"errors"
"fmt"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/utils"
@@ -31,10 +30,7 @@ type Importer struct {
}
func (i *Importer) PreImport(ctx context.Context) error {
checksum := md5.FromString(i.Input.Name)
i.studio = models.Studio{
Checksum: checksum,
Name: i.Input.Name,
URL: i.Input.URL,
Details: i.Input.Details,

View File

@@ -5,7 +5,6 @@ import (
"errors"
"testing"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/models/mocks"
@@ -64,7 +63,6 @@ func TestImporterPreImport(t *testing.T) {
assert.Nil(t, err)
expectedStudio := createFullStudio(0, 0)
expectedStudio.ParentID = nil
expectedStudio.Checksum = md5.FromString(studioName)
assert.Equal(t, expectedStudio, i.studio)
}

View File

@@ -20,6 +20,10 @@ const base64RE = `^data:.+\/(.+);base64,(.*)$`
// ProcessImageInput transforms an image string either from a base64 encoded
// string, or from a URL, and returns the image as a byte slice
func ProcessImageInput(ctx context.Context, imageInput string) ([]byte, error) {
if imageInput == "" {
return []byte{}, nil
}
regex := regexp.MustCompile(base64RE)
if regex.MatchString(imageInput) {
d, err := ProcessBase64Image(imageInput)

View File

@@ -0,0 +1,3 @@
This migration removes the unused `scraped_items` table from the database, which was only used in very old versions of Stash. For the vast majority of users, it should be empty, but if not, the migration will fail and restore the old database. If this happens, please manually edit the database and remove the table yourself, after making a copy of any contained data you'd like to keep. If you are not confident on how to do this, feel free to ask for assistance on the Discord server.
This migration also enforces studio name uniqueness at the database level. Although no longer possible in recent versions, older versions of Stash allowed for different studios to have identical names. If your database has such duplicate names, the duplicates will have `" (1)"`, `" (2)"`, etc. appended to their names after this migration.

View File

@@ -1,7 +1,9 @@
import migration32 from "./32.md";
import migration39 from "./39.md";
import migration48 from "./48.md";
export const migrationNotes: Record<number, string> = {
32: migration32,
39: migration39,
48: migration48,
};