mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 12:24:38 +03:00
Data layer restructuring (#997)
* Move query builders to sqlite package * Add transaction system * Wrap model resolvers in transaction * Add error return value for StringSliceToIntSlice * Update/refactor mutation resolvers * Convert query builders * Remove unused join types * Add stash id unit tests * Use WAL journal mode
This commit is contained in:
@@ -5,12 +5,13 @@ import (
|
||||
"sort"
|
||||
"strconv"
|
||||
|
||||
"github.com/99designs/gqlgen/graphql"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type Resolver struct{}
|
||||
type Resolver struct {
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (r *Resolver) Gallery() models.GalleryResolver {
|
||||
return &galleryResolver{r}
|
||||
@@ -79,35 +80,75 @@ type scrapedSceneMovieResolver struct{ *Resolver }
|
||||
type scrapedScenePerformerResolver struct{ *Resolver }
|
||||
type scrapedSceneStudioResolver struct{ *Resolver }
|
||||
|
||||
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) ([]*models.SceneMarker, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
return qb.Wall(q)
|
||||
func (r *Resolver) withTxn(ctx context.Context, fn func(r models.Repository) error) error {
|
||||
return r.txnManager.WithTxn(ctx, fn)
|
||||
}
|
||||
|
||||
func (r *queryResolver) SceneWall(ctx context.Context, q *string) ([]*models.Scene, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
return qb.Wall(q)
|
||||
func (r *Resolver) withReadTxn(ctx context.Context, fn func(r models.ReaderRepository) error) error {
|
||||
return r.txnManager.WithReadTxn(ctx, fn)
|
||||
}
|
||||
|
||||
func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *string) ([]*models.MarkerStringsResultType, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
return qb.GetMarkerStrings(q, sort)
|
||||
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.SceneMarker().Wall(q)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) SceneWall(ctx context.Context, q *string) (ret []*models.Scene, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Scene().Wall(q)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *string) (ret []*models.MarkerStringsResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.SceneMarker().GetMarkerStrings(q, sort)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ValidGalleriesForScene(ctx context.Context, scene_id *string) ([]*models.Gallery, error) {
|
||||
if scene_id == nil {
|
||||
panic("nil scene id") // TODO make scene_id mandatory
|
||||
}
|
||||
sceneID, _ := strconv.Atoi(*scene_id)
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
scene, err := sqb.Find(sceneID)
|
||||
sceneID, err := strconv.Atoi(*scene_id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
validGalleries, err := qb.ValidGalleriesForScenePath(scene.Path)
|
||||
sceneGallery, _ := qb.FindBySceneID(sceneID, nil)
|
||||
var validGalleries []*models.Gallery
|
||||
var sceneGallery *models.Gallery
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
sqb := repo.Scene()
|
||||
scene, err := sqb.Find(sceneID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
qb := repo.Gallery()
|
||||
validGalleries, err = qb.ValidGalleriesForScenePath(scene.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sceneGallery, err = qb.FindBySceneID(sceneID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if sceneGallery != nil {
|
||||
validGalleries = append(validGalleries, sceneGallery)
|
||||
}
|
||||
@@ -115,23 +156,26 @@ func (r *queryResolver) ValidGalleriesForScene(ctx context.Context, scene_id *st
|
||||
}
|
||||
|
||||
func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, error) {
|
||||
scenesQB := models.NewSceneQueryBuilder()
|
||||
var ret models.StatsResultType
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
scenesQB := repo.Scene()
|
||||
imageQB := repo.Image()
|
||||
galleryQB := repo.Gallery()
|
||||
studiosQB := repo.Studio()
|
||||
performersQB := repo.Performer()
|
||||
moviesQB := repo.Movie()
|
||||
tagsQB := repo.Tag()
|
||||
scenesCount, _ := scenesQB.Count()
|
||||
scenesSize, _ := scenesQB.Size()
|
||||
imageQB := models.NewImageQueryBuilder()
|
||||
imageCount, _ := imageQB.Count()
|
||||
imageSize, _ := imageQB.Size()
|
||||
galleryQB := models.NewGalleryQueryBuilder()
|
||||
galleryCount, _ := galleryQB.Count()
|
||||
performersQB := models.NewPerformerQueryBuilder()
|
||||
performersCount, _ := performersQB.Count()
|
||||
studiosQB := models.NewStudioQueryBuilder()
|
||||
studiosCount, _ := studiosQB.Count()
|
||||
moviesQB := models.NewMovieQueryBuilder()
|
||||
moviesCount, _ := moviesQB.Count()
|
||||
tagsQB := models.NewTagQueryBuilder()
|
||||
tagsCount, _ := tagsQB.Count()
|
||||
return &models.StatsResultType{
|
||||
|
||||
ret = models.StatsResultType{
|
||||
SceneCount: scenesCount,
|
||||
ScenesSize: scenesSize,
|
||||
ImageCount: imageCount,
|
||||
@@ -141,7 +185,14 @@ func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, err
|
||||
StudioCount: studiosCount,
|
||||
MovieCount: moviesCount,
|
||||
TagCount: tagsCount,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) Version(ctx context.Context) (*models.Version, error) {
|
||||
@@ -171,20 +222,25 @@ func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion
|
||||
|
||||
// Get scene marker tags which show up under the video.
|
||||
func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([]*models.SceneMarkerTag, error) {
|
||||
sceneID, _ := strconv.Atoi(scene_id)
|
||||
sqb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, err := sqb.FindBySceneID(sceneID, nil)
|
||||
sceneID, err := strconv.Atoi(scene_id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags := make(map[int]*models.SceneMarkerTag)
|
||||
var keys []int
|
||||
tqb := models.NewTagQueryBuilder()
|
||||
for _, sceneMarker := range sceneMarkers {
|
||||
markerPrimaryTag, err := tqb.Find(sceneMarker.PrimaryTagID, nil)
|
||||
tags := make(map[int]*models.SceneMarkerTag)
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
sceneMarkers, err := repo.SceneMarker().FindBySceneID(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
tqb := repo.Tag()
|
||||
for _, sceneMarker := range sceneMarkers {
|
||||
markerPrimaryTag, err := tqb.Find(sceneMarker.PrimaryTagID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, hasKey := tags[markerPrimaryTag.ID]
|
||||
var sceneMarkerTag *models.SceneMarkerTag
|
||||
@@ -198,6 +254,11 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
|
||||
tags[markerPrimaryTag.ID].SceneMarkers = append(tags[markerPrimaryTag.ID].SceneMarkers, sceneMarker)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Sort so that primary tags that show up earlier in the video are first.
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
a := tags[keys[i]]
|
||||
@@ -212,13 +273,3 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// wasFieldIncluded returns true if the given field was included in the request.
|
||||
// Slices are unmarshalled to empty slices even if the field was omitted. This
|
||||
// method determines if it was omitted altogether.
|
||||
func wasFieldIncluded(ctx context.Context, field string) bool {
|
||||
rctx := graphql.GetRequestContext(ctx)
|
||||
|
||||
_, ret := rctx.Variables[field]
|
||||
return ret
|
||||
}
|
||||
|
||||
@@ -22,21 +22,25 @@ func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*stri
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Images(ctx context.Context, obj *models.Gallery) ([]*models.Image, error) {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
|
||||
return qb.FindByGalleryID(obj.ID)
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (*models.Image, error) {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
|
||||
imgs, err := qb.FindByGalleryID(obj.ID)
|
||||
if err != nil {
|
||||
func (r *galleryResolver) Images(ctx context.Context, obj *models.Gallery) (ret []*models.Image, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Image().FindByGalleryID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ret *models.Image
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (ret *models.Image, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
imgs, err := repo.Image().FindByGalleryID(obj.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(imgs) > 0 {
|
||||
ret = imgs[0]
|
||||
}
|
||||
@@ -48,6 +52,11 @@ func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (*mode
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -81,35 +90,71 @@ func (r *galleryResolver) Rating(ctx context.Context, obj *models.Gallery) (*int
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Scene(ctx context.Context, obj *models.Gallery) (*models.Scene, error) {
|
||||
func (r *galleryResolver) Scene(ctx context.Context, obj *models.Gallery) (ret *models.Scene, err error) {
|
||||
if !obj.SceneID.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
return qb.Find(int(obj.SceneID.Int64))
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Scene().Find(int(obj.SceneID.Int64))
|
||||
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (*models.Studio, error) {
|
||||
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) {
|
||||
if !obj.StudioID.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.Find(int(obj.StudioID.Int64), nil)
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) ([]*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.FindByGalleryID(obj.ID, nil)
|
||||
func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) (ret []*models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Tag().FindByGalleryID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) ([]*models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
return qb.FindByGalleryID(obj.ID, nil)
|
||||
func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) (ret []*models.Performer, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Performer().FindByGalleryID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (int, error) {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
return qb.CountByGalleryID(obj.ID)
|
||||
func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (ret int, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Image().CountByGalleryID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -43,26 +43,51 @@ func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*models.I
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) ([]*models.Gallery, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
return qb.FindByImageID(obj.ID, nil)
|
||||
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Gallery().FindByImageID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (*models.Studio, error) {
|
||||
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) {
|
||||
if !obj.StudioID.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.Find(int(obj.StudioID.Int64), nil)
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) ([]*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.FindByImageID(obj.ID, nil)
|
||||
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().FindByImageID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) ([]*models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
return qb.FindByImageID(obj.ID, nil)
|
||||
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Performer().FindByImageID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -53,10 +53,16 @@ func (r *movieResolver) Rating(ctx context.Context, obj *models.Movie) (*int, er
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *models.Studio, err error) {
|
||||
if obj.StudioID.Valid {
|
||||
return qb.Find(int(obj.StudioID.Int64), nil)
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
@@ -88,8 +94,14 @@ func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*
|
||||
return &backimagePath, nil
|
||||
}
|
||||
|
||||
func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (*int, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
res, err := qb.CountByMovieID(obj.ID)
|
||||
func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret *int, err error) {
|
||||
var res int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
res, err = repo.Scene().CountByMovieID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, err
|
||||
}
|
||||
|
||||
@@ -138,18 +138,36 @@ func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (*int, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
res, err := qb.CountByPerformerID(obj.ID)
|
||||
return &res, err
|
||||
func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (ret *int, err error) {
|
||||
var res int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
res, err = repo.Scene().CountByPerformerID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) ([]*models.Scene, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
return qb.FindByPerformerID(obj.ID)
|
||||
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) (ret []*models.Scene, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Scene().FindByPerformerID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) ([]*models.StashID, error) {
|
||||
qb := models.NewJoinsQueryBuilder()
|
||||
return qb.GetPerformerStashIDs(obj.ID)
|
||||
func (r *performerResolver) StashIds(ctx context.Context, obj *models.Performer) (ret []*models.StashID, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Performer().GetStashIDs(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -94,37 +94,59 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) ([]*models.SceneMarker, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID, nil)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Gallery(ctx context.Context, obj *models.Scene) (*models.Gallery, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID, nil)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) ([]*models.SceneMovie, error) {
|
||||
joinQB := models.NewJoinsQueryBuilder()
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
|
||||
sceneMovies, err := joinQB.GetSceneMovies(obj.ID, nil)
|
||||
if err != nil {
|
||||
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (ret []*models.SceneMarker, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.SceneMarker().FindBySceneID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ret []*models.SceneMovie
|
||||
for _, sm := range sceneMovies {
|
||||
movie, err := qb.Find(sm.MovieID, nil)
|
||||
if err != nil {
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Gallery(ctx context.Context, obj *models.Scene) (ret *models.Gallery, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Gallery().FindBySceneID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) {
|
||||
if !obj.StudioID.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().Find(int(obj.StudioID.Int64))
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*models.SceneMovie, err error) {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Scene()
|
||||
mqb := repo.Movie()
|
||||
|
||||
sceneMovies, err := qb.GetMovies(obj.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, sm := range sceneMovies {
|
||||
movie, err := mqb.Find(sm.MovieID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sceneIdx := sm.SceneIndex
|
||||
sceneMovie := &models.SceneMovie{
|
||||
Movie: movie,
|
||||
@@ -138,20 +160,43 @@ func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) ([]*model
|
||||
|
||||
ret = append(ret, sceneMovie)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) ([]*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID, nil)
|
||||
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) (ret []*models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().FindBySceneID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) ([]*models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
return qb.FindBySceneID(obj.ID, nil)
|
||||
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret []*models.Performer, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Performer().FindBySceneID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) ([]*models.StashID, error) {
|
||||
qb := models.NewJoinsQueryBuilder()
|
||||
return qb.GetSceneStashIDs(obj.ID)
|
||||
func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []*models.StashID, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Scene().GetStashIDs(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -2,29 +2,47 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *sceneMarkerResolver) Scene(ctx context.Context, obj *models.SceneMarker) (*models.Scene, error) {
|
||||
func (r *sceneMarkerResolver) Scene(ctx context.Context, obj *models.SceneMarker) (ret *models.Scene, err error) {
|
||||
if !obj.SceneID.Valid {
|
||||
panic("Invalid scene id")
|
||||
}
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
sceneID := int(obj.SceneID.Int64)
|
||||
scene, err := qb.Find(sceneID)
|
||||
return scene, err
|
||||
ret, err = repo.Scene().Find(sceneID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneMarker) (*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
tag, err := qb.Find(obj.PrimaryTagID, nil)
|
||||
return tag, err
|
||||
func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneMarker) (ret *models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().Find(obj.PrimaryTagID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) ([]*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.FindBySceneMarkerID(obj.ID, nil)
|
||||
func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) (ret []*models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().FindBySceneMarkerID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
|
||||
func (r *sceneMarkerResolver) Stream(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||
|
||||
@@ -25,10 +25,12 @@ func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*st
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj.ID).GetStudioImageURL()
|
||||
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
hasImage, err := qb.HasStudioImage(obj.ID)
|
||||
|
||||
if err != nil {
|
||||
var hasImage bool
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
hasImage, err = repo.Studio().HasImage(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -40,27 +42,51 @@ func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*st
|
||||
return &imagePath, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (*int, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
res, err := qb.CountByStudioID(obj.ID)
|
||||
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (ret *int, err error) {
|
||||
var res int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
res, err = repo.Scene().CountByStudioID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &res, err
|
||||
}
|
||||
|
||||
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (*models.Studio, error) {
|
||||
func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) {
|
||||
if !obj.ParentID.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.Find(int(obj.ParentID.Int64), nil)
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().Find(int(obj.ParentID.Int64))
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) ([]*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.FindChildren(obj.ID, nil)
|
||||
func (r *studioResolver) ChildStudios(ctx context.Context, obj *models.Studio) (ret []*models.Studio, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().FindChildren(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) ([]*models.StashID, error) {
|
||||
qb := models.NewJoinsQueryBuilder()
|
||||
return qb.GetStudioStashIDs(obj.ID)
|
||||
func (r *studioResolver) StashIds(ctx context.Context, obj *models.Studio) (ret []*models.StashID, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().GetStashIDs(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -7,21 +7,27 @@ import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (*int, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
if obj == nil {
|
||||
return nil, nil
|
||||
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
|
||||
var count int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
count, err = repo.Scene().CountByTagID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
count, err := qb.CountByTagID(obj.ID)
|
||||
|
||||
return &count, err
|
||||
}
|
||||
|
||||
func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (*int, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
if obj == nil {
|
||||
return nil, nil
|
||||
func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (ret *int, err error) {
|
||||
var count int
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
count, err = repo.SceneMarker().CountByTagID(obj.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
count, err := qb.CountByTagID(obj.ID)
|
||||
|
||||
return &count, err
|
||||
}
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
||||
|
||||
if input.VideoFileNamingAlgorithm != config.GetVideoFileNamingAlgorithm() {
|
||||
// validate changing VideoFileNamingAlgorithm
|
||||
if err := manager.ValidateVideoFileNamingAlgorithm(input.VideoFileNamingAlgorithm); err != nil {
|
||||
if err := manager.ValidateVideoFileNamingAlgorithm(r.txnManager, input.VideoFileNamingAlgorithm); err != nil {
|
||||
return makeConfigGeneralResult(), err
|
||||
}
|
||||
|
||||
|
||||
@@ -4,11 +4,10 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
@@ -69,108 +68,102 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input models.Galle
|
||||
newGallery.SceneID = sql.NullInt64{Valid: false}
|
||||
}
|
||||
|
||||
// Start the transaction and save the performer
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
gallery, err := qb.Create(newGallery, tx)
|
||||
// Start the transaction and save the gallery
|
||||
var gallery *models.Gallery
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Gallery()
|
||||
var err error
|
||||
gallery, err = qb.Create(newGallery)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// Save the performers
|
||||
var performerJoins []models.PerformersGalleries
|
||||
for _, pid := range input.PerformerIds {
|
||||
performerID, _ := strconv.Atoi(pid)
|
||||
performerJoin := models.PerformersGalleries{
|
||||
PerformerID: performerID,
|
||||
GalleryID: gallery.ID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformersGalleries(gallery.ID, performerJoins, tx); err != nil {
|
||||
return nil, err
|
||||
if err := r.updateGalleryPerformers(qb, gallery.ID, input.PerformerIds); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Save the tags
|
||||
var tagJoins []models.GalleriesTags
|
||||
for _, tid := range input.TagIds {
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
tagJoin := models.GalleriesTags{
|
||||
GalleryID: gallery.ID,
|
||||
TagID: tagID,
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
if err := jqb.UpdateGalleriesTags(gallery.ID, tagJoins, tx); err != nil {
|
||||
return nil, err
|
||||
if err := r.updateGalleryTags(qb, gallery.ID, input.TagIds); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gallery, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (*models.Gallery, error) {
|
||||
// Start the transaction and save the gallery
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
func (r *mutationResolver) updateGalleryPerformers(qb models.GalleryReaderWriter, galleryID int, performerIDs []string) error {
|
||||
ids, err := utils.StringSliceToIntSlice(performerIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.UpdatePerformers(galleryID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) updateGalleryTags(qb models.GalleryReaderWriter, galleryID int, tagIDs []string) error {
|
||||
ids, err := utils.StringSliceToIntSlice(tagIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.UpdateTags(galleryID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (ret *models.Gallery, err error) {
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
}
|
||||
ret, err := r.galleryUpdate(input, translator, tx)
|
||||
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
// Start the transaction and save the gallery
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
ret, err = r.galleryUpdate(input, translator, repo)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.GalleryUpdateInput) ([]*models.Gallery, error) {
|
||||
// Start the transaction and save the gallery
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.GalleryUpdateInput) (ret []*models.Gallery, err error) {
|
||||
inputMaps := getUpdateInputMaps(ctx)
|
||||
|
||||
var ret []*models.Gallery
|
||||
|
||||
// Start the transaction and save the gallery
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
for i, gallery := range input {
|
||||
translator := changesetTranslator{
|
||||
inputMap: inputMaps[i],
|
||||
}
|
||||
|
||||
thisGallery, err := r.galleryUpdate(*gallery, translator, tx)
|
||||
ret = append(ret, thisGallery)
|
||||
|
||||
thisGallery, err := r.galleryUpdate(*gallery, translator, repo)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
ret = append(ret, thisGallery)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, translator changesetTranslator, tx *sqlx.Tx) (*models.Gallery, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Gallery, error) {
|
||||
qb := repo.Gallery()
|
||||
|
||||
// Populate gallery from the input
|
||||
galleryID, _ := strconv.Atoi(input.ID)
|
||||
originalGallery, err := qb.Find(galleryID, nil)
|
||||
galleryID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
originalGallery, err := qb.Find(galleryID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -209,40 +202,21 @@ func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, transl
|
||||
|
||||
// gallery scene is set from the scene only
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
gallery, err := qb.UpdatePartial(updatedGallery, tx)
|
||||
gallery, err := qb.UpdatePartial(updatedGallery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Save the performers
|
||||
if translator.hasField("performer_ids") {
|
||||
var performerJoins []models.PerformersGalleries
|
||||
for _, pid := range input.PerformerIds {
|
||||
performerID, _ := strconv.Atoi(pid)
|
||||
performerJoin := models.PerformersGalleries{
|
||||
PerformerID: performerID,
|
||||
GalleryID: galleryID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformersGalleries(galleryID, performerJoins, tx); err != nil {
|
||||
if err := r.updateGalleryPerformers(qb, galleryID, input.PerformerIds); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the tags
|
||||
if translator.hasField("tag_ids") {
|
||||
var tagJoins []models.GalleriesTags
|
||||
for _, tid := range input.TagIds {
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
tagJoin := models.GalleriesTags{
|
||||
GalleryID: galleryID,
|
||||
TagID: tagID,
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
if err := jqb.UpdateGalleriesTags(galleryID, tagJoins, tx); err != nil {
|
||||
if err := r.updateGalleryTags(qb, galleryID, input.TagIds); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
@@ -254,11 +228,6 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.B
|
||||
// Populate gallery from the input
|
||||
updatedTime := time.Now()
|
||||
|
||||
// Start the transaction and save the gallery marker
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
}
|
||||
@@ -277,171 +246,128 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.B
|
||||
|
||||
ret := []*models.Gallery{}
|
||||
|
||||
// Start the transaction and save the galleries
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Gallery()
|
||||
|
||||
for _, galleryIDStr := range input.Ids {
|
||||
galleryID, _ := strconv.Atoi(galleryIDStr)
|
||||
updatedGallery.ID = galleryID
|
||||
|
||||
gallery, err := qb.UpdatePartial(updatedGallery, tx)
|
||||
gallery, err := qb.UpdatePartial(updatedGallery)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
ret = append(ret, gallery)
|
||||
|
||||
// Save the performers
|
||||
if translator.hasField("performer_ids") {
|
||||
performerIDs, err := adjustGalleryPerformerIDs(tx, galleryID, *input.PerformerIds)
|
||||
performerIDs, err := adjustGalleryPerformerIDs(qb, galleryID, *input.PerformerIds)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
var performerJoins []models.PerformersGalleries
|
||||
for _, performerID := range performerIDs {
|
||||
performerJoin := models.PerformersGalleries{
|
||||
PerformerID: performerID,
|
||||
GalleryID: galleryID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformersGalleries(galleryID, performerJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdatePerformers(galleryID, performerIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the tags
|
||||
if translator.hasField("tag_ids") {
|
||||
tagIDs, err := adjustGalleryTagIDs(tx, galleryID, *input.TagIds)
|
||||
tagIDs, err := adjustGalleryTagIDs(qb, galleryID, *input.TagIds)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
var tagJoins []models.GalleriesTags
|
||||
for _, tagID := range tagIDs {
|
||||
tagJoin := models.GalleriesTags{
|
||||
GalleryID: galleryID,
|
||||
TagID: tagID,
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
if err := jqb.UpdateGalleriesTags(galleryID, tagJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateTags(galleryID, tagIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func adjustGalleryPerformerIDs(tx *sqlx.Tx, galleryID int, ids models.BulkUpdateIds) ([]int, error) {
|
||||
var ret []int
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
|
||||
// adding to the joins
|
||||
performerJoins, err := jqb.GetGalleryPerformers(galleryID, tx)
|
||||
|
||||
func adjustGalleryPerformerIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetPerformerIDs(galleryID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, join := range performerJoins {
|
||||
ret = append(ret, join.PerformerID)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustGalleryTagIDs(tx *sqlx.Tx, galleryID int, ids models.BulkUpdateIds) ([]int, error) {
|
||||
var ret []int
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
|
||||
// adding to the joins
|
||||
tagJoins, err := jqb.GetGalleryTags(galleryID, tx)
|
||||
|
||||
func adjustGalleryTagIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetTagIDs(galleryID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, join := range tagJoins {
|
||||
ret = append(ret, join.TagID)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
iqb := models.NewImageQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
galleryIDs, err := utils.StringSliceToIntSlice(input.Ids)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
var galleries []*models.Gallery
|
||||
var imgsToPostProcess []*models.Image
|
||||
var imgsToDelete []*models.Image
|
||||
|
||||
for _, id := range input.Ids {
|
||||
galleryID, _ := strconv.Atoi(id)
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Gallery()
|
||||
iqb := repo.Image()
|
||||
|
||||
for _, id := range galleryIDs {
|
||||
gallery, err := qb.Find(id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if gallery == nil {
|
||||
return fmt.Errorf("gallery with id %d not found", id)
|
||||
}
|
||||
|
||||
gallery, err := qb.Find(galleryID, tx)
|
||||
if gallery != nil {
|
||||
galleries = append(galleries, gallery)
|
||||
}
|
||||
err = qb.Destroy(galleryID, tx)
|
||||
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
|
||||
// if this is a zip-based gallery, delete the images as well
|
||||
// if this is a zip-based gallery, delete the images as well first
|
||||
if gallery.Zip {
|
||||
imgs, err := iqb.FindByGalleryID(galleryID)
|
||||
imgs, err := iqb.FindByGalleryID(id)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
return err
|
||||
}
|
||||
|
||||
for _, img := range imgs {
|
||||
err = iqb.Destroy(img.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
if err := iqb.Destroy(img.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
imgsToPostProcess = append(imgsToPostProcess, img)
|
||||
}
|
||||
} else if input.DeleteFile != nil && *input.DeleteFile {
|
||||
// Delete image if it is only attached to this gallery
|
||||
imgs, err := iqb.FindByGalleryID(galleryID)
|
||||
imgs, err := iqb.FindByGalleryID(id)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
return err
|
||||
}
|
||||
|
||||
for _, img := range imgs {
|
||||
imgGalleries, err := qb.FindByImageID(img.ID, tx)
|
||||
imgGalleries, err := qb.FindByImageID(img.ID)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
return err
|
||||
}
|
||||
|
||||
if len(imgGalleries) == 0 {
|
||||
err = iqb.Destroy(img.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
if err := iqb.Destroy(img.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
imgsToDelete = append(imgsToDelete, img)
|
||||
@@ -449,9 +375,14 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := qb.Destroy(id); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
@@ -479,34 +410,39 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
||||
}
|
||||
|
||||
func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.GalleryAddInput) (bool, error) {
|
||||
galleryID, _ := strconv.Atoi(input.GalleryID)
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, err := qb.Find(galleryID, nil)
|
||||
galleryID, err := strconv.Atoi(input.GalleryID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
imageIDs, err := utils.StringSliceToIntSlice(input.ImageIds)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Gallery()
|
||||
gallery, err := qb.Find(galleryID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if gallery == nil {
|
||||
return false, errors.New("gallery not found")
|
||||
return errors.New("gallery not found")
|
||||
}
|
||||
|
||||
if gallery.Zip {
|
||||
return false, errors.New("cannot modify zip gallery images")
|
||||
return errors.New("cannot modify zip gallery images")
|
||||
}
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
for _, id := range input.ImageIds {
|
||||
imageID, _ := strconv.Atoi(id)
|
||||
_, err := jqb.AddImageGallery(imageID, galleryID, tx)
|
||||
newIDs, err := qb.GetImageIDs(galleryID)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
newIDs = utils.IntAppendUniques(newIDs, imageIDs)
|
||||
return qb.UpdateImages(galleryID, newIDs)
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
@@ -514,34 +450,39 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.Ga
|
||||
}
|
||||
|
||||
func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input models.GalleryRemoveInput) (bool, error) {
|
||||
galleryID, _ := strconv.Atoi(input.GalleryID)
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, err := qb.Find(galleryID, nil)
|
||||
galleryID, err := strconv.Atoi(input.GalleryID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
imageIDs, err := utils.StringSliceToIntSlice(input.ImageIds)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Gallery()
|
||||
gallery, err := qb.Find(galleryID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if gallery == nil {
|
||||
return false, errors.New("gallery not found")
|
||||
return errors.New("gallery not found")
|
||||
}
|
||||
|
||||
if gallery.Zip {
|
||||
return false, errors.New("cannot modify zip gallery images")
|
||||
return errors.New("cannot modify zip gallery images")
|
||||
}
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
for _, id := range input.ImageIds {
|
||||
imageID, _ := strconv.Atoi(id)
|
||||
_, err := jqb.RemoveImageGallery(imageID, galleryID, tx)
|
||||
newIDs, err := qb.GetImageIDs(galleryID)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
newIDs = utils.IntExclude(newIDs, imageIDs)
|
||||
return qb.UpdateImages(galleryID, newIDs)
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
||||
@@ -2,71 +2,63 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (*models.Image, error) {
|
||||
// Start the transaction and save the image
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (ret *models.Image, err error) {
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
}
|
||||
|
||||
ret, err := r.imageUpdate(input, translator, tx)
|
||||
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
// Start the transaction and save the image
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
ret, err = r.imageUpdate(input, translator, repo)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) ([]*models.Image, error) {
|
||||
// Start the transaction and save the image
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) (ret []*models.Image, err error) {
|
||||
inputMaps := getUpdateInputMaps(ctx)
|
||||
|
||||
var ret []*models.Image
|
||||
|
||||
// Start the transaction and save the image
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
for i, image := range input {
|
||||
translator := changesetTranslator{
|
||||
inputMap: inputMaps[i],
|
||||
}
|
||||
|
||||
thisImage, err := r.imageUpdate(*image, translator, tx)
|
||||
ret = append(ret, thisImage)
|
||||
|
||||
thisImage, err := r.imageUpdate(*image, translator, repo)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
ret = append(ret, thisImage)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, translator changesetTranslator, tx *sqlx.Tx) (*models.Image, error) {
|
||||
func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Image, error) {
|
||||
// Populate image from the input
|
||||
imageID, _ := strconv.Atoi(input.ID)
|
||||
imageID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedTime := time.Now()
|
||||
updatedImage := models.ImagePartial{
|
||||
@@ -79,43 +71,28 @@ func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, translator
|
||||
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
|
||||
updatedImage.Organized = input.Organized
|
||||
|
||||
qb := models.NewImageQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
image, err := qb.Update(updatedImage, tx)
|
||||
qb := repo.Image()
|
||||
image, err := qb.Update(updatedImage)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// don't set the galleries directly. Use add/remove gallery images interface instead
|
||||
if translator.hasField("gallery_ids") {
|
||||
if err := r.updateImageGalleries(qb, imageID, input.GalleryIds); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the performers
|
||||
if translator.hasField("performer_ids") {
|
||||
var performerJoins []models.PerformersImages
|
||||
for _, pid := range input.PerformerIds {
|
||||
performerID, _ := strconv.Atoi(pid)
|
||||
performerJoin := models.PerformersImages{
|
||||
PerformerID: performerID,
|
||||
ImageID: imageID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformersImages(imageID, performerJoins, tx); err != nil {
|
||||
if err := r.updateImagePerformers(qb, imageID, input.PerformerIds); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the tags
|
||||
if translator.hasField("tag_ids") {
|
||||
var tagJoins []models.ImagesTags
|
||||
for _, tid := range input.TagIds {
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
tagJoin := models.ImagesTags{
|
||||
ImageID: imageID,
|
||||
TagID: tagID,
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
if err := jqb.UpdateImagesTags(imageID, tagJoins, tx); err != nil {
|
||||
if err := r.updateImageTags(qb, imageID, input.TagIds); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
@@ -123,15 +100,39 @@ func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, translator
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.BulkImageUpdateInput) ([]*models.Image, error) {
|
||||
func (r *mutationResolver) updateImageGalleries(qb models.ImageReaderWriter, imageID int, galleryIDs []string) error {
|
||||
ids, err := utils.StringSliceToIntSlice(galleryIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.UpdateGalleries(imageID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) updateImagePerformers(qb models.ImageReaderWriter, imageID int, performerIDs []string) error {
|
||||
ids, err := utils.StringSliceToIntSlice(performerIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.UpdatePerformers(imageID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) updateImageTags(qb models.ImageReaderWriter, imageID int, tagsIDs []string) error {
|
||||
ids, err := utils.StringSliceToIntSlice(tagsIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.UpdateTags(imageID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.BulkImageUpdateInput) (ret []*models.Image, err error) {
|
||||
imageIDs, err := utils.StringSliceToIntSlice(input.Ids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Populate image from the input
|
||||
updatedTime := time.Now()
|
||||
|
||||
// Start the transaction and save the image marker
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewImageQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
|
||||
updatedImage := models.ImagePartial{
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
@@ -145,168 +146,113 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.Bul
|
||||
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
|
||||
updatedImage.Organized = input.Organized
|
||||
|
||||
ret := []*models.Image{}
|
||||
// Start the transaction and save the image marker
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Image()
|
||||
|
||||
for _, imageIDStr := range input.Ids {
|
||||
imageID, _ := strconv.Atoi(imageIDStr)
|
||||
for _, imageID := range imageIDs {
|
||||
updatedImage.ID = imageID
|
||||
|
||||
image, err := qb.Update(updatedImage, tx)
|
||||
image, err := qb.Update(updatedImage)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
ret = append(ret, image)
|
||||
|
||||
// Save the galleries
|
||||
if translator.hasField("gallery_ids") {
|
||||
galleryIDs, err := adjustImageGalleryIDs(tx, imageID, *input.GalleryIds)
|
||||
galleryIDs, err := adjustImageGalleryIDs(qb, imageID, *input.GalleryIds)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
var galleryJoins []models.GalleriesImages
|
||||
for _, gid := range galleryIDs {
|
||||
galleryJoin := models.GalleriesImages{
|
||||
GalleryID: gid,
|
||||
ImageID: imageID,
|
||||
}
|
||||
galleryJoins = append(galleryJoins, galleryJoin)
|
||||
}
|
||||
if err := jqb.UpdateGalleriesImages(imageID, galleryJoins, tx); err != nil {
|
||||
return nil, err
|
||||
if err := qb.UpdateGalleries(imageID, galleryIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the performers
|
||||
if translator.hasField("performer_ids") {
|
||||
performerIDs, err := adjustImagePerformerIDs(tx, imageID, *input.PerformerIds)
|
||||
performerIDs, err := adjustImagePerformerIDs(qb, imageID, *input.PerformerIds)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
var performerJoins []models.PerformersImages
|
||||
for _, performerID := range performerIDs {
|
||||
performerJoin := models.PerformersImages{
|
||||
PerformerID: performerID,
|
||||
ImageID: imageID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformersImages(imageID, performerJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdatePerformers(imageID, performerIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the tags
|
||||
if translator.hasField("tag_ids") {
|
||||
tagIDs, err := adjustImageTagIDs(tx, imageID, *input.TagIds)
|
||||
tagIDs, err := adjustImageTagIDs(qb, imageID, *input.TagIds)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
var tagJoins []models.ImagesTags
|
||||
for _, tagID := range tagIDs {
|
||||
tagJoin := models.ImagesTags{
|
||||
ImageID: imageID,
|
||||
TagID: tagID,
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
if err := jqb.UpdateImagesTags(imageID, tagJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateTags(imageID, tagIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func adjustImageGalleryIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) {
|
||||
var ret []int
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
|
||||
// adding to the joins
|
||||
galleryJoins, err := jqb.GetImageGalleries(imageID, tx)
|
||||
|
||||
func adjustImageGalleryIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetGalleryIDs(imageID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, join := range galleryJoins {
|
||||
ret = append(ret, join.GalleryID)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustImagePerformerIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) {
|
||||
var ret []int
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
|
||||
// adding to the joins
|
||||
performerJoins, err := jqb.GetImagePerformers(imageID, tx)
|
||||
|
||||
func adjustImagePerformerIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetPerformerIDs(imageID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, join := range performerJoins {
|
||||
ret = append(ret, join.PerformerID)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustImageTagIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) {
|
||||
var ret []int
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
|
||||
// adding to the joins
|
||||
tagJoins, err := jqb.GetImageTags(imageID, tx)
|
||||
|
||||
func adjustImageTagIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetTagIDs(imageID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, join := range tagJoins {
|
||||
ret = append(ret, join.TagID)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (bool, error) {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
imageID, _ := strconv.Atoi(input.ID)
|
||||
image, err := qb.Find(imageID)
|
||||
err = qb.Destroy(imageID, tx)
|
||||
|
||||
func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (ret bool, err error) {
|
||||
imageID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
var image *models.Image
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Image()
|
||||
|
||||
image, err = qb.Find(imageID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if image == nil {
|
||||
return fmt.Errorf("image with id %d not found", imageID)
|
||||
}
|
||||
|
||||
return qb.Destroy(imageID)
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
@@ -325,27 +271,35 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.ImagesDestroyInput) (bool, error) {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
var images []*models.Image
|
||||
for _, id := range input.Ids {
|
||||
imageID, _ := strconv.Atoi(id)
|
||||
|
||||
image, err := qb.Find(imageID)
|
||||
if image != nil {
|
||||
images = append(images, image)
|
||||
}
|
||||
err = qb.Destroy(imageID, tx)
|
||||
|
||||
func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.ImagesDestroyInput) (ret bool, err error) {
|
||||
imageIDs, err := utils.StringSliceToIntSlice(input.Ids)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
|
||||
var images []*models.Image
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Image()
|
||||
|
||||
for _, imageID := range imageIDs {
|
||||
|
||||
image, err := qb.Find(imageID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
if image == nil {
|
||||
return fmt.Errorf("image with id %d not found", imageID)
|
||||
}
|
||||
|
||||
images = append(images, image)
|
||||
if err := qb.Destroy(imageID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
@@ -366,62 +320,56 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImageIncrementO(ctx context.Context, id string) (int, error) {
|
||||
imageID, _ := strconv.Atoi(id)
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewImageQueryBuilder()
|
||||
|
||||
newVal, err := qb.IncrementOCounter(imageID, tx)
|
||||
func (r *mutationResolver) ImageIncrementO(ctx context.Context, id string) (ret int, err error) {
|
||||
imageID, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Image()
|
||||
|
||||
ret, err = qb.IncrementOCounter(imageID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return newVal, nil
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImageDecrementO(ctx context.Context, id string) (int, error) {
|
||||
imageID, _ := strconv.Atoi(id)
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewImageQueryBuilder()
|
||||
|
||||
newVal, err := qb.DecrementOCounter(imageID, tx)
|
||||
func (r *mutationResolver) ImageDecrementO(ctx context.Context, id string) (ret int, err error) {
|
||||
imageID, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Image()
|
||||
|
||||
ret, err = qb.DecrementOCounter(imageID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return newVal, nil
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImageResetO(ctx context.Context, id string) (int, error) {
|
||||
imageID, _ := strconv.Atoi(id)
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewImageQueryBuilder()
|
||||
|
||||
newVal, err := qb.ResetOCounter(imageID, tx)
|
||||
func (r *mutationResolver) ImageResetO(ctx context.Context, id string) (ret int, err error) {
|
||||
imageID, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Image()
|
||||
|
||||
ret, err = qb.ResetOCounter(imageID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return newVal, nil
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
@@ -85,24 +84,23 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
|
||||
}
|
||||
|
||||
// Start the transaction and save the movie
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
movie, err := qb.Create(newMovie, tx)
|
||||
var movie *models.Movie
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Movie()
|
||||
movie, err = qb.Create(newMovie)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(frontimageData) > 0 {
|
||||
if err := qb.UpdateMovieImages(movie.ID, frontimageData, backimageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -111,7 +109,10 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
|
||||
|
||||
func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUpdateInput) (*models.Movie, error) {
|
||||
// Populate movie from the input
|
||||
movieID, _ := strconv.Atoi(input.ID)
|
||||
movieID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedMovie := models.MoviePartial{
|
||||
ID: movieID,
|
||||
@@ -123,7 +124,6 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
|
||||
}
|
||||
|
||||
var frontimageData []byte
|
||||
var err error
|
||||
frontImageIncluded := translator.hasField("front_image")
|
||||
if input.FrontImage != nil {
|
||||
_, frontimageData, err = utils.ProcessBase64Image(*input.FrontImage)
|
||||
@@ -157,36 +157,33 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
|
||||
updatedMovie.URL = translator.nullString(input.URL, "url")
|
||||
|
||||
// Start the transaction and save the movie
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
movie, err := qb.Update(updatedMovie, tx)
|
||||
var movie *models.Movie
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Movie()
|
||||
movie, err = qb.Update(updatedMovie)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if frontImageIncluded || backImageIncluded {
|
||||
if !frontImageIncluded {
|
||||
frontimageData, err = qb.GetFrontImage(updatedMovie.ID, tx)
|
||||
frontimageData, err = qb.GetFrontImage(updatedMovie.ID)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
}
|
||||
if !backImageIncluded {
|
||||
backimageData, err = qb.GetBackImage(updatedMovie.ID, tx)
|
||||
backimageData, err = qb.GetBackImage(updatedMovie.ID)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(frontimageData) == 0 && len(backimageData) == 0 {
|
||||
// both images are being nulled. Destroy them.
|
||||
if err := qb.DestroyMovieImages(movie.ID, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.DestroyImages(movie.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// HACK - if front image is null and back image is not null, then set the front image
|
||||
@@ -195,15 +192,14 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
|
||||
_, frontimageData, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
|
||||
}
|
||||
|
||||
if err := qb.UpdateMovieImages(movie.ID, frontimageData, backimageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateImages(movie.ID, frontimageData, backimageData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -211,28 +207,35 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MovieDestroy(ctx context.Context, input models.MovieDestroyInput) (bool, error) {
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if err := qb.Destroy(input.ID, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
id, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
return repo.Movie().Destroy(id)
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MoviesDestroy(ctx context.Context, ids []string) (bool, error) {
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
for _, id := range ids {
|
||||
if err := qb.Destroy(id, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
func (r *mutationResolver) MoviesDestroy(ctx context.Context, movieIDs []string) (bool, error) {
|
||||
ids, err := utils.StringSliceToIntSlice(movieIDs)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Movie()
|
||||
for _, id := range ids {
|
||||
if err := qb.Destroy(id); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
@@ -86,41 +85,32 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
||||
}
|
||||
|
||||
// Start the transaction and save the performer
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
var performer *models.Performer
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Performer()
|
||||
|
||||
performer, err := qb.Create(newPerformer, tx)
|
||||
performer, err = qb.Create(newPerformer)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdatePerformerImage(performer.ID, imageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateImage(performer.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the stash_ids
|
||||
if input.StashIds != nil {
|
||||
var stashIDJoins []models.StashID
|
||||
for _, stashID := range input.StashIds {
|
||||
newJoin := models.StashID{
|
||||
StashID: stashID.StashID,
|
||||
Endpoint: stashID.Endpoint,
|
||||
}
|
||||
stashIDJoins = append(stashIDJoins, newJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformerStashIDs(performer.ID, stashIDJoins, tx); err != nil {
|
||||
return nil, err
|
||||
stashIDJoins := models.StashIDsFromInput(input.StashIds)
|
||||
if err := qb.UpdateStashIDs(performer.ID, stashIDJoins); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -183,47 +173,38 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
||||
updatedPerformer.Favorite = translator.nullBool(input.Favorite, "favorite")
|
||||
|
||||
// Start the transaction and save the performer
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
var performer *models.Performer
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Performer()
|
||||
|
||||
performer, err := qb.Update(updatedPerformer, tx)
|
||||
var err error
|
||||
performer, err = qb.Update(updatedPerformer)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdatePerformerImage(performer.ID, imageData, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateImage(performer.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if imageIncluded {
|
||||
// must be unsetting
|
||||
if err := qb.DestroyPerformerImage(performer.ID, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.DestroyImage(performer.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the stash_ids
|
||||
if translator.hasField("stash_ids") {
|
||||
var stashIDJoins []models.StashID
|
||||
for _, stashID := range input.StashIds {
|
||||
newJoin := models.StashID{
|
||||
StashID: stashID.StashID,
|
||||
Endpoint: stashID.Endpoint,
|
||||
}
|
||||
stashIDJoins = append(stashIDJoins, newJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformerStashIDs(performerID, stashIDJoins, tx); err != nil {
|
||||
return nil, err
|
||||
stashIDJoins := models.StashIDsFromInput(input.StashIds)
|
||||
if err := qb.UpdateStashIDs(performerID, stashIDJoins); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -231,28 +212,35 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerDestroy(ctx context.Context, input models.PerformerDestroyInput) (bool, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if err := qb.Destroy(input.ID, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
id, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
return repo.Performer().Destroy(id)
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformersDestroy(ctx context.Context, ids []string) (bool, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
for _, id := range ids {
|
||||
if err := qb.Destroy(id, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs []string) (bool, error) {
|
||||
ids, err := utils.StringSliceToIntSlice(performerIDs)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Performer()
|
||||
for _, id := range ids {
|
||||
if err := qb.Destroy(id); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
|
||||
@@ -3,73 +3,64 @@ package api
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUpdateInput) (*models.Scene, error) {
|
||||
// Start the transaction and save the scene
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUpdateInput) (ret *models.Scene, err error) {
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
}
|
||||
ret, err := r.sceneUpdate(input, translator, tx)
|
||||
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
// Start the transaction and save the scene
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
ret, err = r.sceneUpdate(input, translator, repo)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.SceneUpdateInput) ([]*models.Scene, error) {
|
||||
// Start the transaction and save the scene
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
var ret []*models.Scene
|
||||
|
||||
func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.SceneUpdateInput) (ret []*models.Scene, err error) {
|
||||
inputMaps := getUpdateInputMaps(ctx)
|
||||
|
||||
// Start the transaction and save the scene
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
for i, scene := range input {
|
||||
translator := changesetTranslator{
|
||||
inputMap: inputMaps[i],
|
||||
}
|
||||
|
||||
thisScene, err := r.sceneUpdate(*scene, translator, tx)
|
||||
thisScene, err := r.sceneUpdate(*scene, translator, repo)
|
||||
ret = append(ret, thisScene)
|
||||
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator changesetTranslator, tx *sqlx.Tx) (*models.Scene, error) {
|
||||
func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Scene, error) {
|
||||
// Populate scene from the input
|
||||
sceneID, _ := strconv.Atoi(input.ID)
|
||||
sceneID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var coverImageData []byte
|
||||
|
||||
@@ -97,24 +88,23 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator
|
||||
// update the cover after updating the scene
|
||||
}
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
scene, err := qb.Update(updatedScene, tx)
|
||||
qb := repo.Scene()
|
||||
scene, err := qb.Update(updatedScene)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update cover table
|
||||
if len(coverImageData) > 0 {
|
||||
if err := qb.UpdateSceneCover(sceneID, coverImageData, tx); err != nil {
|
||||
if err := qb.UpdateCover(sceneID, coverImageData); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the existing gallery value
|
||||
if translator.hasField("gallery_id") {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
err = gqb.ClearGalleryId(sceneID, tx)
|
||||
gqb := repo.Gallery()
|
||||
err = gqb.ClearGalleryId(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -122,13 +112,12 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator
|
||||
if input.GalleryID != nil {
|
||||
// Save the gallery
|
||||
galleryID, _ := strconv.Atoi(*input.GalleryID)
|
||||
updatedGallery := models.Gallery{
|
||||
updatedGallery := models.GalleryPartial{
|
||||
ID: galleryID,
|
||||
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: true},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
SceneID: &sql.NullInt64{Int64: int64(sceneID), Valid: true},
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
_, err := gqb.Update(updatedGallery, tx)
|
||||
_, err := gqb.UpdatePartial(updatedGallery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -137,59 +126,29 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator
|
||||
|
||||
// Save the performers
|
||||
if translator.hasField("performer_ids") {
|
||||
var performerJoins []models.PerformersScenes
|
||||
for _, pid := range input.PerformerIds {
|
||||
performerID, _ := strconv.Atoi(pid)
|
||||
performerJoin := models.PerformersScenes{
|
||||
PerformerID: performerID,
|
||||
SceneID: sceneID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformersScenes(sceneID, performerJoins, tx); err != nil {
|
||||
if err := r.updateScenePerformers(qb, sceneID, input.PerformerIds); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the movies
|
||||
if translator.hasField("movies") {
|
||||
var movieJoins []models.MoviesScenes
|
||||
|
||||
for _, movie := range input.Movies {
|
||||
|
||||
movieID, _ := strconv.Atoi(movie.MovieID)
|
||||
|
||||
movieJoin := models.MoviesScenes{
|
||||
MovieID: movieID,
|
||||
SceneID: sceneID,
|
||||
}
|
||||
|
||||
if movie.SceneIndex != nil {
|
||||
movieJoin.SceneIndex = sql.NullInt64{
|
||||
Int64: int64(*movie.SceneIndex),
|
||||
Valid: true,
|
||||
}
|
||||
}
|
||||
|
||||
movieJoins = append(movieJoins, movieJoin)
|
||||
}
|
||||
if err := jqb.UpdateMoviesScenes(sceneID, movieJoins, tx); err != nil {
|
||||
if err := r.updateSceneMovies(qb, sceneID, input.Movies); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the tags
|
||||
if translator.hasField("tag_ids") {
|
||||
var tagJoins []models.ScenesTags
|
||||
for _, tid := range input.TagIds {
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
tagJoin := models.ScenesTags{
|
||||
SceneID: sceneID,
|
||||
TagID: tagID,
|
||||
if err := r.updateSceneTags(qb, sceneID, input.TagIds); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
if err := jqb.UpdateScenesTags(sceneID, tagJoins, tx); err != nil {
|
||||
|
||||
// Save the stash_ids
|
||||
if translator.hasField("stash_ids") {
|
||||
stashIDJoins := models.StashIDsFromInput(input.StashIds)
|
||||
if err := qb.UpdateStashIDs(sceneID, stashIDJoins); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
@@ -202,25 +161,57 @@ func (r *mutationResolver) sceneUpdate(input models.SceneUpdateInput, translator
|
||||
}
|
||||
}
|
||||
|
||||
// Save the stash_ids
|
||||
if translator.hasField("stash_ids") {
|
||||
var stashIDJoins []models.StashID
|
||||
for _, stashID := range input.StashIds {
|
||||
newJoin := models.StashID{
|
||||
StashID: stashID.StashID,
|
||||
Endpoint: stashID.Endpoint,
|
||||
}
|
||||
stashIDJoins = append(stashIDJoins, newJoin)
|
||||
}
|
||||
if err := jqb.UpdateSceneStashIDs(sceneID, stashIDJoins, tx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return scene, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) updateScenePerformers(qb models.SceneReaderWriter, sceneID int, performerIDs []string) error {
|
||||
ids, err := utils.StringSliceToIntSlice(performerIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.UpdatePerformers(sceneID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) updateSceneMovies(qb models.SceneReaderWriter, sceneID int, movies []*models.SceneMovieInput) error {
|
||||
var movieJoins []models.MoviesScenes
|
||||
|
||||
for _, movie := range movies {
|
||||
movieID, err := strconv.Atoi(movie.MovieID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
movieJoin := models.MoviesScenes{
|
||||
MovieID: movieID,
|
||||
}
|
||||
|
||||
if movie.SceneIndex != nil {
|
||||
movieJoin.SceneIndex = sql.NullInt64{
|
||||
Int64: int64(*movie.SceneIndex),
|
||||
Valid: true,
|
||||
}
|
||||
}
|
||||
|
||||
movieJoins = append(movieJoins, movieJoin)
|
||||
}
|
||||
|
||||
return qb.UpdateMovies(sceneID, movieJoins)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) updateSceneTags(qb models.SceneReaderWriter, sceneID int, tagsIDs []string) error {
|
||||
ids, err := utils.StringSliceToIntSlice(tagsIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return qb.UpdateTags(sceneID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.BulkSceneUpdateInput) ([]*models.Scene, error) {
|
||||
sceneIDs, err := utils.StringSliceToIntSlice(input.Ids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Populate scene from the input
|
||||
updatedTime := time.Now()
|
||||
|
||||
@@ -228,11 +219,6 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
}
|
||||
|
||||
// Start the transaction and save the scene marker
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
|
||||
updatedScene := models.ScenePartial{
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
@@ -247,84 +233,62 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
|
||||
|
||||
ret := []*models.Scene{}
|
||||
|
||||
for _, sceneIDStr := range input.Ids {
|
||||
sceneID, _ := strconv.Atoi(sceneIDStr)
|
||||
// Start the transaction and save the scene marker
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Scene()
|
||||
gqb := repo.Gallery()
|
||||
|
||||
for _, sceneID := range sceneIDs {
|
||||
updatedScene.ID = sceneID
|
||||
|
||||
scene, err := qb.Update(updatedScene, tx)
|
||||
scene, err := qb.Update(updatedScene)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
ret = append(ret, scene)
|
||||
|
||||
if translator.hasField("gallery_id") {
|
||||
// Save the gallery
|
||||
var galleryID int
|
||||
if input.GalleryID != nil {
|
||||
galleryID, _ = strconv.Atoi(*input.GalleryID)
|
||||
}
|
||||
updatedGallery := models.Gallery{
|
||||
galleryID, _ := strconv.Atoi(*input.GalleryID)
|
||||
updatedGallery := models.GalleryPartial{
|
||||
ID: galleryID,
|
||||
SceneID: sql.NullInt64{Int64: int64(sceneID), Valid: true},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
SceneID: &sql.NullInt64{Int64: int64(sceneID), Valid: true},
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||
}
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
_, err := gqb.Update(updatedGallery, tx)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
|
||||
if _, err := gqb.UpdatePartial(updatedGallery); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the performers
|
||||
if translator.hasField("performer_ids") {
|
||||
performerIDs, err := adjustScenePerformerIDs(tx, sceneID, *input.PerformerIds)
|
||||
performerIDs, err := adjustScenePerformerIDs(qb, sceneID, *input.PerformerIds)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
var performerJoins []models.PerformersScenes
|
||||
for _, performerID := range performerIDs {
|
||||
performerJoin := models.PerformersScenes{
|
||||
PerformerID: performerID,
|
||||
SceneID: sceneID,
|
||||
}
|
||||
performerJoins = append(performerJoins, performerJoin)
|
||||
}
|
||||
if err := jqb.UpdatePerformersScenes(sceneID, performerJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdatePerformers(sceneID, performerIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the tags
|
||||
if translator.hasField("tag_ids") {
|
||||
tagIDs, err := adjustSceneTagIDs(tx, sceneID, *input.TagIds)
|
||||
tagIDs, err := adjustSceneTagIDs(qb, sceneID, *input.TagIds)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
var tagJoins []models.ScenesTags
|
||||
for _, tagID := range tagIDs {
|
||||
tagJoin := models.ScenesTags{
|
||||
SceneID: sceneID,
|
||||
TagID: tagID,
|
||||
}
|
||||
tagJoins = append(tagJoins, tagJoin)
|
||||
}
|
||||
if err := jqb.UpdateScenesTags(sceneID, tagJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateTags(sceneID, tagIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -332,6 +296,17 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
|
||||
}
|
||||
|
||||
func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
|
||||
// if we are setting the ids, just return the ids
|
||||
if updateIDs.Mode == models.BulkUpdateIDModeSet {
|
||||
existingIDs = []int{}
|
||||
for _, idStr := range updateIDs.Ids {
|
||||
id, _ := strconv.Atoi(idStr)
|
||||
existingIDs = append(existingIDs, id)
|
||||
}
|
||||
|
||||
return existingIDs
|
||||
}
|
||||
|
||||
for _, idStr := range updateIDs.Ids {
|
||||
id, _ := strconv.Atoi(idStr)
|
||||
|
||||
@@ -357,63 +332,53 @@ func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
|
||||
return existingIDs
|
||||
}
|
||||
|
||||
func adjustScenePerformerIDs(tx *sqlx.Tx, sceneID int, ids models.BulkUpdateIds) ([]int, error) {
|
||||
var ret []int
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
|
||||
// adding to the joins
|
||||
performerJoins, err := jqb.GetScenePerformers(sceneID, tx)
|
||||
|
||||
func adjustScenePerformerIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetPerformerIDs(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, join := range performerJoins {
|
||||
ret = append(ret, join.PerformerID)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustSceneTagIDs(tx *sqlx.Tx, sceneID int, ids models.BulkUpdateIds) ([]int, error) {
|
||||
var ret []int
|
||||
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove {
|
||||
// adding to the joins
|
||||
tagJoins, err := jqb.GetSceneTags(sceneID, tx)
|
||||
|
||||
func adjustSceneTagIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetTagIDs(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, join := range tagJoins {
|
||||
ret = append(ret, join.TagID)
|
||||
}
|
||||
}
|
||||
|
||||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
sceneID, _ := strconv.Atoi(input.ID)
|
||||
scene, err := qb.Find(sceneID)
|
||||
err = manager.DestroyScene(sceneID, tx)
|
||||
|
||||
sceneID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
var scene *models.Scene
|
||||
var postCommitFunc func()
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Scene()
|
||||
var err error
|
||||
scene, err = qb.Find(sceneID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if scene == nil {
|
||||
return fmt.Errorf("scene with id %d not found", sceneID)
|
||||
}
|
||||
|
||||
postCommitFunc, err = manager.DestroyScene(scene, repo)
|
||||
return err
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// perform the post-commit actions
|
||||
postCommitFunc()
|
||||
|
||||
// if delete generated is true, then delete the generated files
|
||||
// for the scene
|
||||
if input.DeleteGenerated != nil && *input.DeleteGenerated {
|
||||
@@ -430,10 +395,11 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.ScenesDestroyInput) (bool, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
var scenes []*models.Scene
|
||||
var postCommitFuncs []func()
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Scene()
|
||||
|
||||
for _, id := range input.Ids {
|
||||
sceneID, _ := strconv.Atoi(id)
|
||||
|
||||
@@ -441,18 +407,23 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
|
||||
if scene != nil {
|
||||
scenes = append(scenes, scene)
|
||||
}
|
||||
err = manager.DestroyScene(sceneID, tx)
|
||||
|
||||
f, err := manager.DestroyScene(scene, repo)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return false, err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
postCommitFuncs = append(postCommitFuncs, f)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
for _, f := range postCommitFuncs {
|
||||
f()
|
||||
}
|
||||
|
||||
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
|
||||
for _, scene := range scenes {
|
||||
// if delete generated is true, then delete the generated files
|
||||
@@ -472,8 +443,16 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.SceneMarkerCreateInput) (*models.SceneMarker, error) {
|
||||
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
|
||||
sceneID, _ := strconv.Atoi(input.SceneID)
|
||||
primaryTagID, err := strconv.Atoi(input.PrimaryTagID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sceneID, err := strconv.Atoi(input.SceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentTime := time.Now()
|
||||
newSceneMarker := models.SceneMarker{
|
||||
Title: input.Title,
|
||||
@@ -484,14 +463,31 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.S
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
return changeMarker(ctx, create, newSceneMarker, input.TagIds)
|
||||
tagIDs, err := utils.StringSliceToIntSlice(input.TagIds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r.changeMarker(ctx, create, newSceneMarker, tagIDs)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.SceneMarkerUpdateInput) (*models.SceneMarker, error) {
|
||||
// Populate scene marker from the input
|
||||
sceneMarkerID, _ := strconv.Atoi(input.ID)
|
||||
sceneID, _ := strconv.Atoi(input.SceneID)
|
||||
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
|
||||
sceneMarkerID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
primaryTagID, err := strconv.Atoi(input.PrimaryTagID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sceneID, err := strconv.Atoi(input.SceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedSceneMarker := models.SceneMarker{
|
||||
ID: sceneMarkerID,
|
||||
Title: input.Title,
|
||||
@@ -501,168 +497,151 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.S
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: time.Now()},
|
||||
}
|
||||
|
||||
return changeMarker(ctx, update, updatedSceneMarker, input.TagIds)
|
||||
tagIDs, err := utils.StringSliceToIntSlice(input.TagIds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r.changeMarker(ctx, update, updatedSceneMarker, tagIDs)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
markerID, _ := strconv.Atoi(id)
|
||||
marker, err := qb.Find(markerID)
|
||||
|
||||
markerID, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := qb.Destroy(id, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return false, err
|
||||
var postCommitFunc func()
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.SceneMarker()
|
||||
sqb := repo.Scene()
|
||||
|
||||
marker, err := qb.Find(markerID)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
|
||||
if marker == nil {
|
||||
return fmt.Errorf("scene marker with id %d not found", markerID)
|
||||
}
|
||||
|
||||
scene, err := sqb.Find(int(marker.SceneID.Int64))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
postCommitFunc, err = manager.DestroySceneMarker(scene, marker, qb)
|
||||
return err
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// delete the preview for the marker
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
scene, _ := sqb.Find(int(marker.SceneID.Int64))
|
||||
|
||||
if scene != nil {
|
||||
seconds := int(marker.Seconds)
|
||||
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
|
||||
}
|
||||
postCommitFunc()
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func changeMarker(ctx context.Context, changeType int, changedMarker models.SceneMarker, tagIds []string) (*models.SceneMarker, error) {
|
||||
// Start the transaction and save the scene marker
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
|
||||
func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, changedMarker models.SceneMarker, tagIDs []int) (*models.SceneMarker, error) {
|
||||
var existingMarker *models.SceneMarker
|
||||
var sceneMarker *models.SceneMarker
|
||||
var scene *models.Scene
|
||||
|
||||
// Start the transaction and save the scene marker
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.SceneMarker()
|
||||
sqb := repo.Scene()
|
||||
|
||||
var err error
|
||||
switch changeType {
|
||||
case create:
|
||||
sceneMarker, err = qb.Create(changedMarker, tx)
|
||||
sceneMarker, err = qb.Create(changedMarker)
|
||||
case update:
|
||||
// check to see if timestamp was changed
|
||||
existingMarker, err = qb.Find(changedMarker.ID)
|
||||
if err == nil {
|
||||
sceneMarker, err = qb.Update(changedMarker, tx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sceneMarker, err = qb.Update(changedMarker)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
scene, err = sqb.Find(int(existingMarker.SceneID.Int64))
|
||||
}
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// Save the marker tags
|
||||
var markerTagJoins []models.SceneMarkersTags
|
||||
for _, tid := range tagIds {
|
||||
tagID, _ := strconv.Atoi(tid)
|
||||
if tagID == changedMarker.PrimaryTagID {
|
||||
continue // If this tag is the primary tag, then let's not add it.
|
||||
}
|
||||
markerTag := models.SceneMarkersTags{
|
||||
SceneMarkerID: sceneMarker.ID,
|
||||
TagID: tagID,
|
||||
}
|
||||
markerTagJoins = append(markerTagJoins, markerTag)
|
||||
}
|
||||
switch changeType {
|
||||
case create:
|
||||
if err := jqb.CreateSceneMarkersTags(markerTagJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
case update:
|
||||
if err := jqb.UpdateSceneMarkersTags(changedMarker.ID, markerTagJoins, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
// If this tag is the primary tag, then let's not add it.
|
||||
tagIDs = utils.IntExclude(tagIDs, []int{changedMarker.PrimaryTagID})
|
||||
return qb.UpdateTags(sceneMarker.ID, tagIDs)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// remove the marker preview if the timestamp was changed
|
||||
if existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
|
||||
scene, _ := sqb.Find(int(existingMarker.SceneID.Int64))
|
||||
|
||||
if scene != nil {
|
||||
if scene != nil && existingMarker != nil && existingMarker.Seconds != changedMarker.Seconds {
|
||||
seconds := int(existingMarker.Seconds)
|
||||
manager.DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
|
||||
}
|
||||
}
|
||||
|
||||
return sceneMarker, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (int, error) {
|
||||
sceneID, _ := strconv.Atoi(id)
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
newVal, err := qb.IncrementOCounter(sceneID, tx)
|
||||
func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (ret int, err error) {
|
||||
sceneID, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Scene()
|
||||
|
||||
ret, err = qb.IncrementOCounter(sceneID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return newVal, nil
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (int, error) {
|
||||
sceneID, _ := strconv.Atoi(id)
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
newVal, err := qb.DecrementOCounter(sceneID, tx)
|
||||
func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (ret int, err error) {
|
||||
sceneID, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Scene()
|
||||
|
||||
ret, err = qb.DecrementOCounter(sceneID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return newVal, nil
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (int, error) {
|
||||
sceneID, _ := strconv.Atoi(id)
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
newVal, err := qb.ResetOCounter(sceneID, tx)
|
||||
func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int, err error) {
|
||||
sceneID, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Scene()
|
||||
|
||||
ret, err = qb.ResetOCounter(sceneID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return newVal, nil
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneGenerateScreenshot(ctx context.Context, id string, at *float64) (string, error) {
|
||||
|
||||
@@ -16,7 +16,7 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
|
||||
return false, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
|
||||
}
|
||||
|
||||
client := stashbox.NewClient(*boxes[input.StashBoxIndex])
|
||||
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
||||
|
||||
return client.SubmitStashBoxFingerprints(input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
@@ -44,41 +43,33 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
||||
}
|
||||
|
||||
// Start the transaction and save the studio
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
var studio *models.Studio
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Studio()
|
||||
|
||||
studio, err := qb.Create(newStudio, tx)
|
||||
var err error
|
||||
studio, err = qb.Create(newStudio)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateStudioImage(studio.ID, imageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateImage(studio.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the stash_ids
|
||||
if input.StashIds != nil {
|
||||
var stashIDJoins []models.StashID
|
||||
for _, stashID := range input.StashIds {
|
||||
newJoin := models.StashID{
|
||||
StashID: stashID.StashID,
|
||||
Endpoint: stashID.Endpoint,
|
||||
}
|
||||
stashIDJoins = append(stashIDJoins, newJoin)
|
||||
}
|
||||
if err := jqb.UpdateStudioStashIDs(studio.ID, stashIDJoins, tx); err != nil {
|
||||
return nil, err
|
||||
stashIDJoins := models.StashIDsFromInput(input.StashIds)
|
||||
if err := qb.UpdateStashIDs(studio.ID, stashIDJoins); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -87,7 +78,10 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
||||
|
||||
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) {
|
||||
// Populate studio from the input
|
||||
studioID, _ := strconv.Atoi(input.ID)
|
||||
studioID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
@@ -118,52 +112,42 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||
updatedStudio.ParentID = translator.nullInt64FromString(input.ParentID, "parent_id")
|
||||
|
||||
// Start the transaction and save the studio
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
var studio *models.Studio
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Studio()
|
||||
|
||||
if err := manager.ValidateModifyStudio(updatedStudio, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := manager.ValidateModifyStudio(updatedStudio, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
studio, err := qb.Update(updatedStudio, tx)
|
||||
var err error
|
||||
studio, err = qb.Update(updatedStudio)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateStudioImage(studio.ID, imageData, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateImage(studio.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if imageIncluded {
|
||||
// must be unsetting
|
||||
if err := qb.DestroyStudioImage(studio.ID, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.DestroyImage(studio.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Save the stash_ids
|
||||
if translator.hasField("stash_ids") {
|
||||
var stashIDJoins []models.StashID
|
||||
for _, stashID := range input.StashIds {
|
||||
newJoin := models.StashID{
|
||||
StashID: stashID.StashID,
|
||||
Endpoint: stashID.Endpoint,
|
||||
}
|
||||
stashIDJoins = append(stashIDJoins, newJoin)
|
||||
}
|
||||
if err := jqb.UpdateStudioStashIDs(studioID, stashIDJoins, tx); err != nil {
|
||||
return nil, err
|
||||
stashIDJoins := models.StashIDsFromInput(input.StashIds)
|
||||
if err := qb.UpdateStashIDs(studioID, stashIDJoins); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -171,28 +155,35 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioDestroy(ctx context.Context, input models.StudioDestroyInput) (bool, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if err := qb.Destroy(input.ID, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
id, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
return repo.Studio().Destroy(id)
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudiosDestroy(ctx context.Context, ids []string) (bool, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
for _, id := range ids {
|
||||
if err := qb.Destroy(id, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
func (r *mutationResolver) StudiosDestroy(ctx context.Context, studioIDs []string) (bool, error) {
|
||||
ids, err := utils.StringSliceToIntSlice(studioIDs)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Studio()
|
||||
for _, id := range ids {
|
||||
if err := qb.Destroy(id); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
@@ -33,31 +32,29 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
|
||||
}
|
||||
|
||||
// Start the transaction and save the tag
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewTagQueryBuilder()
|
||||
var tag *models.Tag
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Tag()
|
||||
|
||||
// ensure name is unique
|
||||
if err := manager.EnsureTagNameUnique(newTag, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := manager.EnsureTagNameUnique(newTag, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tag, err := qb.Create(newTag, tx)
|
||||
tag, err = qb.Create(newTag)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateTagImage(tag.ID, imageData, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateImage(tag.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -66,7 +63,11 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
|
||||
|
||||
func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdateInput) (*models.Tag, error) {
|
||||
// Populate tag from the input
|
||||
tagID, _ := strconv.Atoi(input.ID)
|
||||
tagID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedTag := models.Tag{
|
||||
ID: tagID,
|
||||
Name: input.Name,
|
||||
@@ -74,7 +75,6 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
||||
}
|
||||
|
||||
var imageData []byte
|
||||
var err error
|
||||
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
@@ -90,50 +90,45 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
||||
}
|
||||
|
||||
// Start the transaction and save the tag
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewTagQueryBuilder()
|
||||
var tag *models.Tag
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Tag()
|
||||
|
||||
// ensure name is unique
|
||||
existing, err := qb.Find(tagID, tx)
|
||||
existing, err := qb.Find(tagID)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
if existing == nil {
|
||||
tx.Rollback()
|
||||
return nil, fmt.Errorf("Tag with ID %d not found", tagID)
|
||||
return fmt.Errorf("Tag with ID %d not found", tagID)
|
||||
}
|
||||
|
||||
if existing.Name != updatedTag.Name {
|
||||
if err := manager.EnsureTagNameUnique(updatedTag, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := manager.EnsureTagNameUnique(updatedTag, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
tag, err := qb.Update(updatedTag, tx)
|
||||
tag, err = qb.Update(updatedTag)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
// update image table
|
||||
if len(imageData) > 0 {
|
||||
if err := qb.UpdateTagImage(tag.ID, imageData, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.UpdateImage(tag.ID, imageData); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if imageIncluded {
|
||||
// must be unsetting
|
||||
if err := qb.DestroyTagImage(tag.ID, tx); err != nil {
|
||||
tx.Rollback()
|
||||
return nil, err
|
||||
if err := qb.DestroyImage(tag.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Commit
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -141,29 +136,35 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagDestroy(ctx context.Context, input models.TagDestroyInput) (bool, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if err := qb.Destroy(input.ID, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
tagID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
return repo.Tag().Destroy(tagID)
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagsDestroy(ctx context.Context, ids []string) (bool, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
func (r *mutationResolver) TagsDestroy(ctx context.Context, tagIDs []string) (bool, error) {
|
||||
ids, err := utils.StringSliceToIntSlice(tagIDs)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.Tag()
|
||||
for _, id := range ids {
|
||||
if err := qb.Destroy(id, tx); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return false, err
|
||||
if err := qb.Destroy(id); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
|
||||
70
pkg/api/resolver_mutation_tag_test.go
Normal file
70
pkg/api/resolver_mutation_tag_test.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// TODO - move this into a common area
|
||||
func newResolver() *Resolver {
|
||||
return &Resolver{
|
||||
txnManager: mocks.NewTransactionManager(),
|
||||
}
|
||||
}
|
||||
|
||||
const tagName = "tagName"
|
||||
const errTagName = "errTagName"
|
||||
|
||||
const existingTagID = 1
|
||||
const existingTagName = "existingTagName"
|
||||
const newTagID = 2
|
||||
|
||||
func TestTagCreate(t *testing.T) {
|
||||
r := newResolver()
|
||||
|
||||
tagRW := r.txnManager.(*mocks.TransactionManager).Tag().(*mocks.TagReaderWriter)
|
||||
tagRW.On("FindByName", existingTagName, true).Return(&models.Tag{
|
||||
ID: existingTagID,
|
||||
Name: existingTagName,
|
||||
}, nil).Once()
|
||||
tagRW.On("FindByName", errTagName, true).Return(nil, nil).Once()
|
||||
|
||||
expectedErr := errors.New("TagCreate error")
|
||||
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, expectedErr)
|
||||
|
||||
_, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
|
||||
Name: existingTagName,
|
||||
})
|
||||
|
||||
assert.NotNil(t, err)
|
||||
|
||||
_, err = r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
|
||||
Name: errTagName,
|
||||
})
|
||||
|
||||
assert.Equal(t, expectedErr, err)
|
||||
tagRW.AssertExpectations(t)
|
||||
|
||||
r = newResolver()
|
||||
tagRW = r.txnManager.(*mocks.TransactionManager).Tag().(*mocks.TagReaderWriter)
|
||||
|
||||
tagRW.On("FindByName", tagName, true).Return(nil, nil).Once()
|
||||
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{
|
||||
ID: newTagID,
|
||||
Name: tagName,
|
||||
}, nil)
|
||||
|
||||
tag, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
|
||||
Name: tagName,
|
||||
})
|
||||
|
||||
assert.Nil(t, err)
|
||||
assert.NotNil(t, tag)
|
||||
}
|
||||
@@ -7,17 +7,37 @@ import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindGallery(ctx context.Context, id string) (*models.Gallery, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt, nil)
|
||||
func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models.Gallery, err error) {
|
||||
idInt, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Gallery().Find(idInt)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (*models.FindGalleriesResultType, error) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
galleries, total := qb.Query(galleryFilter, filter)
|
||||
return &models.FindGalleriesResultType{
|
||||
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (ret *models.FindGalleriesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
galleries, total, err := repo.Gallery().Query(galleryFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindGalleriesResultType{
|
||||
Count: total,
|
||||
Galleries: galleries,
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -8,23 +8,48 @@ import (
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *string) (*models.Image, error) {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
var image *models.Image
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
qb := repo.Image()
|
||||
var err error
|
||||
|
||||
if id != nil {
|
||||
idInt, _ := strconv.Atoi(*id)
|
||||
idInt, err := strconv.Atoi(*id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
image, err = qb.Find(idInt)
|
||||
} else if checksum != nil {
|
||||
image, err = qb.FindByChecksum(*checksum)
|
||||
}
|
||||
return image, err
|
||||
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (*models.FindImagesResultType, error) {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
images, total := qb.Query(imageFilter, filter)
|
||||
return &models.FindImagesResultType{
|
||||
func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (ret *models.FindImagesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
qb := repo.Image()
|
||||
images, total, err := qb.Query(imageFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindImagesResultType{
|
||||
Count: total,
|
||||
Images: images,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -7,27 +7,60 @@ import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindMovie(ctx context.Context, id string) (*models.Movie, error) {
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt, nil)
|
||||
func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.Movie, err error) {
|
||||
idInt, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Movie().Find(idInt)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (*models.FindMoviesResultType, error) {
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
movies, total := qb.Query(movieFilter, filter)
|
||||
return &models.FindMoviesResultType{
|
||||
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (ret *models.FindMoviesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
movies, total, err := repo.Movie().Query(movieFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindMoviesResultType{
|
||||
Count: total,
|
||||
Movies: movies,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllMovies(ctx context.Context) ([]*models.Movie, error) {
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
return qb.All()
|
||||
func (r *queryResolver) AllMovies(ctx context.Context) (ret []*models.Movie, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Movie().All()
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllMoviesSlim(ctx context.Context) ([]*models.Movie, error) {
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
return qb.AllSlim()
|
||||
func (r *queryResolver) AllMoviesSlim(ctx context.Context) (ret []*models.Movie, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Movie().AllSlim()
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -2,31 +2,64 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindPerformer(ctx context.Context, id string) (*models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt)
|
||||
func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *models.Performer, err error) {
|
||||
idInt, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Performer().Find(idInt)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (*models.FindPerformersResultType, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performers, total := qb.Query(performerFilter, filter)
|
||||
return &models.FindPerformersResultType{
|
||||
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (ret *models.FindPerformersResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
performers, total, err := repo.Performer().Query(performerFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindPerformersResultType{
|
||||
Count: total,
|
||||
Performers: performers,
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllPerformers(ctx context.Context) ([]*models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
return qb.All()
|
||||
func (r *queryResolver) AllPerformers(ctx context.Context) (ret []*models.Performer, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Performer().All()
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllPerformersSlim(ctx context.Context) ([]*models.Performer, error) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
return qb.AllSlim()
|
||||
func (r *queryResolver) AllPerformersSlim(ctx context.Context) (ret []*models.Performer, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Performer().AllSlim()
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -9,70 +9,114 @@ import (
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
var scene *models.Scene
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
qb := repo.Scene()
|
||||
var err error
|
||||
if id != nil {
|
||||
idInt, _ := strconv.Atoi(*id)
|
||||
idInt, err := strconv.Atoi(*id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scene, err = qb.Find(idInt)
|
||||
} else if checksum != nil {
|
||||
scene, err = qb.FindByChecksum(*checksum)
|
||||
}
|
||||
return scene, err
|
||||
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return scene, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneHashInput) (*models.Scene, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
var scene *models.Scene
|
||||
var err error
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
qb := repo.Scene()
|
||||
var err error
|
||||
if input.Checksum != nil {
|
||||
scene, err = qb.FindByChecksum(*input.Checksum)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if scene == nil && input.Oshash != nil {
|
||||
scene, err = qb.FindByOSHash(*input.Oshash)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return scene, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
scenes, total, err := repo.Scene().Query(sceneFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ret = &models.FindScenesResultType{
|
||||
Count: total,
|
||||
Scenes: scenes,
|
||||
}
|
||||
|
||||
return scene, err
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIds []int, filter *models.FindFilterType) (*models.FindScenesResultType, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scenes, total := qb.Query(sceneFilter, filter)
|
||||
return &models.FindScenesResultType{
|
||||
func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
scenes, total, err := repo.Scene().QueryByPathRegex(filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindScenesResultType{
|
||||
Count: total,
|
||||
Scenes: scenes,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (*models.FindScenesResultType, error) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
scenes, total := qb.QueryByPathRegex(filter)
|
||||
return &models.FindScenesResultType{
|
||||
Count: total,
|
||||
Scenes: scenes,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.FindFilterType, config models.SceneParserInput) (*models.SceneParserResultType, error) {
|
||||
func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.FindFilterType, config models.SceneParserInput) (ret *models.SceneParserResultType, err error) {
|
||||
parser := manager.NewSceneFilenameParser(filter, config)
|
||||
|
||||
result, count, err := parser.Parse()
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
result, count, err := parser.Parse(repo)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.SceneParserResultType{
|
||||
Count: count,
|
||||
Results: result,
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &models.SceneParserResultType{
|
||||
Count: count,
|
||||
Results: result,
|
||||
}, nil
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -2,14 +2,25 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType) (*models.FindSceneMarkersResultType, error) {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, total := qb.Query(sceneMarkerFilter, filter)
|
||||
return &models.FindSceneMarkersResultType{
|
||||
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType) (ret *models.FindSceneMarkersResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
sceneMarkers, total, err := repo.SceneMarker().Query(sceneMarkerFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ret = &models.FindSceneMarkersResultType{
|
||||
Count: total,
|
||||
SceneMarkers: sceneMarkers,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -2,31 +2,66 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindStudio(ctx context.Context, id string) (*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt, nil)
|
||||
func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.Studio, err error) {
|
||||
idInt, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
ret, err = repo.Studio().Find(idInt)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (*models.FindStudiosResultType, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studios, total := qb.Query(studioFilter, filter)
|
||||
return &models.FindStudiosResultType{
|
||||
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (ret *models.FindStudiosResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
studios, total, err := repo.Studio().Query(studioFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindStudiosResultType{
|
||||
Count: total,
|
||||
Studios: studios,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllStudios(ctx context.Context) ([]*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.All()
|
||||
func (r *queryResolver) AllStudios(ctx context.Context) (ret []*models.Studio, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().All()
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllStudiosSlim(ctx context.Context) ([]*models.Studio, error) {
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
return qb.AllSlim()
|
||||
func (r *queryResolver) AllStudiosSlim(ctx context.Context) (ret []*models.Studio, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Studio().AllSlim()
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -7,27 +7,60 @@ import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindTag(ctx context.Context, id string) (*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
idInt, _ := strconv.Atoi(id)
|
||||
return qb.Find(idInt, nil)
|
||||
func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag, err error) {
|
||||
idInt, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().Find(idInt)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (*models.FindTagsResultType, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
tags, total := qb.Query(tagFilter, filter)
|
||||
return &models.FindTagsResultType{
|
||||
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (ret *models.FindTagsResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
tags, total, err := repo.Tag().Query(tagFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindTagsResultType{
|
||||
Count: total,
|
||||
Tags: tags,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllTags(ctx context.Context) ([]*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.All()
|
||||
func (r *queryResolver) AllTags(ctx context.Context) (ret []*models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().All()
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) AllTagsSlim(ctx context.Context) ([]*models.Tag, error) {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
return qb.AllSlim()
|
||||
func (r *queryResolver) AllTagsSlim(ctx context.Context) (ret []*models.Tag, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
ret, err = repo.Tag().AllSlim()
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -12,11 +12,13 @@ import (
|
||||
|
||||
func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models.SceneStreamEndpoint, error) {
|
||||
// find the scene
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
var scene *models.Scene
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
idInt, _ := strconv.Atoi(*id)
|
||||
scene, err := qb.Find(idInt)
|
||||
|
||||
if err != nil {
|
||||
var err error
|
||||
scene, err = repo.Scene().Find(idInt)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.Sta
|
||||
return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex)
|
||||
}
|
||||
|
||||
client := stashbox.NewClient(*boxes[input.StashBoxIndex])
|
||||
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
|
||||
|
||||
if len(input.SceneIds) > 0 {
|
||||
return client.FindStashBoxScenesByFingerprints(input.SceneIds)
|
||||
|
||||
@@ -12,7 +12,9 @@ import (
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type imageRoutes struct{}
|
||||
type imageRoutes struct {
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (rs imageRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
@@ -57,13 +59,17 @@ func ImageCtx(next http.Handler) http.Handler {
|
||||
imageID, _ := strconv.Atoi(imageIdentifierQueryParam)
|
||||
|
||||
var image *models.Image
|
||||
qb := models.NewImageQueryBuilder()
|
||||
manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
qb := repo.Image()
|
||||
if imageID == 0 {
|
||||
image, _ = qb.FindByChecksum(imageIdentifierQueryParam)
|
||||
} else {
|
||||
image, _ = qb.Find(imageID)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if image == nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
|
||||
@@ -6,11 +6,14 @@ import (
|
||||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type movieRoutes struct{}
|
||||
type movieRoutes struct {
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (rs movieRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
@@ -26,11 +29,16 @@ func (rs movieRoutes) Routes() chi.Router {
|
||||
|
||||
func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
|
||||
movie := r.Context().Value(movieKey).(*models.Movie)
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
image, _ := qb.GetFrontImage(movie.ID, nil)
|
||||
|
||||
defaultParam := r.URL.Query().Get("default")
|
||||
if len(image) == 0 || defaultParam == "true" {
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Movie().GetFrontImage(movie.ID)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
_, image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
|
||||
}
|
||||
|
||||
@@ -39,11 +47,16 @@ func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) {
|
||||
movie := r.Context().Value(movieKey).(*models.Movie)
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
image, _ := qb.GetBackImage(movie.ID, nil)
|
||||
|
||||
defaultParam := r.URL.Query().Get("default")
|
||||
if len(image) == 0 || defaultParam == "true" {
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Movie().GetBackImage(movie.ID)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
_, image, _ = utils.ProcessBase64Image(models.DefaultMovieImage)
|
||||
}
|
||||
|
||||
@@ -58,9 +71,12 @@ func MovieCtx(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewMovieQueryBuilder()
|
||||
movie, err := qb.Find(movieID, nil)
|
||||
if err != nil {
|
||||
var movie *models.Movie
|
||||
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
movie, err = repo.Movie().Find(movieID)
|
||||
return err
|
||||
}); err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -6,11 +6,14 @@ import (
|
||||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type performerRoutes struct{}
|
||||
type performerRoutes struct {
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (rs performerRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
@@ -25,10 +28,16 @@ func (rs performerRoutes) Routes() chi.Router {
|
||||
|
||||
func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
performer := r.Context().Value(performerKey).(*models.Performer)
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
image, _ := qb.GetPerformerImage(performer.ID, nil)
|
||||
|
||||
defaultParam := r.URL.Query().Get("default")
|
||||
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Performer().GetImage(performer.ID)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
if len(image) == 0 || defaultParam == "true" {
|
||||
image, _ = getRandomPerformerImageUsingName(performer.Name.String, performer.Gender.String)
|
||||
}
|
||||
@@ -44,9 +53,12 @@ func PerformerCtx(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performer, err := qb.Find(performerID)
|
||||
if err != nil {
|
||||
var performer *models.Performer
|
||||
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
performer, err = repo.Performer().Find(performerID)
|
||||
return err
|
||||
}); err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -15,7 +15,9 @@ import (
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type sceneRoutes struct{}
|
||||
type sceneRoutes struct {
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
@@ -183,8 +185,11 @@ func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
|
||||
if screenshotExists {
|
||||
http.ServeFile(w, r, filepath)
|
||||
} else {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
cover, _ := qb.GetSceneCover(scene.ID, nil)
|
||||
var cover []byte
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
cover, _ = repo.Scene().GetCover(scene.ID)
|
||||
return nil
|
||||
})
|
||||
utils.ServeImage(cover, w, r)
|
||||
}
|
||||
}
|
||||
@@ -201,39 +206,48 @@ func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
|
||||
http.ServeFile(w, r, filepath)
|
||||
}
|
||||
|
||||
func getChapterVttTitle(marker *models.SceneMarker) string {
|
||||
func (rs sceneRoutes) getChapterVttTitle(ctx context.Context, marker *models.SceneMarker) string {
|
||||
if marker.Title != "" {
|
||||
return marker.Title
|
||||
}
|
||||
|
||||
qb := models.NewTagQueryBuilder()
|
||||
primaryTag, err := qb.Find(marker.PrimaryTagID, nil)
|
||||
var ret string
|
||||
if err := rs.txnManager.WithReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
qb := repo.Tag()
|
||||
primaryTag, err := qb.Find(marker.PrimaryTagID)
|
||||
if err != nil {
|
||||
// should not happen
|
||||
panic(err)
|
||||
return err
|
||||
}
|
||||
|
||||
ret := primaryTag.Name
|
||||
ret = primaryTag.Name
|
||||
|
||||
tags, err := qb.FindBySceneMarkerID(marker.ID, nil)
|
||||
tags, err := qb.FindBySceneMarkerID(marker.ID)
|
||||
if err != nil {
|
||||
// should not happen
|
||||
panic(err)
|
||||
return err
|
||||
}
|
||||
|
||||
for _, t := range tags {
|
||||
ret += ", " + t.Name
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, err := qb.FindBySceneID(scene.ID, nil)
|
||||
if err != nil {
|
||||
panic("invalid scene markers for chapter vtt")
|
||||
var sceneMarkers []*models.SceneMarker
|
||||
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
sceneMarkers, err = repo.SceneMarker().FindBySceneID(scene.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
vttLines := []string{"WEBVTT", ""}
|
||||
@@ -241,7 +255,7 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
||||
vttLines = append(vttLines, strconv.Itoa(i+1))
|
||||
time := utils.GetVTTTime(marker.Seconds)
|
||||
vttLines = append(vttLines, time+" --> "+time)
|
||||
vttLines = append(vttLines, getChapterVttTitle(marker))
|
||||
vttLines = append(vttLines, rs.getChapterVttTitle(r.Context(), marker))
|
||||
vttLines = append(vttLines, "")
|
||||
}
|
||||
vtt := strings.Join(vttLines, "\n")
|
||||
@@ -267,11 +281,14 @@ func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
|
||||
func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarker, err := qb.Find(sceneMarkerID)
|
||||
if err != nil {
|
||||
logger.Warn("Error when getting scene marker for stream")
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
var sceneMarker *models.SceneMarker
|
||||
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
sceneMarker, err = repo.SceneMarker().Find(sceneMarkerID)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Warnf("Error when getting scene marker for stream: %s", err.Error())
|
||||
http.Error(w, http.StatusText(500), 500)
|
||||
return
|
||||
}
|
||||
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
|
||||
@@ -281,11 +298,14 @@ func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request)
|
||||
func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarker, err := qb.Find(sceneMarkerID)
|
||||
if err != nil {
|
||||
logger.Warn("Error when getting scene marker for stream")
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
var sceneMarker *models.SceneMarker
|
||||
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
sceneMarker, err = repo.SceneMarker().Find(sceneMarkerID)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Warnf("Error when getting scene marker for stream: %s", err.Error())
|
||||
http.Error(w, http.StatusText(500), 500)
|
||||
return
|
||||
}
|
||||
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.GetHash(config.GetVideoFileNamingAlgorithm()), int(sceneMarker.Seconds))
|
||||
@@ -310,7 +330,8 @@ func SceneCtx(next http.Handler) http.Handler {
|
||||
sceneID, _ := strconv.Atoi(sceneIdentifierQueryParam)
|
||||
|
||||
var scene *models.Scene
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
qb := repo.Scene()
|
||||
if sceneID == 0 {
|
||||
// determine checksum/os by the length of the query param
|
||||
if len(sceneIdentifierQueryParam) == 32 {
|
||||
@@ -322,6 +343,9 @@ func SceneCtx(next http.Handler) http.Handler {
|
||||
scene, _ = qb.Find(sceneID)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if scene == nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
|
||||
@@ -6,11 +6,14 @@ import (
|
||||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type studioRoutes struct{}
|
||||
type studioRoutes struct {
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (rs studioRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
@@ -25,12 +28,14 @@ func (rs studioRoutes) Routes() chi.Router {
|
||||
|
||||
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
studio := r.Context().Value(studioKey).(*models.Studio)
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
var image []byte
|
||||
defaultParam := r.URL.Query().Get("default")
|
||||
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
image, _ = qb.GetStudioImage(studio.ID, nil)
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Studio().GetImage(studio.ID)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
@@ -48,9 +53,12 @@ func StudioCtx(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
studio, err := qb.Find(studioID, nil)
|
||||
if err != nil {
|
||||
var studio *models.Studio
|
||||
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
studio, err = repo.Studio().Find(studioID)
|
||||
return err
|
||||
}); err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -6,11 +6,14 @@ import (
|
||||
"strconv"
|
||||
|
||||
"github.com/go-chi/chi"
|
||||
"github.com/stashapp/stash/pkg/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type tagRoutes struct{}
|
||||
type tagRoutes struct {
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (rs tagRoutes) Routes() chi.Router {
|
||||
r := chi.NewRouter()
|
||||
@@ -25,12 +28,17 @@ func (rs tagRoutes) Routes() chi.Router {
|
||||
|
||||
func (rs tagRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
tag := r.Context().Value(tagKey).(*models.Tag)
|
||||
qb := models.NewTagQueryBuilder()
|
||||
image, _ := qb.GetTagImage(tag.ID, nil)
|
||||
|
||||
// use default image if not present
|
||||
defaultParam := r.URL.Query().Get("default")
|
||||
if len(image) == 0 || defaultParam == "true" {
|
||||
|
||||
var image []byte
|
||||
if defaultParam != "true" {
|
||||
rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
image, _ = repo.Tag().GetImage(tag.ID)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
if len(image) == 0 {
|
||||
image = models.DefaultTagImage
|
||||
}
|
||||
|
||||
@@ -45,9 +53,12 @@ func TagCtx(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
qb := models.NewTagQueryBuilder()
|
||||
tag, err := qb.Find(tagID, nil)
|
||||
if err != nil {
|
||||
var tag *models.Tag
|
||||
if err := manager.GetInstance().TxnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
|
||||
var err error
|
||||
tag, err = repo.Tag().Find(tagID)
|
||||
return err
|
||||
}); err != nil {
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -134,7 +134,12 @@ func Start() {
|
||||
return true
|
||||
},
|
||||
})
|
||||
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: &Resolver{}}), recoverFunc, websocketUpgrader)
|
||||
|
||||
txnManager := manager.GetInstance().TxnManager
|
||||
resolver := &Resolver{
|
||||
txnManager: txnManager,
|
||||
}
|
||||
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: resolver}), recoverFunc, websocketUpgrader)
|
||||
|
||||
r.Handle("/graphql", gqlHandler)
|
||||
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
|
||||
@@ -145,12 +150,24 @@ func Start() {
|
||||
|
||||
r.Get(loginEndPoint, getLoginHandler)
|
||||
|
||||
r.Mount("/performer", performerRoutes{}.Routes())
|
||||
r.Mount("/scene", sceneRoutes{}.Routes())
|
||||
r.Mount("/image", imageRoutes{}.Routes())
|
||||
r.Mount("/studio", studioRoutes{}.Routes())
|
||||
r.Mount("/movie", movieRoutes{}.Routes())
|
||||
r.Mount("/tag", tagRoutes{}.Routes())
|
||||
r.Mount("/performer", performerRoutes{
|
||||
txnManager: txnManager,
|
||||
}.Routes())
|
||||
r.Mount("/scene", sceneRoutes{
|
||||
txnManager: txnManager,
|
||||
}.Routes())
|
||||
r.Mount("/image", imageRoutes{
|
||||
txnManager: txnManager,
|
||||
}.Routes())
|
||||
r.Mount("/studio", studioRoutes{
|
||||
txnManager: txnManager,
|
||||
}.Routes())
|
||||
r.Mount("/movie", movieRoutes{
|
||||
txnManager: txnManager,
|
||||
}.Routes())
|
||||
r.Mount("/tag", tagRoutes{
|
||||
txnManager: txnManager,
|
||||
}.Routes())
|
||||
r.Mount("/downloads", downloadsRoutes{}.Routes())
|
||||
|
||||
r.HandleFunc("/css", func(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
@@ -68,9 +68,9 @@ func Initialize(databasePath string) bool {
|
||||
|
||||
func open(databasePath string, disableForeignKeys bool) *sqlx.DB {
|
||||
// https://github.com/mattn/go-sqlite3
|
||||
url := "file:" + databasePath
|
||||
url := "file:" + databasePath + "?_journal=WAL"
|
||||
if !disableForeignKeys {
|
||||
url += "?_fk=true"
|
||||
url += "&_fk=true"
|
||||
}
|
||||
|
||||
conn, err := sqlx.Open(sqlite3Driver, url)
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"testing"
|
||||
@@ -51,18 +50,18 @@ var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
func createFullGallery(id int) models.Gallery {
|
||||
return models.Gallery{
|
||||
ID: id,
|
||||
Path: modelstest.NullString(path),
|
||||
Path: models.NullString(path),
|
||||
Zip: zip,
|
||||
Title: modelstest.NullString(title),
|
||||
Title: models.NullString(title),
|
||||
Checksum: checksum,
|
||||
Date: models.SQLiteDate{
|
||||
String: date,
|
||||
Valid: true,
|
||||
},
|
||||
Details: modelstest.NullString(details),
|
||||
Rating: modelstest.NullInt64(rating),
|
||||
Details: models.NullString(details),
|
||||
Rating: models.NullInt64(rating),
|
||||
Organized: organized,
|
||||
URL: modelstest.NullString(url),
|
||||
URL: models.NullString(url),
|
||||
CreatedAt: models.SQLiteTimestamp{
|
||||
Timestamp: createTime,
|
||||
},
|
||||
@@ -146,7 +145,7 @@ func TestToJSON(t *testing.T) {
|
||||
|
||||
func createStudioGallery(studioID int) models.Gallery {
|
||||
return models.Gallery{
|
||||
StudioID: modelstest.NullInt64(int64(studioID)),
|
||||
StudioID: models.NullInt64(int64(studioID)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,7 +179,7 @@ func TestGetStudioName(t *testing.T) {
|
||||
studioErr := errors.New("error getting image")
|
||||
|
||||
mockStudioReader.On("Find", studioID).Return(&models.Studio{
|
||||
Name: modelstest.NullString(studioName),
|
||||
Name: models.NullString(studioName),
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("Find", missingStudioID).Return(nil, nil).Once()
|
||||
mockStudioReader.On("Find", errStudioID).Return(nil, studioErr).Once()
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
package gallery
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func GetFiles(g *models.Gallery, baseURL string) []*models.GalleryFilesType {
|
||||
var galleryFiles []*models.GalleryFilesType
|
||||
|
||||
qb := models.NewImageQueryBuilder()
|
||||
images, err := qb.FindByGalleryID(g.ID)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for i, img := range images {
|
||||
builder := urlbuilders.NewImageURLBuilder(baseURL, img.ID)
|
||||
imageURL := builder.GetImageURL()
|
||||
|
||||
galleryFile := models.GalleryFilesType{
|
||||
Index: i,
|
||||
Name: &img.Title.String,
|
||||
Path: &imageURL,
|
||||
}
|
||||
galleryFiles = append(galleryFiles, &galleryFile)
|
||||
}
|
||||
|
||||
return galleryFiles
|
||||
}
|
||||
@@ -15,7 +15,6 @@ type Importer struct {
|
||||
StudioWriter models.StudioReaderWriter
|
||||
PerformerWriter models.PerformerReaderWriter
|
||||
TagWriter models.TagReaderWriter
|
||||
JoinWriter models.JoinReaderWriter
|
||||
Input jsonschema.Gallery
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
|
||||
@@ -237,29 +236,22 @@ func (i *Importer) createTags(names []string) ([]*models.Tag, error) {
|
||||
|
||||
func (i *Importer) PostImport(id int) error {
|
||||
if len(i.performers) > 0 {
|
||||
var performerJoins []models.PerformersGalleries
|
||||
var performerIDs []int
|
||||
for _, performer := range i.performers {
|
||||
join := models.PerformersGalleries{
|
||||
PerformerID: performer.ID,
|
||||
GalleryID: id,
|
||||
performerIDs = append(performerIDs, performer.ID)
|
||||
}
|
||||
performerJoins = append(performerJoins, join)
|
||||
}
|
||||
if err := i.JoinWriter.UpdatePerformersGalleries(id, performerJoins); err != nil {
|
||||
|
||||
if err := i.ReaderWriter.UpdatePerformers(id, performerIDs); err != nil {
|
||||
return fmt.Errorf("failed to associate performers: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if len(i.tags) > 0 {
|
||||
var tagJoins []models.GalleriesTags
|
||||
for _, tag := range i.tags {
|
||||
join := models.GalleriesTags{
|
||||
GalleryID: id,
|
||||
TagID: tag.ID,
|
||||
var tagIDs []int
|
||||
for _, t := range i.tags {
|
||||
tagIDs = append(tagIDs, t.ID)
|
||||
}
|
||||
tagJoins = append(tagJoins, join)
|
||||
}
|
||||
if err := i.JoinWriter.UpdateGalleriesTags(id, tagJoins); err != nil {
|
||||
if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil {
|
||||
return fmt.Errorf("failed to associate tags: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
@@ -77,17 +76,17 @@ func TestImporterPreImport(t *testing.T) {
|
||||
assert.Nil(t, err)
|
||||
|
||||
expectedGallery := models.Gallery{
|
||||
Path: modelstest.NullString(path),
|
||||
Path: models.NullString(path),
|
||||
Checksum: checksum,
|
||||
Title: modelstest.NullString(title),
|
||||
Title: models.NullString(title),
|
||||
Date: models.SQLiteDate{
|
||||
String: date,
|
||||
Valid: true,
|
||||
},
|
||||
Details: modelstest.NullString(details),
|
||||
Rating: modelstest.NullInt64(rating),
|
||||
Details: models.NullString(details),
|
||||
Rating: models.NullInt64(rating),
|
||||
Organized: organized,
|
||||
URL: modelstest.NullString(url),
|
||||
URL: models.NullString(url),
|
||||
CreatedAt: models.SQLiteTimestamp{
|
||||
Timestamp: createdAt,
|
||||
},
|
||||
@@ -194,7 +193,7 @@ func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
performerReaderWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
{
|
||||
ID: existingPerformerID,
|
||||
Name: modelstest.NullString(existingPerformerName),
|
||||
Name: models.NullString(existingPerformerName),
|
||||
},
|
||||
}, nil).Once()
|
||||
performerReaderWriter.On("FindByNames", []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
@@ -354,10 +353,10 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestImporterPostImportUpdatePerformers(t *testing.T) {
|
||||
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||
galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
JoinWriter: joinReaderWriter,
|
||||
ReaderWriter: galleryReaderWriter,
|
||||
performers: []*models.Performer{
|
||||
{
|
||||
ID: existingPerformerID,
|
||||
@@ -365,15 +364,10 @@ func TestImporterPostImportUpdatePerformers(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
updateErr := errors.New("UpdatePerformersGalleries error")
|
||||
updateErr := errors.New("UpdatePerformers error")
|
||||
|
||||
joinReaderWriter.On("UpdatePerformersGalleries", galleryID, []models.PerformersGalleries{
|
||||
{
|
||||
PerformerID: existingPerformerID,
|
||||
GalleryID: galleryID,
|
||||
},
|
||||
}).Return(nil).Once()
|
||||
joinReaderWriter.On("UpdatePerformersGalleries", errPerformersID, mock.AnythingOfType("[]models.PerformersGalleries")).Return(updateErr).Once()
|
||||
galleryReaderWriter.On("UpdatePerformers", galleryID, []int{existingPerformerID}).Return(nil).Once()
|
||||
galleryReaderWriter.On("UpdatePerformers", errPerformersID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
|
||||
|
||||
err := i.PostImport(galleryID)
|
||||
assert.Nil(t, err)
|
||||
@@ -381,14 +375,14 @@ func TestImporterPostImportUpdatePerformers(t *testing.T) {
|
||||
err = i.PostImport(errPerformersID)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
joinReaderWriter.AssertExpectations(t)
|
||||
galleryReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPostImportUpdateTags(t *testing.T) {
|
||||
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||
galleryReaderWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
JoinWriter: joinReaderWriter,
|
||||
ReaderWriter: galleryReaderWriter,
|
||||
tags: []*models.Tag{
|
||||
{
|
||||
ID: existingTagID,
|
||||
@@ -396,15 +390,10 @@ func TestImporterPostImportUpdateTags(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
updateErr := errors.New("UpdateGalleriesTags error")
|
||||
updateErr := errors.New("UpdateTags error")
|
||||
|
||||
joinReaderWriter.On("UpdateGalleriesTags", galleryID, []models.GalleriesTags{
|
||||
{
|
||||
TagID: existingTagID,
|
||||
GalleryID: galleryID,
|
||||
},
|
||||
}).Return(nil).Once()
|
||||
joinReaderWriter.On("UpdateGalleriesTags", errTagsID, mock.AnythingOfType("[]models.GalleriesTags")).Return(updateErr).Once()
|
||||
galleryReaderWriter.On("UpdateTags", galleryID, []int{existingTagID}).Return(nil).Once()
|
||||
galleryReaderWriter.On("UpdateTags", errTagsID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
|
||||
|
||||
err := i.PostImport(galleryID)
|
||||
assert.Nil(t, err)
|
||||
@@ -412,7 +401,7 @@ func TestImporterPostImportUpdateTags(t *testing.T) {
|
||||
err = i.PostImport(errTagsID)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
joinReaderWriter.AssertExpectations(t)
|
||||
galleryReaderWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterFindExistingID(t *testing.T) {
|
||||
@@ -454,11 +443,11 @@ func TestCreate(t *testing.T) {
|
||||
readerWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
gallery := models.Gallery{
|
||||
Title: modelstest.NullString(title),
|
||||
Title: models.NullString(title),
|
||||
}
|
||||
|
||||
galleryErr := models.Gallery{
|
||||
Title: modelstest.NullString(galleryNameErr),
|
||||
Title: models.NullString(galleryNameErr),
|
||||
}
|
||||
|
||||
i := Importer{
|
||||
@@ -488,7 +477,7 @@ func TestUpdate(t *testing.T) {
|
||||
readerWriter := &mocks.GalleryReaderWriter{}
|
||||
|
||||
gallery := models.Gallery{
|
||||
Title: modelstest.NullString(title),
|
||||
Title: models.NullString(title),
|
||||
}
|
||||
|
||||
i := Importer{
|
||||
|
||||
23
pkg/gallery/update.go
Normal file
23
pkg/gallery/update.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package gallery
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func UpdateFileModTime(qb models.GalleryWriter, id int, modTime models.NullSQLiteTimestamp) (*models.Gallery, error) {
|
||||
return qb.UpdatePartial(models.GalleryPartial{
|
||||
ID: id,
|
||||
FileModTime: &modTime,
|
||||
})
|
||||
}
|
||||
|
||||
func AddImage(qb models.GalleryReaderWriter, galleryID int, imageID int) error {
|
||||
imageIDs, err := qb.GetImageIDs(galleryID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
imageIDs = utils.IntAppendUnique(imageIDs, imageID)
|
||||
return qb.UpdateImages(galleryID, imageIDs)
|
||||
}
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"testing"
|
||||
@@ -65,14 +64,14 @@ var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
func createFullImage(id int) models.Image {
|
||||
return models.Image{
|
||||
ID: id,
|
||||
Title: modelstest.NullString(title),
|
||||
Title: models.NullString(title),
|
||||
Checksum: checksum,
|
||||
Height: modelstest.NullInt64(height),
|
||||
Height: models.NullInt64(height),
|
||||
OCounter: ocounter,
|
||||
Rating: modelstest.NullInt64(rating),
|
||||
Rating: models.NullInt64(rating),
|
||||
Size: models.NullInt64(int64(size)),
|
||||
Organized: organized,
|
||||
Size: modelstest.NullInt64(int64(size)),
|
||||
Width: modelstest.NullInt64(width),
|
||||
Width: models.NullInt64(width),
|
||||
CreatedAt: models.SQLiteTimestamp{
|
||||
Timestamp: createTime,
|
||||
},
|
||||
@@ -150,7 +149,7 @@ func TestToJSON(t *testing.T) {
|
||||
|
||||
func createStudioImage(studioID int) models.Image {
|
||||
return models.Image{
|
||||
StudioID: modelstest.NullInt64(int64(studioID)),
|
||||
StudioID: models.NullInt64(int64(studioID)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,7 +183,7 @@ func TestGetStudioName(t *testing.T) {
|
||||
studioErr := errors.New("error getting image")
|
||||
|
||||
mockStudioReader.On("Find", studioID).Return(&models.Studio{
|
||||
Name: modelstest.NullString(studioName),
|
||||
Name: models.NullString(studioName),
|
||||
}, nil).Once()
|
||||
mockStudioReader.On("Find", missingStudioID).Return(nil, nil).Once()
|
||||
mockStudioReader.On("Find", errStudioID).Return(nil, studioErr).Once()
|
||||
|
||||
@@ -16,7 +16,6 @@ type Importer struct {
|
||||
GalleryWriter models.GalleryReaderWriter
|
||||
PerformerWriter models.PerformerReaderWriter
|
||||
TagWriter models.TagReaderWriter
|
||||
JoinWriter models.JoinReaderWriter
|
||||
Input jsonschema.Image
|
||||
Path string
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
@@ -227,43 +226,33 @@ func (i *Importer) populateTags() error {
|
||||
|
||||
func (i *Importer) PostImport(id int) error {
|
||||
if len(i.galleries) > 0 {
|
||||
var galleryJoins []models.GalleriesImages
|
||||
for _, gallery := range i.galleries {
|
||||
join := models.GalleriesImages{
|
||||
GalleryID: gallery.ID,
|
||||
ImageID: id,
|
||||
var galleryIDs []int
|
||||
for _, g := range i.galleries {
|
||||
galleryIDs = append(galleryIDs, g.ID)
|
||||
}
|
||||
galleryJoins = append(galleryJoins, join)
|
||||
}
|
||||
if err := i.JoinWriter.UpdateGalleriesImages(id, galleryJoins); err != nil {
|
||||
|
||||
if err := i.ReaderWriter.UpdateGalleries(id, galleryIDs); err != nil {
|
||||
return fmt.Errorf("failed to associate galleries: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if len(i.performers) > 0 {
|
||||
var performerJoins []models.PerformersImages
|
||||
var performerIDs []int
|
||||
for _, performer := range i.performers {
|
||||
join := models.PerformersImages{
|
||||
PerformerID: performer.ID,
|
||||
ImageID: id,
|
||||
performerIDs = append(performerIDs, performer.ID)
|
||||
}
|
||||
performerJoins = append(performerJoins, join)
|
||||
}
|
||||
if err := i.JoinWriter.UpdatePerformersImages(id, performerJoins); err != nil {
|
||||
|
||||
if err := i.ReaderWriter.UpdatePerformers(id, performerIDs); err != nil {
|
||||
return fmt.Errorf("failed to associate performers: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if len(i.tags) > 0 {
|
||||
var tagJoins []models.ImagesTags
|
||||
for _, tag := range i.tags {
|
||||
join := models.ImagesTags{
|
||||
ImageID: id,
|
||||
TagID: tag.ID,
|
||||
var tagIDs []int
|
||||
for _, t := range i.tags {
|
||||
tagIDs = append(tagIDs, t.ID)
|
||||
}
|
||||
tagJoins = append(tagJoins, join)
|
||||
}
|
||||
if err := i.JoinWriter.UpdateImagesTags(id, tagJoins); err != nil {
|
||||
if err := i.ReaderWriter.UpdateTags(id, tagIDs); err != nil {
|
||||
return fmt.Errorf("failed to associate tags: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"github.com/stashapp/stash/pkg/manager/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stashapp/stash/pkg/models/modelstest"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
@@ -227,7 +226,7 @@ func TestImporterPreImportWithPerformer(t *testing.T) {
|
||||
performerReaderWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{
|
||||
{
|
||||
ID: existingPerformerID,
|
||||
Name: modelstest.NullString(existingPerformerName),
|
||||
Name: models.NullString(existingPerformerName),
|
||||
},
|
||||
}, nil).Once()
|
||||
performerReaderWriter.On("FindByNames", []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
@@ -387,10 +386,10 @@ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestImporterPostImportUpdateGallery(t *testing.T) {
|
||||
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||
readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
JoinWriter: joinReaderWriter,
|
||||
ReaderWriter: readerWriter,
|
||||
galleries: []*models.Gallery{
|
||||
{
|
||||
ID: existingGalleryID,
|
||||
@@ -398,15 +397,10 @@ func TestImporterPostImportUpdateGallery(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
updateErr := errors.New("UpdateGalleriesImages error")
|
||||
updateErr := errors.New("UpdateGalleries error")
|
||||
|
||||
joinReaderWriter.On("UpdateGalleriesImages", imageID, []models.GalleriesImages{
|
||||
{
|
||||
GalleryID: existingGalleryID,
|
||||
ImageID: imageID,
|
||||
},
|
||||
}).Return(nil).Once()
|
||||
joinReaderWriter.On("UpdateGalleriesImages", errGalleriesID, mock.AnythingOfType("[]models.GalleriesImages")).Return(updateErr).Once()
|
||||
readerWriter.On("UpdateGalleries", imageID, []int{existingGalleryID}).Return(nil).Once()
|
||||
readerWriter.On("UpdateGalleries", errGalleriesID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
|
||||
|
||||
err := i.PostImport(imageID)
|
||||
assert.Nil(t, err)
|
||||
@@ -414,14 +408,14 @@ func TestImporterPostImportUpdateGallery(t *testing.T) {
|
||||
err = i.PostImport(errGalleriesID)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
joinReaderWriter.AssertExpectations(t)
|
||||
readerWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPostImportUpdatePerformers(t *testing.T) {
|
||||
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||
readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
JoinWriter: joinReaderWriter,
|
||||
ReaderWriter: readerWriter,
|
||||
performers: []*models.Performer{
|
||||
{
|
||||
ID: existingPerformerID,
|
||||
@@ -429,15 +423,10 @@ func TestImporterPostImportUpdatePerformers(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
updateErr := errors.New("UpdatePerformersImages error")
|
||||
updateErr := errors.New("UpdatePerformers error")
|
||||
|
||||
joinReaderWriter.On("UpdatePerformersImages", imageID, []models.PerformersImages{
|
||||
{
|
||||
PerformerID: existingPerformerID,
|
||||
ImageID: imageID,
|
||||
},
|
||||
}).Return(nil).Once()
|
||||
joinReaderWriter.On("UpdatePerformersImages", errPerformersID, mock.AnythingOfType("[]models.PerformersImages")).Return(updateErr).Once()
|
||||
readerWriter.On("UpdatePerformers", imageID, []int{existingPerformerID}).Return(nil).Once()
|
||||
readerWriter.On("UpdatePerformers", errPerformersID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
|
||||
|
||||
err := i.PostImport(imageID)
|
||||
assert.Nil(t, err)
|
||||
@@ -445,14 +434,14 @@ func TestImporterPostImportUpdatePerformers(t *testing.T) {
|
||||
err = i.PostImport(errPerformersID)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
joinReaderWriter.AssertExpectations(t)
|
||||
readerWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPostImportUpdateTags(t *testing.T) {
|
||||
joinReaderWriter := &mocks.JoinReaderWriter{}
|
||||
readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
i := Importer{
|
||||
JoinWriter: joinReaderWriter,
|
||||
ReaderWriter: readerWriter,
|
||||
tags: []*models.Tag{
|
||||
{
|
||||
ID: existingTagID,
|
||||
@@ -460,15 +449,10 @@ func TestImporterPostImportUpdateTags(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
updateErr := errors.New("UpdateImagesTags error")
|
||||
updateErr := errors.New("UpdateTags error")
|
||||
|
||||
joinReaderWriter.On("UpdateImagesTags", imageID, []models.ImagesTags{
|
||||
{
|
||||
TagID: existingTagID,
|
||||
ImageID: imageID,
|
||||
},
|
||||
}).Return(nil).Once()
|
||||
joinReaderWriter.On("UpdateImagesTags", errTagsID, mock.AnythingOfType("[]models.ImagesTags")).Return(updateErr).Once()
|
||||
readerWriter.On("UpdateTags", imageID, []int{existingTagID}).Return(nil).Once()
|
||||
readerWriter.On("UpdateTags", errTagsID, mock.AnythingOfType("[]int")).Return(updateErr).Once()
|
||||
|
||||
err := i.PostImport(imageID)
|
||||
assert.Nil(t, err)
|
||||
@@ -476,7 +460,7 @@ func TestImporterPostImportUpdateTags(t *testing.T) {
|
||||
err = i.PostImport(errTagsID)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
joinReaderWriter.AssertExpectations(t)
|
||||
readerWriter.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterFindExistingID(t *testing.T) {
|
||||
@@ -518,11 +502,11 @@ func TestCreate(t *testing.T) {
|
||||
readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
image := models.Image{
|
||||
Title: modelstest.NullString(title),
|
||||
Title: models.NullString(title),
|
||||
}
|
||||
|
||||
imageErr := models.Image{
|
||||
Title: modelstest.NullString(imageNameErr),
|
||||
Title: models.NullString(imageNameErr),
|
||||
}
|
||||
|
||||
i := Importer{
|
||||
@@ -553,11 +537,11 @@ func TestUpdate(t *testing.T) {
|
||||
readerWriter := &mocks.ImageReaderWriter{}
|
||||
|
||||
image := models.Image{
|
||||
Title: modelstest.NullString(title),
|
||||
Title: models.NullString(title),
|
||||
}
|
||||
|
||||
imageErr := models.Image{
|
||||
Title: modelstest.NullString(imageNameErr),
|
||||
Title: models.NullString(imageNameErr),
|
||||
}
|
||||
|
||||
i := Importer{
|
||||
|
||||
10
pkg/image/update.go
Normal file
10
pkg/image/update.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package image
|
||||
|
||||
import "github.com/stashapp/stash/pkg/models"
|
||||
|
||||
func UpdateFileModTime(qb models.ImageWriter, id int, modTime models.NullSQLiteTimestamp) (*models.Image, error) {
|
||||
return qb.Update(models.ImagePartial{
|
||||
ID: id,
|
||||
FileModTime: &modTime,
|
||||
})
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
@@ -9,13 +10,16 @@ import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func setInitialMD5Config() {
|
||||
func setInitialMD5Config(txnManager models.TransactionManager) {
|
||||
// if there are no scene files in the database, then default the
|
||||
// VideoFileNamingAlgorithm config setting to oshash and calculateMD5 to
|
||||
// false, otherwise set them to true for backwards compatibility purposes
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
count, err := sqb.Count()
|
||||
if err != nil {
|
||||
var count int
|
||||
if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
count, err = r.Scene().Count()
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Errorf("Error while counting scenes: %s", err.Error())
|
||||
return
|
||||
}
|
||||
@@ -43,9 +47,10 @@ func setInitialMD5Config() {
|
||||
//
|
||||
// Likewise, if VideoFileNamingAlgorithm is set to oshash, then this function
|
||||
// will ensure that all oshash values are set on all scenes.
|
||||
func ValidateVideoFileNamingAlgorithm(newValue models.HashAlgorithm) error {
|
||||
func ValidateVideoFileNamingAlgorithm(txnManager models.TransactionManager, newValue models.HashAlgorithm) error {
|
||||
// if algorithm is being set to MD5, then all checksums must be present
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
return txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
qb := r.Scene()
|
||||
if newValue == models.HashAlgorithmMd5 {
|
||||
missingMD5, err := qb.CountMissingChecksum()
|
||||
if err != nil {
|
||||
@@ -67,4 +72,5 @@ func ValidateVideoFileNamingAlgorithm(newValue models.HashAlgorithm) error {
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
@@ -433,12 +433,6 @@ type SceneFilenameParser struct {
|
||||
studioCache map[string]*models.Studio
|
||||
movieCache map[string]*models.Movie
|
||||
tagCache map[string]*models.Tag
|
||||
|
||||
performerQuery performerQueryer
|
||||
sceneQuery sceneQueryer
|
||||
tagQuery tagQueryer
|
||||
studioQuery studioQueryer
|
||||
movieQuery movieQueryer
|
||||
}
|
||||
|
||||
func NewSceneFilenameParser(filter *models.FindFilterType, config models.SceneParserInput) *SceneFilenameParser {
|
||||
@@ -455,21 +449,6 @@ func NewSceneFilenameParser(filter *models.FindFilterType, config models.ScenePa
|
||||
|
||||
p.initWhiteSpaceRegex()
|
||||
|
||||
performerQuery := models.NewPerformerQueryBuilder()
|
||||
p.performerQuery = &performerQuery
|
||||
|
||||
sceneQuery := models.NewSceneQueryBuilder()
|
||||
p.sceneQuery = &sceneQuery
|
||||
|
||||
tagQuery := models.NewTagQueryBuilder()
|
||||
p.tagQuery = &tagQuery
|
||||
|
||||
studioQuery := models.NewStudioQueryBuilder()
|
||||
p.studioQuery = &studioQuery
|
||||
|
||||
movieQuery := models.NewMovieQueryBuilder()
|
||||
p.movieQuery = &movieQuery
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
@@ -489,7 +468,7 @@ func (p *SceneFilenameParser) initWhiteSpaceRegex() {
|
||||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) Parse() ([]*models.SceneParserResult, int, error) {
|
||||
func (p *SceneFilenameParser) Parse(repo models.ReaderRepository) ([]*models.SceneParserResult, int, error) {
|
||||
// perform the query to find the scenes
|
||||
mapper, err := newParseMapper(p.Pattern, p.ParserInput.IgnoreWords)
|
||||
|
||||
@@ -499,14 +478,17 @@ func (p *SceneFilenameParser) Parse() ([]*models.SceneParserResult, int, error)
|
||||
|
||||
p.Filter.Q = &mapper.regexString
|
||||
|
||||
scenes, total := p.sceneQuery.QueryByPathRegex(p.Filter)
|
||||
scenes, total, err := repo.Scene().QueryByPathRegex(p.Filter)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
ret := p.parseScenes(scenes, mapper)
|
||||
ret := p.parseScenes(repo, scenes, mapper)
|
||||
|
||||
return ret, total, nil
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) parseScenes(scenes []*models.Scene, mapper *parseMapper) []*models.SceneParserResult {
|
||||
func (p *SceneFilenameParser) parseScenes(repo models.ReaderRepository, scenes []*models.Scene, mapper *parseMapper) []*models.SceneParserResult {
|
||||
var ret []*models.SceneParserResult
|
||||
for _, scene := range scenes {
|
||||
sceneHolder := mapper.parse(scene)
|
||||
@@ -515,7 +497,7 @@ func (p *SceneFilenameParser) parseScenes(scenes []*models.Scene, mapper *parseM
|
||||
r := &models.SceneParserResult{
|
||||
Scene: scene,
|
||||
}
|
||||
p.setParserResult(*sceneHolder, r)
|
||||
p.setParserResult(repo, *sceneHolder, r)
|
||||
|
||||
if r != nil {
|
||||
ret = append(ret, r)
|
||||
@@ -536,7 +518,7 @@ func (p SceneFilenameParser) replaceWhitespaceCharacters(value string) string {
|
||||
return value
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) queryPerformer(performerName string) *models.Performer {
|
||||
func (p *SceneFilenameParser) queryPerformer(qb models.PerformerReader, performerName string) *models.Performer {
|
||||
// massage the performer name
|
||||
performerName = delimiterRE.ReplaceAllString(performerName, " ")
|
||||
|
||||
@@ -546,7 +528,7 @@ func (p *SceneFilenameParser) queryPerformer(performerName string) *models.Perfo
|
||||
}
|
||||
|
||||
// perform an exact match and grab the first
|
||||
performers, _ := p.performerQuery.FindByNames([]string{performerName}, nil, true)
|
||||
performers, _ := qb.FindByNames([]string{performerName}, true)
|
||||
|
||||
var ret *models.Performer
|
||||
if len(performers) > 0 {
|
||||
@@ -559,7 +541,7 @@ func (p *SceneFilenameParser) queryPerformer(performerName string) *models.Perfo
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
|
||||
func (p *SceneFilenameParser) queryStudio(qb models.StudioReader, studioName string) *models.Studio {
|
||||
// massage the performer name
|
||||
studioName = delimiterRE.ReplaceAllString(studioName, " ")
|
||||
|
||||
@@ -568,7 +550,7 @@ func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
|
||||
return ret
|
||||
}
|
||||
|
||||
ret, _ := p.studioQuery.FindByName(studioName, nil, true)
|
||||
ret, _ := qb.FindByName(studioName, true)
|
||||
|
||||
// add result to cache
|
||||
p.studioCache[studioName] = ret
|
||||
@@ -576,7 +558,7 @@ func (p *SceneFilenameParser) queryStudio(studioName string) *models.Studio {
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) queryMovie(movieName string) *models.Movie {
|
||||
func (p *SceneFilenameParser) queryMovie(qb models.MovieReader, movieName string) *models.Movie {
|
||||
// massage the movie name
|
||||
movieName = delimiterRE.ReplaceAllString(movieName, " ")
|
||||
|
||||
@@ -585,7 +567,7 @@ func (p *SceneFilenameParser) queryMovie(movieName string) *models.Movie {
|
||||
return ret
|
||||
}
|
||||
|
||||
ret, _ := p.movieQuery.FindByName(movieName, nil, true)
|
||||
ret, _ := qb.FindByName(movieName, true)
|
||||
|
||||
// add result to cache
|
||||
p.movieCache[movieName] = ret
|
||||
@@ -593,7 +575,7 @@ func (p *SceneFilenameParser) queryMovie(movieName string) *models.Movie {
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
|
||||
func (p *SceneFilenameParser) queryTag(qb models.TagReader, tagName string) *models.Tag {
|
||||
// massage the performer name
|
||||
tagName = delimiterRE.ReplaceAllString(tagName, " ")
|
||||
|
||||
@@ -603,7 +585,7 @@ func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
|
||||
}
|
||||
|
||||
// match tag name exactly
|
||||
ret, _ := p.tagQuery.FindByName(tagName, nil, true)
|
||||
ret, _ := qb.FindByName(tagName, true)
|
||||
|
||||
// add result to cache
|
||||
p.tagCache[tagName] = ret
|
||||
@@ -611,12 +593,12 @@ func (p *SceneFilenameParser) queryTag(tagName string) *models.Tag {
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setPerformers(h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setPerformers(qb models.PerformerReader, h sceneHolder, result *models.SceneParserResult) {
|
||||
// query for each performer
|
||||
performersSet := make(map[int]bool)
|
||||
for _, performerName := range h.performers {
|
||||
if performerName != "" {
|
||||
performer := p.queryPerformer(performerName)
|
||||
performer := p.queryPerformer(qb, performerName)
|
||||
if performer != nil {
|
||||
if _, found := performersSet[performer.ID]; !found {
|
||||
result.PerformerIds = append(result.PerformerIds, strconv.Itoa(performer.ID))
|
||||
@@ -627,12 +609,12 @@ func (p *SceneFilenameParser) setPerformers(h sceneHolder, result *models.SceneP
|
||||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setTags(h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setTags(qb models.TagReader, h sceneHolder, result *models.SceneParserResult) {
|
||||
// query for each performer
|
||||
tagsSet := make(map[int]bool)
|
||||
for _, tagName := range h.tags {
|
||||
if tagName != "" {
|
||||
tag := p.queryTag(tagName)
|
||||
tag := p.queryTag(qb, tagName)
|
||||
if tag != nil {
|
||||
if _, found := tagsSet[tag.ID]; !found {
|
||||
result.TagIds = append(result.TagIds, strconv.Itoa(tag.ID))
|
||||
@@ -643,23 +625,23 @@ func (p *SceneFilenameParser) setTags(h sceneHolder, result *models.SceneParserR
|
||||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setStudio(h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setStudio(qb models.StudioReader, h sceneHolder, result *models.SceneParserResult) {
|
||||
// query for each performer
|
||||
if h.studio != "" {
|
||||
studio := p.queryStudio(h.studio)
|
||||
studio := p.queryStudio(qb, h.studio)
|
||||
if studio != nil {
|
||||
studioId := strconv.Itoa(studio.ID)
|
||||
result.StudioID = &studioId
|
||||
studioID := strconv.Itoa(studio.ID)
|
||||
result.StudioID = &studioID
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setMovies(h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setMovies(qb models.MovieReader, h sceneHolder, result *models.SceneParserResult) {
|
||||
// query for each movie
|
||||
moviesSet := make(map[int]bool)
|
||||
for _, movieName := range h.movies {
|
||||
if movieName != "" {
|
||||
movie := p.queryMovie(movieName)
|
||||
movie := p.queryMovie(qb, movieName)
|
||||
if movie != nil {
|
||||
if _, found := moviesSet[movie.ID]; !found {
|
||||
result.Movies = append(result.Movies, &models.SceneMovieID{
|
||||
@@ -672,7 +654,7 @@ func (p *SceneFilenameParser) setMovies(h sceneHolder, result *models.SceneParse
|
||||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setParserResult(repo models.ReaderRepository, h sceneHolder, result *models.SceneParserResult) {
|
||||
if h.result.Title.Valid {
|
||||
title := h.result.Title.String
|
||||
title = p.replaceWhitespaceCharacters(title)
|
||||
@@ -694,15 +676,15 @@ func (p *SceneFilenameParser) setParserResult(h sceneHolder, result *models.Scen
|
||||
}
|
||||
|
||||
if len(h.performers) > 0 {
|
||||
p.setPerformers(h, result)
|
||||
p.setPerformers(repo.Performer(), h, result)
|
||||
}
|
||||
if len(h.tags) > 0 {
|
||||
p.setTags(h, result)
|
||||
p.setTags(repo.Tag(), h, result)
|
||||
}
|
||||
p.setStudio(h, result)
|
||||
p.setStudio(repo.Studio(), h, result)
|
||||
|
||||
if len(h.movies) > 0 {
|
||||
p.setMovies(h, result)
|
||||
p.setMovies(repo.Movie(), h, result)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,43 +5,11 @@ import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
// DestroyImage deletes an image and its associated relationships from the
|
||||
// database.
|
||||
func DestroyImage(imageID int, tx *sqlx.Tx) error {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
|
||||
_, err := qb.Find(imageID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := jqb.DestroyImagesTags(imageID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := jqb.DestroyPerformersImages(imageID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := jqb.DestroyImageGalleries(imageID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.Destroy(imageID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteGeneratedImageFiles deletes generated files for the provided image.
|
||||
func DeleteGeneratedImageFiles(image *models.Image) {
|
||||
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
|
||||
|
||||
@@ -10,8 +10,10 @@ import (
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/manager/paths"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
"github.com/stashapp/stash/pkg/scraper"
|
||||
"github.com/stashapp/stash/pkg/sqlite"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
@@ -26,6 +28,8 @@ type singleton struct {
|
||||
ScraperCache *scraper.Cache
|
||||
|
||||
DownloadStore *DownloadStore
|
||||
|
||||
TxnManager models.TransactionManager
|
||||
}
|
||||
|
||||
var instance *singleton
|
||||
@@ -54,10 +58,11 @@ func Initialize() *singleton {
|
||||
Paths: paths.NewPaths(),
|
||||
|
||||
PluginCache: initPluginCache(),
|
||||
ScraperCache: initScraperCache(),
|
||||
|
||||
DownloadStore: NewDownloadStore(),
|
||||
TxnManager: sqlite.NewTransactionManager(),
|
||||
}
|
||||
instance.ScraperCache = instance.initScraperCache()
|
||||
|
||||
instance.RefreshConfig()
|
||||
|
||||
@@ -180,13 +185,13 @@ func initPluginCache() *plugin.Cache {
|
||||
}
|
||||
|
||||
// initScraperCache initializes a new scraper cache and returns it.
|
||||
func initScraperCache() *scraper.Cache {
|
||||
func (s *singleton) initScraperCache() *scraper.Cache {
|
||||
scraperConfig := scraper.GlobalConfig{
|
||||
Path: config.GetScrapersPath(),
|
||||
UserAgent: config.GetScraperUserAgent(),
|
||||
CDPPath: config.GetScraperCDPPath(),
|
||||
}
|
||||
ret, err := scraper.NewCache(scraperConfig)
|
||||
ret, err := scraper.NewCache(scraperConfig, s.TxnManager)
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("Error reading scraper configs: %s", err.Error())
|
||||
@@ -210,5 +215,5 @@ func (s *singleton) RefreshConfig() {
|
||||
// RefreshScraperCache refreshes the scraper cache. Call this when scraper
|
||||
// configuration changes.
|
||||
func (s *singleton) RefreshScraperCache() {
|
||||
s.ScraperCache = initScraperCache()
|
||||
s.ScraperCache = s.initScraperCache()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
@@ -119,7 +120,7 @@ func (s *singleton) neededScan(paths []*models.StashConfig) (total *int, newFile
|
||||
for _, sp := range paths {
|
||||
err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error {
|
||||
t++
|
||||
task := ScanTask{FilePath: path}
|
||||
task := ScanTask{FilePath: path, TxnManager: s.TxnManager}
|
||||
if !task.doesPathExist() {
|
||||
n++
|
||||
}
|
||||
@@ -211,7 +212,17 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
|
||||
instance.Paths.Generated.EnsureTmpDir()
|
||||
|
||||
wg.Add()
|
||||
task := ScanTask{FilePath: path, UseFileMetadata: input.UseFileMetadata, StripFileExtension: input.StripFileExtension, fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5, GeneratePreview: input.ScanGeneratePreviews, GenerateImagePreview: input.ScanGenerateImagePreviews, GenerateSprite: input.ScanGenerateSprites}
|
||||
task := ScanTask{
|
||||
TxnManager: s.TxnManager,
|
||||
FilePath: path,
|
||||
UseFileMetadata: input.UseFileMetadata,
|
||||
StripFileExtension: input.StripFileExtension,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
calculateMD5: calculateMD5,
|
||||
GeneratePreview: input.ScanGeneratePreviews,
|
||||
GenerateImagePreview: input.ScanGenerateImagePreviews,
|
||||
GenerateSprite: input.ScanGenerateSprites,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
|
||||
return nil
|
||||
@@ -240,7 +251,11 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
|
||||
|
||||
for _, path := range galleries {
|
||||
wg.Add()
|
||||
task := ScanTask{FilePath: path, UseFileMetadata: false}
|
||||
task := ScanTask{
|
||||
TxnManager: s.TxnManager,
|
||||
FilePath: path,
|
||||
UseFileMetadata: false,
|
||||
}
|
||||
go task.associateGallery(&wg)
|
||||
wg.Wait()
|
||||
}
|
||||
@@ -261,6 +276,7 @@ func (s *singleton) Import() {
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
task := ImportTask{
|
||||
txnManager: s.TxnManager,
|
||||
BaseDir: config.GetMetadataPath(),
|
||||
Reset: true,
|
||||
DuplicateBehaviour: models.ImportDuplicateEnumFail,
|
||||
@@ -284,7 +300,11 @@ func (s *singleton) Export() {
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
task := ExportTask{full: true, fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm()}
|
||||
task := ExportTask{
|
||||
txnManager: s.TxnManager,
|
||||
full: true,
|
||||
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
|
||||
}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}()
|
||||
@@ -344,21 +364,27 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
s.Status.SetStatus(Generate)
|
||||
s.Status.indefiniteProgress()
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
mqb := models.NewSceneMarkerQueryBuilder()
|
||||
|
||||
//this.job.total = await ObjectionUtils.getCount(Scene);
|
||||
instance.Paths.Generated.EnsureTmpDir()
|
||||
|
||||
sceneIDs := utils.StringSliceToIntSlice(input.SceneIDs)
|
||||
markerIDs := utils.StringSliceToIntSlice(input.MarkerIDs)
|
||||
sceneIDs, err := utils.StringSliceToIntSlice(input.SceneIDs)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
markerIDs, err := utils.StringSliceToIntSlice(input.MarkerIDs)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
var scenes []*models.Scene
|
||||
var err error
|
||||
var markers []*models.SceneMarker
|
||||
|
||||
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
qb := r.Scene()
|
||||
if len(sceneIDs) > 0 {
|
||||
scenes, err = qb.FindMany(sceneIDs)
|
||||
} else {
|
||||
@@ -366,7 +392,19 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("failed to get scenes for generate")
|
||||
return err
|
||||
}
|
||||
|
||||
if len(markerIDs) > 0 {
|
||||
markers, err = r.SceneMarker().FindMany(markerIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
@@ -377,14 +415,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
|
||||
s.Status.Progress = 0
|
||||
lenScenes := len(scenes)
|
||||
total := lenScenes
|
||||
|
||||
var markers []*models.SceneMarker
|
||||
if len(markerIDs) > 0 {
|
||||
markers, err = mqb.FindMany(markerIDs)
|
||||
|
||||
total += len(markers)
|
||||
}
|
||||
total := lenScenes + len(markers)
|
||||
|
||||
if s.Status.stopping {
|
||||
logger.Info("Stopping due to user request")
|
||||
@@ -429,7 +460,11 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
}
|
||||
|
||||
if input.Sprites {
|
||||
task := GenerateSpriteTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
|
||||
task := GenerateSpriteTask{
|
||||
Scene: *scene,
|
||||
Overwrite: overwrite,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
}
|
||||
wg.Add()
|
||||
go task.Start(&wg)
|
||||
}
|
||||
@@ -448,13 +483,22 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
|
||||
if input.Markers {
|
||||
wg.Add()
|
||||
task := GenerateMarkersTask{Scene: scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
|
||||
task := GenerateMarkersTask{
|
||||
TxnManager: s.TxnManager,
|
||||
Scene: scene,
|
||||
Overwrite: overwrite,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
if input.Transcodes {
|
||||
wg.Add()
|
||||
task := GenerateTranscodeTask{Scene: *scene, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
|
||||
task := GenerateTranscodeTask{
|
||||
Scene: *scene,
|
||||
Overwrite: overwrite,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
}
|
||||
@@ -474,7 +518,12 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
|
||||
}
|
||||
|
||||
wg.Add()
|
||||
task := GenerateMarkersTask{Marker: marker, Overwrite: overwrite, fileNamingAlgorithm: fileNamingAlgo}
|
||||
task := GenerateMarkersTask{
|
||||
TxnManager: s.TxnManager,
|
||||
Marker: marker,
|
||||
Overwrite: overwrite,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
}
|
||||
|
||||
@@ -502,7 +551,6 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) {
|
||||
s.Status.SetStatus(Generate)
|
||||
s.Status.indefiniteProgress()
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
instance.Paths.Generated.EnsureTmpDir()
|
||||
|
||||
go func() {
|
||||
@@ -514,13 +562,18 @@ func (s *singleton) generateScreenshot(sceneId string, at *float64) {
|
||||
return
|
||||
}
|
||||
|
||||
scene, err := qb.Find(sceneIdInt)
|
||||
if err != nil || scene == nil {
|
||||
logger.Errorf("failed to get scene for generate")
|
||||
var scene *models.Scene
|
||||
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
scene, err = r.Scene().Find(sceneIdInt)
|
||||
return err
|
||||
}); err != nil || scene == nil {
|
||||
logger.Errorf("failed to get scene for generate: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
task := GenerateScreenshotTask{
|
||||
txnManager: s.TxnManager,
|
||||
Scene: *scene,
|
||||
ScreenshotAt: at,
|
||||
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
|
||||
@@ -551,31 +604,38 @@ func (s *singleton) AutoTag(performerIds []string, studioIds []string, tagIds []
|
||||
studioCount := len(studioIds)
|
||||
tagCount := len(tagIds)
|
||||
|
||||
performerQuery := models.NewPerformerQueryBuilder()
|
||||
studioQuery := models.NewTagQueryBuilder()
|
||||
tagQuery := models.NewTagQueryBuilder()
|
||||
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
performerQuery := r.Performer()
|
||||
studioQuery := r.Studio()
|
||||
tagQuery := r.Tag()
|
||||
|
||||
const wildcard = "*"
|
||||
var err error
|
||||
if performerCount == 1 && performerIds[0] == wildcard {
|
||||
performerCount, err = performerQuery.Count()
|
||||
if err != nil {
|
||||
logger.Errorf("Error getting performer count: %s", err.Error())
|
||||
return fmt.Errorf("Error getting performer count: %s", err.Error())
|
||||
}
|
||||
}
|
||||
if studioCount == 1 && studioIds[0] == wildcard {
|
||||
studioCount, err = studioQuery.Count()
|
||||
if err != nil {
|
||||
logger.Errorf("Error getting studio count: %s", err.Error())
|
||||
return fmt.Errorf("Error getting studio count: %s", err.Error())
|
||||
}
|
||||
}
|
||||
if tagCount == 1 && tagIds[0] == wildcard {
|
||||
tagCount, err = tagQuery.Count()
|
||||
if err != nil {
|
||||
logger.Errorf("Error getting tag count: %s", err.Error())
|
||||
return fmt.Errorf("Error getting tag count: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
total := performerCount + studioCount + tagCount
|
||||
s.Status.setProgress(0, total)
|
||||
|
||||
@@ -586,36 +646,44 @@ func (s *singleton) AutoTag(performerIds []string, studioIds []string, tagIds []
|
||||
}
|
||||
|
||||
func (s *singleton) autoTagPerformers(performerIds []string) {
|
||||
performerQuery := models.NewPerformerQueryBuilder()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for _, performerId := range performerIds {
|
||||
var performers []*models.Performer
|
||||
|
||||
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
performerQuery := r.Performer()
|
||||
|
||||
if performerId == "*" {
|
||||
var err error
|
||||
performers, err = performerQuery.All()
|
||||
if err != nil {
|
||||
logger.Errorf("Error querying performers: %s", err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error querying performers: %s", err.Error())
|
||||
}
|
||||
} else {
|
||||
performerIdInt, err := strconv.Atoi(performerId)
|
||||
if err != nil {
|
||||
logger.Errorf("Error parsing performer id %s: %s", performerId, err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error parsing performer id %s: %s", performerId, err.Error())
|
||||
}
|
||||
|
||||
performer, err := performerQuery.Find(performerIdInt)
|
||||
if err != nil {
|
||||
logger.Errorf("Error finding performer id %s: %s", performerId, err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error finding performer id %s: %s", performerId, err.Error())
|
||||
}
|
||||
performers = append(performers, performer)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
for _, performer := range performers {
|
||||
wg.Add(1)
|
||||
task := AutoTagPerformerTask{performer: performer}
|
||||
task := AutoTagPerformerTask{
|
||||
txnManager: s.TxnManager,
|
||||
performer: performer,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
|
||||
@@ -625,36 +693,43 @@ func (s *singleton) autoTagPerformers(performerIds []string) {
|
||||
}
|
||||
|
||||
func (s *singleton) autoTagStudios(studioIds []string) {
|
||||
studioQuery := models.NewStudioQueryBuilder()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for _, studioId := range studioIds {
|
||||
var studios []*models.Studio
|
||||
|
||||
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
studioQuery := r.Studio()
|
||||
if studioId == "*" {
|
||||
var err error
|
||||
studios, err = studioQuery.All()
|
||||
if err != nil {
|
||||
logger.Errorf("Error querying studios: %s", err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error querying studios: %s", err.Error())
|
||||
}
|
||||
} else {
|
||||
studioIdInt, err := strconv.Atoi(studioId)
|
||||
if err != nil {
|
||||
logger.Errorf("Error parsing studio id %s: %s", studioId, err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error parsing studio id %s: %s", studioId, err.Error())
|
||||
}
|
||||
|
||||
studio, err := studioQuery.Find(studioIdInt, nil)
|
||||
studio, err := studioQuery.Find(studioIdInt)
|
||||
if err != nil {
|
||||
logger.Errorf("Error finding studio id %s: %s", studioId, err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error finding studio id %s: %s", studioId, err.Error())
|
||||
}
|
||||
studios = append(studios, studio)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
for _, studio := range studios {
|
||||
wg.Add(1)
|
||||
task := AutoTagStudioTask{studio: studio}
|
||||
task := AutoTagStudioTask{
|
||||
studio: studio,
|
||||
txnManager: s.TxnManager,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
|
||||
@@ -664,36 +739,42 @@ func (s *singleton) autoTagStudios(studioIds []string) {
|
||||
}
|
||||
|
||||
func (s *singleton) autoTagTags(tagIds []string) {
|
||||
tagQuery := models.NewTagQueryBuilder()
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for _, tagId := range tagIds {
|
||||
var tags []*models.Tag
|
||||
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
tagQuery := r.Tag()
|
||||
if tagId == "*" {
|
||||
var err error
|
||||
tags, err = tagQuery.All()
|
||||
if err != nil {
|
||||
logger.Errorf("Error querying tags: %s", err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error querying tags: %s", err.Error())
|
||||
}
|
||||
} else {
|
||||
tagIdInt, err := strconv.Atoi(tagId)
|
||||
if err != nil {
|
||||
logger.Errorf("Error parsing tag id %s: %s", tagId, err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error parsing tag id %s: %s", tagId, err.Error())
|
||||
}
|
||||
|
||||
tag, err := tagQuery.Find(tagIdInt, nil)
|
||||
tag, err := tagQuery.Find(tagIdInt)
|
||||
if err != nil {
|
||||
logger.Errorf("Error finding tag id %s: %s", tagId, err.Error())
|
||||
continue
|
||||
return fmt.Errorf("Error finding tag id %s: %s", tagId, err.Error())
|
||||
}
|
||||
tags = append(tags, tag)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
for _, tag := range tags {
|
||||
wg.Add(1)
|
||||
task := AutoTagTagTask{tag: tag}
|
||||
task := AutoTagTagTask{
|
||||
txnManager: s.TxnManager,
|
||||
tag: tag,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
|
||||
@@ -709,28 +790,38 @@ func (s *singleton) Clean() {
|
||||
s.Status.SetStatus(Clean)
|
||||
s.Status.indefiniteProgress()
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
iqb := models.NewImageQueryBuilder()
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
var scenes []*models.Scene
|
||||
var images []*models.Image
|
||||
var galleries []*models.Gallery
|
||||
|
||||
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
qb := r.Scene()
|
||||
iqb := r.Image()
|
||||
gqb := r.Gallery()
|
||||
|
||||
logger.Infof("Starting cleaning of tracked files")
|
||||
scenes, err := qb.All()
|
||||
var err error
|
||||
scenes, err = qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("failed to fetch list of scenes for cleaning")
|
||||
return
|
||||
return errors.New("failed to fetch list of scenes for cleaning")
|
||||
}
|
||||
|
||||
images, err := iqb.All()
|
||||
images, err = iqb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("failed to fetch list of images for cleaning")
|
||||
return
|
||||
return errors.New("failed to fetch list of images for cleaning")
|
||||
}
|
||||
|
||||
galleries, err := gqb.All()
|
||||
galleries, err = gqb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("failed to fetch list of galleries for cleaning")
|
||||
return errors.New("failed to fetch list of galleries for cleaning")
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
@@ -757,7 +848,11 @@ func (s *singleton) Clean() {
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
task := CleanTask{Scene: scene, fileNamingAlgorithm: fileNamingAlgo}
|
||||
task := CleanTask{
|
||||
TxnManager: s.TxnManager,
|
||||
Scene: scene,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}
|
||||
@@ -776,7 +871,10 @@ func (s *singleton) Clean() {
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
task := CleanTask{Image: img}
|
||||
task := CleanTask{
|
||||
TxnManager: s.TxnManager,
|
||||
Image: img,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}
|
||||
@@ -795,7 +893,10 @@ func (s *singleton) Clean() {
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
task := CleanTask{Gallery: gallery}
|
||||
task := CleanTask{
|
||||
TxnManager: s.TxnManager,
|
||||
Gallery: gallery,
|
||||
}
|
||||
go task.Start(&wg)
|
||||
wg.Wait()
|
||||
}
|
||||
@@ -811,17 +912,19 @@ func (s *singleton) MigrateHash() {
|
||||
s.Status.SetStatus(Migrate)
|
||||
s.Status.indefiniteProgress()
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
go func() {
|
||||
defer s.returnToIdleState()
|
||||
|
||||
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
|
||||
logger.Infof("Migrating generated files for %s naming hash", fileNamingAlgo.String())
|
||||
|
||||
scenes, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("failed to fetch list of scenes for migration")
|
||||
var scenes []*models.Scene
|
||||
if err := s.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
scenes, err = r.Scene().All()
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Errorf("failed to fetch list of scenes for migration: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
@@ -926,6 +1029,7 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate
|
||||
|
||||
if input.Markers {
|
||||
task := GenerateMarkersTask{
|
||||
TxnManager: s.TxnManager,
|
||||
Scene: scene,
|
||||
Overwrite: overwrite,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
|
||||
@@ -2,5 +2,5 @@ package manager
|
||||
|
||||
// PostMigrate is executed after migrations have been executed.
|
||||
func (s *singleton) PostMigrate() {
|
||||
setInitialMD5Config()
|
||||
setInitialMD5Config(s.TxnManager)
|
||||
}
|
||||
|
||||
@@ -4,9 +4,6 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
@@ -16,37 +13,54 @@ import (
|
||||
)
|
||||
|
||||
// DestroyScene deletes a scene and its associated relationships from the
|
||||
// database.
|
||||
func DestroyScene(sceneID int, tx *sqlx.Tx) error {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
// database. Returns a function to perform any post-commit actions.
|
||||
func DestroyScene(scene *models.Scene, repo models.Repository) (func(), error) {
|
||||
qb := repo.Scene()
|
||||
mqb := repo.SceneMarker()
|
||||
gqb := repo.Gallery()
|
||||
|
||||
_, err := qb.Find(sceneID)
|
||||
if err := gqb.ClearGalleryId(scene.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
markers, err := mqb.FindBySceneID(scene.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := jqb.DestroyScenesTags(sceneID, tx); err != nil {
|
||||
return err
|
||||
var funcs []func()
|
||||
for _, m := range markers {
|
||||
f, err := DestroySceneMarker(scene, m, mqb)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
funcs = append(funcs, f)
|
||||
}
|
||||
|
||||
if err := jqb.DestroyPerformersScenes(sceneID, tx); err != nil {
|
||||
return err
|
||||
if err := qb.Destroy(scene.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := jqb.DestroyScenesMarkers(sceneID, tx); err != nil {
|
||||
return err
|
||||
return func() {
|
||||
for _, f := range funcs {
|
||||
f()
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DestroySceneMarker deletes the scene marker from the database and returns a
|
||||
// function that removes the generated files, to be executed after the
|
||||
// transaction is successfully committed.
|
||||
func DestroySceneMarker(scene *models.Scene, sceneMarker *models.SceneMarker, qb models.SceneMarkerWriter) (func(), error) {
|
||||
if err := qb.Destroy(sceneMarker.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := jqb.DestroyScenesGalleries(sceneID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := qb.Destroy(strconv.Itoa(sceneID), tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
// delete the preview for the marker
|
||||
return func() {
|
||||
seconds := int(sceneMarker.Seconds)
|
||||
DeleteSceneMarkerFiles(scene, seconds, config.GetVideoFileNamingAlgorithm())
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DeleteGeneratedSceneFiles deletes generated files for the provided scene.
|
||||
|
||||
@@ -4,18 +4,16 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func ValidateModifyStudio(studio models.StudioPartial, tx *sqlx.Tx) error {
|
||||
func ValidateModifyStudio(studio models.StudioPartial, qb models.StudioReader) error {
|
||||
if studio.ParentID == nil || !studio.ParentID.Valid {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ensure there is no cyclic dependency
|
||||
thisID := studio.ID
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
|
||||
currentParentID := *studio.ParentID
|
||||
|
||||
@@ -24,7 +22,7 @@ func ValidateModifyStudio(studio models.StudioPartial, tx *sqlx.Tx) error {
|
||||
return errors.New("studio cannot be an ancestor of itself")
|
||||
}
|
||||
|
||||
currentStudio, err := qb.Find(int(currentParentID.Int64), tx)
|
||||
currentStudio, err := qb.Find(int(currentParentID.Int64))
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding parent studio: %s", err.Error())
|
||||
}
|
||||
|
||||
@@ -3,17 +3,13 @@ package manager
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func EnsureTagNameUnique(tag models.Tag, tx *sqlx.Tx) error {
|
||||
qb := models.NewTagQueryBuilder()
|
||||
|
||||
func EnsureTagNameUnique(tag models.Tag, qb models.TagReader) error {
|
||||
// ensure name is unique
|
||||
sameNameTag, err := qb.FindByName(tag.Name, tx, true)
|
||||
sameNameTag, err := qb.FindByName(tag.Name, true)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -3,16 +3,18 @@ package manager
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/scene"
|
||||
)
|
||||
|
||||
type AutoTagPerformerTask struct {
|
||||
performer *models.Performer
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (t *AutoTagPerformerTask) Start(wg *sync.WaitGroup) {
|
||||
@@ -32,44 +34,38 @@ func getQueryRegex(name string) string {
|
||||
}
|
||||
|
||||
func (t *AutoTagPerformerTask) autoTagPerformer() {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
|
||||
regex := getQueryRegex(t.performer.Name.String)
|
||||
|
||||
if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Scene()
|
||||
const ignoreOrganized = true
|
||||
scenes, err := qb.QueryAllByPathRegex(regex, ignoreOrganized)
|
||||
|
||||
if err != nil {
|
||||
logger.Infof("Error querying scenes with regex '%s': %s", regex, err.Error())
|
||||
return
|
||||
return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error())
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
for _, scene := range scenes {
|
||||
added, err := jqb.AddPerformerScene(scene.ID, t.performer.ID, tx)
|
||||
for _, s := range scenes {
|
||||
added, err := scene.AddPerformer(qb, s.ID, t.performer.ID)
|
||||
|
||||
if err != nil {
|
||||
logger.Infof("Error adding performer '%s' to scene '%s': %s", t.performer.Name.String, scene.GetTitle(), err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
return fmt.Errorf("Error adding performer '%s' to scene '%s': %s", t.performer.Name.String, s.GetTitle(), err.Error())
|
||||
}
|
||||
|
||||
if added {
|
||||
logger.Infof("Added performer '%s' to scene '%s'", t.performer.Name.String, scene.GetTitle())
|
||||
logger.Infof("Added performer '%s' to scene '%s'", t.performer.Name.String, s.GetTitle())
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Infof("Error adding performer to scene: %s", err.Error())
|
||||
return
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
type AutoTagStudioTask struct {
|
||||
studio *models.Studio
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (t *AutoTagStudioTask) Start(wg *sync.WaitGroup) {
|
||||
@@ -79,21 +75,17 @@ func (t *AutoTagStudioTask) Start(wg *sync.WaitGroup) {
|
||||
}
|
||||
|
||||
func (t *AutoTagStudioTask) autoTagStudio() {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
|
||||
regex := getQueryRegex(t.studio.Name.String)
|
||||
|
||||
if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Scene()
|
||||
const ignoreOrganized = true
|
||||
scenes, err := qb.QueryAllByPathRegex(regex, ignoreOrganized)
|
||||
|
||||
if err != nil {
|
||||
logger.Infof("Error querying scenes with regex '%s': %s", regex, err.Error())
|
||||
return
|
||||
return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error())
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
for _, scene := range scenes {
|
||||
// #306 - don't overwrite studio if already present
|
||||
if scene.StudioID.Valid {
|
||||
@@ -110,23 +102,20 @@ func (t *AutoTagStudioTask) autoTagStudio() {
|
||||
StudioID: &studioID,
|
||||
}
|
||||
|
||||
_, err := qb.Update(scenePartial, tx)
|
||||
|
||||
if err != nil {
|
||||
logger.Infof("Error adding studio to scene: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
if _, err := qb.Update(scenePartial); err != nil {
|
||||
return fmt.Errorf("Error adding studio to scene: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Infof("Error adding studio to scene: %s", err.Error())
|
||||
return
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
type AutoTagTagTask struct {
|
||||
tag *models.Tag
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (t *AutoTagTagTask) Start(wg *sync.WaitGroup) {
|
||||
@@ -136,38 +125,31 @@ func (t *AutoTagTagTask) Start(wg *sync.WaitGroup) {
|
||||
}
|
||||
|
||||
func (t *AutoTagTagTask) autoTagTag() {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
|
||||
regex := getQueryRegex(t.tag.Name)
|
||||
|
||||
if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Scene()
|
||||
const ignoreOrganized = true
|
||||
scenes, err := qb.QueryAllByPathRegex(regex, ignoreOrganized)
|
||||
|
||||
if err != nil {
|
||||
logger.Infof("Error querying scenes with regex '%s': %s", regex, err.Error())
|
||||
return
|
||||
return fmt.Errorf("Error querying scenes with regex '%s': %s", regex, err.Error())
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
for _, scene := range scenes {
|
||||
added, err := jqb.AddSceneTag(scene.ID, t.tag.ID, tx)
|
||||
for _, s := range scenes {
|
||||
added, err := scene.AddTag(qb, s.ID, t.tag.ID)
|
||||
|
||||
if err != nil {
|
||||
logger.Infof("Error adding tag '%s' to scene '%s': %s", t.tag.Name, scene.GetTitle(), err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
return fmt.Errorf("Error adding tag '%s' to scene '%s': %s", t.tag.Name, s.GetTitle(), err.Error())
|
||||
}
|
||||
|
||||
if added {
|
||||
logger.Infof("Added tag '%s' to scene '%s'", t.tag.Name, scene.GetTitle())
|
||||
logger.Infof("Added tag '%s' to scene '%s'", t.tag.Name, s.GetTitle())
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Infof("Error adding tag to scene: %s", err.Error())
|
||||
return
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,11 +14,11 @@ import (
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/sqlite"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
|
||||
_ "github.com/golang-migrate/migrate/v4/database/sqlite3"
|
||||
_ "github.com/golang-migrate/migrate/v4/source/file"
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
const testName = "Foo's Bar"
|
||||
@@ -106,17 +106,15 @@ func TestMain(m *testing.M) {
|
||||
os.Exit(ret)
|
||||
}
|
||||
|
||||
func createPerformer(tx *sqlx.Tx) error {
|
||||
func createPerformer(pqb models.PerformerWriter) error {
|
||||
// create the performer
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
|
||||
performer := models.Performer{
|
||||
Checksum: testName,
|
||||
Name: sql.NullString{Valid: true, String: testName},
|
||||
Favorite: sql.NullBool{Valid: true, Bool: false},
|
||||
}
|
||||
|
||||
_, err := pqb.Create(performer, tx)
|
||||
_, err := pqb.Create(performer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -124,27 +122,23 @@ func createPerformer(tx *sqlx.Tx) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func createStudio(tx *sqlx.Tx, name string) (*models.Studio, error) {
|
||||
func createStudio(qb models.StudioWriter, name string) (*models.Studio, error) {
|
||||
// create the studio
|
||||
qb := models.NewStudioQueryBuilder()
|
||||
|
||||
studio := models.Studio{
|
||||
Checksum: name,
|
||||
Name: sql.NullString{Valid: true, String: testName},
|
||||
}
|
||||
|
||||
return qb.Create(studio, tx)
|
||||
return qb.Create(studio)
|
||||
}
|
||||
|
||||
func createTag(tx *sqlx.Tx) error {
|
||||
func createTag(qb models.TagWriter) error {
|
||||
// create the studio
|
||||
qb := models.NewTagQueryBuilder()
|
||||
|
||||
tag := models.Tag{
|
||||
Name: testName,
|
||||
}
|
||||
|
||||
_, err := qb.Create(tag, tx)
|
||||
_, err := qb.Create(tag)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -152,9 +146,7 @@ func createTag(tx *sqlx.Tx) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func createScenes(tx *sqlx.Tx) error {
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
|
||||
func createScenes(sqb models.SceneReaderWriter) error {
|
||||
// create the scenes
|
||||
var scenePatterns []string
|
||||
var falseScenePatterns []string
|
||||
@@ -175,13 +167,13 @@ func createScenes(tx *sqlx.Tx) error {
|
||||
}
|
||||
|
||||
for _, fn := range scenePatterns {
|
||||
err := createScene(sqb, tx, makeScene(fn, true))
|
||||
err := createScene(sqb, makeScene(fn, true))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for _, fn := range falseScenePatterns {
|
||||
err := createScene(sqb, tx, makeScene(fn, false))
|
||||
err := createScene(sqb, makeScene(fn, false))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -191,7 +183,7 @@ func createScenes(tx *sqlx.Tx) error {
|
||||
for _, fn := range scenePatterns {
|
||||
s := makeScene("organized"+fn, false)
|
||||
s.Organized = true
|
||||
err := createScene(sqb, tx, s)
|
||||
err := createScene(sqb, s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -200,7 +192,7 @@ func createScenes(tx *sqlx.Tx) error {
|
||||
// create scene with existing studio io
|
||||
studioScene := makeScene(existingStudioSceneName, true)
|
||||
studioScene.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
|
||||
err := createScene(sqb, tx, studioScene)
|
||||
err := createScene(sqb, studioScene)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -222,8 +214,8 @@ func makeScene(name string, expectedResult bool) *models.Scene {
|
||||
return scene
|
||||
}
|
||||
|
||||
func createScene(sqb models.SceneQueryBuilder, tx *sqlx.Tx, scene *models.Scene) error {
|
||||
_, err := sqb.Create(*scene, tx)
|
||||
func createScene(sqb models.SceneWriter, scene *models.Scene) error {
|
||||
_, err := sqb.Create(*scene)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to create scene with name '%s': %s", scene.Path, err.Error())
|
||||
@@ -232,39 +224,43 @@ func createScene(sqb models.SceneQueryBuilder, tx *sqlx.Tx, scene *models.Scene)
|
||||
return nil
|
||||
}
|
||||
|
||||
func populateDB() error {
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
func withTxn(f func(r models.Repository) error) error {
|
||||
t := sqlite.NewTransactionManager()
|
||||
return t.WithTxn(context.TODO(), f)
|
||||
}
|
||||
|
||||
err := createPerformer(tx)
|
||||
func populateDB() error {
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
err := createPerformer(r.Performer())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = createStudio(tx, testName)
|
||||
_, err = createStudio(r.Studio(), testName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// create existing studio
|
||||
existingStudio, err := createStudio(tx, existingStudioName)
|
||||
existingStudio, err := createStudio(r.Studio(), existingStudioName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
existingStudioID = existingStudio.ID
|
||||
|
||||
err = createTag(tx)
|
||||
err = createTag(r.Tag())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = createScenes(tx)
|
||||
err = createScenes(r.Scene())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -272,16 +268,19 @@ func populateDB() error {
|
||||
}
|
||||
|
||||
func TestParsePerformers(t *testing.T) {
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
performers, err := pqb.All()
|
||||
|
||||
if err != nil {
|
||||
var performers []*models.Performer
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
var err error
|
||||
performers, err = r.Performer().All()
|
||||
return err
|
||||
}); err != nil {
|
||||
t.Errorf("Error getting performer: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
task := AutoTagPerformerTask{
|
||||
performer: performers[0],
|
||||
txnManager: sqlite.NewTransactionManager(),
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
@@ -289,16 +288,19 @@ func TestParsePerformers(t *testing.T) {
|
||||
task.Start(&wg)
|
||||
|
||||
// verify that scenes were tagged correctly
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
withTxn(func(r models.Repository) error {
|
||||
pqb := r.Performer()
|
||||
|
||||
scenes, err := sqb.All()
|
||||
scenes, err := r.Scene().All()
|
||||
if err != nil {
|
||||
t.Error(err.Error())
|
||||
}
|
||||
|
||||
for _, scene := range scenes {
|
||||
performers, err := pqb.FindBySceneID(scene.ID, nil)
|
||||
performers, err := pqb.FindBySceneID(scene.ID)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Error getting scene performers: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// title is only set on scenes where we expect performer to be set
|
||||
@@ -308,19 +310,25 @@ func TestParsePerformers(t *testing.T) {
|
||||
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseStudios(t *testing.T) {
|
||||
studioQuery := models.NewStudioQueryBuilder()
|
||||
studios, err := studioQuery.All()
|
||||
|
||||
if err != nil {
|
||||
var studios []*models.Studio
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
var err error
|
||||
studios, err = r.Studio().All()
|
||||
return err
|
||||
}); err != nil {
|
||||
t.Errorf("Error getting studio: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
task := AutoTagStudioTask{
|
||||
studio: studios[0],
|
||||
txnManager: sqlite.NewTransactionManager(),
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
@@ -328,9 +336,11 @@ func TestParseStudios(t *testing.T) {
|
||||
task.Start(&wg)
|
||||
|
||||
// verify that scenes were tagged correctly
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
|
||||
scenes, err := sqb.All()
|
||||
withTxn(func(r models.Repository) error {
|
||||
scenes, err := r.Scene().All()
|
||||
if err != nil {
|
||||
t.Error(err.Error())
|
||||
}
|
||||
|
||||
for _, scene := range scenes {
|
||||
// check for existing studio id scene first
|
||||
@@ -347,19 +357,25 @@ func TestParseStudios(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseTags(t *testing.T) {
|
||||
tagQuery := models.NewTagQueryBuilder()
|
||||
tags, err := tagQuery.All()
|
||||
|
||||
if err != nil {
|
||||
var tags []*models.Tag
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
var err error
|
||||
tags, err = r.Tag().All()
|
||||
return err
|
||||
}); err != nil {
|
||||
t.Errorf("Error getting performer: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
task := AutoTagTagTask{
|
||||
tag: tags[0],
|
||||
txnManager: sqlite.NewTransactionManager(),
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
@@ -367,16 +383,19 @@ func TestParseTags(t *testing.T) {
|
||||
task.Start(&wg)
|
||||
|
||||
// verify that scenes were tagged correctly
|
||||
sqb := models.NewSceneQueryBuilder()
|
||||
withTxn(func(r models.Repository) error {
|
||||
scenes, err := r.Scene().All()
|
||||
if err != nil {
|
||||
t.Error(err.Error())
|
||||
}
|
||||
|
||||
scenes, err := sqb.All()
|
||||
tqb := r.Tag()
|
||||
|
||||
for _, scene := range scenes {
|
||||
tags, err := tagQuery.FindBySceneID(scene.ID, nil)
|
||||
tags, err := tqb.FindBySceneID(scene.ID)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Error getting scene tags: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// title is only set on scenes where we expect performer to be set
|
||||
@@ -386,4 +405,7 @@ func TestParseTags(t *testing.T) {
|
||||
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
@@ -15,6 +14,7 @@ import (
|
||||
)
|
||||
|
||||
type CleanTask struct {
|
||||
TxnManager models.TransactionManager
|
||||
Scene *models.Scene
|
||||
Gallery *models.Gallery
|
||||
Image *models.Image
|
||||
@@ -133,60 +133,45 @@ func (t *CleanTask) shouldCleanImage(s *models.Image) bool {
|
||||
}
|
||||
|
||||
func (t *CleanTask) deleteScene(sceneID int) {
|
||||
ctx := context.TODO()
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
scene, err := qb.Find(sceneID)
|
||||
err = DestroyScene(sceneID, tx)
|
||||
var postCommitFunc func()
|
||||
var scene *models.Scene
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
||||
qb := repo.Scene()
|
||||
|
||||
var err error
|
||||
scene, err = qb.Find(sceneID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
postCommitFunc, err = DestroyScene(scene, repo)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Errorf("Error deleting scene from database: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("Error deleting scene from database: %s", err.Error())
|
||||
return
|
||||
}
|
||||
postCommitFunc()
|
||||
|
||||
DeleteGeneratedSceneFiles(scene, t.fileNamingAlgorithm)
|
||||
}
|
||||
|
||||
func (t *CleanTask) deleteGallery(galleryID int) {
|
||||
ctx := context.TODO()
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
err := qb.Destroy(galleryID, tx)
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("Error deleting gallery from database: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
||||
qb := repo.Gallery()
|
||||
return qb.Destroy(galleryID)
|
||||
}); err != nil {
|
||||
logger.Errorf("Error deleting gallery from database: %s", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (t *CleanTask) deleteImage(imageID int) {
|
||||
ctx := context.TODO()
|
||||
qb := models.NewImageQueryBuilder()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
err := qb.Destroy(imageID, tx)
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(repo models.Repository) error {
|
||||
qb := repo.Image()
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("Error deleting image from database: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return qb.Destroy(imageID)
|
||||
}); err != nil {
|
||||
logger.Errorf("Error deleting image from database: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package manager
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
@@ -27,6 +28,7 @@ import (
|
||||
)
|
||||
|
||||
type ExportTask struct {
|
||||
txnManager models.TransactionManager
|
||||
full bool
|
||||
|
||||
baseDir string
|
||||
@@ -58,8 +60,10 @@ func newExportSpec(input *models.ExportObjectTypeInput) *exportSpec {
|
||||
return &exportSpec{}
|
||||
}
|
||||
|
||||
ids, _ := utils.StringSliceToIntSlice(input.Ids)
|
||||
|
||||
ret := &exportSpec{
|
||||
IDs: utils.StringSliceToIntSlice(input.Ids),
|
||||
IDs: ids,
|
||||
}
|
||||
|
||||
if input.All != nil {
|
||||
@@ -76,6 +80,7 @@ func CreateExportTask(a models.HashAlgorithm, input models.ExportObjectsInput) *
|
||||
}
|
||||
|
||||
return &ExportTask{
|
||||
txnManager: GetInstance().TxnManager,
|
||||
fileNamingAlgorithm: a,
|
||||
scenes: newExportSpec(input.Scenes),
|
||||
images: newExportSpec(input.Images),
|
||||
@@ -125,34 +130,40 @@ func (t *ExportTask) Start(wg *sync.WaitGroup) {
|
||||
|
||||
paths.EnsureJSONDirs(t.baseDir)
|
||||
|
||||
t.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
// include movie scenes and gallery images
|
||||
if !t.full {
|
||||
// only include movie scenes if includeDependencies is also set
|
||||
if !t.scenes.all && t.includeDependencies {
|
||||
t.populateMovieScenes()
|
||||
t.populateMovieScenes(r)
|
||||
}
|
||||
|
||||
// always export gallery images
|
||||
if !t.images.all {
|
||||
t.populateGalleryImages()
|
||||
t.populateGalleryImages(r)
|
||||
}
|
||||
}
|
||||
|
||||
t.ExportScenes(workerCount)
|
||||
t.ExportImages(workerCount)
|
||||
t.ExportGalleries(workerCount)
|
||||
t.ExportMovies(workerCount)
|
||||
t.ExportPerformers(workerCount)
|
||||
t.ExportStudios(workerCount)
|
||||
t.ExportTags(workerCount)
|
||||
t.ExportScenes(workerCount, r)
|
||||
t.ExportImages(workerCount, r)
|
||||
t.ExportGalleries(workerCount, r)
|
||||
t.ExportMovies(workerCount, r)
|
||||
t.ExportPerformers(workerCount, r)
|
||||
t.ExportStudios(workerCount, r)
|
||||
t.ExportTags(workerCount, r)
|
||||
|
||||
if t.full {
|
||||
t.ExportScrapedItems(r)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err := t.json.saveMappings(t.Mappings); err != nil {
|
||||
logger.Errorf("[mappings] failed to save json: %s", err.Error())
|
||||
}
|
||||
|
||||
if t.full {
|
||||
t.ExportScrapedItems()
|
||||
} else {
|
||||
if !t.full {
|
||||
err := t.generateDownload()
|
||||
if err != nil {
|
||||
logger.Errorf("error generating download link: %s", err.Error())
|
||||
@@ -242,9 +253,9 @@ func (t *ExportTask) zipFile(fn, outDir string, z *zip.Writer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *ExportTask) populateMovieScenes() {
|
||||
reader := models.NewMovieReaderWriter(nil)
|
||||
sceneReader := models.NewSceneReaderWriter(nil)
|
||||
func (t *ExportTask) populateMovieScenes(repo models.ReaderRepository) {
|
||||
reader := repo.Movie()
|
||||
sceneReader := repo.Scene()
|
||||
|
||||
var movies []*models.Movie
|
||||
var err error
|
||||
@@ -272,9 +283,9 @@ func (t *ExportTask) populateMovieScenes() {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) populateGalleryImages() {
|
||||
reader := models.NewGalleryReaderWriter(nil)
|
||||
imageReader := models.NewImageReaderWriter(nil)
|
||||
func (t *ExportTask) populateGalleryImages(repo models.ReaderRepository) {
|
||||
reader := repo.Gallery()
|
||||
imageReader := repo.Image()
|
||||
|
||||
var galleries []*models.Gallery
|
||||
var err error
|
||||
@@ -302,10 +313,10 @@ func (t *ExportTask) populateGalleryImages() {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportScenes(workers int) {
|
||||
func (t *ExportTask) ExportScenes(workers int, repo models.ReaderRepository) {
|
||||
var scenesWg sync.WaitGroup
|
||||
|
||||
sceneReader := models.NewSceneReaderWriter(nil)
|
||||
sceneReader := repo.Scene()
|
||||
|
||||
var scenes []*models.Scene
|
||||
var err error
|
||||
@@ -327,7 +338,7 @@ func (t *ExportTask) ExportScenes(workers int) {
|
||||
|
||||
for w := 0; w < workers; w++ { // create export Scene workers
|
||||
scenesWg.Add(1)
|
||||
go exportScene(&scenesWg, jobCh, t)
|
||||
go exportScene(&scenesWg, jobCh, repo, t)
|
||||
}
|
||||
|
||||
for i, scene := range scenes {
|
||||
@@ -346,16 +357,15 @@ func (t *ExportTask) ExportScenes(workers int) {
|
||||
logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
||||
}
|
||||
|
||||
func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask) {
|
||||
func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, repo models.ReaderRepository, t *ExportTask) {
|
||||
defer wg.Done()
|
||||
sceneReader := models.NewSceneReaderWriter(nil)
|
||||
studioReader := models.NewStudioReaderWriter(nil)
|
||||
movieReader := models.NewMovieReaderWriter(nil)
|
||||
galleryReader := models.NewGalleryReaderWriter(nil)
|
||||
performerReader := models.NewPerformerReaderWriter(nil)
|
||||
tagReader := models.NewTagReaderWriter(nil)
|
||||
sceneMarkerReader := models.NewSceneMarkerReaderWriter(nil)
|
||||
joinReader := models.NewJoinReaderWriter(nil)
|
||||
sceneReader := repo.Scene()
|
||||
studioReader := repo.Studio()
|
||||
movieReader := repo.Movie()
|
||||
galleryReader := repo.Gallery()
|
||||
performerReader := repo.Performer()
|
||||
tagReader := repo.Tag()
|
||||
sceneMarkerReader := repo.SceneMarker()
|
||||
|
||||
for s := range jobChan {
|
||||
sceneHash := s.GetHash(t.fileNamingAlgorithm)
|
||||
@@ -402,7 +412,7 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask
|
||||
continue
|
||||
}
|
||||
|
||||
newSceneJSON.Movies, err = scene.GetSceneMoviesJSON(movieReader, joinReader, s)
|
||||
newSceneJSON.Movies, err = scene.GetSceneMoviesJSON(movieReader, sceneReader, s)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> error getting scene movies JSON: %s", sceneHash, err.Error())
|
||||
continue
|
||||
@@ -417,14 +427,14 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask
|
||||
t.galleries.IDs = utils.IntAppendUnique(t.galleries.IDs, sceneGallery.ID)
|
||||
}
|
||||
|
||||
tagIDs, err := scene.GetDependentTagIDs(tagReader, joinReader, sceneMarkerReader, s)
|
||||
tagIDs, err := scene.GetDependentTagIDs(tagReader, sceneMarkerReader, s)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> error getting scene tags: %s", sceneHash, err.Error())
|
||||
continue
|
||||
}
|
||||
t.tags.IDs = utils.IntAppendUniques(t.tags.IDs, tagIDs)
|
||||
|
||||
movieIDs, err := scene.GetDependentMovieIDs(joinReader, s)
|
||||
movieIDs, err := scene.GetDependentMovieIDs(sceneReader, s)
|
||||
if err != nil {
|
||||
logger.Errorf("[scenes] <%s> error getting scene movies: %s", sceneHash, err.Error())
|
||||
continue
|
||||
@@ -445,10 +455,10 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportImages(workers int) {
|
||||
func (t *ExportTask) ExportImages(workers int, repo models.ReaderRepository) {
|
||||
var imagesWg sync.WaitGroup
|
||||
|
||||
imageReader := models.NewImageReaderWriter(nil)
|
||||
imageReader := repo.Image()
|
||||
|
||||
var images []*models.Image
|
||||
var err error
|
||||
@@ -470,7 +480,7 @@ func (t *ExportTask) ExportImages(workers int) {
|
||||
|
||||
for w := 0; w < workers; w++ { // create export Image workers
|
||||
imagesWg.Add(1)
|
||||
go exportImage(&imagesWg, jobCh, t)
|
||||
go exportImage(&imagesWg, jobCh, repo, t)
|
||||
}
|
||||
|
||||
for i, image := range images {
|
||||
@@ -489,12 +499,12 @@ func (t *ExportTask) ExportImages(workers int) {
|
||||
logger.Infof("[images] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
||||
}
|
||||
|
||||
func exportImage(wg *sync.WaitGroup, jobChan <-chan *models.Image, t *ExportTask) {
|
||||
func exportImage(wg *sync.WaitGroup, jobChan <-chan *models.Image, repo models.ReaderRepository, t *ExportTask) {
|
||||
defer wg.Done()
|
||||
studioReader := models.NewStudioReaderWriter(nil)
|
||||
galleryReader := models.NewGalleryReaderWriter(nil)
|
||||
performerReader := models.NewPerformerReaderWriter(nil)
|
||||
tagReader := models.NewTagReaderWriter(nil)
|
||||
studioReader := repo.Studio()
|
||||
galleryReader := repo.Gallery()
|
||||
performerReader := repo.Performer()
|
||||
tagReader := repo.Tag()
|
||||
|
||||
for s := range jobChan {
|
||||
imageHash := s.Checksum
|
||||
@@ -560,10 +570,10 @@ func (t *ExportTask) getGalleryChecksums(galleries []*models.Gallery) (ret []str
|
||||
return
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportGalleries(workers int) {
|
||||
func (t *ExportTask) ExportGalleries(workers int, repo models.ReaderRepository) {
|
||||
var galleriesWg sync.WaitGroup
|
||||
|
||||
reader := models.NewGalleryReaderWriter(nil)
|
||||
reader := repo.Gallery()
|
||||
|
||||
var galleries []*models.Gallery
|
||||
var err error
|
||||
@@ -585,7 +595,7 @@ func (t *ExportTask) ExportGalleries(workers int) {
|
||||
|
||||
for w := 0; w < workers; w++ { // create export Scene workers
|
||||
galleriesWg.Add(1)
|
||||
go exportGallery(&galleriesWg, jobCh, t)
|
||||
go exportGallery(&galleriesWg, jobCh, repo, t)
|
||||
}
|
||||
|
||||
for i, gallery := range galleries {
|
||||
@@ -609,11 +619,11 @@ func (t *ExportTask) ExportGalleries(workers int) {
|
||||
logger.Infof("[galleries] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
||||
}
|
||||
|
||||
func exportGallery(wg *sync.WaitGroup, jobChan <-chan *models.Gallery, t *ExportTask) {
|
||||
func exportGallery(wg *sync.WaitGroup, jobChan <-chan *models.Gallery, repo models.ReaderRepository, t *ExportTask) {
|
||||
defer wg.Done()
|
||||
studioReader := models.NewStudioReaderWriter(nil)
|
||||
performerReader := models.NewPerformerReaderWriter(nil)
|
||||
tagReader := models.NewTagReaderWriter(nil)
|
||||
studioReader := repo.Studio()
|
||||
performerReader := repo.Performer()
|
||||
tagReader := repo.Tag()
|
||||
|
||||
for g := range jobChan {
|
||||
galleryHash := g.Checksum
|
||||
@@ -666,10 +676,10 @@ func exportGallery(wg *sync.WaitGroup, jobChan <-chan *models.Gallery, t *Export
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportPerformers(workers int) {
|
||||
func (t *ExportTask) ExportPerformers(workers int, repo models.ReaderRepository) {
|
||||
var performersWg sync.WaitGroup
|
||||
|
||||
reader := models.NewPerformerReaderWriter(nil)
|
||||
reader := repo.Performer()
|
||||
var performers []*models.Performer
|
||||
var err error
|
||||
all := t.full || (t.performers != nil && t.performers.all)
|
||||
@@ -689,7 +699,7 @@ func (t *ExportTask) ExportPerformers(workers int) {
|
||||
|
||||
for w := 0; w < workers; w++ { // create export Performer workers
|
||||
performersWg.Add(1)
|
||||
go t.exportPerformer(&performersWg, jobCh)
|
||||
go t.exportPerformer(&performersWg, jobCh, repo)
|
||||
}
|
||||
|
||||
for i, performer := range performers {
|
||||
@@ -706,10 +716,10 @@ func (t *ExportTask) ExportPerformers(workers int) {
|
||||
logger.Infof("[performers] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
||||
}
|
||||
|
||||
func (t *ExportTask) exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models.Performer) {
|
||||
func (t *ExportTask) exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models.Performer, repo models.ReaderRepository) {
|
||||
defer wg.Done()
|
||||
|
||||
performerReader := models.NewPerformerReaderWriter(nil)
|
||||
performerReader := repo.Performer()
|
||||
|
||||
for p := range jobChan {
|
||||
newPerformerJSON, err := performer.ToJSON(performerReader, p)
|
||||
@@ -732,10 +742,10 @@ func (t *ExportTask) exportPerformer(wg *sync.WaitGroup, jobChan <-chan *models.
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportStudios(workers int) {
|
||||
func (t *ExportTask) ExportStudios(workers int, repo models.ReaderRepository) {
|
||||
var studiosWg sync.WaitGroup
|
||||
|
||||
reader := models.NewStudioReaderWriter(nil)
|
||||
reader := repo.Studio()
|
||||
var studios []*models.Studio
|
||||
var err error
|
||||
all := t.full || (t.studios != nil && t.studios.all)
|
||||
@@ -756,7 +766,7 @@ func (t *ExportTask) ExportStudios(workers int) {
|
||||
|
||||
for w := 0; w < workers; w++ { // create export Studio workers
|
||||
studiosWg.Add(1)
|
||||
go t.exportStudio(&studiosWg, jobCh)
|
||||
go t.exportStudio(&studiosWg, jobCh, repo)
|
||||
}
|
||||
|
||||
for i, studio := range studios {
|
||||
@@ -773,10 +783,10 @@ func (t *ExportTask) ExportStudios(workers int) {
|
||||
logger.Infof("[studios] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
||||
}
|
||||
|
||||
func (t *ExportTask) exportStudio(wg *sync.WaitGroup, jobChan <-chan *models.Studio) {
|
||||
func (t *ExportTask) exportStudio(wg *sync.WaitGroup, jobChan <-chan *models.Studio, repo models.ReaderRepository) {
|
||||
defer wg.Done()
|
||||
|
||||
studioReader := models.NewStudioReaderWriter(nil)
|
||||
studioReader := repo.Studio()
|
||||
|
||||
for s := range jobChan {
|
||||
newStudioJSON, err := studio.ToJSON(studioReader, s)
|
||||
@@ -797,10 +807,10 @@ func (t *ExportTask) exportStudio(wg *sync.WaitGroup, jobChan <-chan *models.Stu
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportTags(workers int) {
|
||||
func (t *ExportTask) ExportTags(workers int, repo models.ReaderRepository) {
|
||||
var tagsWg sync.WaitGroup
|
||||
|
||||
reader := models.NewTagReaderWriter(nil)
|
||||
reader := repo.Tag()
|
||||
var tags []*models.Tag
|
||||
var err error
|
||||
all := t.full || (t.tags != nil && t.tags.all)
|
||||
@@ -821,7 +831,7 @@ func (t *ExportTask) ExportTags(workers int) {
|
||||
|
||||
for w := 0; w < workers; w++ { // create export Tag workers
|
||||
tagsWg.Add(1)
|
||||
go t.exportTag(&tagsWg, jobCh)
|
||||
go t.exportTag(&tagsWg, jobCh, repo)
|
||||
}
|
||||
|
||||
for i, tag := range tags {
|
||||
@@ -841,10 +851,10 @@ func (t *ExportTask) ExportTags(workers int) {
|
||||
logger.Infof("[tags] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
||||
}
|
||||
|
||||
func (t *ExportTask) exportTag(wg *sync.WaitGroup, jobChan <-chan *models.Tag) {
|
||||
func (t *ExportTask) exportTag(wg *sync.WaitGroup, jobChan <-chan *models.Tag, repo models.ReaderRepository) {
|
||||
defer wg.Done()
|
||||
|
||||
tagReader := models.NewTagReaderWriter(nil)
|
||||
tagReader := repo.Tag()
|
||||
|
||||
for thisTag := range jobChan {
|
||||
newTagJSON, err := tag.ToJSON(tagReader, thisTag)
|
||||
@@ -868,10 +878,10 @@ func (t *ExportTask) exportTag(wg *sync.WaitGroup, jobChan <-chan *models.Tag) {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportMovies(workers int) {
|
||||
func (t *ExportTask) ExportMovies(workers int, repo models.ReaderRepository) {
|
||||
var moviesWg sync.WaitGroup
|
||||
|
||||
reader := models.NewMovieReaderWriter(nil)
|
||||
reader := repo.Movie()
|
||||
var movies []*models.Movie
|
||||
var err error
|
||||
all := t.full || (t.movies != nil && t.movies.all)
|
||||
@@ -892,7 +902,7 @@ func (t *ExportTask) ExportMovies(workers int) {
|
||||
|
||||
for w := 0; w < workers; w++ { // create export Studio workers
|
||||
moviesWg.Add(1)
|
||||
go t.exportMovie(&moviesWg, jobCh)
|
||||
go t.exportMovie(&moviesWg, jobCh, repo)
|
||||
}
|
||||
|
||||
for i, movie := range movies {
|
||||
@@ -909,11 +919,11 @@ func (t *ExportTask) ExportMovies(workers int) {
|
||||
logger.Infof("[movies] export complete in %s. %d workers used.", time.Since(startTime), workers)
|
||||
|
||||
}
|
||||
func (t *ExportTask) exportMovie(wg *sync.WaitGroup, jobChan <-chan *models.Movie) {
|
||||
func (t *ExportTask) exportMovie(wg *sync.WaitGroup, jobChan <-chan *models.Movie, repo models.ReaderRepository) {
|
||||
defer wg.Done()
|
||||
|
||||
movieReader := models.NewMovieReaderWriter(nil)
|
||||
studioReader := models.NewStudioReaderWriter(nil)
|
||||
movieReader := repo.Movie()
|
||||
studioReader := repo.Studio()
|
||||
|
||||
for m := range jobChan {
|
||||
newMovieJSON, err := movie.ToJSON(movieReader, studioReader, m)
|
||||
@@ -942,9 +952,9 @@ func (t *ExportTask) exportMovie(wg *sync.WaitGroup, jobChan <-chan *models.Movi
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ExportTask) ExportScrapedItems() {
|
||||
qb := models.NewScrapedItemQueryBuilder()
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
func (t *ExportTask) ExportScrapedItems(repo models.ReaderRepository) {
|
||||
qb := repo.ScrapedItem()
|
||||
sqb := repo.Studio()
|
||||
scrapedItems, err := qb.All()
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] failed to fetch all items: %s", err.Error())
|
||||
@@ -960,7 +970,7 @@ func (t *ExportTask) ExportScrapedItems() {
|
||||
|
||||
var studioName string
|
||||
if scrapedItem.StudioID.Valid {
|
||||
studio, _ := sqb.Find(int(scrapedItem.StudioID.Int64), nil)
|
||||
studio, _ := sqb.Find(int(scrapedItem.StudioID.Int64))
|
||||
if studio != nil {
|
||||
studioName = studio.Name.String
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
@@ -13,6 +14,7 @@ import (
|
||||
)
|
||||
|
||||
type GenerateMarkersTask struct {
|
||||
TxnManager models.TransactionManager
|
||||
Scene *models.Scene
|
||||
Marker *models.SceneMarker
|
||||
Overwrite bool
|
||||
@@ -27,13 +29,21 @@ func (t *GenerateMarkersTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
}
|
||||
|
||||
if t.Marker != nil {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scene, err := qb.Find(int(t.Marker.SceneID.Int64))
|
||||
if err != nil {
|
||||
var scene *models.Scene
|
||||
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
scene, err = r.Scene().Find(int(t.Marker.SceneID.Int64))
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Errorf("error finding scene for marker: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if scene == nil {
|
||||
logger.Errorf("scene not found for id %d", t.Marker.SceneID.Int64)
|
||||
return
|
||||
}
|
||||
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
|
||||
if err != nil {
|
||||
logger.Errorf("error reading video file: %s", err.Error())
|
||||
@@ -45,8 +55,16 @@ func (t *GenerateMarkersTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
}
|
||||
|
||||
func (t *GenerateMarkersTask) generateSceneMarkers() {
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, _ := qb.FindBySceneID(t.Scene.ID, nil)
|
||||
var sceneMarkers []*models.SceneMarker
|
||||
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
sceneMarkers, err = r.SceneMarker().FindBySceneID(t.Scene.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Errorf("error getting scene markers: %s", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if len(sceneMarkers) == 0 {
|
||||
return
|
||||
}
|
||||
@@ -117,8 +135,16 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene
|
||||
|
||||
func (t *GenerateMarkersTask) isMarkerNeeded() int {
|
||||
markers := 0
|
||||
qb := models.NewSceneMarkerQueryBuilder()
|
||||
sceneMarkers, _ := qb.FindBySceneID(t.Scene.ID, nil)
|
||||
var sceneMarkers []*models.SceneMarker
|
||||
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
sceneMarkers, err = r.SceneMarker().FindBySceneID(t.Scene.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Errorf("errror finding scene markers: %s", err.Error())
|
||||
return 0
|
||||
}
|
||||
|
||||
if len(sceneMarkers) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -2,12 +2,12 @@ package manager
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
@@ -17,6 +17,7 @@ type GenerateScreenshotTask struct {
|
||||
Scene models.Scene
|
||||
ScreenshotAt *float64
|
||||
fileNamingAlgorithm models.HashAlgorithm
|
||||
txnManager models.TransactionManager
|
||||
}
|
||||
|
||||
func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) {
|
||||
@@ -60,10 +61,8 @@ func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) {
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Scene()
|
||||
updatedTime := time.Now()
|
||||
updatedScene := models.ScenePartial{
|
||||
ID: t.Scene.ID,
|
||||
@@ -71,28 +70,22 @@ func (t *GenerateScreenshotTask) Start(wg *sync.WaitGroup) {
|
||||
}
|
||||
|
||||
if err := SetSceneScreenshot(checksum, coverImageData); err != nil {
|
||||
logger.Errorf("Error writing screenshot: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
return fmt.Errorf("Error writing screenshot: %s", err.Error())
|
||||
}
|
||||
|
||||
// update the scene cover table
|
||||
if err := qb.UpdateSceneCover(t.Scene.ID, coverImageData, tx); err != nil {
|
||||
logger.Errorf("Error setting screenshot: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
if err := qb.UpdateCover(t.Scene.ID, coverImageData); err != nil {
|
||||
return fmt.Errorf("Error setting screenshot: %s", err.Error())
|
||||
}
|
||||
|
||||
// update the scene with the update date
|
||||
_, err = qb.Update(updatedScene, tx)
|
||||
_, err = qb.Update(updatedScene)
|
||||
if err != nil {
|
||||
logger.Errorf("Error updating scene: %s", err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
return fmt.Errorf("Error updating scene: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("Error setting screenshot: %s", err.Error())
|
||||
return
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/gallery"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
@@ -29,6 +28,7 @@ import (
|
||||
)
|
||||
|
||||
type ImportTask struct {
|
||||
txnManager models.TransactionManager
|
||||
json jsonUtils
|
||||
|
||||
BaseDir string
|
||||
@@ -44,6 +44,7 @@ type ImportTask struct {
|
||||
|
||||
func CreateImportTask(a models.HashAlgorithm, input models.ImportObjectsInput) *ImportTask {
|
||||
return &ImportTask{
|
||||
txnManager: GetInstance().TxnManager,
|
||||
ZipFile: input.File.File,
|
||||
Reset: false,
|
||||
DuplicateBehaviour: input.DuplicateBehaviour,
|
||||
@@ -200,22 +201,16 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||
|
||||
logger.Progressf("[performers] %d of %d", index, len(t.mappings.Performers))
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
readerWriter := models.NewPerformerReaderWriter(tx)
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
readerWriter := r.Performer()
|
||||
importer := &performer.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
Input: *performerJSON,
|
||||
}
|
||||
|
||||
if err := performImport(importer, t.DuplicateBehaviour); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[performers] <%s> failed to import: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[performers] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error())
|
||||
return performImport(importer, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[performers] <%s> import failed: %s", mappingJSON.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,12 +232,9 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||
|
||||
logger.Progressf("[studios] %d of %d", index, len(t.mappings.Studios))
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
// fail on missing parent studio to begin with
|
||||
if err := t.ImportStudio(studioJSON, pendingParent, tx); err != nil {
|
||||
tx.Rollback()
|
||||
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
return t.ImportStudio(studioJSON, pendingParent, r.Studio())
|
||||
}); err != nil {
|
||||
if err == studio.ErrParentStudioNotExist {
|
||||
// add to the pending parent list so that it is created after the parent
|
||||
s := pendingParent[studioJSON.ParentStudio]
|
||||
@@ -254,11 +246,6 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||
logger.Errorf("[studios] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[studios] import failed to commit: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// create the leftover studios, warning for missing parents
|
||||
@@ -267,18 +254,12 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||
|
||||
for _, s := range pendingParent {
|
||||
for _, orphanStudioJSON := range s {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
if err := t.ImportStudio(orphanStudioJSON, nil, tx); err != nil {
|
||||
tx.Rollback()
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
return t.ImportStudio(orphanStudioJSON, nil, r.Studio())
|
||||
}); err != nil {
|
||||
logger.Errorf("[studios] <%s> failed to create: %s", orphanStudioJSON.Name, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
logger.Errorf("[studios] import failed to commit: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -286,8 +267,7 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||
logger.Info("[studios] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportStudio(studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, tx *sqlx.Tx) error {
|
||||
readerWriter := models.NewStudioReaderWriter(tx)
|
||||
func (t *ImportTask) ImportStudio(studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, readerWriter models.StudioReaderWriter) error {
|
||||
importer := &studio.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
Input: *studioJSON,
|
||||
@@ -307,7 +287,7 @@ func (t *ImportTask) ImportStudio(studioJSON *jsonschema.Studio, pendingParent m
|
||||
s := pendingParent[studioJSON.Name]
|
||||
for _, childStudioJSON := range s {
|
||||
// map is nil since we're not checking parent studios at this point
|
||||
if err := t.ImportStudio(childStudioJSON, nil, tx); err != nil {
|
||||
if err := t.ImportStudio(childStudioJSON, nil, readerWriter); err != nil {
|
||||
return fmt.Errorf("failed to create child studio <%s>: %s", childStudioJSON.Name, err.Error())
|
||||
}
|
||||
}
|
||||
@@ -331,9 +311,9 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
|
||||
|
||||
logger.Progressf("[movies] %d of %d", index, len(t.mappings.Movies))
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
readerWriter := models.NewMovieReaderWriter(tx)
|
||||
studioReaderWriter := models.NewStudioReaderWriter(tx)
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
readerWriter := r.Movie()
|
||||
studioReaderWriter := r.Studio()
|
||||
|
||||
movieImporter := &movie.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
@@ -342,15 +322,9 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
}
|
||||
|
||||
if err := performImport(movieImporter, t.DuplicateBehaviour); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[movies] <%s> failed to import: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[movies] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error())
|
||||
return performImport(movieImporter, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[movies] <%s> import failed: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
@@ -371,31 +345,23 @@ func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
||||
|
||||
logger.Progressf("[galleries] %d of %d", index, len(t.mappings.Galleries))
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
readerWriter := models.NewGalleryReaderWriter(tx)
|
||||
tagWriter := models.NewTagReaderWriter(tx)
|
||||
joinWriter := models.NewJoinReaderWriter(tx)
|
||||
performerWriter := models.NewPerformerReaderWriter(tx)
|
||||
studioWriter := models.NewStudioReaderWriter(tx)
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
readerWriter := r.Gallery()
|
||||
tagWriter := r.Tag()
|
||||
performerWriter := r.Performer()
|
||||
studioWriter := r.Studio()
|
||||
|
||||
galleryImporter := &gallery.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
PerformerWriter: performerWriter,
|
||||
StudioWriter: studioWriter,
|
||||
TagWriter: tagWriter,
|
||||
JoinWriter: joinWriter,
|
||||
Input: *galleryJSON,
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
}
|
||||
|
||||
if err := performImport(galleryImporter, t.DuplicateBehaviour); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[galleries] <%s> failed to import: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
return performImport(galleryImporter, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[galleries] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
@@ -417,33 +383,29 @@ func (t *ImportTask) ImportTags(ctx context.Context) {
|
||||
|
||||
logger.Progressf("[tags] %d of %d", index, len(t.mappings.Tags))
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
readerWriter := models.NewTagReaderWriter(tx)
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
readerWriter := r.Tag()
|
||||
|
||||
tagImporter := &tag.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
Input: *tagJSON,
|
||||
}
|
||||
|
||||
if err := performImport(tagImporter, t.DuplicateBehaviour); err != nil {
|
||||
tx.Rollback()
|
||||
return performImport(tagImporter, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[tags] <%s> failed to import: %s", mappingJSON.Checksum, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[tags] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[tags] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
qb := models.NewScrapedItemQueryBuilder()
|
||||
sqb := models.NewStudioQueryBuilder()
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
logger.Info("[scraped sites] importing")
|
||||
qb := r.ScrapedItem()
|
||||
sqb := r.Studio()
|
||||
currentTime := time.Now()
|
||||
|
||||
for i, mappingJSON := range t.scraped {
|
||||
@@ -467,7 +429,7 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: t.getTimeFromJSONTime(mappingJSON.UpdatedAt)},
|
||||
}
|
||||
|
||||
studio, err := sqb.FindByName(mappingJSON.Studio, tx, false)
|
||||
studio, err := sqb.FindByName(mappingJSON.Studio, false)
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] failed to fetch studio: %s", err.Error())
|
||||
}
|
||||
@@ -475,16 +437,17 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
||||
newScrapedItem.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||
}
|
||||
|
||||
_, err = qb.Create(newScrapedItem, tx)
|
||||
_, err = qb.Create(newScrapedItem)
|
||||
if err != nil {
|
||||
logger.Errorf("[scraped sites] <%s> failed to create: %s", newScrapedItem.Title.String, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[scraped sites] importing")
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Errorf("[scraped sites] import failed to commit: %s", err.Error())
|
||||
}
|
||||
|
||||
logger.Info("[scraped sites] import complete")
|
||||
}
|
||||
|
||||
@@ -504,15 +467,14 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
|
||||
sceneHash := mappingJSON.Checksum
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
readerWriter := models.NewSceneReaderWriter(tx)
|
||||
tagWriter := models.NewTagReaderWriter(tx)
|
||||
galleryWriter := models.NewGalleryReaderWriter(tx)
|
||||
joinWriter := models.NewJoinReaderWriter(tx)
|
||||
movieWriter := models.NewMovieReaderWriter(tx)
|
||||
performerWriter := models.NewPerformerReaderWriter(tx)
|
||||
studioWriter := models.NewStudioReaderWriter(tx)
|
||||
markerWriter := models.NewSceneMarkerReaderWriter(tx)
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
readerWriter := r.Scene()
|
||||
tagWriter := r.Tag()
|
||||
galleryWriter := r.Gallery()
|
||||
movieWriter := r.Movie()
|
||||
performerWriter := r.Performer()
|
||||
studioWriter := r.Studio()
|
||||
markerWriter := r.SceneMarker()
|
||||
|
||||
sceneImporter := &scene.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
@@ -523,7 +485,6 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
|
||||
GalleryWriter: galleryWriter,
|
||||
JoinWriter: joinWriter,
|
||||
MovieWriter: movieWriter,
|
||||
PerformerWriter: performerWriter,
|
||||
StudioWriter: studioWriter,
|
||||
@@ -531,38 +492,27 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||
}
|
||||
|
||||
if err := performImport(sceneImporter, t.DuplicateBehaviour); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[scenes] <%s> failed to import: %s", sceneHash, err.Error())
|
||||
continue
|
||||
return err
|
||||
}
|
||||
|
||||
// import the scene markers
|
||||
failedMarkers := false
|
||||
for _, m := range sceneJSON.Markers {
|
||||
markerImporter := &scene.MarkerImporter{
|
||||
SceneID: sceneImporter.ID,
|
||||
Input: m,
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
ReaderWriter: markerWriter,
|
||||
JoinWriter: joinWriter,
|
||||
TagWriter: tagWriter,
|
||||
}
|
||||
|
||||
if err := performImport(markerImporter, t.DuplicateBehaviour); err != nil {
|
||||
failedMarkers = true
|
||||
logger.Errorf("[scenes] <%s> failed to import markers: %s", sceneHash, err.Error())
|
||||
break
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if failedMarkers {
|
||||
tx.Rollback()
|
||||
continue
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[scenes] <%s> import failed to commit: %s", sceneHash, err.Error())
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Errorf("[scenes] <%s> import failed: %s", sceneHash, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -585,13 +535,12 @@ func (t *ImportTask) ImportImages(ctx context.Context) {
|
||||
|
||||
imageHash := mappingJSON.Checksum
|
||||
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
readerWriter := models.NewImageReaderWriter(tx)
|
||||
tagWriter := models.NewTagReaderWriter(tx)
|
||||
galleryWriter := models.NewGalleryReaderWriter(tx)
|
||||
joinWriter := models.NewJoinReaderWriter(tx)
|
||||
performerWriter := models.NewPerformerReaderWriter(tx)
|
||||
studioWriter := models.NewStudioReaderWriter(tx)
|
||||
if err := t.txnManager.WithTxn(ctx, func(r models.Repository) error {
|
||||
readerWriter := r.Image()
|
||||
tagWriter := r.Tag()
|
||||
galleryWriter := r.Gallery()
|
||||
performerWriter := r.Performer()
|
||||
studioWriter := r.Studio()
|
||||
|
||||
imageImporter := &image.Importer{
|
||||
ReaderWriter: readerWriter,
|
||||
@@ -601,30 +550,22 @@ func (t *ImportTask) ImportImages(ctx context.Context) {
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
|
||||
GalleryWriter: galleryWriter,
|
||||
JoinWriter: joinWriter,
|
||||
PerformerWriter: performerWriter,
|
||||
StudioWriter: studioWriter,
|
||||
TagWriter: tagWriter,
|
||||
}
|
||||
|
||||
if err := performImport(imageImporter, t.DuplicateBehaviour); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[images] <%s> failed to import: %s", imageHash, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
logger.Errorf("[images] <%s> import failed to commit: %s", imageHash, err.Error())
|
||||
return performImport(imageImporter, t.DuplicateBehaviour)
|
||||
}); err != nil {
|
||||
logger.Errorf("[images] <%s> import failed: %s", imageHash, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("[images] import complete")
|
||||
}
|
||||
|
||||
func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]*models.Performer, error) {
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
performers, err := pqb.FindByNames(names, tx, false)
|
||||
func (t *ImportTask) getPerformers(names []string, qb models.PerformerReader) ([]*models.Performer, error) {
|
||||
performers, err := qb.FindByNames(names, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -648,12 +589,10 @@ func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]*models.Perfo
|
||||
return performers, nil
|
||||
}
|
||||
|
||||
func (t *ImportTask) getMoviesScenes(input []jsonschema.SceneMovie, sceneID int, tx *sqlx.Tx) ([]models.MoviesScenes, error) {
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
|
||||
func (t *ImportTask) getMoviesScenes(input []jsonschema.SceneMovie, sceneID int, mqb models.MovieReader) ([]models.MoviesScenes, error) {
|
||||
var movies []models.MoviesScenes
|
||||
for _, inputMovie := range input {
|
||||
movie, err := mqb.FindByName(inputMovie.MovieName, tx, false)
|
||||
movie, err := mqb.FindByName(inputMovie.MovieName, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -680,9 +619,8 @@ func (t *ImportTask) getMoviesScenes(input []jsonschema.SceneMovie, sceneID int,
|
||||
return movies, nil
|
||||
}
|
||||
|
||||
func (t *ImportTask) getTags(sceneChecksum string, names []string, tx *sqlx.Tx) ([]*models.Tag, error) {
|
||||
tqb := models.NewTagQueryBuilder()
|
||||
tags, err := tqb.FindByNames(names, tx, false)
|
||||
func (t *ImportTask) getTags(sceneChecksum string, names []string, tqb models.TagReader) ([]*models.Tag, error) {
|
||||
tags, err := tqb.FindByNames(names, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -11,19 +11,20 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/remeh/sizedwaitgroup"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/gallery"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/scene"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type ScanTask struct {
|
||||
TxnManager models.TransactionManager
|
||||
FilePath string
|
||||
UseFileMetadata bool
|
||||
StripFileExtension bool
|
||||
@@ -39,14 +40,18 @@ func (t *ScanTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
if isGallery(t.FilePath) {
|
||||
t.scanGallery()
|
||||
} else if isVideo(t.FilePath) {
|
||||
scene := t.scanScene()
|
||||
s := t.scanScene()
|
||||
|
||||
if scene != nil {
|
||||
if s != nil {
|
||||
iwg := sizedwaitgroup.New(2)
|
||||
|
||||
if t.GenerateSprite {
|
||||
iwg.Add()
|
||||
taskSprite := GenerateSpriteTask{Scene: *scene, Overwrite: false, fileNamingAlgorithm: t.fileNamingAlgorithm}
|
||||
taskSprite := GenerateSpriteTask{
|
||||
Scene: *s,
|
||||
Overwrite: false,
|
||||
fileNamingAlgorithm: t.fileNamingAlgorithm,
|
||||
}
|
||||
go taskSprite.Start(&iwg)
|
||||
}
|
||||
|
||||
@@ -69,7 +74,7 @@ func (t *ScanTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
}
|
||||
|
||||
taskPreview := GeneratePreviewTask{
|
||||
Scene: *scene,
|
||||
Scene: *s,
|
||||
ImagePreview: t.GenerateImagePreview,
|
||||
Options: previewOptions,
|
||||
Overwrite: false,
|
||||
@@ -88,8 +93,26 @@ func (t *ScanTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
}
|
||||
|
||||
func (t *ScanTask) scanGallery() {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, _ := qb.FindByPath(t.FilePath)
|
||||
var g *models.Gallery
|
||||
images := 0
|
||||
scanImages := false
|
||||
|
||||
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
g, err = r.Gallery().FindByPath(t.FilePath)
|
||||
|
||||
if g != nil && err != nil {
|
||||
images, err = r.Image().CountByGalleryID(g.ID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error getting images for zip gallery %s: %s", t.FilePath, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
fileModTime, err := t.getFileModTime()
|
||||
if err != nil {
|
||||
@@ -97,20 +120,29 @@ func (t *ScanTask) scanGallery() {
|
||||
return
|
||||
}
|
||||
|
||||
if gallery != nil {
|
||||
if g != nil {
|
||||
// We already have this item in the database, keep going
|
||||
|
||||
// if file mod time is not set, set it now
|
||||
if !g.FileModTime.Valid {
|
||||
// we will also need to rescan the zip contents
|
||||
updateModTime := false
|
||||
if !gallery.FileModTime.Valid {
|
||||
updateModTime = true
|
||||
t.updateFileModTime(gallery.ID, fileModTime, &qb)
|
||||
scanImages = true
|
||||
logger.Infof("setting file modification time on %s", t.FilePath)
|
||||
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Gallery()
|
||||
if _, err := gallery.UpdateFileModTime(qb, g.ID, models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update our copy of the gallery
|
||||
var err error
|
||||
gallery, err = qb.Find(gallery.ID, nil)
|
||||
if err != nil {
|
||||
g, err = qb.Find(g.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
@@ -118,8 +150,9 @@ func (t *ScanTask) scanGallery() {
|
||||
|
||||
// if the mod time of the zip file is different than that of the associated
|
||||
// gallery, then recalculate the checksum
|
||||
modified := t.isFileModified(fileModTime, gallery.FileModTime)
|
||||
modified := t.isFileModified(fileModTime, g.FileModTime)
|
||||
if modified {
|
||||
scanImages = true
|
||||
logger.Infof("%s has been updated: rescanning", t.FilePath)
|
||||
|
||||
// update the checksum and the modification time
|
||||
@@ -131,7 +164,7 @@ func (t *ScanTask) scanGallery() {
|
||||
|
||||
currentTime := time.Now()
|
||||
galleryPartial := models.GalleryPartial{
|
||||
ID: gallery.ID,
|
||||
ID: g.ID,
|
||||
Checksum: &checksum,
|
||||
FileModTime: &models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
@@ -140,32 +173,18 @@ func (t *ScanTask) scanGallery() {
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
err = database.WithTxn(func(tx *sqlx.Tx) error {
|
||||
_, err := qb.UpdatePartial(galleryPartial, tx)
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
_, err := r.Gallery().UpdatePartial(galleryPartial)
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// scan the zip files if the gallery has no images
|
||||
iqb := models.NewImageQueryBuilder()
|
||||
images, err := iqb.CountByGalleryID(gallery.ID)
|
||||
if err != nil {
|
||||
logger.Errorf("error getting images for zip gallery %s: %s", t.FilePath, err.Error())
|
||||
}
|
||||
|
||||
if images == 0 || modified || updateModTime {
|
||||
t.scanZipImages(gallery)
|
||||
scanImages = scanImages || images == 0
|
||||
} else {
|
||||
// in case thumbnails have been deleted, regenerate them
|
||||
t.regenerateZipImages(gallery)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Ignore directories.
|
||||
if isDir, _ := utils.DirExists(t.FilePath); isDir {
|
||||
return
|
||||
@@ -177,20 +196,23 @@ func (t *ScanTask) scanGallery() {
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
gallery, _ = qb.FindByChecksum(checksum, tx)
|
||||
if gallery != nil {
|
||||
exists, _ := utils.FileExists(gallery.Path.String)
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Gallery()
|
||||
g, _ = qb.FindByChecksum(checksum)
|
||||
if g != nil {
|
||||
exists, _ := utils.FileExists(g.Path.String)
|
||||
if exists {
|
||||
logger.Infof("%s already exists. Duplicate of %s ", t.FilePath, gallery.Path.String)
|
||||
logger.Infof("%s already exists. Duplicate of %s ", t.FilePath, g.Path.String)
|
||||
} else {
|
||||
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||
gallery.Path = sql.NullString{
|
||||
g.Path = sql.NullString{
|
||||
String: t.FilePath,
|
||||
Valid: true,
|
||||
}
|
||||
gallery, err = qb.Update(*gallery, tx)
|
||||
g, err = qb.Update(*g)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
currentTime := time.Now()
|
||||
@@ -219,47 +241,29 @@ func (t *ScanTask) scanGallery() {
|
||||
}
|
||||
|
||||
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||
gallery, err = qb.Create(newGallery, tx)
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
tx.Rollback()
|
||||
return
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// if the gallery has no associated images, then scan the zip for images
|
||||
if gallery != nil {
|
||||
t.scanZipImages(gallery)
|
||||
}
|
||||
}
|
||||
|
||||
type fileModTimeUpdater interface {
|
||||
UpdateFileModTime(id int, modTime models.NullSQLiteTimestamp, tx *sqlx.Tx) error
|
||||
}
|
||||
|
||||
func (t *ScanTask) updateFileModTime(id int, fileModTime time.Time, updater fileModTimeUpdater) error {
|
||||
logger.Infof("setting file modification time on %s", t.FilePath)
|
||||
|
||||
err := database.WithTxn(func(tx *sqlx.Tx) error {
|
||||
return updater.UpdateFileModTime(id, models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
}, tx)
|
||||
})
|
||||
|
||||
g, err = qb.Create(newGallery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scanImages = true
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if g != nil {
|
||||
if scanImages {
|
||||
t.scanZipImages(g)
|
||||
} else {
|
||||
// in case thumbnails have been deleted, regenerate them
|
||||
t.regenerateZipImages(g)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ScanTask) getFileModTime() (time.Time, error) {
|
||||
@@ -281,18 +285,23 @@ func (t *ScanTask) isFileModified(fileModTime time.Time, modTime models.NullSQLi
|
||||
|
||||
// associates a gallery to a scene with the same basename
|
||||
func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, _ := qb.FindByPath(t.FilePath)
|
||||
if gallery == nil {
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Gallery()
|
||||
sqb := r.Scene()
|
||||
g, err := qb.FindByPath(t.FilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if g == nil {
|
||||
// associate is run after scan is finished
|
||||
// should only happen if gallery is a directory or an io error occurs during hashing
|
||||
logger.Warnf("associate: gallery %s not found in DB", t.FilePath)
|
||||
wg.Done()
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
// gallery has no SceneID
|
||||
if !gallery.SceneID.Valid {
|
||||
if !g.SceneID.Valid {
|
||||
basename := strings.TrimSuffix(t.FilePath, filepath.Ext(t.FilePath))
|
||||
var relatedFiles []string
|
||||
vExt := config.GetVideoExtensions()
|
||||
@@ -305,24 +314,21 @@ func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
}
|
||||
}
|
||||
for _, scenePath := range relatedFiles {
|
||||
qbScene := models.NewSceneQueryBuilder()
|
||||
scene, _ := qbScene.FindByPath(scenePath)
|
||||
// found related Scene
|
||||
if scene != nil {
|
||||
logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID)
|
||||
|
||||
gallery.SceneID.Int64 = int64(scene.ID)
|
||||
gallery.SceneID.Valid = true
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
_, err := qb.Update(*gallery, tx)
|
||||
s, err := sqb.FindByPath(scenePath)
|
||||
if err != nil {
|
||||
logger.Errorf("associate: Error updating gallery sceneId %s", err)
|
||||
_ = tx.Rollback()
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
// found related Scene
|
||||
if s != nil {
|
||||
logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, s.ID)
|
||||
|
||||
g.SceneID.Int64 = int64(s.ID)
|
||||
g.SceneID.Valid = true
|
||||
|
||||
_, err = qb.Update(*g)
|
||||
if err != nil {
|
||||
return fmt.Errorf("associate: Error updating gallery sceneId %s", err)
|
||||
}
|
||||
|
||||
// since a gallery can have only one related scene
|
||||
@@ -331,120 +337,133 @@ func (t *ScanTask) associateGallery(wg *sizedwaitgroup.SizedWaitGroup) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
}
|
||||
wg.Done()
|
||||
}
|
||||
|
||||
func (t *ScanTask) scanScene() *models.Scene {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scene, _ := qb.FindByPath(t.FilePath)
|
||||
|
||||
fileModTime, err := t.getFileModTime()
|
||||
if err != nil {
|
||||
logError := func(err error) *models.Scene {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
}
|
||||
|
||||
if scene != nil {
|
||||
var retScene *models.Scene
|
||||
var s *models.Scene
|
||||
|
||||
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
s, err = r.Scene().FindByPath(t.FilePath)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
}
|
||||
|
||||
fileModTime, err := t.getFileModTime()
|
||||
if err != nil {
|
||||
return logError(err)
|
||||
}
|
||||
|
||||
if s != nil {
|
||||
// if file mod time is not set, set it now
|
||||
if !scene.FileModTime.Valid {
|
||||
t.updateFileModTime(scene.ID, fileModTime, &qb)
|
||||
if !s.FileModTime.Valid {
|
||||
logger.Infof("setting file modification time on %s", t.FilePath)
|
||||
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Scene()
|
||||
if _, err := scene.UpdateFileModTime(qb, s.ID, models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update our copy of the scene
|
||||
var err error
|
||||
scene, err = qb.Find(scene.ID)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
s, err = qb.Find(s.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
return logError(err)
|
||||
}
|
||||
}
|
||||
|
||||
// if the mod time of the file is different than that of the associated
|
||||
// scene, then recalculate the checksum and regenerate the thumbnail
|
||||
modified := t.isFileModified(fileModTime, scene.FileModTime)
|
||||
if modified || !scene.Size.Valid {
|
||||
scene, err = t.rescanScene(scene, fileModTime)
|
||||
modified := t.isFileModified(fileModTime, s.FileModTime)
|
||||
if modified || !s.Size.Valid {
|
||||
s, err = t.rescanScene(s, fileModTime)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
return logError(err)
|
||||
}
|
||||
}
|
||||
|
||||
// We already have this item in the database
|
||||
// check for thumbnails,screenshots
|
||||
t.makeScreenshots(nil, scene.GetHash(t.fileNamingAlgorithm))
|
||||
t.makeScreenshots(nil, s.GetHash(t.fileNamingAlgorithm))
|
||||
|
||||
// check for container
|
||||
if !scene.Format.Valid {
|
||||
if !s.Format.Valid {
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath, t.StripFileExtension)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
return logError(err)
|
||||
}
|
||||
container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
|
||||
logger.Infof("Adding container %s to file %s", container, t.FilePath)
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
err = qb.UpdateFormat(scene.ID, string(container), tx)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
_, err := scene.UpdateFormat(r.Scene(), s.ID, string(container))
|
||||
return err
|
||||
}); err != nil {
|
||||
return logError(err)
|
||||
}
|
||||
}
|
||||
|
||||
// check if oshash is set
|
||||
if !scene.OSHash.Valid {
|
||||
if !s.OSHash.Valid {
|
||||
logger.Infof("Calculating oshash for existing file %s ...", t.FilePath)
|
||||
oshash, err := utils.OSHashFromFilePath(t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Scene()
|
||||
// check if oshash clashes with existing scene
|
||||
dupe, _ := qb.FindByOSHash(oshash)
|
||||
if dupe != nil {
|
||||
logger.Errorf("OSHash for file %s is the same as that of %s", t.FilePath, dupe.Path)
|
||||
return nil
|
||||
return fmt.Errorf("OSHash for file %s is the same as that of %s", t.FilePath, dupe.Path)
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
err = qb.UpdateOSHash(scene.ID, oshash, tx)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
tx.Rollback()
|
||||
return nil
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
_, err := scene.UpdateOSHash(qb, s.ID, oshash)
|
||||
return err
|
||||
}); err != nil {
|
||||
return logError(err)
|
||||
}
|
||||
}
|
||||
|
||||
// check if MD5 is set, if calculateMD5 is true
|
||||
if t.calculateMD5 && !scene.Checksum.Valid {
|
||||
if t.calculateMD5 && !s.Checksum.Valid {
|
||||
checksum, err := t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
return logError(err)
|
||||
}
|
||||
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Scene()
|
||||
// check if checksum clashes with existing scene
|
||||
dupe, _ := qb.FindByChecksum(checksum)
|
||||
if dupe != nil {
|
||||
logger.Errorf("MD5 for file %s is the same as that of %s", t.FilePath, dupe.Path)
|
||||
return nil
|
||||
return fmt.Errorf("MD5 for file %s is the same as that of %s", t.FilePath, dupe.Path)
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
err = qb.UpdateChecksum(scene.ID, checksum, tx)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
_, err := scene.UpdateChecksum(qb, s.ID, checksum)
|
||||
return err
|
||||
}); err != nil {
|
||||
return logError(err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -473,28 +492,31 @@ func (t *ScanTask) scanScene() *models.Scene {
|
||||
logger.Infof("%s not found. Calculating oshash...", t.FilePath)
|
||||
oshash, err := utils.OSHashFromFilePath(t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
return logError(err)
|
||||
}
|
||||
|
||||
if t.fileNamingAlgorithm == models.HashAlgorithmMd5 || t.calculateMD5 {
|
||||
checksum, err = t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
return logError(err)
|
||||
}
|
||||
}
|
||||
|
||||
// check for scene by checksum and oshash - MD5 should be
|
||||
// redundant, but check both
|
||||
t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
qb := r.Scene()
|
||||
if checksum != "" {
|
||||
scene, _ = qb.FindByChecksum(checksum)
|
||||
s, _ = qb.FindByChecksum(checksum)
|
||||
}
|
||||
|
||||
if scene == nil {
|
||||
scene, _ = qb.FindByOSHash(oshash)
|
||||
if s == nil {
|
||||
s, _ = qb.FindByOSHash(oshash)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
sceneHash := oshash
|
||||
|
||||
if t.fileNamingAlgorithm == models.HashAlgorithmMd5 {
|
||||
@@ -503,21 +525,22 @@ func (t *ScanTask) scanScene() *models.Scene {
|
||||
|
||||
t.makeScreenshots(videoFile, sceneHash)
|
||||
|
||||
var retScene *models.Scene
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if scene != nil {
|
||||
exists, _ := utils.FileExists(scene.Path)
|
||||
if s != nil {
|
||||
exists, _ := utils.FileExists(s.Path)
|
||||
if exists {
|
||||
logger.Infof("%s already exists. Duplicate of %s", t.FilePath, scene.Path)
|
||||
logger.Infof("%s already exists. Duplicate of %s", t.FilePath, s.Path)
|
||||
} else {
|
||||
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||
scenePartial := models.ScenePartial{
|
||||
ID: scene.ID,
|
||||
ID: s.ID,
|
||||
Path: &t.FilePath,
|
||||
}
|
||||
_, err = qb.Update(scenePartial, tx)
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
_, err := r.Scene().Update(scenePartial)
|
||||
return err
|
||||
}); err != nil {
|
||||
return logError(err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||
@@ -549,23 +572,19 @@ func (t *ScanTask) scanScene() *models.Scene {
|
||||
newScene.Date = models.SQLiteDate{String: videoFile.CreationTime.Format("2006-01-02")}
|
||||
}
|
||||
|
||||
retScene, err = qb.Create(newScene, tx)
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
var err error
|
||||
retScene, err = r.Scene().Create(newScene)
|
||||
return err
|
||||
}); err != nil {
|
||||
return logError(err)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
return nil
|
||||
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil
|
||||
}
|
||||
|
||||
return retScene
|
||||
}
|
||||
|
||||
func (t *ScanTask) rescanScene(scene *models.Scene, fileModTime time.Time) (*models.Scene, error) {
|
||||
func (t *ScanTask) rescanScene(s *models.Scene, fileModTime time.Time) (*models.Scene, error) {
|
||||
logger.Infof("%s has been updated: rescanning", t.FilePath)
|
||||
|
||||
// update the oshash/checksum and the modification time
|
||||
@@ -597,7 +616,7 @@ func (t *ScanTask) rescanScene(scene *models.Scene, fileModTime time.Time) (*mod
|
||||
|
||||
currentTime := time.Now()
|
||||
scenePartial := models.ScenePartial{
|
||||
ID: scene.ID,
|
||||
ID: s.ID,
|
||||
Checksum: checksum,
|
||||
OSHash: &sql.NullString{
|
||||
String: oshash,
|
||||
@@ -620,13 +639,11 @@ func (t *ScanTask) rescanScene(scene *models.Scene, fileModTime time.Time) (*mod
|
||||
}
|
||||
|
||||
var ret *models.Scene
|
||||
err = database.WithTxn(func(tx *sqlx.Tx) error {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
var txnErr error
|
||||
ret, txnErr = qb.Update(scenePartial, tx)
|
||||
return txnErr
|
||||
})
|
||||
if err != nil {
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
var err error
|
||||
ret, err = r.Scene().Update(scenePartial)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil, err
|
||||
}
|
||||
@@ -692,10 +709,14 @@ func (t *ScanTask) scanZipImages(zipGallery *models.Gallery) {
|
||||
}
|
||||
|
||||
func (t *ScanTask) regenerateZipImages(zipGallery *models.Gallery) {
|
||||
iqb := models.NewImageQueryBuilder()
|
||||
var images []*models.Image
|
||||
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
iqb := r.Image()
|
||||
|
||||
images, err := iqb.FindByGalleryID(zipGallery.ID)
|
||||
if err != nil {
|
||||
var err error
|
||||
images, err = iqb.FindByGalleryID(zipGallery.ID)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Warnf("failed to find gallery images: %s", err.Error())
|
||||
return
|
||||
}
|
||||
@@ -706,8 +727,16 @@ func (t *ScanTask) regenerateZipImages(zipGallery *models.Gallery) {
|
||||
}
|
||||
|
||||
func (t *ScanTask) scanImage() {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
i, _ := qb.FindByPath(t.FilePath)
|
||||
var i *models.Image
|
||||
|
||||
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
i, err = r.Image().FindByPath(t.FilePath)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
fileModTime, err := image.GetFileModTime(t.FilePath)
|
||||
if err != nil {
|
||||
@@ -718,12 +747,22 @@ func (t *ScanTask) scanImage() {
|
||||
if i != nil {
|
||||
// if file mod time is not set, set it now
|
||||
if !i.FileModTime.Valid {
|
||||
t.updateFileModTime(i.ID, fileModTime, &qb)
|
||||
logger.Infof("setting file modification time on %s", t.FilePath)
|
||||
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
qb := r.Image()
|
||||
if _, err := image.UpdateFileModTime(qb, i.ID, models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update our copy of the gallery
|
||||
var err error
|
||||
i, err = qb.Find(i.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
@@ -743,10 +782,7 @@ func (t *ScanTask) scanImage() {
|
||||
// We already have this item in the database
|
||||
// check for thumbnails
|
||||
t.generateThumbnail(i)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
} else {
|
||||
// Ignore directories.
|
||||
if isDir, _ := utils.DirExists(t.FilePath); isDir {
|
||||
return
|
||||
@@ -763,10 +799,15 @@ func (t *ScanTask) scanImage() {
|
||||
|
||||
// check for scene by checksum and oshash - MD5 should be
|
||||
// redundant, but check both
|
||||
i, _ = qb.FindByChecksum(checksum)
|
||||
if err := t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
var err error
|
||||
i, err = r.Image().FindByChecksum(checksum)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
if i != nil {
|
||||
exists := image.FileExists(i.Path)
|
||||
if exists {
|
||||
@@ -777,7 +818,14 @@ func (t *ScanTask) scanImage() {
|
||||
ID: i.ID,
|
||||
Path: &t.FilePath,
|
||||
}
|
||||
_, err = qb.Update(imagePartial, tx)
|
||||
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
_, err := r.Image().Update(imagePartial)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.Infof("%s doesn't exist. Creating new item...", image.PathDisplayName(t.FilePath))
|
||||
@@ -792,34 +840,44 @@ func (t *ScanTask) scanImage() {
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
err = image.SetFileDetails(&newImage)
|
||||
if err == nil {
|
||||
i, err = qb.Create(newImage, tx)
|
||||
if err := image.SetFileDetails(&newImage); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
var err error
|
||||
i, err = r.Image().Create(newImage)
|
||||
return err
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
if t.zipGallery != nil {
|
||||
// associate with gallery
|
||||
_, err = jqb.AddImageGallery(i.ID, t.zipGallery.ID, tx)
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
return gallery.AddImage(r.Gallery(), t.zipGallery.ID, i.ID)
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
} else if config.GetCreateGalleriesFromFolders() {
|
||||
// create gallery from folder or associate with existing gallery
|
||||
logger.Infof("Associating image %s with folder gallery", i.Path)
|
||||
err = t.associateImageWithFolderGallery(i.ID, tx)
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
_ = tx.Rollback()
|
||||
return
|
||||
} else if err := tx.Commit(); err != nil {
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
return t.associateImageWithFolderGallery(i.ID, r.Gallery())
|
||||
}); err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if i != nil {
|
||||
t.generateThumbnail(i)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ScanTask) rescanImage(i *models.Image, fileModTime time.Time) (*models.Image, error) {
|
||||
@@ -854,13 +912,11 @@ func (t *ScanTask) rescanImage(i *models.Image, fileModTime time.Time) (*models.
|
||||
}
|
||||
|
||||
var ret *models.Image
|
||||
err = database.WithTxn(func(tx *sqlx.Tx) error {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
var txnErr error
|
||||
ret, txnErr = qb.Update(imagePartial, tx)
|
||||
return txnErr
|
||||
})
|
||||
if err != nil {
|
||||
if err := t.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
|
||||
var err error
|
||||
ret, err = r.Image().Update(imagePartial)
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -875,12 +931,10 @@ func (t *ScanTask) rescanImage(i *models.Image, fileModTime time.Time) (*models.
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (t *ScanTask) associateImageWithFolderGallery(imageID int, tx *sqlx.Tx) error {
|
||||
func (t *ScanTask) associateImageWithFolderGallery(imageID int, qb models.GalleryReaderWriter) error {
|
||||
// find a gallery with the path specified
|
||||
path := filepath.Dir(t.FilePath)
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
jqb := models.NewJoinsQueryBuilder()
|
||||
g, err := gqb.FindByPath(path)
|
||||
g, err := qb.FindByPath(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -902,14 +956,14 @@ func (t *ScanTask) associateImageWithFolderGallery(imageID int, tx *sqlx.Tx) err
|
||||
}
|
||||
|
||||
logger.Infof("Creating gallery for folder %s", path)
|
||||
g, err = gqb.Create(newGallery, tx)
|
||||
g, err = qb.Create(newGallery)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// associate image with gallery
|
||||
_, err = jqb.AddImageGallery(imageID, g.ID, tx)
|
||||
err = gallery.AddImage(qb, g.ID, imageID)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -966,27 +1020,29 @@ func (t *ScanTask) doesPathExist() bool {
|
||||
imgExt := config.GetImageExtensions()
|
||||
gExt := config.GetGalleryExtensions()
|
||||
|
||||
ret := false
|
||||
t.TxnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
|
||||
if matchExtension(t.FilePath, gExt) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, _ := qb.FindByPath(t.FilePath)
|
||||
gallery, _ := r.Gallery().FindByPath(t.FilePath)
|
||||
if gallery != nil {
|
||||
return true
|
||||
ret = true
|
||||
}
|
||||
} else if matchExtension(t.FilePath, vidExt) {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scene, _ := qb.FindByPath(t.FilePath)
|
||||
if scene != nil {
|
||||
return true
|
||||
s, _ := r.Scene().FindByPath(t.FilePath)
|
||||
if s != nil {
|
||||
ret = true
|
||||
}
|
||||
} else if matchExtension(t.FilePath, imgExt) {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
i, _ := qb.FindByPath(t.FilePath)
|
||||
i, _ := r.Image().FindByPath(t.FilePath)
|
||||
if i != nil {
|
||||
return true
|
||||
ret = true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
return nil
|
||||
})
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error {
|
||||
|
||||
@@ -1,74 +1,34 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type GalleryReader interface {
|
||||
// Find(id int) (*Gallery, error)
|
||||
Find(id int) (*Gallery, error)
|
||||
FindMany(ids []int) ([]*Gallery, error)
|
||||
FindByChecksum(checksum string) (*Gallery, error)
|
||||
FindByPath(path string) (*Gallery, error)
|
||||
FindBySceneID(sceneID int) (*Gallery, error)
|
||||
FindByImageID(imageID int) ([]*Gallery, error)
|
||||
// ValidGalleriesForScenePath(scenePath string) ([]*Gallery, error)
|
||||
// Count() (int, error)
|
||||
ValidGalleriesForScenePath(scenePath string) ([]*Gallery, error)
|
||||
Count() (int, error)
|
||||
All() ([]*Gallery, error)
|
||||
// Query(galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int)
|
||||
Query(galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error)
|
||||
GetPerformerIDs(galleryID int) ([]int, error)
|
||||
GetTagIDs(galleryID int) ([]int, error)
|
||||
GetImageIDs(galleryID int) ([]int, error)
|
||||
}
|
||||
|
||||
type GalleryWriter interface {
|
||||
Create(newGallery Gallery) (*Gallery, error)
|
||||
Update(updatedGallery Gallery) (*Gallery, error)
|
||||
// Destroy(id int) error
|
||||
// ClearGalleryId(sceneID int) error
|
||||
UpdatePartial(updatedGallery GalleryPartial) (*Gallery, error)
|
||||
UpdateFileModTime(id int, modTime NullSQLiteTimestamp) error
|
||||
Destroy(id int) error
|
||||
ClearGalleryId(sceneID int) error
|
||||
UpdatePerformers(galleryID int, performerIDs []int) error
|
||||
UpdateTags(galleryID int, tagIDs []int) error
|
||||
UpdateImages(galleryID int, imageIDs []int) error
|
||||
}
|
||||
|
||||
type GalleryReaderWriter interface {
|
||||
GalleryReader
|
||||
GalleryWriter
|
||||
}
|
||||
|
||||
func NewGalleryReaderWriter(tx *sqlx.Tx) GalleryReaderWriter {
|
||||
return &galleryReaderWriter{
|
||||
tx: tx,
|
||||
qb: NewGalleryQueryBuilder(),
|
||||
}
|
||||
}
|
||||
|
||||
type galleryReaderWriter struct {
|
||||
tx *sqlx.Tx
|
||||
qb GalleryQueryBuilder
|
||||
}
|
||||
|
||||
func (t *galleryReaderWriter) FindMany(ids []int) ([]*Gallery, error) {
|
||||
return t.qb.FindMany(ids)
|
||||
}
|
||||
|
||||
func (t *galleryReaderWriter) FindByChecksum(checksum string) (*Gallery, error) {
|
||||
return t.qb.FindByChecksum(checksum, t.tx)
|
||||
}
|
||||
|
||||
func (t *galleryReaderWriter) All() ([]*Gallery, error) {
|
||||
return t.qb.All()
|
||||
}
|
||||
|
||||
func (t *galleryReaderWriter) FindByPath(path string) (*Gallery, error) {
|
||||
return t.qb.FindByPath(path)
|
||||
}
|
||||
|
||||
func (t *galleryReaderWriter) FindBySceneID(sceneID int) (*Gallery, error) {
|
||||
return t.qb.FindBySceneID(sceneID, t.tx)
|
||||
}
|
||||
|
||||
func (t *galleryReaderWriter) FindByImageID(imageID int) ([]*Gallery, error) {
|
||||
return t.qb.FindByImageID(imageID, t.tx)
|
||||
}
|
||||
|
||||
func (t *galleryReaderWriter) Create(newGallery Gallery) (*Gallery, error) {
|
||||
return t.qb.Create(newGallery, t.tx)
|
||||
}
|
||||
|
||||
func (t *galleryReaderWriter) Update(updatedGallery Gallery) (*Gallery, error) {
|
||||
return t.qb.Update(updatedGallery, t.tx)
|
||||
}
|
||||
|
||||
@@ -1,77 +1,41 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type ImageReader interface {
|
||||
// Find(id int) (*Image, error)
|
||||
Find(id int) (*Image, error)
|
||||
FindMany(ids []int) ([]*Image, error)
|
||||
FindByChecksum(checksum string) (*Image, error)
|
||||
FindByGalleryID(galleryID int) ([]*Image, error)
|
||||
// FindByPath(path string) (*Image, error)
|
||||
CountByGalleryID(galleryID int) (int, error)
|
||||
FindByPath(path string) (*Image, error)
|
||||
// FindByPerformerID(performerID int) ([]*Image, error)
|
||||
// CountByPerformerID(performerID int) (int, error)
|
||||
// FindByStudioID(studioID int) ([]*Image, error)
|
||||
// Count() (int, error)
|
||||
Count() (int, error)
|
||||
Size() (float64, error)
|
||||
// SizeCount() (string, error)
|
||||
// CountByStudioID(studioID int) (int, error)
|
||||
// CountByTagID(tagID int) (int, error)
|
||||
All() ([]*Image, error)
|
||||
// Query(imageFilter *ImageFilterType, findFilter *FindFilterType) ([]*Image, int)
|
||||
Query(imageFilter *ImageFilterType, findFilter *FindFilterType) ([]*Image, int, error)
|
||||
GetGalleryIDs(imageID int) ([]int, error)
|
||||
GetTagIDs(imageID int) ([]int, error)
|
||||
GetPerformerIDs(imageID int) ([]int, error)
|
||||
}
|
||||
|
||||
type ImageWriter interface {
|
||||
Create(newImage Image) (*Image, error)
|
||||
Update(updatedImage ImagePartial) (*Image, error)
|
||||
UpdateFull(updatedImage Image) (*Image, error)
|
||||
// IncrementOCounter(id int) (int, error)
|
||||
// DecrementOCounter(id int) (int, error)
|
||||
// ResetOCounter(id int) (int, error)
|
||||
// Destroy(id string) error
|
||||
IncrementOCounter(id int) (int, error)
|
||||
DecrementOCounter(id int) (int, error)
|
||||
ResetOCounter(id int) (int, error)
|
||||
Destroy(id int) error
|
||||
UpdateGalleries(imageID int, galleryIDs []int) error
|
||||
UpdatePerformers(imageID int, performerIDs []int) error
|
||||
UpdateTags(imageID int, tagIDs []int) error
|
||||
}
|
||||
|
||||
type ImageReaderWriter interface {
|
||||
ImageReader
|
||||
ImageWriter
|
||||
}
|
||||
|
||||
func NewImageReaderWriter(tx *sqlx.Tx) ImageReaderWriter {
|
||||
return &imageReaderWriter{
|
||||
tx: tx,
|
||||
qb: NewImageQueryBuilder(),
|
||||
}
|
||||
}
|
||||
|
||||
type imageReaderWriter struct {
|
||||
tx *sqlx.Tx
|
||||
qb ImageQueryBuilder
|
||||
}
|
||||
|
||||
func (t *imageReaderWriter) FindMany(ids []int) ([]*Image, error) {
|
||||
return t.qb.FindMany(ids)
|
||||
}
|
||||
|
||||
func (t *imageReaderWriter) FindByChecksum(checksum string) (*Image, error) {
|
||||
return t.qb.FindByChecksum(checksum)
|
||||
}
|
||||
|
||||
func (t *imageReaderWriter) FindByGalleryID(galleryID int) ([]*Image, error) {
|
||||
return t.qb.FindByGalleryID(galleryID)
|
||||
}
|
||||
|
||||
func (t *imageReaderWriter) All() ([]*Image, error) {
|
||||
return t.qb.All()
|
||||
}
|
||||
|
||||
func (t *imageReaderWriter) Create(newImage Image) (*Image, error) {
|
||||
return t.qb.Create(newImage, t.tx)
|
||||
}
|
||||
|
||||
func (t *imageReaderWriter) Update(updatedImage ImagePartial) (*Image, error) {
|
||||
return t.qb.Update(updatedImage, t.tx)
|
||||
}
|
||||
|
||||
func (t *imageReaderWriter) UpdateFull(updatedImage Image) (*Image, error) {
|
||||
return t.qb.UpdateFull(updatedImage, t.tx)
|
||||
}
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type JoinReader interface {
|
||||
// GetScenePerformers(sceneID int) ([]PerformersScenes, error)
|
||||
GetSceneMovies(sceneID int) ([]MoviesScenes, error)
|
||||
// GetSceneTags(sceneID int) ([]ScenesTags, error)
|
||||
}
|
||||
|
||||
type JoinWriter interface {
|
||||
CreatePerformersScenes(newJoins []PerformersScenes) error
|
||||
// AddPerformerScene(sceneID int, performerID int) (bool, error)
|
||||
UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes) error
|
||||
// DestroyPerformersScenes(sceneID int) error
|
||||
CreateMoviesScenes(newJoins []MoviesScenes) error
|
||||
// AddMoviesScene(sceneID int, movieID int, sceneIdx *int) (bool, error)
|
||||
UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes) error
|
||||
// DestroyMoviesScenes(sceneID int) error
|
||||
// CreateScenesTags(newJoins []ScenesTags) error
|
||||
UpdateScenesTags(sceneID int, updatedJoins []ScenesTags) error
|
||||
// AddSceneTag(sceneID int, tagID int) (bool, error)
|
||||
// DestroyScenesTags(sceneID int) error
|
||||
// CreateSceneMarkersTags(newJoins []SceneMarkersTags) error
|
||||
UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error
|
||||
// DestroySceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error
|
||||
// DestroyScenesGalleries(sceneID int) error
|
||||
// DestroyScenesMarkers(sceneID int) error
|
||||
UpdatePerformersGalleries(galleryID int, updatedJoins []PerformersGalleries) error
|
||||
UpdateGalleriesTags(galleryID int, updatedJoins []GalleriesTags) error
|
||||
UpdateGalleriesImages(imageID int, updatedJoins []GalleriesImages) error
|
||||
UpdatePerformersImages(imageID int, updatedJoins []PerformersImages) error
|
||||
UpdateImagesTags(imageID int, updatedJoins []ImagesTags) error
|
||||
}
|
||||
|
||||
type JoinReaderWriter interface {
|
||||
JoinReader
|
||||
JoinWriter
|
||||
}
|
||||
|
||||
func NewJoinReaderWriter(tx *sqlx.Tx) JoinReaderWriter {
|
||||
return &joinReaderWriter{
|
||||
tx: tx,
|
||||
qb: NewJoinsQueryBuilder(),
|
||||
}
|
||||
}
|
||||
|
||||
type joinReaderWriter struct {
|
||||
tx *sqlx.Tx
|
||||
qb JoinsQueryBuilder
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) GetSceneMovies(sceneID int) ([]MoviesScenes, error) {
|
||||
return t.qb.GetSceneMovies(sceneID, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) CreatePerformersScenes(newJoins []PerformersScenes) error {
|
||||
return t.qb.CreatePerformersScenes(newJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes) error {
|
||||
return t.qb.UpdatePerformersScenes(sceneID, updatedJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) CreateMoviesScenes(newJoins []MoviesScenes) error {
|
||||
return t.qb.CreateMoviesScenes(newJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdateMoviesScenes(sceneID int, updatedJoins []MoviesScenes) error {
|
||||
return t.qb.UpdateMoviesScenes(sceneID, updatedJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdateScenesTags(sceneID int, updatedJoins []ScenesTags) error {
|
||||
return t.qb.UpdateScenesTags(sceneID, updatedJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error {
|
||||
return t.qb.UpdateSceneMarkersTags(sceneMarkerID, updatedJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdatePerformersGalleries(galleryID int, updatedJoins []PerformersGalleries) error {
|
||||
return t.qb.UpdatePerformersGalleries(galleryID, updatedJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdateGalleriesTags(galleryID int, updatedJoins []GalleriesTags) error {
|
||||
return t.qb.UpdateGalleriesTags(galleryID, updatedJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdateGalleriesImages(imageID int, updatedJoins []GalleriesImages) error {
|
||||
return t.qb.UpdateGalleriesImages(imageID, updatedJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdatePerformersImages(imageID int, updatedJoins []PerformersImages) error {
|
||||
return t.qb.UpdatePerformersImages(imageID, updatedJoins, t.tx)
|
||||
}
|
||||
|
||||
func (t *joinReaderWriter) UpdateImagesTags(imageID int, updatedJoins []ImagesTags) error {
|
||||
return t.qb.UpdateImagesTags(imageID, updatedJoins, t.tx)
|
||||
}
|
||||
@@ -35,6 +35,41 @@ func (_m *GalleryReaderWriter) All() ([]*models.Gallery, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// ClearGalleryId provides a mock function with given fields: sceneID
|
||||
func (_m *GalleryReaderWriter) ClearGalleryId(sceneID int) error {
|
||||
ret := _m.Called(sceneID)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(sceneID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Count provides a mock function with given fields:
|
||||
func (_m *GalleryReaderWriter) Count() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newGallery
|
||||
func (_m *GalleryReaderWriter) Create(newGallery models.Gallery) (*models.Gallery, error) {
|
||||
ret := _m.Called(newGallery)
|
||||
@@ -58,6 +93,43 @@ func (_m *GalleryReaderWriter) Create(newGallery models.Gallery) (*models.Galler
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Destroy provides a mock function with given fields: id
|
||||
func (_m *GalleryReaderWriter) Destroy(id int) error {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Find provides a mock function with given fields: id
|
||||
func (_m *GalleryReaderWriter) Find(id int) (*models.Gallery, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 *models.Gallery
|
||||
if rf, ok := ret.Get(0).(func(int) *models.Gallery); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.Gallery)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByChecksum provides a mock function with given fields: checksum
|
||||
func (_m *GalleryReaderWriter) FindByChecksum(checksum string) (*models.Gallery, error) {
|
||||
ret := _m.Called(checksum)
|
||||
@@ -173,6 +245,105 @@ func (_m *GalleryReaderWriter) FindMany(ids []int) ([]*models.Gallery, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetImageIDs provides a mock function with given fields: galleryID
|
||||
func (_m *GalleryReaderWriter) GetImageIDs(galleryID int) ([]int, error) {
|
||||
ret := _m.Called(galleryID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(galleryID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(galleryID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetPerformerIDs provides a mock function with given fields: galleryID
|
||||
func (_m *GalleryReaderWriter) GetPerformerIDs(galleryID int) ([]int, error) {
|
||||
ret := _m.Called(galleryID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(galleryID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(galleryID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetTagIDs provides a mock function with given fields: galleryID
|
||||
func (_m *GalleryReaderWriter) GetTagIDs(galleryID int) ([]int, error) {
|
||||
ret := _m.Called(galleryID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(galleryID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(galleryID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Query provides a mock function with given fields: galleryFilter, findFilter
|
||||
func (_m *GalleryReaderWriter) Query(galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) ([]*models.Gallery, int, error) {
|
||||
ret := _m.Called(galleryFilter, findFilter)
|
||||
|
||||
var r0 []*models.Gallery
|
||||
if rf, ok := ret.Get(0).(func(*models.GalleryFilterType, *models.FindFilterType) []*models.Gallery); ok {
|
||||
r0 = rf(galleryFilter, findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Gallery)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.GalleryFilterType, *models.FindFilterType) int); ok {
|
||||
r1 = rf(galleryFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.GalleryFilterType, *models.FindFilterType) error); ok {
|
||||
r2 = rf(galleryFilter, findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// Update provides a mock function with given fields: updatedGallery
|
||||
func (_m *GalleryReaderWriter) Update(updatedGallery models.Gallery) (*models.Gallery, error) {
|
||||
ret := _m.Called(updatedGallery)
|
||||
@@ -195,3 +366,105 @@ func (_m *GalleryReaderWriter) Update(updatedGallery models.Gallery) (*models.Ga
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateFileModTime provides a mock function with given fields: id, modTime
|
||||
func (_m *GalleryReaderWriter) UpdateFileModTime(id int, modTime models.NullSQLiteTimestamp) error {
|
||||
ret := _m.Called(id, modTime)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, models.NullSQLiteTimestamp) error); ok {
|
||||
r0 = rf(id, modTime)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateImages provides a mock function with given fields: galleryID, imageIDs
|
||||
func (_m *GalleryReaderWriter) UpdateImages(galleryID int, imageIDs []int) error {
|
||||
ret := _m.Called(galleryID, imageIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(galleryID, imageIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdatePartial provides a mock function with given fields: updatedGallery
|
||||
func (_m *GalleryReaderWriter) UpdatePartial(updatedGallery models.GalleryPartial) (*models.Gallery, error) {
|
||||
ret := _m.Called(updatedGallery)
|
||||
|
||||
var r0 *models.Gallery
|
||||
if rf, ok := ret.Get(0).(func(models.GalleryPartial) *models.Gallery); ok {
|
||||
r0 = rf(updatedGallery)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.Gallery)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(models.GalleryPartial) error); ok {
|
||||
r1 = rf(updatedGallery)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdatePerformers provides a mock function with given fields: galleryID, performerIDs
|
||||
func (_m *GalleryReaderWriter) UpdatePerformers(galleryID int, performerIDs []int) error {
|
||||
ret := _m.Called(galleryID, performerIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(galleryID, performerIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateTags provides a mock function with given fields: galleryID, tagIDs
|
||||
func (_m *GalleryReaderWriter) UpdateTags(galleryID int, tagIDs []int) error {
|
||||
ret := _m.Called(galleryID, tagIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(galleryID, tagIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// ValidGalleriesForScenePath provides a mock function with given fields: scenePath
|
||||
func (_m *GalleryReaderWriter) ValidGalleriesForScenePath(scenePath string) ([]*models.Gallery, error) {
|
||||
ret := _m.Called(scenePath)
|
||||
|
||||
var r0 []*models.Gallery
|
||||
if rf, ok := ret.Get(0).(func(string) []*models.Gallery); ok {
|
||||
r0 = rf(scenePath)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Gallery)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||
r1 = rf(scenePath)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
@@ -35,6 +35,48 @@ func (_m *ImageReaderWriter) All() ([]*models.Image, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Count provides a mock function with given fields:
|
||||
func (_m *ImageReaderWriter) Count() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountByGalleryID provides a mock function with given fields: galleryID
|
||||
func (_m *ImageReaderWriter) CountByGalleryID(galleryID int) (int, error) {
|
||||
ret := _m.Called(galleryID)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(galleryID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(galleryID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newImage
|
||||
func (_m *ImageReaderWriter) Create(newImage models.Image) (*models.Image, error) {
|
||||
ret := _m.Called(newImage)
|
||||
@@ -58,6 +100,64 @@ func (_m *ImageReaderWriter) Create(newImage models.Image) (*models.Image, error
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// DecrementOCounter provides a mock function with given fields: id
|
||||
func (_m *ImageReaderWriter) DecrementOCounter(id int) (int, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Destroy provides a mock function with given fields: id
|
||||
func (_m *ImageReaderWriter) Destroy(id int) error {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Find provides a mock function with given fields: id
|
||||
func (_m *ImageReaderWriter) Find(id int) (*models.Image, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 *models.Image
|
||||
if rf, ok := ret.Get(0).(func(int) *models.Image); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.Image)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByChecksum provides a mock function with given fields: checksum
|
||||
func (_m *ImageReaderWriter) FindByChecksum(checksum string) (*models.Image, error) {
|
||||
ret := _m.Called(checksum)
|
||||
@@ -104,6 +204,29 @@ func (_m *ImageReaderWriter) FindByGalleryID(galleryID int) ([]*models.Image, er
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByPath provides a mock function with given fields: path
|
||||
func (_m *ImageReaderWriter) FindByPath(path string) (*models.Image, error) {
|
||||
ret := _m.Called(path)
|
||||
|
||||
var r0 *models.Image
|
||||
if rf, ok := ret.Get(0).(func(string) *models.Image); ok {
|
||||
r0 = rf(path)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.Image)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||
r1 = rf(path)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindMany provides a mock function with given fields: ids
|
||||
func (_m *ImageReaderWriter) FindMany(ids []int) ([]*models.Image, error) {
|
||||
ret := _m.Called(ids)
|
||||
@@ -127,6 +250,168 @@ func (_m *ImageReaderWriter) FindMany(ids []int) ([]*models.Image, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetGalleryIDs provides a mock function with given fields: imageID
|
||||
func (_m *ImageReaderWriter) GetGalleryIDs(imageID int) ([]int, error) {
|
||||
ret := _m.Called(imageID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(imageID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(imageID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetPerformerIDs provides a mock function with given fields: imageID
|
||||
func (_m *ImageReaderWriter) GetPerformerIDs(imageID int) ([]int, error) {
|
||||
ret := _m.Called(imageID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(imageID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(imageID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetTagIDs provides a mock function with given fields: imageID
|
||||
func (_m *ImageReaderWriter) GetTagIDs(imageID int) ([]int, error) {
|
||||
ret := _m.Called(imageID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(imageID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(imageID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// IncrementOCounter provides a mock function with given fields: id
|
||||
func (_m *ImageReaderWriter) IncrementOCounter(id int) (int, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Query provides a mock function with given fields: imageFilter, findFilter
|
||||
func (_m *ImageReaderWriter) Query(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, int, error) {
|
||||
ret := _m.Called(imageFilter, findFilter)
|
||||
|
||||
var r0 []*models.Image
|
||||
if rf, ok := ret.Get(0).(func(*models.ImageFilterType, *models.FindFilterType) []*models.Image); ok {
|
||||
r0 = rf(imageFilter, findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Image)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.ImageFilterType, *models.FindFilterType) int); ok {
|
||||
r1 = rf(imageFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.ImageFilterType, *models.FindFilterType) error); ok {
|
||||
r2 = rf(imageFilter, findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// ResetOCounter provides a mock function with given fields: id
|
||||
func (_m *ImageReaderWriter) ResetOCounter(id int) (int, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Size provides a mock function with given fields:
|
||||
func (_m *ImageReaderWriter) Size() (float64, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 float64
|
||||
if rf, ok := ret.Get(0).(func() float64); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(float64)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Update provides a mock function with given fields: updatedImage
|
||||
func (_m *ImageReaderWriter) Update(updatedImage models.ImagePartial) (*models.Image, error) {
|
||||
ret := _m.Called(updatedImage)
|
||||
@@ -172,3 +457,45 @@ func (_m *ImageReaderWriter) UpdateFull(updatedImage models.Image) (*models.Imag
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateGalleries provides a mock function with given fields: imageID, galleryIDs
|
||||
func (_m *ImageReaderWriter) UpdateGalleries(imageID int, galleryIDs []int) error {
|
||||
ret := _m.Called(imageID, galleryIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(imageID, galleryIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdatePerformers provides a mock function with given fields: imageID, performerIDs
|
||||
func (_m *ImageReaderWriter) UpdatePerformers(imageID int, performerIDs []int) error {
|
||||
ret := _m.Called(imageID, performerIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(imageID, performerIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateTags provides a mock function with given fields: imageID, tagIDs
|
||||
func (_m *ImageReaderWriter) UpdateTags(imageID int, tagIDs []int) error {
|
||||
ret := _m.Called(imageID, tagIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(imageID, tagIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
@@ -1,190 +0,0 @@
|
||||
// Code generated by mockery v0.0.0-dev. DO NOT EDIT.
|
||||
|
||||
package mocks
|
||||
|
||||
import (
|
||||
models "github.com/stashapp/stash/pkg/models"
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// JoinReaderWriter is an autogenerated mock type for the JoinReaderWriter type
|
||||
type JoinReaderWriter struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
// CreateMoviesScenes provides a mock function with given fields: newJoins
|
||||
func (_m *JoinReaderWriter) CreateMoviesScenes(newJoins []models.MoviesScenes) error {
|
||||
ret := _m.Called(newJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func([]models.MoviesScenes) error); ok {
|
||||
r0 = rf(newJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// CreatePerformersScenes provides a mock function with given fields: newJoins
|
||||
func (_m *JoinReaderWriter) CreatePerformersScenes(newJoins []models.PerformersScenes) error {
|
||||
ret := _m.Called(newJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func([]models.PerformersScenes) error); ok {
|
||||
r0 = rf(newJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// GetSceneMovies provides a mock function with given fields: sceneID
|
||||
func (_m *JoinReaderWriter) GetSceneMovies(sceneID int) ([]models.MoviesScenes, error) {
|
||||
ret := _m.Called(sceneID)
|
||||
|
||||
var r0 []models.MoviesScenes
|
||||
if rf, ok := ret.Get(0).(func(int) []models.MoviesScenes); ok {
|
||||
r0 = rf(sceneID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]models.MoviesScenes)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(sceneID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateGalleriesImages provides a mock function with given fields: imageID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdateGalleriesImages(imageID int, updatedJoins []models.GalleriesImages) error {
|
||||
ret := _m.Called(imageID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.GalleriesImages) error); ok {
|
||||
r0 = rf(imageID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateGalleriesTags provides a mock function with given fields: galleryID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdateGalleriesTags(galleryID int, updatedJoins []models.GalleriesTags) error {
|
||||
ret := _m.Called(galleryID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.GalleriesTags) error); ok {
|
||||
r0 = rf(galleryID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateImagesTags provides a mock function with given fields: imageID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdateImagesTags(imageID int, updatedJoins []models.ImagesTags) error {
|
||||
ret := _m.Called(imageID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.ImagesTags) error); ok {
|
||||
r0 = rf(imageID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateMoviesScenes provides a mock function with given fields: sceneID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdateMoviesScenes(sceneID int, updatedJoins []models.MoviesScenes) error {
|
||||
ret := _m.Called(sceneID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.MoviesScenes) error); ok {
|
||||
r0 = rf(sceneID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdatePerformersGalleries provides a mock function with given fields: galleryID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdatePerformersGalleries(galleryID int, updatedJoins []models.PerformersGalleries) error {
|
||||
ret := _m.Called(galleryID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.PerformersGalleries) error); ok {
|
||||
r0 = rf(galleryID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdatePerformersImages provides a mock function with given fields: imageID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdatePerformersImages(imageID int, updatedJoins []models.PerformersImages) error {
|
||||
ret := _m.Called(imageID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.PerformersImages) error); ok {
|
||||
r0 = rf(imageID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdatePerformersScenes provides a mock function with given fields: sceneID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdatePerformersScenes(sceneID int, updatedJoins []models.PerformersScenes) error {
|
||||
ret := _m.Called(sceneID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.PerformersScenes) error); ok {
|
||||
r0 = rf(sceneID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateSceneMarkersTags provides a mock function with given fields: sceneMarkerID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []models.SceneMarkersTags) error {
|
||||
ret := _m.Called(sceneMarkerID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.SceneMarkersTags) error); ok {
|
||||
r0 = rf(sceneMarkerID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateScenesTags provides a mock function with given fields: sceneID, updatedJoins
|
||||
func (_m *JoinReaderWriter) UpdateScenesTags(sceneID int, updatedJoins []models.ScenesTags) error {
|
||||
ret := _m.Called(sceneID, updatedJoins)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.ScenesTags) error); ok {
|
||||
r0 = rf(sceneID, updatedJoins)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
@@ -35,6 +35,50 @@ func (_m *MovieReaderWriter) All() ([]*models.Movie, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// AllSlim provides a mock function with given fields:
|
||||
func (_m *MovieReaderWriter) AllSlim() ([]*models.Movie, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 []*models.Movie
|
||||
if rf, ok := ret.Get(0).(func() []*models.Movie); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Movie)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Count provides a mock function with given fields:
|
||||
func (_m *MovieReaderWriter) Count() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newMovie
|
||||
func (_m *MovieReaderWriter) Create(newMovie models.Movie) (*models.Movie, error) {
|
||||
ret := _m.Called(newMovie)
|
||||
@@ -58,6 +102,34 @@ func (_m *MovieReaderWriter) Create(newMovie models.Movie) (*models.Movie, error
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Destroy provides a mock function with given fields: id
|
||||
func (_m *MovieReaderWriter) Destroy(id int) error {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// DestroyImages provides a mock function with given fields: movieID
|
||||
func (_m *MovieReaderWriter) DestroyImages(movieID int) error {
|
||||
ret := _m.Called(movieID)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(movieID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Find provides a mock function with given fields: id
|
||||
func (_m *MovieReaderWriter) Find(id int) (*models.Movie, error) {
|
||||
ret := _m.Called(id)
|
||||
@@ -196,6 +268,36 @@ func (_m *MovieReaderWriter) GetFrontImage(movieID int) ([]byte, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Query provides a mock function with given fields: movieFilter, findFilter
|
||||
func (_m *MovieReaderWriter) Query(movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) {
|
||||
ret := _m.Called(movieFilter, findFilter)
|
||||
|
||||
var r0 []*models.Movie
|
||||
if rf, ok := ret.Get(0).(func(*models.MovieFilterType, *models.FindFilterType) []*models.Movie); ok {
|
||||
r0 = rf(movieFilter, findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Movie)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.MovieFilterType, *models.FindFilterType) int); ok {
|
||||
r1 = rf(movieFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.MovieFilterType, *models.FindFilterType) error); ok {
|
||||
r2 = rf(movieFilter, findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// Update provides a mock function with given fields: updatedMovie
|
||||
func (_m *MovieReaderWriter) Update(updatedMovie models.MoviePartial) (*models.Movie, error) {
|
||||
ret := _m.Called(updatedMovie)
|
||||
@@ -242,8 +344,8 @@ func (_m *MovieReaderWriter) UpdateFull(updatedMovie models.Movie) (*models.Movi
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateMovieImages provides a mock function with given fields: movieID, frontImage, backImage
|
||||
func (_m *MovieReaderWriter) UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error {
|
||||
// UpdateImages provides a mock function with given fields: movieID, frontImage, backImage
|
||||
func (_m *MovieReaderWriter) UpdateImages(movieID int, frontImage []byte, backImage []byte) error {
|
||||
ret := _m.Called(movieID, frontImage, backImage)
|
||||
|
||||
var r0 error
|
||||
|
||||
@@ -35,6 +35,50 @@ func (_m *PerformerReaderWriter) All() ([]*models.Performer, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// AllSlim provides a mock function with given fields:
|
||||
func (_m *PerformerReaderWriter) AllSlim() ([]*models.Performer, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 []*models.Performer
|
||||
if rf, ok := ret.Get(0).(func() []*models.Performer); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Performer)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Count provides a mock function with given fields:
|
||||
func (_m *PerformerReaderWriter) Count() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newPerformer
|
||||
func (_m *PerformerReaderWriter) Create(newPerformer models.Performer) (*models.Performer, error) {
|
||||
ret := _m.Called(newPerformer)
|
||||
@@ -58,6 +102,57 @@ func (_m *PerformerReaderWriter) Create(newPerformer models.Performer) (*models.
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Destroy provides a mock function with given fields: id
|
||||
func (_m *PerformerReaderWriter) Destroy(id int) error {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// DestroyImage provides a mock function with given fields: performerID
|
||||
func (_m *PerformerReaderWriter) DestroyImage(performerID int) error {
|
||||
ret := _m.Called(performerID)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(performerID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Find provides a mock function with given fields: id
|
||||
func (_m *PerformerReaderWriter) Find(id int) (*models.Performer, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 *models.Performer
|
||||
if rf, ok := ret.Get(0).(func(int) *models.Performer); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.Performer)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByGalleryID provides a mock function with given fields: galleryID
|
||||
func (_m *PerformerReaderWriter) FindByGalleryID(galleryID int) ([]*models.Performer, error) {
|
||||
ret := _m.Called(galleryID)
|
||||
@@ -196,8 +291,8 @@ func (_m *PerformerReaderWriter) FindNamesBySceneID(sceneID int) ([]*models.Perf
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetPerformerImage provides a mock function with given fields: performerID
|
||||
func (_m *PerformerReaderWriter) GetPerformerImage(performerID int) ([]byte, error) {
|
||||
// GetImage provides a mock function with given fields: performerID
|
||||
func (_m *PerformerReaderWriter) GetImage(performerID int) ([]byte, error) {
|
||||
ret := _m.Called(performerID)
|
||||
|
||||
var r0 []byte
|
||||
@@ -219,6 +314,59 @@ func (_m *PerformerReaderWriter) GetPerformerImage(performerID int) ([]byte, err
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetStashIDs provides a mock function with given fields: performerID
|
||||
func (_m *PerformerReaderWriter) GetStashIDs(performerID int) ([]*models.StashID, error) {
|
||||
ret := _m.Called(performerID)
|
||||
|
||||
var r0 []*models.StashID
|
||||
if rf, ok := ret.Get(0).(func(int) []*models.StashID); ok {
|
||||
r0 = rf(performerID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.StashID)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(performerID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Query provides a mock function with given fields: performerFilter, findFilter
|
||||
func (_m *PerformerReaderWriter) Query(performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) {
|
||||
ret := _m.Called(performerFilter, findFilter)
|
||||
|
||||
var r0 []*models.Performer
|
||||
if rf, ok := ret.Get(0).(func(*models.PerformerFilterType, *models.FindFilterType) []*models.Performer); ok {
|
||||
r0 = rf(performerFilter, findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Performer)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.PerformerFilterType, *models.FindFilterType) int); ok {
|
||||
r1 = rf(performerFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.PerformerFilterType, *models.FindFilterType) error); ok {
|
||||
r2 = rf(performerFilter, findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// Update provides a mock function with given fields: updatedPerformer
|
||||
func (_m *PerformerReaderWriter) Update(updatedPerformer models.PerformerPartial) (*models.Performer, error) {
|
||||
ret := _m.Called(updatedPerformer)
|
||||
@@ -265,8 +413,8 @@ func (_m *PerformerReaderWriter) UpdateFull(updatedPerformer models.Performer) (
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdatePerformerImage provides a mock function with given fields: performerID, image
|
||||
func (_m *PerformerReaderWriter) UpdatePerformerImage(performerID int, image []byte) error {
|
||||
// UpdateImage provides a mock function with given fields: performerID, image
|
||||
func (_m *PerformerReaderWriter) UpdateImage(performerID int, image []byte) error {
|
||||
ret := _m.Called(performerID, image)
|
||||
|
||||
var r0 error
|
||||
@@ -278,3 +426,17 @@ func (_m *PerformerReaderWriter) UpdatePerformerImage(performerID int, image []b
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateStashIDs provides a mock function with given fields: performerID, stashIDs
|
||||
func (_m *PerformerReaderWriter) UpdateStashIDs(performerID int, stashIDs []models.StashID) error {
|
||||
ret := _m.Called(performerID, stashIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.StashID) error); ok {
|
||||
r0 = rf(performerID, stashIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
@@ -12,6 +12,27 @@ type SceneMarkerReaderWriter struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
// CountByTagID provides a mock function with given fields: tagID
|
||||
func (_m *SceneMarkerReaderWriter) CountByTagID(tagID int) (int, error) {
|
||||
ret := _m.Called(tagID)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(tagID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(tagID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newSceneMarker
|
||||
func (_m *SceneMarkerReaderWriter) Create(newSceneMarker models.SceneMarker) (*models.SceneMarker, error) {
|
||||
ret := _m.Called(newSceneMarker)
|
||||
@@ -35,6 +56,43 @@ func (_m *SceneMarkerReaderWriter) Create(newSceneMarker models.SceneMarker) (*m
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Destroy provides a mock function with given fields: id
|
||||
func (_m *SceneMarkerReaderWriter) Destroy(id int) error {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Find provides a mock function with given fields: id
|
||||
func (_m *SceneMarkerReaderWriter) Find(id int) (*models.SceneMarker, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 *models.SceneMarker
|
||||
if rf, ok := ret.Get(0).(func(int) *models.SceneMarker); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.SceneMarker)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindBySceneID provides a mock function with given fields: sceneID
|
||||
func (_m *SceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*models.SceneMarker, error) {
|
||||
ret := _m.Called(sceneID)
|
||||
@@ -58,6 +116,105 @@ func (_m *SceneMarkerReaderWriter) FindBySceneID(sceneID int) ([]*models.SceneMa
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindMany provides a mock function with given fields: ids
|
||||
func (_m *SceneMarkerReaderWriter) FindMany(ids []int) ([]*models.SceneMarker, error) {
|
||||
ret := _m.Called(ids)
|
||||
|
||||
var r0 []*models.SceneMarker
|
||||
if rf, ok := ret.Get(0).(func([]int) []*models.SceneMarker); ok {
|
||||
r0 = rf(ids)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.SceneMarker)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func([]int) error); ok {
|
||||
r1 = rf(ids)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetMarkerStrings provides a mock function with given fields: q, sort
|
||||
func (_m *SceneMarkerReaderWriter) GetMarkerStrings(q *string, sort *string) ([]*models.MarkerStringsResultType, error) {
|
||||
ret := _m.Called(q, sort)
|
||||
|
||||
var r0 []*models.MarkerStringsResultType
|
||||
if rf, ok := ret.Get(0).(func(*string, *string) []*models.MarkerStringsResultType); ok {
|
||||
r0 = rf(q, sort)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.MarkerStringsResultType)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(*string, *string) error); ok {
|
||||
r1 = rf(q, sort)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetTagIDs provides a mock function with given fields: imageID
|
||||
func (_m *SceneMarkerReaderWriter) GetTagIDs(imageID int) ([]int, error) {
|
||||
ret := _m.Called(imageID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(imageID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(imageID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Query provides a mock function with given fields: sceneMarkerFilter, findFilter
|
||||
func (_m *SceneMarkerReaderWriter) Query(sceneMarkerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) ([]*models.SceneMarker, int, error) {
|
||||
ret := _m.Called(sceneMarkerFilter, findFilter)
|
||||
|
||||
var r0 []*models.SceneMarker
|
||||
if rf, ok := ret.Get(0).(func(*models.SceneMarkerFilterType, *models.FindFilterType) []*models.SceneMarker); ok {
|
||||
r0 = rf(sceneMarkerFilter, findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.SceneMarker)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.SceneMarkerFilterType, *models.FindFilterType) int); ok {
|
||||
r1 = rf(sceneMarkerFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.SceneMarkerFilterType, *models.FindFilterType) error); ok {
|
||||
r2 = rf(sceneMarkerFilter, findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// Update provides a mock function with given fields: updatedSceneMarker
|
||||
func (_m *SceneMarkerReaderWriter) Update(updatedSceneMarker models.SceneMarker) (*models.SceneMarker, error) {
|
||||
ret := _m.Called(updatedSceneMarker)
|
||||
@@ -80,3 +237,40 @@ func (_m *SceneMarkerReaderWriter) Update(updatedSceneMarker models.SceneMarker)
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateTags provides a mock function with given fields: markerID, tagIDs
|
||||
func (_m *SceneMarkerReaderWriter) UpdateTags(markerID int, tagIDs []int) error {
|
||||
ret := _m.Called(markerID, tagIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(markerID, tagIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Wall provides a mock function with given fields: q
|
||||
func (_m *SceneMarkerReaderWriter) Wall(q *string) ([]*models.SceneMarker, error) {
|
||||
ret := _m.Called(q)
|
||||
|
||||
var r0 []*models.SceneMarker
|
||||
if rf, ok := ret.Get(0).(func(*string) []*models.SceneMarker); ok {
|
||||
r0 = rf(q)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.SceneMarker)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(*string) error); ok {
|
||||
r1 = rf(q)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
@@ -35,6 +35,153 @@ func (_m *SceneReaderWriter) All() ([]*models.Scene, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Count provides a mock function with given fields:
|
||||
func (_m *SceneReaderWriter) Count() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountByMovieID provides a mock function with given fields: movieID
|
||||
func (_m *SceneReaderWriter) CountByMovieID(movieID int) (int, error) {
|
||||
ret := _m.Called(movieID)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(movieID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(movieID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountByPerformerID provides a mock function with given fields: performerID
|
||||
func (_m *SceneReaderWriter) CountByPerformerID(performerID int) (int, error) {
|
||||
ret := _m.Called(performerID)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(performerID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(performerID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountByStudioID provides a mock function with given fields: studioID
|
||||
func (_m *SceneReaderWriter) CountByStudioID(studioID int) (int, error) {
|
||||
ret := _m.Called(studioID)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(studioID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(studioID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountByTagID provides a mock function with given fields: tagID
|
||||
func (_m *SceneReaderWriter) CountByTagID(tagID int) (int, error) {
|
||||
ret := _m.Called(tagID)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(tagID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(tagID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountMissingChecksum provides a mock function with given fields:
|
||||
func (_m *SceneReaderWriter) CountMissingChecksum() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountMissingOSHash provides a mock function with given fields:
|
||||
func (_m *SceneReaderWriter) CountMissingOSHash() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newScene
|
||||
func (_m *SceneReaderWriter) Create(newScene models.Scene) (*models.Scene, error) {
|
||||
ret := _m.Called(newScene)
|
||||
@@ -58,6 +205,78 @@ func (_m *SceneReaderWriter) Create(newScene models.Scene) (*models.Scene, error
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// DecrementOCounter provides a mock function with given fields: id
|
||||
func (_m *SceneReaderWriter) DecrementOCounter(id int) (int, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Destroy provides a mock function with given fields: id
|
||||
func (_m *SceneReaderWriter) Destroy(id int) error {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// DestroyCover provides a mock function with given fields: sceneID
|
||||
func (_m *SceneReaderWriter) DestroyCover(sceneID int) error {
|
||||
ret := _m.Called(sceneID)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(sceneID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Find provides a mock function with given fields: id
|
||||
func (_m *SceneReaderWriter) Find(id int) (*models.Scene, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 *models.Scene
|
||||
if rf, ok := ret.Get(0).(func(int) *models.Scene); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.Scene)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByChecksum provides a mock function with given fields: checksum
|
||||
func (_m *SceneReaderWriter) FindByChecksum(checksum string) (*models.Scene, error) {
|
||||
ret := _m.Called(checksum)
|
||||
@@ -127,6 +346,52 @@ func (_m *SceneReaderWriter) FindByOSHash(oshash string) (*models.Scene, error)
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByPath provides a mock function with given fields: path
|
||||
func (_m *SceneReaderWriter) FindByPath(path string) (*models.Scene, error) {
|
||||
ret := _m.Called(path)
|
||||
|
||||
var r0 *models.Scene
|
||||
if rf, ok := ret.Get(0).(func(string) *models.Scene); ok {
|
||||
r0 = rf(path)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.Scene)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||
r1 = rf(path)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByPerformerID provides a mock function with given fields: performerID
|
||||
func (_m *SceneReaderWriter) FindByPerformerID(performerID int) ([]*models.Scene, error) {
|
||||
ret := _m.Called(performerID)
|
||||
|
||||
var r0 []*models.Scene
|
||||
if rf, ok := ret.Get(0).(func(int) []*models.Scene); ok {
|
||||
r0 = rf(performerID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Scene)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(performerID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindMany provides a mock function with given fields: ids
|
||||
func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) {
|
||||
ret := _m.Called(ids)
|
||||
@@ -150,8 +415,8 @@ func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetSceneCover provides a mock function with given fields: sceneID
|
||||
func (_m *SceneReaderWriter) GetSceneCover(sceneID int) ([]byte, error) {
|
||||
// GetCover provides a mock function with given fields: sceneID
|
||||
func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) {
|
||||
ret := _m.Called(sceneID)
|
||||
|
||||
var r0 []byte
|
||||
@@ -173,6 +438,244 @@ func (_m *SceneReaderWriter) GetSceneCover(sceneID int) ([]byte, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetMovies provides a mock function with given fields: sceneID
|
||||
func (_m *SceneReaderWriter) GetMovies(sceneID int) ([]models.MoviesScenes, error) {
|
||||
ret := _m.Called(sceneID)
|
||||
|
||||
var r0 []models.MoviesScenes
|
||||
if rf, ok := ret.Get(0).(func(int) []models.MoviesScenes); ok {
|
||||
r0 = rf(sceneID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]models.MoviesScenes)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(sceneID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetPerformerIDs provides a mock function with given fields: imageID
|
||||
func (_m *SceneReaderWriter) GetPerformerIDs(imageID int) ([]int, error) {
|
||||
ret := _m.Called(imageID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(imageID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(imageID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetStashIDs provides a mock function with given fields: performerID
|
||||
func (_m *SceneReaderWriter) GetStashIDs(performerID int) ([]*models.StashID, error) {
|
||||
ret := _m.Called(performerID)
|
||||
|
||||
var r0 []*models.StashID
|
||||
if rf, ok := ret.Get(0).(func(int) []*models.StashID); ok {
|
||||
r0 = rf(performerID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.StashID)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(performerID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetTagIDs provides a mock function with given fields: imageID
|
||||
func (_m *SceneReaderWriter) GetTagIDs(imageID int) ([]int, error) {
|
||||
ret := _m.Called(imageID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(int) []int); ok {
|
||||
r0 = rf(imageID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(imageID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// IncrementOCounter provides a mock function with given fields: id
|
||||
func (_m *SceneReaderWriter) IncrementOCounter(id int) (int, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Query provides a mock function with given fields: sceneFilter, findFilter
|
||||
func (_m *SceneReaderWriter) Query(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) {
|
||||
ret := _m.Called(sceneFilter, findFilter)
|
||||
|
||||
var r0 []*models.Scene
|
||||
if rf, ok := ret.Get(0).(func(*models.SceneFilterType, *models.FindFilterType) []*models.Scene); ok {
|
||||
r0 = rf(sceneFilter, findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Scene)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.SceneFilterType, *models.FindFilterType) int); ok {
|
||||
r1 = rf(sceneFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.SceneFilterType, *models.FindFilterType) error); ok {
|
||||
r2 = rf(sceneFilter, findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// QueryAllByPathRegex provides a mock function with given fields: regex, ignoreOrganized
|
||||
func (_m *SceneReaderWriter) QueryAllByPathRegex(regex string, ignoreOrganized bool) ([]*models.Scene, error) {
|
||||
ret := _m.Called(regex, ignoreOrganized)
|
||||
|
||||
var r0 []*models.Scene
|
||||
if rf, ok := ret.Get(0).(func(string, bool) []*models.Scene); ok {
|
||||
r0 = rf(regex, ignoreOrganized)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Scene)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(string, bool) error); ok {
|
||||
r1 = rf(regex, ignoreOrganized)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// QueryByPathRegex provides a mock function with given fields: findFilter
|
||||
func (_m *SceneReaderWriter) QueryByPathRegex(findFilter *models.FindFilterType) ([]*models.Scene, int, error) {
|
||||
ret := _m.Called(findFilter)
|
||||
|
||||
var r0 []*models.Scene
|
||||
if rf, ok := ret.Get(0).(func(*models.FindFilterType) []*models.Scene); ok {
|
||||
r0 = rf(findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Scene)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.FindFilterType) int); ok {
|
||||
r1 = rf(findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.FindFilterType) error); ok {
|
||||
r2 = rf(findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// ResetOCounter provides a mock function with given fields: id
|
||||
func (_m *SceneReaderWriter) ResetOCounter(id int) (int, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(int) int); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Size provides a mock function with given fields:
|
||||
func (_m *SceneReaderWriter) Size() (float64, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 float64
|
||||
if rf, ok := ret.Get(0).(func() float64); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(float64)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Update provides a mock function with given fields: updatedScene
|
||||
func (_m *SceneReaderWriter) Update(updatedScene models.ScenePartial) (*models.Scene, error) {
|
||||
ret := _m.Called(updatedScene)
|
||||
@@ -196,6 +699,34 @@ func (_m *SceneReaderWriter) Update(updatedScene models.ScenePartial) (*models.S
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateCover provides a mock function with given fields: sceneID, cover
|
||||
func (_m *SceneReaderWriter) UpdateCover(sceneID int, cover []byte) error {
|
||||
ret := _m.Called(sceneID, cover)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []byte) error); ok {
|
||||
r0 = rf(sceneID, cover)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateFileModTime provides a mock function with given fields: id, modTime
|
||||
func (_m *SceneReaderWriter) UpdateFileModTime(id int, modTime models.NullSQLiteTimestamp) error {
|
||||
ret := _m.Called(id, modTime)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, models.NullSQLiteTimestamp) error); ok {
|
||||
r0 = rf(id, modTime)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateFull provides a mock function with given fields: updatedScene
|
||||
func (_m *SceneReaderWriter) UpdateFull(updatedScene models.Scene) (*models.Scene, error) {
|
||||
ret := _m.Called(updatedScene)
|
||||
@@ -219,16 +750,81 @@ func (_m *SceneReaderWriter) UpdateFull(updatedScene models.Scene) (*models.Scen
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateSceneCover provides a mock function with given fields: sceneID, cover
|
||||
func (_m *SceneReaderWriter) UpdateSceneCover(sceneID int, cover []byte) error {
|
||||
ret := _m.Called(sceneID, cover)
|
||||
// UpdateMovies provides a mock function with given fields: sceneID, movies
|
||||
func (_m *SceneReaderWriter) UpdateMovies(sceneID int, movies []models.MoviesScenes) error {
|
||||
ret := _m.Called(sceneID, movies)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []byte) error); ok {
|
||||
r0 = rf(sceneID, cover)
|
||||
if rf, ok := ret.Get(0).(func(int, []models.MoviesScenes) error); ok {
|
||||
r0 = rf(sceneID, movies)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdatePerformers provides a mock function with given fields: sceneID, performerIDs
|
||||
func (_m *SceneReaderWriter) UpdatePerformers(sceneID int, performerIDs []int) error {
|
||||
ret := _m.Called(sceneID, performerIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(sceneID, performerIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateStashIDs provides a mock function with given fields: sceneID, stashIDs
|
||||
func (_m *SceneReaderWriter) UpdateStashIDs(sceneID int, stashIDs []models.StashID) error {
|
||||
ret := _m.Called(sceneID, stashIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.StashID) error); ok {
|
||||
r0 = rf(sceneID, stashIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateTags provides a mock function with given fields: sceneID, tagIDs
|
||||
func (_m *SceneReaderWriter) UpdateTags(sceneID int, tagIDs []int) error {
|
||||
ret := _m.Called(sceneID, tagIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []int) error); ok {
|
||||
r0 = rf(sceneID, tagIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Wall provides a mock function with given fields: q
|
||||
func (_m *SceneReaderWriter) Wall(q *string) ([]*models.Scene, error) {
|
||||
ret := _m.Called(q)
|
||||
|
||||
var r0 []*models.Scene
|
||||
if rf, ok := ret.Get(0).(func(*string) []*models.Scene); ok {
|
||||
r0 = rf(q)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Scene)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(*string) error); ok {
|
||||
r1 = rf(q)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
59
pkg/models/mocks/ScrapedItemReaderWriter.go
Normal file
59
pkg/models/mocks/ScrapedItemReaderWriter.go
Normal file
@@ -0,0 +1,59 @@
|
||||
// Code generated by mockery v0.0.0-dev. DO NOT EDIT.
|
||||
|
||||
package mocks
|
||||
|
||||
import (
|
||||
models "github.com/stashapp/stash/pkg/models"
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// ScrapedItemReaderWriter is an autogenerated mock type for the ScrapedItemReaderWriter type
|
||||
type ScrapedItemReaderWriter struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
// All provides a mock function with given fields:
|
||||
func (_m *ScrapedItemReaderWriter) All() ([]*models.ScrapedItem, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 []*models.ScrapedItem
|
||||
if rf, ok := ret.Get(0).(func() []*models.ScrapedItem); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.ScrapedItem)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newObject
|
||||
func (_m *ScrapedItemReaderWriter) Create(newObject models.ScrapedItem) (*models.ScrapedItem, error) {
|
||||
ret := _m.Called(newObject)
|
||||
|
||||
var r0 *models.ScrapedItem
|
||||
if rf, ok := ret.Get(0).(func(models.ScrapedItem) *models.ScrapedItem); ok {
|
||||
r0 = rf(newObject)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*models.ScrapedItem)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(models.ScrapedItem) error); ok {
|
||||
r1 = rf(newObject)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
@@ -35,6 +35,50 @@ func (_m *StudioReaderWriter) All() ([]*models.Studio, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// AllSlim provides a mock function with given fields:
|
||||
func (_m *StudioReaderWriter) AllSlim() ([]*models.Studio, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 []*models.Studio
|
||||
if rf, ok := ret.Get(0).(func() []*models.Studio); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Studio)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Count provides a mock function with given fields:
|
||||
func (_m *StudioReaderWriter) Count() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newStudio
|
||||
func (_m *StudioReaderWriter) Create(newStudio models.Studio) (*models.Studio, error) {
|
||||
ret := _m.Called(newStudio)
|
||||
@@ -58,6 +102,34 @@ func (_m *StudioReaderWriter) Create(newStudio models.Studio) (*models.Studio, e
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Destroy provides a mock function with given fields: id
|
||||
func (_m *StudioReaderWriter) Destroy(id int) error {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// DestroyImage provides a mock function with given fields: studioID
|
||||
func (_m *StudioReaderWriter) DestroyImage(studioID int) error {
|
||||
ret := _m.Called(studioID)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(studioID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Find provides a mock function with given fields: id
|
||||
func (_m *StudioReaderWriter) Find(id int) (*models.Studio, error) {
|
||||
ret := _m.Called(id)
|
||||
@@ -104,6 +176,29 @@ func (_m *StudioReaderWriter) FindByName(name string, nocase bool) (*models.Stud
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindChildren provides a mock function with given fields: id
|
||||
func (_m *StudioReaderWriter) FindChildren(id int) ([]*models.Studio, error) {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 []*models.Studio
|
||||
if rf, ok := ret.Get(0).(func(int) []*models.Studio); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Studio)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(id)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindMany provides a mock function with given fields: ids
|
||||
func (_m *StudioReaderWriter) FindMany(ids []int) ([]*models.Studio, error) {
|
||||
ret := _m.Called(ids)
|
||||
@@ -127,8 +222,8 @@ func (_m *StudioReaderWriter) FindMany(ids []int) ([]*models.Studio, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetStudioImage provides a mock function with given fields: studioID
|
||||
func (_m *StudioReaderWriter) GetStudioImage(studioID int) ([]byte, error) {
|
||||
// GetImage provides a mock function with given fields: studioID
|
||||
func (_m *StudioReaderWriter) GetImage(studioID int) ([]byte, error) {
|
||||
ret := _m.Called(studioID)
|
||||
|
||||
var r0 []byte
|
||||
@@ -150,6 +245,80 @@ func (_m *StudioReaderWriter) GetStudioImage(studioID int) ([]byte, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetStashIDs provides a mock function with given fields: studioID
|
||||
func (_m *StudioReaderWriter) GetStashIDs(studioID int) ([]*models.StashID, error) {
|
||||
ret := _m.Called(studioID)
|
||||
|
||||
var r0 []*models.StashID
|
||||
if rf, ok := ret.Get(0).(func(int) []*models.StashID); ok {
|
||||
r0 = rf(studioID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.StashID)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(studioID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// HasImage provides a mock function with given fields: studioID
|
||||
func (_m *StudioReaderWriter) HasImage(studioID int) (bool, error) {
|
||||
ret := _m.Called(studioID)
|
||||
|
||||
var r0 bool
|
||||
if rf, ok := ret.Get(0).(func(int) bool); ok {
|
||||
r0 = rf(studioID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(bool)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(int) error); ok {
|
||||
r1 = rf(studioID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Query provides a mock function with given fields: studioFilter, findFilter
|
||||
func (_m *StudioReaderWriter) Query(studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) ([]*models.Studio, int, error) {
|
||||
ret := _m.Called(studioFilter, findFilter)
|
||||
|
||||
var r0 []*models.Studio
|
||||
if rf, ok := ret.Get(0).(func(*models.StudioFilterType, *models.FindFilterType) []*models.Studio); ok {
|
||||
r0 = rf(studioFilter, findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Studio)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.StudioFilterType, *models.FindFilterType) int); ok {
|
||||
r1 = rf(studioFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.StudioFilterType, *models.FindFilterType) error); ok {
|
||||
r2 = rf(studioFilter, findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// Update provides a mock function with given fields: updatedStudio
|
||||
func (_m *StudioReaderWriter) Update(updatedStudio models.StudioPartial) (*models.Studio, error) {
|
||||
ret := _m.Called(updatedStudio)
|
||||
@@ -196,8 +365,8 @@ func (_m *StudioReaderWriter) UpdateFull(updatedStudio models.Studio) (*models.S
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateStudioImage provides a mock function with given fields: studioID, image
|
||||
func (_m *StudioReaderWriter) UpdateStudioImage(studioID int, image []byte) error {
|
||||
// UpdateImage provides a mock function with given fields: studioID, image
|
||||
func (_m *StudioReaderWriter) UpdateImage(studioID int, image []byte) error {
|
||||
ret := _m.Called(studioID, image)
|
||||
|
||||
var r0 error
|
||||
@@ -209,3 +378,17 @@ func (_m *StudioReaderWriter) UpdateStudioImage(studioID int, image []byte) erro
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// UpdateStashIDs provides a mock function with given fields: studioID, stashIDs
|
||||
func (_m *StudioReaderWriter) UpdateStashIDs(studioID int, stashIDs []models.StashID) error {
|
||||
ret := _m.Called(studioID, stashIDs)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int, []models.StashID) error); ok {
|
||||
r0 = rf(studioID, stashIDs)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
@@ -35,6 +35,50 @@ func (_m *TagReaderWriter) All() ([]*models.Tag, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// AllSlim provides a mock function with given fields:
|
||||
func (_m *TagReaderWriter) AllSlim() ([]*models.Tag, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 []*models.Tag
|
||||
if rf, ok := ret.Get(0).(func() []*models.Tag); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Tag)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Count provides a mock function with given fields:
|
||||
func (_m *TagReaderWriter) Count() (int, error) {
|
||||
ret := _m.Called()
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func() int); ok {
|
||||
r0 = rf()
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func() error); ok {
|
||||
r1 = rf()
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: newTag
|
||||
func (_m *TagReaderWriter) Create(newTag models.Tag) (*models.Tag, error) {
|
||||
ret := _m.Called(newTag)
|
||||
@@ -58,6 +102,34 @@ func (_m *TagReaderWriter) Create(newTag models.Tag) (*models.Tag, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Destroy provides a mock function with given fields: id
|
||||
func (_m *TagReaderWriter) Destroy(id int) error {
|
||||
ret := _m.Called(id)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(id)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// DestroyImage provides a mock function with given fields: tagID
|
||||
func (_m *TagReaderWriter) DestroyImage(tagID int) error {
|
||||
ret := _m.Called(tagID)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(int) error); ok {
|
||||
r0 = rf(tagID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// Find provides a mock function with given fields: id
|
||||
func (_m *TagReaderWriter) Find(id int) (*models.Tag, error) {
|
||||
ret := _m.Called(id)
|
||||
@@ -242,8 +314,8 @@ func (_m *TagReaderWriter) FindMany(ids []int) ([]*models.Tag, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetTagImage provides a mock function with given fields: tagID
|
||||
func (_m *TagReaderWriter) GetTagImage(tagID int) ([]byte, error) {
|
||||
// GetImage provides a mock function with given fields: tagID
|
||||
func (_m *TagReaderWriter) GetImage(tagID int) ([]byte, error) {
|
||||
ret := _m.Called(tagID)
|
||||
|
||||
var r0 []byte
|
||||
@@ -265,6 +337,36 @@ func (_m *TagReaderWriter) GetTagImage(tagID int) ([]byte, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Query provides a mock function with given fields: tagFilter, findFilter
|
||||
func (_m *TagReaderWriter) Query(tagFilter *models.TagFilterType, findFilter *models.FindFilterType) ([]*models.Tag, int, error) {
|
||||
ret := _m.Called(tagFilter, findFilter)
|
||||
|
||||
var r0 []*models.Tag
|
||||
if rf, ok := ret.Get(0).(func(*models.TagFilterType, *models.FindFilterType) []*models.Tag); ok {
|
||||
r0 = rf(tagFilter, findFilter)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Tag)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 int
|
||||
if rf, ok := ret.Get(1).(func(*models.TagFilterType, *models.FindFilterType) int); ok {
|
||||
r1 = rf(tagFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Get(1).(int)
|
||||
}
|
||||
|
||||
var r2 error
|
||||
if rf, ok := ret.Get(2).(func(*models.TagFilterType, *models.FindFilterType) error); ok {
|
||||
r2 = rf(tagFilter, findFilter)
|
||||
} else {
|
||||
r2 = ret.Error(2)
|
||||
}
|
||||
|
||||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// Update provides a mock function with given fields: updatedTag
|
||||
func (_m *TagReaderWriter) Update(updatedTag models.Tag) (*models.Tag, error) {
|
||||
ret := _m.Called(updatedTag)
|
||||
@@ -288,8 +390,8 @@ func (_m *TagReaderWriter) Update(updatedTag models.Tag) (*models.Tag, error) {
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// UpdateTagImage provides a mock function with given fields: tagID, image
|
||||
func (_m *TagReaderWriter) UpdateTagImage(tagID int, image []byte) error {
|
||||
// UpdateImage provides a mock function with given fields: tagID, image
|
||||
func (_m *TagReaderWriter) UpdateImage(tagID int, image []byte) error {
|
||||
ret := _m.Called(tagID, image)
|
||||
|
||||
var r0 error
|
||||
|
||||
117
pkg/models/mocks/transaction.go
Normal file
117
pkg/models/mocks/transaction.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package mocks
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
models "github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type TransactionManager struct {
|
||||
gallery models.GalleryReaderWriter
|
||||
image models.ImageReaderWriter
|
||||
movie models.MovieReaderWriter
|
||||
performer models.PerformerReaderWriter
|
||||
scene models.SceneReaderWriter
|
||||
sceneMarker models.SceneMarkerReaderWriter
|
||||
scrapedItem models.ScrapedItemReaderWriter
|
||||
studio models.StudioReaderWriter
|
||||
tag models.TagReaderWriter
|
||||
}
|
||||
|
||||
func NewTransactionManager() *TransactionManager {
|
||||
return &TransactionManager{
|
||||
gallery: &GalleryReaderWriter{},
|
||||
image: &ImageReaderWriter{},
|
||||
movie: &MovieReaderWriter{},
|
||||
performer: &PerformerReaderWriter{},
|
||||
scene: &SceneReaderWriter{},
|
||||
sceneMarker: &SceneMarkerReaderWriter{},
|
||||
scrapedItem: &ScrapedItemReaderWriter{},
|
||||
studio: &StudioReaderWriter{},
|
||||
tag: &TagReaderWriter{},
|
||||
}
|
||||
}
|
||||
|
||||
func (t *TransactionManager) WithTxn(ctx context.Context, fn func(r models.Repository) error) error {
|
||||
return fn(t)
|
||||
}
|
||||
|
||||
func (t *TransactionManager) Gallery() models.GalleryReaderWriter {
|
||||
return t.gallery
|
||||
}
|
||||
|
||||
func (t *TransactionManager) Image() models.ImageReaderWriter {
|
||||
return t.image
|
||||
}
|
||||
|
||||
func (t *TransactionManager) Movie() models.MovieReaderWriter {
|
||||
return t.movie
|
||||
}
|
||||
|
||||
func (t *TransactionManager) Performer() models.PerformerReaderWriter {
|
||||
return t.performer
|
||||
}
|
||||
|
||||
func (t *TransactionManager) SceneMarker() models.SceneMarkerReaderWriter {
|
||||
return t.sceneMarker
|
||||
}
|
||||
|
||||
func (t *TransactionManager) Scene() models.SceneReaderWriter {
|
||||
return t.scene
|
||||
}
|
||||
|
||||
func (t *TransactionManager) ScrapedItem() models.ScrapedItemReaderWriter {
|
||||
return t.scrapedItem
|
||||
}
|
||||
|
||||
func (t *TransactionManager) Studio() models.StudioReaderWriter {
|
||||
return t.studio
|
||||
}
|
||||
|
||||
func (t *TransactionManager) Tag() models.TagReaderWriter {
|
||||
return t.tag
|
||||
}
|
||||
|
||||
type ReadTransaction struct {
|
||||
t *TransactionManager
|
||||
}
|
||||
|
||||
func (t *TransactionManager) WithReadTxn(ctx context.Context, fn func(r models.ReaderRepository) error) error {
|
||||
return fn(&ReadTransaction{t: t})
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) Gallery() models.GalleryReader {
|
||||
return r.t.gallery
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) Image() models.ImageReader {
|
||||
return r.t.image
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) Movie() models.MovieReader {
|
||||
return r.t.movie
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) Performer() models.PerformerReader {
|
||||
return r.t.performer
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) SceneMarker() models.SceneMarkerReader {
|
||||
return r.t.sceneMarker
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) Scene() models.SceneReader {
|
||||
return r.t.scene
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) ScrapedItem() models.ScrapedItemReader {
|
||||
return r.t.scrapedItem
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) Studio() models.StudioReader {
|
||||
return r.t.studio
|
||||
}
|
||||
|
||||
func (r *ReadTransaction) Tag() models.TagReader {
|
||||
return r.t.tag
|
||||
}
|
||||
@@ -42,3 +42,13 @@ type GalleryPartial struct {
|
||||
}
|
||||
|
||||
const DefaultGthumbWidth int = 640
|
||||
|
||||
type Galleries []*Gallery
|
||||
|
||||
func (g *Galleries) Append(o interface{}) {
|
||||
*g = append(*g, o.(*Gallery))
|
||||
}
|
||||
|
||||
func (g *Galleries) New() interface{} {
|
||||
return &Gallery{}
|
||||
}
|
||||
|
||||
@@ -46,3 +46,13 @@ type ImageFileType struct {
|
||||
Width *int `graphql:"width" json:"width"`
|
||||
Height *int `graphql:"height" json:"height"`
|
||||
}
|
||||
|
||||
type Images []*Image
|
||||
|
||||
func (i *Images) Append(o interface{}) {
|
||||
*i = append(*i, o.(*Image))
|
||||
}
|
||||
|
||||
func (i *Images) New() interface{} {
|
||||
return &Image{}
|
||||
}
|
||||
|
||||
@@ -2,53 +2,13 @@ package models
|
||||
|
||||
import "database/sql"
|
||||
|
||||
type PerformersScenes struct {
|
||||
PerformerID int `db:"performer_id" json:"performer_id"`
|
||||
SceneID int `db:"scene_id" json:"scene_id"`
|
||||
}
|
||||
|
||||
type MoviesScenes struct {
|
||||
MovieID int `db:"movie_id" json:"movie_id"`
|
||||
SceneID int `db:"scene_id" json:"scene_id"`
|
||||
SceneIndex sql.NullInt64 `db:"scene_index" json:"scene_index"`
|
||||
}
|
||||
|
||||
type ScenesTags struct {
|
||||
SceneID int `db:"scene_id" json:"scene_id"`
|
||||
TagID int `db:"tag_id" json:"tag_id"`
|
||||
}
|
||||
|
||||
type SceneMarkersTags struct {
|
||||
SceneMarkerID int `db:"scene_marker_id" json:"scene_marker_id"`
|
||||
TagID int `db:"tag_id" json:"tag_id"`
|
||||
}
|
||||
|
||||
type StashID struct {
|
||||
StashID string `db:"stash_id" json:"stash_id"`
|
||||
Endpoint string `db:"endpoint" json:"endpoint"`
|
||||
}
|
||||
|
||||
type PerformersImages struct {
|
||||
PerformerID int `db:"performer_id" json:"performer_id"`
|
||||
ImageID int `db:"image_id" json:"image_id"`
|
||||
}
|
||||
|
||||
type ImagesTags struct {
|
||||
ImageID int `db:"image_id" json:"image_id"`
|
||||
TagID int `db:"tag_id" json:"tag_id"`
|
||||
}
|
||||
|
||||
type GalleriesImages struct {
|
||||
GalleryID int `db:"gallery_id" json:"gallery_id"`
|
||||
ImageID int `db:"image_id" json:"image_id"`
|
||||
}
|
||||
|
||||
type PerformersGalleries struct {
|
||||
PerformerID int `db:"performer_id" json:"performer_id"`
|
||||
GalleryID int `db:"gallery_id" json:"gallery_id"`
|
||||
}
|
||||
|
||||
type GalleriesTags struct {
|
||||
TagID int `db:"tag_id" json:"tag_id"`
|
||||
GalleryID int `db:"gallery_id" json:"gallery_id"`
|
||||
}
|
||||
|
||||
@@ -50,3 +50,13 @@ func NewMovie(name string) *Movie {
|
||||
UpdatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
}
|
||||
|
||||
type Movies []*Movie
|
||||
|
||||
func (m *Movies) Append(o interface{}) {
|
||||
*m = append(*m, o.(*Movie))
|
||||
}
|
||||
|
||||
func (m *Movies) New() interface{} {
|
||||
return &Movie{}
|
||||
}
|
||||
|
||||
@@ -65,3 +65,13 @@ func NewPerformer(name string) *Performer {
|
||||
UpdatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
}
|
||||
|
||||
type Performers []*Performer
|
||||
|
||||
func (p *Performers) Append(o interface{}) {
|
||||
*p = append(*p, o.(*Performer))
|
||||
}
|
||||
|
||||
func (p *Performers) New() interface{} {
|
||||
return &Performer{}
|
||||
}
|
||||
|
||||
@@ -95,3 +95,13 @@ type SceneFileType struct {
|
||||
Framerate *float64 `graphql:"framerate" json:"framerate"`
|
||||
Bitrate *int `graphql:"bitrate" json:"bitrate"`
|
||||
}
|
||||
|
||||
type Scenes []*Scene
|
||||
|
||||
func (s *Scenes) Append(o interface{}) {
|
||||
*s = append(*s, o.(*Scene))
|
||||
}
|
||||
|
||||
func (m *Scenes) New() interface{} {
|
||||
return &Scene{}
|
||||
}
|
||||
|
||||
@@ -13,3 +13,13 @@ type SceneMarker struct {
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
type SceneMarkers []*SceneMarker
|
||||
|
||||
func (m *SceneMarkers) Append(o interface{}) {
|
||||
*m = append(*m, o.(*SceneMarker))
|
||||
}
|
||||
|
||||
func (m *SceneMarkers) New() interface{} {
|
||||
return &SceneMarker{}
|
||||
}
|
||||
|
||||
@@ -174,3 +174,13 @@ type ScrapedMovieStudio struct {
|
||||
Name string `graphql:"name" json:"name"`
|
||||
URL *string `graphql:"url" json:"url"`
|
||||
}
|
||||
|
||||
type ScrapedItems []*ScrapedItem
|
||||
|
||||
func (s *ScrapedItems) Append(o interface{}) {
|
||||
*s = append(*s, o.(*ScrapedItem))
|
||||
}
|
||||
|
||||
func (s *ScrapedItems) New() interface{} {
|
||||
return &ScrapedItem{}
|
||||
}
|
||||
|
||||
@@ -38,3 +38,13 @@ func NewStudio(name string) *Studio {
|
||||
UpdatedAt: SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
}
|
||||
|
||||
type Studios []*Studio
|
||||
|
||||
func (s *Studios) Append(o interface{}) {
|
||||
*s = append(*s, o.(*Studio))
|
||||
}
|
||||
|
||||
func (s *Studios) New() interface{} {
|
||||
return &Studio{}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,16 @@ func NewTag(name string) *Tag {
|
||||
}
|
||||
}
|
||||
|
||||
type Tags []*Tag
|
||||
|
||||
func (t *Tags) Append(o interface{}) {
|
||||
*t = append(*t, o.(*Tag))
|
||||
}
|
||||
|
||||
func (t *Tags) New() interface{} {
|
||||
return &Tag{}
|
||||
}
|
||||
|
||||
// Original Tag image from: https://fontawesome.com/icons/tag?style=solid
|
||||
// Modified to change color and rotate
|
||||
// Licensed under CC Attribution 4.0: https://fontawesome.com/license
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type MovieReader interface {
|
||||
Find(id int) (*Movie, error)
|
||||
FindMany(ids []int) ([]*Movie, error)
|
||||
@@ -11,8 +7,9 @@ type MovieReader interface {
|
||||
FindByName(name string, nocase bool) (*Movie, error)
|
||||
FindByNames(names []string, nocase bool) ([]*Movie, error)
|
||||
All() ([]*Movie, error)
|
||||
// AllSlim() ([]*Movie, error)
|
||||
// Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int)
|
||||
Count() (int, error)
|
||||
AllSlim() ([]*Movie, error)
|
||||
Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error)
|
||||
GetFrontImage(movieID int) ([]byte, error)
|
||||
GetBackImage(movieID int) ([]byte, error)
|
||||
}
|
||||
@@ -21,68 +18,12 @@ type MovieWriter interface {
|
||||
Create(newMovie Movie) (*Movie, error)
|
||||
Update(updatedMovie MoviePartial) (*Movie, error)
|
||||
UpdateFull(updatedMovie Movie) (*Movie, error)
|
||||
// Destroy(id string) error
|
||||
UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error
|
||||
// DestroyMovieImages(movieID int) error
|
||||
Destroy(id int) error
|
||||
UpdateImages(movieID int, frontImage []byte, backImage []byte) error
|
||||
DestroyImages(movieID int) error
|
||||
}
|
||||
|
||||
type MovieReaderWriter interface {
|
||||
MovieReader
|
||||
MovieWriter
|
||||
}
|
||||
|
||||
func NewMovieReaderWriter(tx *sqlx.Tx) MovieReaderWriter {
|
||||
return &movieReaderWriter{
|
||||
tx: tx,
|
||||
qb: NewMovieQueryBuilder(),
|
||||
}
|
||||
}
|
||||
|
||||
type movieReaderWriter struct {
|
||||
tx *sqlx.Tx
|
||||
qb MovieQueryBuilder
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) Find(id int) (*Movie, error) {
|
||||
return t.qb.Find(id, t.tx)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) FindMany(ids []int) ([]*Movie, error) {
|
||||
return t.qb.FindMany(ids)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) FindByName(name string, nocase bool) (*Movie, error) {
|
||||
return t.qb.FindByName(name, t.tx, nocase)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) FindByNames(names []string, nocase bool) ([]*Movie, error) {
|
||||
return t.qb.FindByNames(names, t.tx, nocase)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) All() ([]*Movie, error) {
|
||||
return t.qb.All()
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) GetFrontImage(movieID int) ([]byte, error) {
|
||||
return t.qb.GetFrontImage(movieID, t.tx)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) GetBackImage(movieID int) ([]byte, error) {
|
||||
return t.qb.GetBackImage(movieID, t.tx)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) Create(newMovie Movie) (*Movie, error) {
|
||||
return t.qb.Create(newMovie, t.tx)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) Update(updatedMovie MoviePartial) (*Movie, error) {
|
||||
return t.qb.Update(updatedMovie, t.tx)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) UpdateFull(updatedMovie Movie) (*Movie, error) {
|
||||
return t.qb.UpdateFull(updatedMovie, t.tx)
|
||||
}
|
||||
|
||||
func (t *movieReaderWriter) UpdateMovieImages(movieID int, frontImage []byte, backImage []byte) error {
|
||||
return t.qb.UpdateMovieImages(movieID, frontImage, backImage, t.tx)
|
||||
}
|
||||
|
||||
@@ -1,94 +1,32 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type PerformerReader interface {
|
||||
// Find(id int) (*Performer, error)
|
||||
Find(id int) (*Performer, error)
|
||||
FindMany(ids []int) ([]*Performer, error)
|
||||
FindBySceneID(sceneID int) ([]*Performer, error)
|
||||
FindNamesBySceneID(sceneID int) ([]*Performer, error)
|
||||
FindByImageID(imageID int) ([]*Performer, error)
|
||||
FindByGalleryID(galleryID int) ([]*Performer, error)
|
||||
FindByNames(names []string, nocase bool) ([]*Performer, error)
|
||||
// Count() (int, error)
|
||||
Count() (int, error)
|
||||
All() ([]*Performer, error)
|
||||
// AllSlim() ([]*Performer, error)
|
||||
// Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int)
|
||||
GetPerformerImage(performerID int) ([]byte, error)
|
||||
AllSlim() ([]*Performer, error)
|
||||
Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error)
|
||||
GetImage(performerID int) ([]byte, error)
|
||||
GetStashIDs(performerID int) ([]*StashID, error)
|
||||
}
|
||||
|
||||
type PerformerWriter interface {
|
||||
Create(newPerformer Performer) (*Performer, error)
|
||||
Update(updatedPerformer PerformerPartial) (*Performer, error)
|
||||
UpdateFull(updatedPerformer Performer) (*Performer, error)
|
||||
// Destroy(id string) error
|
||||
UpdatePerformerImage(performerID int, image []byte) error
|
||||
// DestroyPerformerImage(performerID int) error
|
||||
Destroy(id int) error
|
||||
UpdateImage(performerID int, image []byte) error
|
||||
DestroyImage(performerID int) error
|
||||
UpdateStashIDs(performerID int, stashIDs []StashID) error
|
||||
}
|
||||
|
||||
type PerformerReaderWriter interface {
|
||||
PerformerReader
|
||||
PerformerWriter
|
||||
}
|
||||
|
||||
func NewPerformerReaderWriter(tx *sqlx.Tx) PerformerReaderWriter {
|
||||
return &performerReaderWriter{
|
||||
tx: tx,
|
||||
qb: NewPerformerQueryBuilder(),
|
||||
}
|
||||
}
|
||||
|
||||
type performerReaderWriter struct {
|
||||
tx *sqlx.Tx
|
||||
qb PerformerQueryBuilder
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) FindMany(ids []int) ([]*Performer, error) {
|
||||
return t.qb.FindMany(ids)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) FindByNames(names []string, nocase bool) ([]*Performer, error) {
|
||||
return t.qb.FindByNames(names, t.tx, nocase)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) All() ([]*Performer, error) {
|
||||
return t.qb.All()
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) GetPerformerImage(performerID int) ([]byte, error) {
|
||||
return t.qb.GetPerformerImage(performerID, t.tx)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) FindBySceneID(id int) ([]*Performer, error) {
|
||||
return t.qb.FindBySceneID(id, t.tx)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) FindNamesBySceneID(sceneID int) ([]*Performer, error) {
|
||||
return t.qb.FindNameBySceneID(sceneID, t.tx)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) FindByImageID(id int) ([]*Performer, error) {
|
||||
return t.qb.FindByImageID(id, t.tx)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) FindByGalleryID(id int) ([]*Performer, error) {
|
||||
return t.qb.FindByGalleryID(id, t.tx)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) Create(newPerformer Performer) (*Performer, error) {
|
||||
return t.qb.Create(newPerformer, t.tx)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) Update(updatedPerformer PerformerPartial) (*Performer, error) {
|
||||
return t.qb.Update(updatedPerformer, t.tx)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) UpdateFull(updatedPerformer Performer) (*Performer, error) {
|
||||
return t.qb.UpdateFull(updatedPerformer, t.tx)
|
||||
}
|
||||
|
||||
func (t *performerReaderWriter) UpdatePerformerImage(performerID int, image []byte) error {
|
||||
return t.qb.UpdatePerformerImage(performerID, image, t.tx)
|
||||
}
|
||||
|
||||
@@ -1,225 +0,0 @@
|
||||
// +build integration
|
||||
|
||||
package models_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func TestGalleryFind(t *testing.T) {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
|
||||
const galleryIdx = 0
|
||||
gallery, err := gqb.Find(galleryIDs[galleryIdx], nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding gallery: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, getGalleryStringValue(galleryIdx, "Path"), gallery.Path.String)
|
||||
|
||||
gallery, err = gqb.Find(0, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding gallery: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Nil(t, gallery)
|
||||
}
|
||||
|
||||
func TestGalleryFindByChecksum(t *testing.T) {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
|
||||
const galleryIdx = 0
|
||||
galleryChecksum := getGalleryStringValue(galleryIdx, "Checksum")
|
||||
gallery, err := gqb.FindByChecksum(galleryChecksum, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding gallery: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, getGalleryStringValue(galleryIdx, "Path"), gallery.Path.String)
|
||||
|
||||
galleryChecksum = "not exist"
|
||||
gallery, err = gqb.FindByChecksum(galleryChecksum, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding gallery: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Nil(t, gallery)
|
||||
}
|
||||
|
||||
func TestGalleryFindByPath(t *testing.T) {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
|
||||
const galleryIdx = 0
|
||||
galleryPath := getGalleryStringValue(galleryIdx, "Path")
|
||||
gallery, err := gqb.FindByPath(galleryPath)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding gallery: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, galleryPath, gallery.Path.String)
|
||||
|
||||
galleryPath = "not exist"
|
||||
gallery, err = gqb.FindByPath(galleryPath)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding gallery: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Nil(t, gallery)
|
||||
}
|
||||
|
||||
func TestGalleryFindBySceneID(t *testing.T) {
|
||||
gqb := models.NewGalleryQueryBuilder()
|
||||
|
||||
sceneID := sceneIDs[sceneIdxWithGallery]
|
||||
gallery, err := gqb.FindBySceneID(sceneID, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding gallery: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, getGalleryStringValue(galleryIdxWithScene, "Path"), gallery.Path.String)
|
||||
|
||||
gallery, err = gqb.FindBySceneID(0, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding gallery: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Nil(t, gallery)
|
||||
}
|
||||
|
||||
func TestGalleryQueryQ(t *testing.T) {
|
||||
const galleryIdx = 0
|
||||
|
||||
q := getGalleryStringValue(galleryIdx, pathField)
|
||||
|
||||
sqb := models.NewGalleryQueryBuilder()
|
||||
|
||||
galleryQueryQ(t, sqb, q, galleryIdx)
|
||||
}
|
||||
|
||||
func galleryQueryQ(t *testing.T, qb models.GalleryQueryBuilder, q string, expectedGalleryIdx int) {
|
||||
filter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
galleries, _ := qb.Query(nil, &filter)
|
||||
|
||||
assert.Len(t, galleries, 1)
|
||||
gallery := galleries[0]
|
||||
assert.Equal(t, galleryIDs[expectedGalleryIdx], gallery.ID)
|
||||
|
||||
// no Q should return all results
|
||||
filter.Q = nil
|
||||
galleries, _ = qb.Query(nil, &filter)
|
||||
|
||||
assert.Len(t, galleries, totalGalleries)
|
||||
}
|
||||
|
||||
func TestGalleryQueryPath(t *testing.T) {
|
||||
const galleryIdx = 1
|
||||
galleryPath := getGalleryStringValue(galleryIdx, "Path")
|
||||
|
||||
pathCriterion := models.StringCriterionInput{
|
||||
Value: galleryPath,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyGalleriesPath(t, pathCriterion)
|
||||
|
||||
pathCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyGalleriesPath(t, pathCriterion)
|
||||
}
|
||||
|
||||
func verifyGalleriesPath(t *testing.T, pathCriterion models.StringCriterionInput) {
|
||||
sqb := models.NewGalleryQueryBuilder()
|
||||
galleryFilter := models.GalleryFilterType{
|
||||
Path: &pathCriterion,
|
||||
}
|
||||
|
||||
galleries, _ := sqb.Query(&galleryFilter, nil)
|
||||
|
||||
for _, gallery := range galleries {
|
||||
verifyNullString(t, gallery.Path, pathCriterion)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGalleryQueryRating(t *testing.T) {
|
||||
const rating = 3
|
||||
ratingCriterion := models.IntCriterionInput{
|
||||
Value: rating,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyGalleriesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyGalleriesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierGreaterThan
|
||||
verifyGalleriesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierLessThan
|
||||
verifyGalleriesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierIsNull
|
||||
verifyGalleriesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierNotNull
|
||||
verifyGalleriesRating(t, ratingCriterion)
|
||||
}
|
||||
|
||||
func verifyGalleriesRating(t *testing.T, ratingCriterion models.IntCriterionInput) {
|
||||
sqb := models.NewGalleryQueryBuilder()
|
||||
galleryFilter := models.GalleryFilterType{
|
||||
Rating: &ratingCriterion,
|
||||
}
|
||||
|
||||
galleries, _ := sqb.Query(&galleryFilter, nil)
|
||||
|
||||
for _, gallery := range galleries {
|
||||
verifyInt64(t, gallery.Rating, ratingCriterion)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGalleryQueryIsMissingScene(t *testing.T) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
isMissing := "scene"
|
||||
galleryFilter := models.GalleryFilterType{
|
||||
IsMissing: &isMissing,
|
||||
}
|
||||
|
||||
q := getGalleryStringValue(galleryIdxWithScene, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
galleries, _ := qb.Query(&galleryFilter, &findFilter)
|
||||
|
||||
assert.Len(t, galleries, 0)
|
||||
|
||||
findFilter.Q = nil
|
||||
galleries, _ = qb.Query(&galleryFilter, &findFilter)
|
||||
|
||||
// ensure non of the ids equal the one with gallery
|
||||
for _, gallery := range galleries {
|
||||
assert.NotEqual(t, galleryIDs[galleryIdxWithScene], gallery.ID)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO ValidGalleriesForScenePath
|
||||
// TODO Count
|
||||
// TODO All
|
||||
// TODO Query
|
||||
// TODO Update
|
||||
// TODO Destroy
|
||||
// TODO ClearGalleryId
|
||||
@@ -1,652 +0,0 @@
|
||||
// +build integration
|
||||
|
||||
package models_test
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func TestImageFind(t *testing.T) {
|
||||
// assume that the first image is imageWithGalleryPath
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
|
||||
const imageIdx = 0
|
||||
imageID := imageIDs[imageIdx]
|
||||
image, err := sqb.Find(imageID)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding image: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, getImageStringValue(imageIdx, "Path"), image.Path)
|
||||
|
||||
imageID = 0
|
||||
image, err = sqb.Find(imageID)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding image: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Nil(t, image)
|
||||
}
|
||||
|
||||
func TestImageFindByPath(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
|
||||
const imageIdx = 1
|
||||
imagePath := getImageStringValue(imageIdx, "Path")
|
||||
image, err := sqb.FindByPath(imagePath)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding image: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, imageIDs[imageIdx], image.ID)
|
||||
assert.Equal(t, imagePath, image.Path)
|
||||
|
||||
imagePath = "not exist"
|
||||
image, err = sqb.FindByPath(imagePath)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding image: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Nil(t, image)
|
||||
}
|
||||
|
||||
func TestImageCountByPerformerID(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
count, err := sqb.CountByPerformerID(performerIDs[performerIdxWithImage])
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error counting images: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, count)
|
||||
|
||||
count, err = sqb.CountByPerformerID(0)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error counting images: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 0, count)
|
||||
}
|
||||
|
||||
func TestImageQueryQ(t *testing.T) {
|
||||
const imageIdx = 2
|
||||
|
||||
q := getImageStringValue(imageIdx, titleField)
|
||||
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
|
||||
imageQueryQ(t, sqb, q, imageIdx)
|
||||
}
|
||||
|
||||
func imageQueryQ(t *testing.T, sqb models.ImageQueryBuilder, q string, expectedImageIdx int) {
|
||||
filter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
images, _ := sqb.Query(nil, &filter)
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
image := images[0]
|
||||
assert.Equal(t, imageIDs[expectedImageIdx], image.ID)
|
||||
|
||||
// no Q should return all results
|
||||
filter.Q = nil
|
||||
images, _ = sqb.Query(nil, &filter)
|
||||
|
||||
assert.Len(t, images, totalImages)
|
||||
}
|
||||
|
||||
func TestImageQueryPath(t *testing.T) {
|
||||
const imageIdx = 1
|
||||
imagePath := getImageStringValue(imageIdx, "Path")
|
||||
|
||||
pathCriterion := models.StringCriterionInput{
|
||||
Value: imagePath,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyImagePath(t, pathCriterion)
|
||||
|
||||
pathCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyImagePath(t, pathCriterion)
|
||||
}
|
||||
|
||||
func verifyImagePath(t *testing.T, pathCriterion models.StringCriterionInput) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
imageFilter := models.ImageFilterType{
|
||||
Path: &pathCriterion,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, nil)
|
||||
|
||||
for _, image := range images {
|
||||
verifyString(t, image.Path, pathCriterion)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageQueryRating(t *testing.T) {
|
||||
const rating = 3
|
||||
ratingCriterion := models.IntCriterionInput{
|
||||
Value: rating,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyImagesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyImagesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierGreaterThan
|
||||
verifyImagesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierLessThan
|
||||
verifyImagesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierIsNull
|
||||
verifyImagesRating(t, ratingCriterion)
|
||||
|
||||
ratingCriterion.Modifier = models.CriterionModifierNotNull
|
||||
verifyImagesRating(t, ratingCriterion)
|
||||
}
|
||||
|
||||
func verifyImagesRating(t *testing.T, ratingCriterion models.IntCriterionInput) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
imageFilter := models.ImageFilterType{
|
||||
Rating: &ratingCriterion,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, nil)
|
||||
|
||||
for _, image := range images {
|
||||
verifyInt64(t, image.Rating, ratingCriterion)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageQueryOCounter(t *testing.T) {
|
||||
const oCounter = 1
|
||||
oCounterCriterion := models.IntCriterionInput{
|
||||
Value: oCounter,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyImagesOCounter(t, oCounterCriterion)
|
||||
|
||||
oCounterCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyImagesOCounter(t, oCounterCriterion)
|
||||
|
||||
oCounterCriterion.Modifier = models.CriterionModifierGreaterThan
|
||||
verifyImagesOCounter(t, oCounterCriterion)
|
||||
|
||||
oCounterCriterion.Modifier = models.CriterionModifierLessThan
|
||||
verifyImagesOCounter(t, oCounterCriterion)
|
||||
}
|
||||
|
||||
func verifyImagesOCounter(t *testing.T, oCounterCriterion models.IntCriterionInput) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
imageFilter := models.ImageFilterType{
|
||||
OCounter: &oCounterCriterion,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, nil)
|
||||
|
||||
for _, image := range images {
|
||||
verifyInt(t, image.OCounter, oCounterCriterion)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageQueryResolution(t *testing.T) {
|
||||
verifyImagesResolution(t, models.ResolutionEnumLow)
|
||||
verifyImagesResolution(t, models.ResolutionEnumStandard)
|
||||
verifyImagesResolution(t, models.ResolutionEnumStandardHd)
|
||||
verifyImagesResolution(t, models.ResolutionEnumFullHd)
|
||||
verifyImagesResolution(t, models.ResolutionEnumFourK)
|
||||
verifyImagesResolution(t, models.ResolutionEnum("unknown"))
|
||||
}
|
||||
|
||||
func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
imageFilter := models.ImageFilterType{
|
||||
Resolution: &resolution,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, nil)
|
||||
|
||||
for _, image := range images {
|
||||
verifyImageResolution(t, image.Height, resolution)
|
||||
}
|
||||
}
|
||||
|
||||
func verifyImageResolution(t *testing.T, height sql.NullInt64, resolution models.ResolutionEnum) {
|
||||
assert := assert.New(t)
|
||||
h := height.Int64
|
||||
|
||||
switch resolution {
|
||||
case models.ResolutionEnumLow:
|
||||
assert.True(h < 480)
|
||||
case models.ResolutionEnumStandard:
|
||||
assert.True(h >= 480 && h < 720)
|
||||
case models.ResolutionEnumStandardHd:
|
||||
assert.True(h >= 720 && h < 1080)
|
||||
case models.ResolutionEnumFullHd:
|
||||
assert.True(h >= 1080 && h < 2160)
|
||||
case models.ResolutionEnumFourK:
|
||||
assert.True(h >= 2160)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageQueryIsMissingGalleries(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
isMissing := "galleries"
|
||||
imageFilter := models.ImageFilterType{
|
||||
IsMissing: &isMissing,
|
||||
}
|
||||
|
||||
q := getImageStringValue(imageIdxWithGallery, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, &findFilter)
|
||||
|
||||
assert.Len(t, images, 0)
|
||||
|
||||
findFilter.Q = nil
|
||||
images, _ = sqb.Query(&imageFilter, &findFilter)
|
||||
|
||||
// ensure non of the ids equal the one with gallery
|
||||
for _, image := range images {
|
||||
assert.NotEqual(t, imageIDs[imageIdxWithGallery], image.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageQueryIsMissingStudio(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
isMissing := "studio"
|
||||
imageFilter := models.ImageFilterType{
|
||||
IsMissing: &isMissing,
|
||||
}
|
||||
|
||||
q := getImageStringValue(imageIdxWithStudio, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, &findFilter)
|
||||
|
||||
assert.Len(t, images, 0)
|
||||
|
||||
findFilter.Q = nil
|
||||
images, _ = sqb.Query(&imageFilter, &findFilter)
|
||||
|
||||
// ensure non of the ids equal the one with studio
|
||||
for _, image := range images {
|
||||
assert.NotEqual(t, imageIDs[imageIdxWithStudio], image.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageQueryIsMissingPerformers(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
isMissing := "performers"
|
||||
imageFilter := models.ImageFilterType{
|
||||
IsMissing: &isMissing,
|
||||
}
|
||||
|
||||
q := getImageStringValue(imageIdxWithPerformer, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, &findFilter)
|
||||
|
||||
assert.Len(t, images, 0)
|
||||
|
||||
findFilter.Q = nil
|
||||
images, _ = sqb.Query(&imageFilter, &findFilter)
|
||||
|
||||
assert.True(t, len(images) > 0)
|
||||
|
||||
// ensure non of the ids equal the one with movies
|
||||
for _, image := range images {
|
||||
assert.NotEqual(t, imageIDs[imageIdxWithPerformer], image.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageQueryIsMissingTags(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
isMissing := "tags"
|
||||
imageFilter := models.ImageFilterType{
|
||||
IsMissing: &isMissing,
|
||||
}
|
||||
|
||||
q := getImageStringValue(imageIdxWithTwoTags, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, &findFilter)
|
||||
|
||||
assert.Len(t, images, 0)
|
||||
|
||||
findFilter.Q = nil
|
||||
images, _ = sqb.Query(&imageFilter, &findFilter)
|
||||
|
||||
assert.True(t, len(images) > 0)
|
||||
}
|
||||
|
||||
func TestImageQueryIsMissingRating(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
isMissing := "rating"
|
||||
imageFilter := models.ImageFilterType{
|
||||
IsMissing: &isMissing,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, nil)
|
||||
|
||||
assert.True(t, len(images) > 0)
|
||||
|
||||
// ensure date is null, empty or "0001-01-01"
|
||||
for _, image := range images {
|
||||
assert.True(t, !image.Rating.Valid)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageQueryPerformers(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
performerCriterion := models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(performerIDs[performerIdxWithImage]),
|
||||
strconv.Itoa(performerIDs[performerIdx1WithImage]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
}
|
||||
|
||||
imageFilter := models.ImageFilterType{
|
||||
Performers: &performerCriterion,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, nil)
|
||||
|
||||
assert.Len(t, images, 2)
|
||||
|
||||
// ensure ids are correct
|
||||
for _, image := range images {
|
||||
assert.True(t, image.ID == imageIDs[imageIdxWithPerformer] || image.ID == imageIDs[imageIdxWithTwoPerformers])
|
||||
}
|
||||
|
||||
performerCriterion = models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(performerIDs[performerIdx1WithImage]),
|
||||
strconv.Itoa(performerIDs[performerIdx2WithImage]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludesAll,
|
||||
}
|
||||
|
||||
images, _ = sqb.Query(&imageFilter, nil)
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
assert.Equal(t, imageIDs[imageIdxWithTwoPerformers], images[0].ID)
|
||||
|
||||
performerCriterion = models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(performerIDs[performerIdx1WithImage]),
|
||||
},
|
||||
Modifier: models.CriterionModifierExcludes,
|
||||
}
|
||||
|
||||
q := getImageStringValue(imageIdxWithTwoPerformers, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
images, _ = sqb.Query(&imageFilter, &findFilter)
|
||||
assert.Len(t, images, 0)
|
||||
}
|
||||
|
||||
func TestImageQueryTags(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
tagCriterion := models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdxWithImage]),
|
||||
strconv.Itoa(tagIDs[tagIdx1WithImage]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
}
|
||||
|
||||
imageFilter := models.ImageFilterType{
|
||||
Tags: &tagCriterion,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, nil)
|
||||
|
||||
assert.Len(t, images, 2)
|
||||
|
||||
// ensure ids are correct
|
||||
for _, image := range images {
|
||||
assert.True(t, image.ID == imageIDs[imageIdxWithTag] || image.ID == imageIDs[imageIdxWithTwoTags])
|
||||
}
|
||||
|
||||
tagCriterion = models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdx1WithImage]),
|
||||
strconv.Itoa(tagIDs[tagIdx2WithImage]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludesAll,
|
||||
}
|
||||
|
||||
images, _ = sqb.Query(&imageFilter, nil)
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
assert.Equal(t, imageIDs[imageIdxWithTwoTags], images[0].ID)
|
||||
|
||||
tagCriterion = models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdx1WithImage]),
|
||||
},
|
||||
Modifier: models.CriterionModifierExcludes,
|
||||
}
|
||||
|
||||
q := getImageStringValue(imageIdxWithTwoTags, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
images, _ = sqb.Query(&imageFilter, &findFilter)
|
||||
assert.Len(t, images, 0)
|
||||
}
|
||||
|
||||
func TestImageQueryStudio(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
studioCriterion := models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(studioIDs[studioIdxWithImage]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
}
|
||||
|
||||
imageFilter := models.ImageFilterType{
|
||||
Studios: &studioCriterion,
|
||||
}
|
||||
|
||||
images, _ := sqb.Query(&imageFilter, nil)
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
|
||||
// ensure id is correct
|
||||
assert.Equal(t, imageIDs[imageIdxWithStudio], images[0].ID)
|
||||
|
||||
studioCriterion = models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(studioIDs[studioIdxWithImage]),
|
||||
},
|
||||
Modifier: models.CriterionModifierExcludes,
|
||||
}
|
||||
|
||||
q := getImageStringValue(imageIdxWithStudio, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
images, _ = sqb.Query(&imageFilter, &findFilter)
|
||||
assert.Len(t, images, 0)
|
||||
}
|
||||
|
||||
func TestImageQuerySorting(t *testing.T) {
|
||||
sort := titleField
|
||||
direction := models.SortDirectionEnumAsc
|
||||
findFilter := models.FindFilterType{
|
||||
Sort: &sort,
|
||||
Direction: &direction,
|
||||
}
|
||||
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
images, _ := sqb.Query(nil, &findFilter)
|
||||
|
||||
// images should be in same order as indexes
|
||||
firstImage := images[0]
|
||||
lastImage := images[len(images)-1]
|
||||
|
||||
assert.Equal(t, imageIDs[0], firstImage.ID)
|
||||
assert.Equal(t, imageIDs[len(imageIDs)-1], lastImage.ID)
|
||||
|
||||
// sort in descending order
|
||||
direction = models.SortDirectionEnumDesc
|
||||
|
||||
images, _ = sqb.Query(nil, &findFilter)
|
||||
firstImage = images[0]
|
||||
lastImage = images[len(images)-1]
|
||||
|
||||
assert.Equal(t, imageIDs[len(imageIDs)-1], firstImage.ID)
|
||||
assert.Equal(t, imageIDs[0], lastImage.ID)
|
||||
}
|
||||
|
||||
func TestImageQueryPagination(t *testing.T) {
|
||||
perPage := 1
|
||||
findFilter := models.FindFilterType{
|
||||
PerPage: &perPage,
|
||||
}
|
||||
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
images, _ := sqb.Query(nil, &findFilter)
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
|
||||
firstID := images[0].ID
|
||||
|
||||
page := 2
|
||||
findFilter.Page = &page
|
||||
images, _ = sqb.Query(nil, &findFilter)
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
secondID := images[0].ID
|
||||
assert.NotEqual(t, firstID, secondID)
|
||||
|
||||
perPage = 2
|
||||
page = 1
|
||||
|
||||
images, _ = sqb.Query(nil, &findFilter)
|
||||
assert.Len(t, images, 2)
|
||||
assert.Equal(t, firstID, images[0].ID)
|
||||
assert.Equal(t, secondID, images[1].ID)
|
||||
}
|
||||
|
||||
func TestImageCountByTagID(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
|
||||
imageCount, err := sqb.CountByTagID(tagIDs[tagIdxWithImage])
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling CountByTagID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, imageCount)
|
||||
|
||||
imageCount, err = sqb.CountByTagID(0)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling CountByTagID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 0, imageCount)
|
||||
}
|
||||
|
||||
func TestImageCountByStudioID(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
|
||||
imageCount, err := sqb.CountByStudioID(studioIDs[studioIdxWithImage])
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling CountByStudioID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, imageCount)
|
||||
|
||||
imageCount, err = sqb.CountByStudioID(0)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling CountByStudioID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 0, imageCount)
|
||||
}
|
||||
|
||||
func TestImageFindByPerformerID(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
|
||||
images, err := sqb.FindByPerformerID(performerIDs[performerIdxWithImage])
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling FindByPerformerID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
assert.Equal(t, imageIDs[imageIdxWithPerformer], images[0].ID)
|
||||
|
||||
images, err = sqb.FindByPerformerID(0)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling FindByPerformerID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Len(t, images, 0)
|
||||
}
|
||||
|
||||
func TestImageFindByStudioID(t *testing.T) {
|
||||
sqb := models.NewImageQueryBuilder()
|
||||
|
||||
images, err := sqb.FindByStudioID(performerIDs[studioIdxWithImage])
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling FindByStudioID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
assert.Equal(t, imageIDs[imageIdxWithStudio], images[0].ID)
|
||||
|
||||
images, err = sqb.FindByStudioID(0)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling FindByStudioID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Len(t, images, 0)
|
||||
}
|
||||
|
||||
// TODO Update
|
||||
// TODO IncrementOCounter
|
||||
// TODO DecrementOCounter
|
||||
// TODO ResetOCounter
|
||||
// TODO Destroy
|
||||
// TODO FindByChecksum
|
||||
// TODO Count
|
||||
// TODO SizeCount
|
||||
// TODO All
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,312 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
)
|
||||
|
||||
type MovieQueryBuilder struct{}
|
||||
|
||||
func NewMovieQueryBuilder() MovieQueryBuilder {
|
||||
return MovieQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) Create(newMovie Movie, tx *sqlx.Tx) (*Movie, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO movies (checksum, name, aliases, duration, date, rating, studio_id, director, synopsis, url, created_at, updated_at)
|
||||
VALUES (:checksum, :name, :aliases, :duration, :date, :rating, :studio_id, :director, :synopsis, :url, :created_at, :updated_at)
|
||||
`,
|
||||
newMovie,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
movieID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&newMovie, `SELECT * FROM movies WHERE id = ? LIMIT 1`, movieID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newMovie, nil
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) Update(updatedMovie MoviePartial, tx *sqlx.Tx) (*Movie, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE movies SET `+SQLGenKeysPartial(updatedMovie)+` WHERE movies.id = :id`,
|
||||
updatedMovie,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return qb.Find(updatedMovie.ID, tx)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) UpdateFull(updatedMovie Movie, tx *sqlx.Tx) (*Movie, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE movies SET `+SQLGenKeys(updatedMovie)+` WHERE movies.id = :id`,
|
||||
updatedMovie,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return qb.Find(updatedMovie.ID, tx)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||
// delete movie from movies_scenes
|
||||
|
||||
_, err := tx.Exec("DELETE FROM movies_scenes WHERE movie_id = ?", id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// // remove movie from scraped items
|
||||
// _, err = tx.Exec("UPDATE scraped_items SET movie_id = null WHERE movie_id = ?", id)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
return executeDeleteQuery("movies", id, tx)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) Find(id int, tx *sqlx.Tx) (*Movie, error) {
|
||||
query := "SELECT * FROM movies WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryMovie(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) FindMany(ids []int) ([]*Movie, error) {
|
||||
var movies []*Movie
|
||||
for _, id := range ids {
|
||||
movie, err := qb.Find(id, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if movie == nil {
|
||||
return nil, fmt.Errorf("movie with id %d not found", id)
|
||||
}
|
||||
|
||||
movies = append(movies, movie)
|
||||
}
|
||||
|
||||
return movies, nil
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]*Movie, error) {
|
||||
query := `
|
||||
SELECT movies.* FROM movies
|
||||
LEFT JOIN movies_scenes as scenes_join on scenes_join.movie_id = movies.id
|
||||
WHERE scenes_join.scene_id = ?
|
||||
GROUP BY movies.id
|
||||
`
|
||||
args := []interface{}{sceneID}
|
||||
return qb.queryMovies(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) FindByName(name string, tx *sqlx.Tx, nocase bool) (*Movie, error) {
|
||||
query := "SELECT * FROM movies WHERE name = ?"
|
||||
if nocase {
|
||||
query += " COLLATE NOCASE"
|
||||
}
|
||||
query += " LIMIT 1"
|
||||
args := []interface{}{name}
|
||||
return qb.queryMovie(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) FindByNames(names []string, tx *sqlx.Tx, nocase bool) ([]*Movie, error) {
|
||||
query := "SELECT * FROM movies WHERE name"
|
||||
if nocase {
|
||||
query += " COLLATE NOCASE"
|
||||
}
|
||||
query += " IN " + getInBinding(len(names))
|
||||
var args []interface{}
|
||||
for _, name := range names {
|
||||
args = append(args, name)
|
||||
}
|
||||
return qb.queryMovies(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT movies.id FROM movies"), nil)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) All() ([]*Movie, error) {
|
||||
return qb.queryMovies(selectAll("movies")+qb.getMovieSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) AllSlim() ([]*Movie, error) {
|
||||
return qb.queryMovies("SELECT movies.id, movies.name FROM movies "+qb.getMovieSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) Query(movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int) {
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
if movieFilter == nil {
|
||||
movieFilter = &MovieFilterType{}
|
||||
}
|
||||
|
||||
var whereClauses []string
|
||||
var havingClauses []string
|
||||
var args []interface{}
|
||||
body := selectDistinctIDs("movies")
|
||||
body += `
|
||||
left join movies_scenes as scenes_join on scenes_join.movie_id = movies.id
|
||||
left join scenes on scenes_join.scene_id = scenes.id
|
||||
left join studios as studio on studio.id = movies.studio_id
|
||||
`
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
searchColumns := []string{"movies.name"}
|
||||
clause, thisArgs := getSearchBinding(searchColumns, *q, false)
|
||||
whereClauses = append(whereClauses, clause)
|
||||
args = append(args, thisArgs...)
|
||||
}
|
||||
|
||||
if studiosFilter := movieFilter.Studios; studiosFilter != nil && len(studiosFilter.Value) > 0 {
|
||||
for _, studioID := range studiosFilter.Value {
|
||||
args = append(args, studioID)
|
||||
}
|
||||
|
||||
whereClause, havingClause := getMultiCriterionClause("movies", "studio", "", "", "studio_id", studiosFilter)
|
||||
whereClauses = appendClause(whereClauses, whereClause)
|
||||
havingClauses = appendClause(havingClauses, havingClause)
|
||||
}
|
||||
|
||||
if isMissingFilter := movieFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" {
|
||||
switch *isMissingFilter {
|
||||
case "front_image":
|
||||
body += `left join movies_images on movies_images.movie_id = movies.id
|
||||
`
|
||||
whereClauses = appendClause(whereClauses, "movies_images.front_image IS NULL")
|
||||
case "back_image":
|
||||
body += `left join movies_images on movies_images.movie_id = movies.id
|
||||
`
|
||||
whereClauses = appendClause(whereClauses, "movies_images.back_image IS NULL")
|
||||
case "scenes":
|
||||
body += `left join movies_scenes on movies_scenes.movie_id = movies.id
|
||||
`
|
||||
whereClauses = appendClause(whereClauses, "movies_scenes.scene_id IS NULL")
|
||||
default:
|
||||
whereClauses = appendClause(whereClauses, "movies."+*isMissingFilter+" IS NULL")
|
||||
}
|
||||
}
|
||||
|
||||
sortAndPagination := qb.getMovieSort(findFilter) + getPagination(findFilter)
|
||||
idsResult, countResult := executeFindQuery("movies", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||
|
||||
var movies []*Movie
|
||||
for _, id := range idsResult {
|
||||
movie, _ := qb.Find(id, nil)
|
||||
movies = append(movies, movie)
|
||||
}
|
||||
|
||||
return movies, countResult
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) getMovieSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
sort = "name"
|
||||
direction = "ASC"
|
||||
} else {
|
||||
sort = findFilter.GetSort("name")
|
||||
direction = findFilter.GetDirection()
|
||||
}
|
||||
|
||||
// #943 - override name sorting to use natural sort
|
||||
if sort == "name" {
|
||||
return " ORDER BY " + getColumn("movies", sort) + " COLLATE NATURAL_CS " + direction
|
||||
}
|
||||
|
||||
return getSort(sort, direction, "movies")
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) queryMovie(query string, args []interface{}, tx *sqlx.Tx) (*Movie, error) {
|
||||
results, err := qb.queryMovies(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return results[0], nil
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) queryMovies(query string, args []interface{}, tx *sqlx.Tx) ([]*Movie, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
movies := make([]*Movie, 0)
|
||||
for rows.Next() {
|
||||
movie := Movie{}
|
||||
if err := rows.StructScan(&movie); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
movies = append(movies, &movie)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return movies, nil
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) UpdateMovieImages(movieID int, frontImage []byte, backImage []byte, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing cover and then create new
|
||||
if err := qb.DestroyMovieImages(movieID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := tx.Exec(
|
||||
`INSERT INTO movies_images (movie_id, front_image, back_image) VALUES (?, ?, ?)`,
|
||||
movieID,
|
||||
frontImage,
|
||||
backImage,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) DestroyMovieImages(movieID int, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins
|
||||
_, err := tx.Exec("DELETE FROM movies_images WHERE movie_id = ?", movieID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) GetFrontImage(movieID int, tx *sqlx.Tx) ([]byte, error) {
|
||||
query := `SELECT front_image from movies_images WHERE movie_id = ?`
|
||||
return getImage(tx, query, movieID)
|
||||
}
|
||||
|
||||
func (qb *MovieQueryBuilder) GetBackImage(movieID int, tx *sqlx.Tx) ([]byte, error) {
|
||||
query := `SELECT back_image from movies_images WHERE movie_id = ?`
|
||||
return getImage(tx, query, movieID)
|
||||
}
|
||||
@@ -1,275 +0,0 @@
|
||||
// +build integration
|
||||
|
||||
package models_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func TestMovieFindBySceneID(t *testing.T) {
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
sceneID := sceneIDs[sceneIdxWithMovie]
|
||||
|
||||
movies, err := mqb.FindBySceneID(sceneID, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding movie: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, len(movies), "expect 1 movie")
|
||||
|
||||
movie := movies[0]
|
||||
assert.Equal(t, getMovieStringValue(movieIdxWithScene, "Name"), movie.Name.String)
|
||||
|
||||
movies, err = mqb.FindBySceneID(0, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding movie: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 0, len(movies))
|
||||
}
|
||||
|
||||
func TestMovieFindByName(t *testing.T) {
|
||||
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
|
||||
name := movieNames[movieIdxWithScene] // find a movie by name
|
||||
|
||||
movie, err := mqb.FindByName(name, nil, false)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding movies: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, movieNames[movieIdxWithScene], movie.Name.String)
|
||||
|
||||
name = movieNames[movieIdxWithDupName] // find a movie by name nocase
|
||||
|
||||
movie, err = mqb.FindByName(name, nil, true)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding movies: %s", err.Error())
|
||||
}
|
||||
// movieIdxWithDupName and movieIdxWithScene should have similar names ( only diff should be Name vs NaMe)
|
||||
//movie.Name should match with movieIdxWithScene since its ID is before moveIdxWithDupName
|
||||
assert.Equal(t, movieNames[movieIdxWithScene], movie.Name.String)
|
||||
//movie.Name should match with movieIdxWithDupName if the check is not case sensitive
|
||||
assert.Equal(t, strings.ToLower(movieNames[movieIdxWithDupName]), strings.ToLower(movie.Name.String))
|
||||
}
|
||||
|
||||
func TestMovieFindByNames(t *testing.T) {
|
||||
var names []string
|
||||
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
|
||||
names = append(names, movieNames[movieIdxWithScene]) // find movies by names
|
||||
|
||||
movies, err := mqb.FindByNames(names, nil, false)
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding movies: %s", err.Error())
|
||||
}
|
||||
assert.Len(t, movies, 1)
|
||||
assert.Equal(t, movieNames[movieIdxWithScene], movies[0].Name.String)
|
||||
|
||||
movies, err = mqb.FindByNames(names, nil, true) // find movies by names nocase
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding movies: %s", err.Error())
|
||||
}
|
||||
assert.Len(t, movies, 2) // movieIdxWithScene and movieIdxWithDupName
|
||||
assert.Equal(t, strings.ToLower(movieNames[movieIdxWithScene]), strings.ToLower(movies[0].Name.String))
|
||||
assert.Equal(t, strings.ToLower(movieNames[movieIdxWithScene]), strings.ToLower(movies[1].Name.String))
|
||||
}
|
||||
|
||||
func TestMovieQueryStudio(t *testing.T) {
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
studioCriterion := models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(studioIDs[studioIdxWithMovie]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
}
|
||||
|
||||
movieFilter := models.MovieFilterType{
|
||||
Studios: &studioCriterion,
|
||||
}
|
||||
|
||||
movies, _ := mqb.Query(&movieFilter, nil)
|
||||
|
||||
assert.Len(t, movies, 1)
|
||||
|
||||
// ensure id is correct
|
||||
assert.Equal(t, movieIDs[movieIdxWithStudio], movies[0].ID)
|
||||
|
||||
studioCriterion = models.MultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(studioIDs[studioIdxWithMovie]),
|
||||
},
|
||||
Modifier: models.CriterionModifierExcludes,
|
||||
}
|
||||
|
||||
q := getMovieStringValue(movieIdxWithStudio, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
movies, _ = mqb.Query(&movieFilter, &findFilter)
|
||||
assert.Len(t, movies, 0)
|
||||
}
|
||||
|
||||
func TestMovieUpdateMovieImages(t *testing.T) {
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
|
||||
// create movie to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestMovieUpdateMovieImages"
|
||||
movie := models.Movie{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
}
|
||||
created, err := mqb.Create(movie, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating movie: %s", err.Error())
|
||||
}
|
||||
|
||||
frontImage := []byte("frontImage")
|
||||
backImage := []byte("backImage")
|
||||
err = mqb.UpdateMovieImages(created.ID, frontImage, backImage, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating movie images: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// ensure images are set
|
||||
storedFront, err := mqb.GetFrontImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting front image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedFront, frontImage)
|
||||
|
||||
storedBack, err := mqb.GetBackImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting back image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedBack, backImage)
|
||||
|
||||
// set front image only
|
||||
newImage := []byte("newImage")
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
err = mqb.UpdateMovieImages(created.ID, newImage, nil, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating movie images: %s", err.Error())
|
||||
}
|
||||
|
||||
storedFront, err = mqb.GetFrontImage(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error getting front image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedFront, newImage)
|
||||
|
||||
// back image should be nil
|
||||
storedBack, err = mqb.GetBackImage(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error getting back image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, nil)
|
||||
|
||||
// set back image only
|
||||
err = mqb.UpdateMovieImages(created.ID, nil, newImage, tx)
|
||||
if err == nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Expected error setting nil front image")
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func TestMovieDestroyMovieImages(t *testing.T) {
|
||||
mqb := models.NewMovieQueryBuilder()
|
||||
|
||||
// create movie to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestMovieDestroyMovieImages"
|
||||
movie := models.Movie{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
}
|
||||
created, err := mqb.Create(movie, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating movie: %s", err.Error())
|
||||
}
|
||||
|
||||
frontImage := []byte("frontImage")
|
||||
backImage := []byte("backImage")
|
||||
err = mqb.UpdateMovieImages(created.ID, frontImage, backImage, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating movie images: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
err = mqb.DestroyMovieImages(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error destroying movie images: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// front image should be nil
|
||||
storedFront, err := mqb.GetFrontImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting front image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, storedFront)
|
||||
|
||||
// back image should be nil
|
||||
storedBack, err := mqb.GetBackImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting back image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, storedBack)
|
||||
}
|
||||
|
||||
// TODO Update
|
||||
// TODO Destroy
|
||||
// TODO Find
|
||||
// TODO Count
|
||||
// TODO All
|
||||
// TODO Query
|
||||
@@ -1,257 +0,0 @@
|
||||
// +build integration
|
||||
|
||||
package models_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func TestPerformerFindBySceneID(t *testing.T) {
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
sceneID := sceneIDs[sceneIdxWithPerformer]
|
||||
|
||||
performers, err := pqb.FindBySceneID(sceneID, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding performer: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, len(performers))
|
||||
performer := performers[0]
|
||||
|
||||
assert.Equal(t, getPerformerStringValue(performerIdxWithScene, "Name"), performer.Name.String)
|
||||
|
||||
performers, err = pqb.FindBySceneID(0, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding performer: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 0, len(performers))
|
||||
}
|
||||
|
||||
func TestPerformerFindNameBySceneID(t *testing.T) {
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
sceneID := sceneIDs[sceneIdxWithPerformer]
|
||||
|
||||
performers, err := pqb.FindNameBySceneID(sceneID, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding performer: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, len(performers))
|
||||
performer := performers[0]
|
||||
|
||||
assert.Equal(t, getPerformerStringValue(performerIdxWithScene, "Name"), performer.Name.String)
|
||||
|
||||
performers, err = pqb.FindBySceneID(0, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding performer: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 0, len(performers))
|
||||
}
|
||||
|
||||
func TestPerformerFindByNames(t *testing.T) {
|
||||
var names []string
|
||||
|
||||
pqb := models.NewPerformerQueryBuilder()
|
||||
|
||||
names = append(names, performerNames[performerIdxWithScene]) // find performers by names
|
||||
|
||||
performers, err := pqb.FindByNames(names, nil, false)
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding performers: %s", err.Error())
|
||||
}
|
||||
assert.Len(t, performers, 1)
|
||||
assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String)
|
||||
|
||||
performers, err = pqb.FindByNames(names, nil, true) // find performers by names nocase
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding performers: %s", err.Error())
|
||||
}
|
||||
assert.Len(t, performers, 2) // performerIdxWithScene and performerIdxWithDupName
|
||||
assert.Equal(t, strings.ToLower(performerNames[performerIdxWithScene]), strings.ToLower(performers[0].Name.String))
|
||||
assert.Equal(t, strings.ToLower(performerNames[performerIdxWithScene]), strings.ToLower(performers[1].Name.String))
|
||||
|
||||
names = append(names, performerNames[performerIdx1WithScene]) // find performers by names ( 2 names )
|
||||
|
||||
performers, err = pqb.FindByNames(names, nil, false)
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding performers: %s", err.Error())
|
||||
}
|
||||
assert.Len(t, performers, 2) // performerIdxWithScene and performerIdx1WithScene
|
||||
assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String)
|
||||
assert.Equal(t, performerNames[performerIdx1WithScene], performers[1].Name.String)
|
||||
|
||||
performers, err = pqb.FindByNames(names, nil, true) // find performers by names ( 2 names nocase)
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding performers: %s", err.Error())
|
||||
}
|
||||
assert.Len(t, performers, 4) // performerIdxWithScene and performerIdxWithDupName , performerIdx1WithScene and performerIdx1WithDupName
|
||||
assert.Equal(t, performerNames[performerIdxWithScene], performers[0].Name.String)
|
||||
assert.Equal(t, performerNames[performerIdx1WithScene], performers[1].Name.String)
|
||||
assert.Equal(t, performerNames[performerIdx1WithDupName], performers[2].Name.String)
|
||||
assert.Equal(t, performerNames[performerIdxWithDupName], performers[3].Name.String)
|
||||
|
||||
}
|
||||
|
||||
func TestPerformerUpdatePerformerImage(t *testing.T) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
|
||||
// create performer to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestPerformerUpdatePerformerImage"
|
||||
performer := models.Performer{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
Favorite: sql.NullBool{Bool: false, Valid: true},
|
||||
}
|
||||
created, err := qb.Create(performer, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating performer: %s", err.Error())
|
||||
}
|
||||
|
||||
image := []byte("image")
|
||||
err = qb.UpdatePerformerImage(created.ID, image, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating performer image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// ensure image set
|
||||
storedImage, err := qb.GetPerformerImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting image: %s", err.Error())
|
||||
}
|
||||
assert.Equal(t, storedImage, image)
|
||||
|
||||
// set nil image
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
err = qb.UpdatePerformerImage(created.ID, nil, tx)
|
||||
if err == nil {
|
||||
t.Fatalf("Expected error setting nil image")
|
||||
}
|
||||
|
||||
tx.Rollback()
|
||||
}
|
||||
|
||||
func TestPerformerDestroyPerformerImage(t *testing.T) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
|
||||
// create performer to test against
|
||||
ctx := context.TODO()
|
||||
tx := database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
const name = "TestPerformerDestroyPerformerImage"
|
||||
performer := models.Performer{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
Checksum: utils.MD5FromString(name),
|
||||
Favorite: sql.NullBool{Bool: false, Valid: true},
|
||||
}
|
||||
created, err := qb.Create(performer, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error creating performer: %s", err.Error())
|
||||
}
|
||||
|
||||
image := []byte("image")
|
||||
err = qb.UpdatePerformerImage(created.ID, image, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error updating performer image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
tx = database.DB.MustBeginTx(ctx, nil)
|
||||
|
||||
err = qb.DestroyPerformerImage(created.ID, tx)
|
||||
if err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error destroying performer image: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
tx.Rollback()
|
||||
t.Fatalf("Error committing: %s", err.Error())
|
||||
}
|
||||
|
||||
// image should be nil
|
||||
storedImage, err := qb.GetPerformerImage(created.ID, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting image: %s", err.Error())
|
||||
}
|
||||
assert.Nil(t, storedImage)
|
||||
}
|
||||
|
||||
func TestPerformerQueryAge(t *testing.T) {
|
||||
const age = 19
|
||||
ageCriterion := models.IntCriterionInput{
|
||||
Value: age,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyPerformerAge(t, ageCriterion)
|
||||
|
||||
ageCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyPerformerAge(t, ageCriterion)
|
||||
|
||||
ageCriterion.Modifier = models.CriterionModifierGreaterThan
|
||||
verifyPerformerAge(t, ageCriterion)
|
||||
|
||||
ageCriterion.Modifier = models.CriterionModifierLessThan
|
||||
verifyPerformerAge(t, ageCriterion)
|
||||
}
|
||||
|
||||
func verifyPerformerAge(t *testing.T, ageCriterion models.IntCriterionInput) {
|
||||
qb := models.NewPerformerQueryBuilder()
|
||||
performerFilter := models.PerformerFilterType{
|
||||
Age: &ageCriterion,
|
||||
}
|
||||
|
||||
performers, _ := qb.Query(&performerFilter, nil)
|
||||
|
||||
now := time.Now()
|
||||
for _, performer := range performers {
|
||||
bd := performer.Birthdate.String
|
||||
d, _ := time.Parse("2006-01-02", bd)
|
||||
age := now.Year() - d.Year()
|
||||
if now.YearDay() < d.YearDay() {
|
||||
age = age - 1
|
||||
}
|
||||
|
||||
verifyInt(t, age, ageCriterion)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO Update
|
||||
// TODO Destroy
|
||||
// TODO Find
|
||||
// TODO Count
|
||||
// TODO All
|
||||
// TODO AllSlim
|
||||
// TODO Query
|
||||
@@ -1,68 +0,0 @@
|
||||
// +build integration
|
||||
|
||||
package models_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func TestMarkerFindBySceneID(t *testing.T) {
|
||||
mqb := models.NewSceneMarkerQueryBuilder()
|
||||
|
||||
sceneID := sceneIDs[sceneIdxWithMarker]
|
||||
markers, err := mqb.FindBySceneID(sceneID, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding markers: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Len(t, markers, 1)
|
||||
assert.Equal(t, markerIDs[markerIdxWithScene], markers[0].ID)
|
||||
|
||||
markers, err = mqb.FindBySceneID(0, nil)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error finding marker: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Len(t, markers, 0)
|
||||
}
|
||||
|
||||
func TestMarkerCountByTagID(t *testing.T) {
|
||||
mqb := models.NewSceneMarkerQueryBuilder()
|
||||
|
||||
markerCount, err := mqb.CountByTagID(tagIDs[tagIdxWithPrimaryMarker])
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling CountByTagID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, markerCount)
|
||||
|
||||
markerCount, err = mqb.CountByTagID(tagIDs[tagIdxWithMarker])
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling CountByTagID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 1, markerCount)
|
||||
|
||||
markerCount, err = mqb.CountByTagID(0)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("error calling CountByTagID: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Equal(t, 0, markerCount)
|
||||
}
|
||||
|
||||
// TODO Update
|
||||
// TODO Destroy
|
||||
// TODO Find
|
||||
// TODO GetMarkerStrings
|
||||
// TODO Wall
|
||||
// TODO Query
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,113 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
)
|
||||
|
||||
type ScrapedItemQueryBuilder struct{}
|
||||
|
||||
func NewScrapedItemQueryBuilder() ScrapedItemQueryBuilder {
|
||||
return ScrapedItemQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) Create(newScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scraped_items (title, description, url, date, rating, tags, models, episode, gallery_filename,
|
||||
gallery_url, video_filename, video_url, studio_id, created_at, updated_at)
|
||||
VALUES (:title, :description, :url, :date, :rating, :tags, :models, :episode, :gallery_filename,
|
||||
:gallery_url, :video_filename, :video_url, :studio_id, :created_at, :updated_at)
|
||||
`,
|
||||
newScrapedItem,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
scrapedItemID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := tx.Get(&newScrapedItem, `SELECT * FROM scraped_items WHERE id = ? LIMIT 1`, scrapedItemID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newScrapedItem, nil
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) Update(updatedScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE scraped_items SET `+SQLGenKeys(updatedScrapedItem)+` WHERE scraped_items.id = :id`,
|
||||
updatedScrapedItem,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&updatedScrapedItem, `SELECT * FROM scraped_items WHERE id = ? LIMIT 1`, updatedScrapedItem.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &updatedScrapedItem, nil
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) Find(id int) (*ScrapedItem, error) {
|
||||
query := "SELECT * FROM scraped_items WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryScrapedItem(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) All() ([]*ScrapedItem, error) {
|
||||
return qb.queryScrapedItems(selectAll("scraped_items")+qb.getScrapedItemsSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) getScrapedItemsSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
sort = "id" // TODO studio_id and title
|
||||
direction = "ASC"
|
||||
} else {
|
||||
sort = findFilter.GetSort("id")
|
||||
direction = findFilter.GetDirection()
|
||||
}
|
||||
return getSort(sort, direction, "scraped_items")
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) queryScrapedItem(query string, args []interface{}, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||
results, err := qb.queryScrapedItems(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return results[0], nil
|
||||
}
|
||||
|
||||
func (qb *ScrapedItemQueryBuilder) queryScrapedItems(query string, args []interface{}, tx *sqlx.Tx) ([]*ScrapedItem, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
scrapedItems := make([]*ScrapedItem, 0)
|
||||
for rows.Next() {
|
||||
scrapedItem := ScrapedItem{}
|
||||
if err := rows.StructScan(&scrapedItem); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
scrapedItems = append(scrapedItems, &scrapedItem)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return scrapedItems, nil
|
||||
}
|
||||
@@ -1,484 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
)
|
||||
|
||||
type queryBuilder struct {
|
||||
tableName string
|
||||
body string
|
||||
|
||||
whereClauses []string
|
||||
havingClauses []string
|
||||
args []interface{}
|
||||
|
||||
sortAndPagination string
|
||||
}
|
||||
|
||||
func (qb queryBuilder) executeFind() ([]int, int) {
|
||||
return executeFindQuery(qb.tableName, qb.body, qb.args, qb.sortAndPagination, qb.whereClauses, qb.havingClauses)
|
||||
}
|
||||
|
||||
func (qb *queryBuilder) addWhere(clauses ...string) {
|
||||
for _, clause := range clauses {
|
||||
if len(clause) > 0 {
|
||||
qb.whereClauses = append(qb.whereClauses, clause)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (qb *queryBuilder) addHaving(clauses ...string) {
|
||||
for _, clause := range clauses {
|
||||
if len(clause) > 0 {
|
||||
qb.havingClauses = append(qb.havingClauses, clause)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (qb *queryBuilder) addArg(args ...interface{}) {
|
||||
qb.args = append(qb.args, args...)
|
||||
}
|
||||
|
||||
func (qb *queryBuilder) handleIntCriterionInput(c *IntCriterionInput, column string) {
|
||||
if c != nil {
|
||||
clause, count := getIntCriterionWhereClause(column, *c)
|
||||
qb.addWhere(clause)
|
||||
if count == 1 {
|
||||
qb.addArg(c.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (qb *queryBuilder) handleStringCriterionInput(c *StringCriterionInput, column string) {
|
||||
if c != nil {
|
||||
if modifier := c.Modifier; c.Modifier.IsValid() {
|
||||
switch modifier {
|
||||
case CriterionModifierIncludes:
|
||||
clause, thisArgs := getSearchBinding([]string{column}, c.Value, false)
|
||||
qb.addWhere(clause)
|
||||
qb.addArg(thisArgs...)
|
||||
case CriterionModifierExcludes:
|
||||
clause, thisArgs := getSearchBinding([]string{column}, c.Value, true)
|
||||
qb.addWhere(clause)
|
||||
qb.addArg(thisArgs...)
|
||||
case CriterionModifierEquals:
|
||||
qb.addWhere(column + " LIKE ?")
|
||||
qb.addArg(c.Value)
|
||||
case CriterionModifierNotEquals:
|
||||
qb.addWhere(column + " NOT LIKE ?")
|
||||
qb.addArg(c.Value)
|
||||
default:
|
||||
clause, count := getSimpleCriterionClause(modifier, "?")
|
||||
qb.addWhere(column + " " + clause)
|
||||
if count == 1 {
|
||||
qb.addArg(c.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var randomSortFloat = rand.Float64()
|
||||
|
||||
func selectAll(tableName string) string {
|
||||
idColumn := getColumn(tableName, "*")
|
||||
return "SELECT " + idColumn + " FROM " + tableName + " "
|
||||
}
|
||||
|
||||
func selectDistinctIDs(tableName string) string {
|
||||
idColumn := getColumn(tableName, "id")
|
||||
return "SELECT DISTINCT " + idColumn + " FROM " + tableName + " "
|
||||
}
|
||||
|
||||
func buildCountQuery(query string) string {
|
||||
return "SELECT COUNT(*) as count FROM (" + query + ") as temp"
|
||||
}
|
||||
|
||||
func getColumn(tableName string, columnName string) string {
|
||||
return tableName + "." + columnName
|
||||
}
|
||||
|
||||
func getPagination(findFilter *FindFilterType) string {
|
||||
if findFilter == nil {
|
||||
panic("nil find filter for pagination")
|
||||
}
|
||||
|
||||
var page int
|
||||
if findFilter.Page == nil || *findFilter.Page < 1 {
|
||||
page = 1
|
||||
} else {
|
||||
page = *findFilter.Page
|
||||
}
|
||||
|
||||
var perPage int
|
||||
if findFilter.PerPage == nil {
|
||||
perPage = 25
|
||||
} else {
|
||||
perPage = *findFilter.PerPage
|
||||
}
|
||||
|
||||
if perPage > 1000 {
|
||||
perPage = 1000
|
||||
} else if perPage < 1 {
|
||||
perPage = 1
|
||||
}
|
||||
|
||||
page = (page - 1) * perPage
|
||||
return " LIMIT " + strconv.Itoa(perPage) + " OFFSET " + strconv.Itoa(page) + " "
|
||||
}
|
||||
|
||||
func getSort(sort string, direction string, tableName string) string {
|
||||
if direction != "ASC" && direction != "DESC" {
|
||||
direction = "ASC"
|
||||
}
|
||||
|
||||
const randomSeedPrefix = "random_"
|
||||
|
||||
if strings.HasSuffix(sort, "_count") {
|
||||
var relationTableName = strings.TrimSuffix(sort, "_count") // TODO: pluralize?
|
||||
colName := getColumn(relationTableName, "id")
|
||||
return " ORDER BY COUNT(distinct " + colName + ") " + direction
|
||||
} else if strings.Compare(sort, "filesize") == 0 {
|
||||
colName := getColumn(tableName, "size")
|
||||
return " ORDER BY cast(" + colName + " as integer) " + direction
|
||||
} else if strings.HasPrefix(sort, randomSeedPrefix) {
|
||||
// seed as a parameter from the UI
|
||||
// turn the provided seed into a float
|
||||
seedStr := "0." + sort[len(randomSeedPrefix):]
|
||||
seed, err := strconv.ParseFloat(seedStr, 32)
|
||||
if err != nil {
|
||||
// fallback to default seed
|
||||
seed = randomSortFloat
|
||||
}
|
||||
return getRandomSort(tableName, direction, seed)
|
||||
} else if strings.Compare(sort, "random") == 0 {
|
||||
return getRandomSort(tableName, direction, randomSortFloat)
|
||||
} else {
|
||||
colName := getColumn(tableName, sort)
|
||||
var additional string
|
||||
if tableName == "scenes" {
|
||||
additional = ", bitrate DESC, framerate DESC, scenes.rating DESC, scenes.duration DESC"
|
||||
} else if tableName == "scene_markers" {
|
||||
additional = ", scene_markers.scene_id ASC, scene_markers.seconds ASC"
|
||||
}
|
||||
if strings.Compare(sort, "name") == 0 {
|
||||
return " ORDER BY " + colName + " COLLATE NOCASE " + direction + additional
|
||||
}
|
||||
if strings.Compare(sort, "title") == 0 {
|
||||
return " ORDER BY " + colName + " COLLATE NATURAL_CS " + direction + additional
|
||||
}
|
||||
|
||||
return " ORDER BY " + colName + " " + direction + additional
|
||||
}
|
||||
}
|
||||
|
||||
func getRandomSort(tableName string, direction string, seed float64) string {
|
||||
// https://stackoverflow.com/a/24511461
|
||||
colName := getColumn(tableName, "id")
|
||||
randomSortString := strconv.FormatFloat(seed, 'f', 16, 32)
|
||||
return " ORDER BY " + "(substr(" + colName + " * " + randomSortString + ", length(" + colName + ") + 2))" + " " + direction
|
||||
}
|
||||
|
||||
func getSearchBinding(columns []string, q string, not bool) (string, []interface{}) {
|
||||
var likeClauses []string
|
||||
var args []interface{}
|
||||
|
||||
notStr := ""
|
||||
binaryType := " OR "
|
||||
if not {
|
||||
notStr = " NOT "
|
||||
binaryType = " AND "
|
||||
}
|
||||
|
||||
queryWords := strings.Split(q, " ")
|
||||
trimmedQuery := strings.Trim(q, "\"")
|
||||
if trimmedQuery == q {
|
||||
// Search for any word
|
||||
for _, word := range queryWords {
|
||||
for _, column := range columns {
|
||||
likeClauses = append(likeClauses, column+notStr+" LIKE ?")
|
||||
args = append(args, "%"+word+"%")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Search the exact query
|
||||
for _, column := range columns {
|
||||
likeClauses = append(likeClauses, column+notStr+" LIKE ?")
|
||||
args = append(args, "%"+trimmedQuery+"%")
|
||||
}
|
||||
}
|
||||
likes := strings.Join(likeClauses, binaryType)
|
||||
|
||||
return "(" + likes + ")", args
|
||||
}
|
||||
|
||||
func getInBinding(length int) string {
|
||||
bindings := strings.Repeat("?, ", length)
|
||||
bindings = strings.TrimRight(bindings, ", ")
|
||||
return "(" + bindings + ")"
|
||||
}
|
||||
|
||||
func getCriterionModifierBinding(criterionModifier CriterionModifier, value interface{}) (string, int) {
|
||||
var length int
|
||||
switch x := value.(type) {
|
||||
case []string:
|
||||
length = len(x)
|
||||
case []int:
|
||||
length = len(x)
|
||||
default:
|
||||
length = 1
|
||||
}
|
||||
if modifier := criterionModifier.String(); criterionModifier.IsValid() {
|
||||
switch modifier {
|
||||
case "EQUALS", "NOT_EQUALS", "GREATER_THAN", "LESS_THAN", "IS_NULL", "NOT_NULL":
|
||||
return getSimpleCriterionClause(criterionModifier, "?")
|
||||
case "INCLUDES":
|
||||
return "IN " + getInBinding(length), length // TODO?
|
||||
case "EXCLUDES":
|
||||
return "NOT IN " + getInBinding(length), length // TODO?
|
||||
default:
|
||||
logger.Errorf("todo")
|
||||
return "= ?", 1 // TODO
|
||||
}
|
||||
}
|
||||
return "= ?", 1 // TODO
|
||||
}
|
||||
|
||||
func getSimpleCriterionClause(criterionModifier CriterionModifier, rhs string) (string, int) {
|
||||
if modifier := criterionModifier.String(); criterionModifier.IsValid() {
|
||||
switch modifier {
|
||||
case "EQUALS":
|
||||
return "= " + rhs, 1
|
||||
case "NOT_EQUALS":
|
||||
return "!= " + rhs, 1
|
||||
case "GREATER_THAN":
|
||||
return "> " + rhs, 1
|
||||
case "LESS_THAN":
|
||||
return "< " + rhs, 1
|
||||
case "IS_NULL":
|
||||
return "IS NULL", 0
|
||||
case "NOT_NULL":
|
||||
return "IS NOT NULL", 0
|
||||
default:
|
||||
logger.Errorf("todo")
|
||||
return "= ?", 1 // TODO
|
||||
}
|
||||
}
|
||||
|
||||
return "= ?", 1 // TODO
|
||||
}
|
||||
|
||||
func getIntCriterionWhereClause(column string, input IntCriterionInput) (string, int) {
|
||||
binding, count := getCriterionModifierBinding(input.Modifier, input.Value)
|
||||
return column + " " + binding, count
|
||||
}
|
||||
|
||||
// returns where clause and having clause
|
||||
func getMultiCriterionClause(primaryTable, foreignTable, joinTable, primaryFK, foreignFK string, criterion *MultiCriterionInput) (string, string) {
|
||||
whereClause := ""
|
||||
havingClause := ""
|
||||
if criterion.Modifier == CriterionModifierIncludes {
|
||||
// includes any of the provided ids
|
||||
whereClause = foreignTable + ".id IN " + getInBinding(len(criterion.Value))
|
||||
} else if criterion.Modifier == CriterionModifierIncludesAll {
|
||||
// includes all of the provided ids
|
||||
whereClause = foreignTable + ".id IN " + getInBinding(len(criterion.Value))
|
||||
havingClause = "count(distinct " + foreignTable + ".id) IS " + strconv.Itoa(len(criterion.Value))
|
||||
} else if criterion.Modifier == CriterionModifierExcludes {
|
||||
// excludes all of the provided ids
|
||||
if joinTable != "" {
|
||||
whereClause = "not exists (select " + joinTable + "." + primaryFK + " from " + joinTable + " where " + joinTable + "." + primaryFK + " = " + primaryTable + ".id and " + joinTable + "." + foreignFK + " in " + getInBinding(len(criterion.Value)) + ")"
|
||||
} else {
|
||||
whereClause = "not exists (select s.id from " + primaryTable + " as s where s.id = " + primaryTable + ".id and s." + foreignFK + " in " + getInBinding(len(criterion.Value)) + ")"
|
||||
}
|
||||
}
|
||||
|
||||
return whereClause, havingClause
|
||||
}
|
||||
|
||||
func runIdsQuery(query string, args []interface{}) ([]int, error) {
|
||||
var result []struct {
|
||||
Int int `db:"id"`
|
||||
}
|
||||
if err := database.DB.Select(&result, query, args...); err != nil && err != sql.ErrNoRows {
|
||||
return []int{}, err
|
||||
}
|
||||
|
||||
vsm := make([]int, len(result))
|
||||
for i, v := range result {
|
||||
vsm[i] = v.Int
|
||||
}
|
||||
return vsm, nil
|
||||
}
|
||||
|
||||
func runCountQuery(query string, args []interface{}) (int, error) {
|
||||
// Perform query and fetch result
|
||||
result := struct {
|
||||
Int int `db:"count"`
|
||||
}{0}
|
||||
if err := database.DB.Get(&result, query, args...); err != nil && err != sql.ErrNoRows {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return result.Int, nil
|
||||
}
|
||||
|
||||
func runSumQuery(query string, args []interface{}) (float64, error) {
|
||||
// Perform query and fetch result
|
||||
result := struct {
|
||||
Float64 float64 `db:"sum"`
|
||||
}{0}
|
||||
if err := database.DB.Get(&result, query, args...); err != nil && err != sql.ErrNoRows {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return result.Float64, nil
|
||||
}
|
||||
|
||||
func executeFindQuery(tableName string, body string, args []interface{}, sortAndPagination string, whereClauses []string, havingClauses []string) ([]int, int) {
|
||||
if len(whereClauses) > 0 {
|
||||
body = body + " WHERE " + strings.Join(whereClauses, " AND ") // TODO handle AND or OR
|
||||
}
|
||||
body = body + " GROUP BY " + tableName + ".id "
|
||||
if len(havingClauses) > 0 {
|
||||
body = body + " HAVING " + strings.Join(havingClauses, " AND ") // TODO handle AND or OR
|
||||
}
|
||||
|
||||
countQuery := buildCountQuery(body)
|
||||
idsQuery := body + sortAndPagination
|
||||
|
||||
// Perform query and fetch result
|
||||
logger.Tracef("SQL: %s, args: %v", idsQuery, args)
|
||||
|
||||
countResult, countErr := runCountQuery(countQuery, args)
|
||||
idsResult, idsErr := runIdsQuery(idsQuery, args)
|
||||
|
||||
if countErr != nil {
|
||||
logger.Errorf("Error executing count query with SQL: %s, args: %v, error: %s", countQuery, args, countErr.Error())
|
||||
panic(countErr)
|
||||
}
|
||||
if idsErr != nil {
|
||||
logger.Errorf("Error executing find query with SQL: %s, args: %v, error: %s", idsQuery, args, idsErr.Error())
|
||||
panic(idsErr)
|
||||
}
|
||||
|
||||
return idsResult, countResult
|
||||
}
|
||||
|
||||
func executeDeleteQuery(tableName string, id string, tx *sqlx.Tx) error {
|
||||
if tx == nil {
|
||||
panic("must use a transaction")
|
||||
}
|
||||
idColumnName := getColumn(tableName, "id")
|
||||
_, err := tx.Exec(
|
||||
`DELETE FROM `+tableName+` WHERE `+idColumnName+` = ?`,
|
||||
id,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
func ensureTx(tx *sqlx.Tx) {
|
||||
if tx == nil {
|
||||
panic("must use a transaction")
|
||||
}
|
||||
}
|
||||
|
||||
// https://github.com/jmoiron/sqlx/issues/410
|
||||
// sqlGenKeys is used for passing a struct and returning a string
|
||||
// of keys for non empty key:values. These keys are formated
|
||||
// keyname=:keyname with a comma seperating them
|
||||
func SQLGenKeys(i interface{}) string {
|
||||
return sqlGenKeys(i, false)
|
||||
}
|
||||
|
||||
// support a partial interface. When a partial interface is provided,
|
||||
// keys will always be included if the value is not null. The partial
|
||||
// interface must therefore consist of pointers
|
||||
func SQLGenKeysPartial(i interface{}) string {
|
||||
return sqlGenKeys(i, true)
|
||||
}
|
||||
|
||||
func sqlGenKeys(i interface{}, partial bool) string {
|
||||
var query []string
|
||||
v := reflect.ValueOf(i)
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
//get key for struct tag
|
||||
rawKey := v.Type().Field(i).Tag.Get("db")
|
||||
key := strings.Split(rawKey, ",")[0]
|
||||
if key == "id" {
|
||||
continue
|
||||
}
|
||||
|
||||
var add bool
|
||||
switch t := v.Field(i).Interface().(type) {
|
||||
case string:
|
||||
add = partial || t != ""
|
||||
case int:
|
||||
add = partial || t != 0
|
||||
case float64:
|
||||
add = partial || t != 0
|
||||
case bool:
|
||||
add = true
|
||||
case SQLiteTimestamp:
|
||||
add = partial || !t.Timestamp.IsZero()
|
||||
case NullSQLiteTimestamp:
|
||||
add = partial || t.Valid
|
||||
case SQLiteDate:
|
||||
add = partial || t.Valid
|
||||
case sql.NullString:
|
||||
add = partial || t.Valid
|
||||
case sql.NullBool:
|
||||
add = partial || t.Valid
|
||||
case sql.NullInt64:
|
||||
add = partial || t.Valid
|
||||
case sql.NullFloat64:
|
||||
add = partial || t.Valid
|
||||
default:
|
||||
reflectValue := reflect.ValueOf(t)
|
||||
isNil := reflectValue.IsNil()
|
||||
add = !isNil
|
||||
}
|
||||
|
||||
if add {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
}
|
||||
return strings.Join(query, ", ")
|
||||
}
|
||||
|
||||
func getImage(tx *sqlx.Tx, query string, args ...interface{}) ([]byte, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var ret []byte
|
||||
if rows.Next() {
|
||||
if err := rows.Scan(&ret); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
@@ -1,307 +0,0 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
)
|
||||
|
||||
type StudioQueryBuilder struct{}
|
||||
|
||||
func NewStudioQueryBuilder() StudioQueryBuilder {
|
||||
return StudioQueryBuilder{}
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Create(newStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO studios (checksum, name, url, parent_id, created_at, updated_at)
|
||||
VALUES (:checksum, :name, :url, :parent_id, :created_at, :updated_at)
|
||||
`,
|
||||
newStudio,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
studioID, err := result.LastInsertId()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tx.Get(&newStudio, `SELECT * FROM studios WHERE id = ? LIMIT 1`, studioID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newStudio, nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Update(updatedStudio StudioPartial, tx *sqlx.Tx) (*Studio, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE studios SET `+SQLGenKeysPartial(updatedStudio)+` WHERE studios.id = :id`,
|
||||
updatedStudio,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ret Studio
|
||||
if err := tx.Get(&ret, `SELECT * FROM studios WHERE id = ? LIMIT 1`, updatedStudio.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) UpdateFull(updatedStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.NamedExec(
|
||||
`UPDATE studios SET `+SQLGenKeys(updatedStudio)+` WHERE studios.id = :id`,
|
||||
updatedStudio,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ret Studio
|
||||
if err := tx.Get(&ret, `SELECT * FROM studios WHERE id = ? LIMIT 1`, updatedStudio.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||
// remove studio from scenes
|
||||
_, err := tx.Exec("UPDATE scenes SET studio_id = null WHERE studio_id = ?", id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// remove studio from scraped items
|
||||
_, err = tx.Exec("UPDATE scraped_items SET studio_id = null WHERE studio_id = ?", id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return executeDeleteQuery("studios", id, tx)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Find(id int, tx *sqlx.Tx) (*Studio, error) {
|
||||
query := "SELECT * FROM studios WHERE id = ? LIMIT 1"
|
||||
args := []interface{}{id}
|
||||
return qb.queryStudio(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) FindMany(ids []int) ([]*Studio, error) {
|
||||
var studios []*Studio
|
||||
for _, id := range ids {
|
||||
studio, err := qb.Find(id, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if studio == nil {
|
||||
return nil, fmt.Errorf("studio with id %d not found", id)
|
||||
}
|
||||
|
||||
studios = append(studios, studio)
|
||||
}
|
||||
|
||||
return studios, nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) FindChildren(id int, tx *sqlx.Tx) ([]*Studio, error) {
|
||||
query := "SELECT studios.* FROM studios WHERE studios.parent_id = ?"
|
||||
args := []interface{}{id}
|
||||
return qb.queryStudios(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) FindBySceneID(sceneID int) (*Studio, error) {
|
||||
query := "SELECT studios.* FROM studios JOIN scenes ON studios.id = scenes.studio_id WHERE scenes.id = ? LIMIT 1"
|
||||
args := []interface{}{sceneID}
|
||||
return qb.queryStudio(query, args, nil)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) FindByName(name string, tx *sqlx.Tx, nocase bool) (*Studio, error) {
|
||||
query := "SELECT * FROM studios WHERE name = ?"
|
||||
if nocase {
|
||||
query += " COLLATE NOCASE"
|
||||
}
|
||||
query += " LIMIT 1"
|
||||
args := []interface{}{name}
|
||||
return qb.queryStudio(query, args, tx)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Count() (int, error) {
|
||||
return runCountQuery(buildCountQuery("SELECT studios.id FROM studios"), nil)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) All() ([]*Studio, error) {
|
||||
return qb.queryStudios(selectAll("studios")+qb.getStudioSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) AllSlim() ([]*Studio, error) {
|
||||
return qb.queryStudios("SELECT studios.id, studios.name, studios.parent_id FROM studios "+qb.getStudioSort(nil), nil, nil)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) Query(studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int) {
|
||||
if studioFilter == nil {
|
||||
studioFilter = &StudioFilterType{}
|
||||
}
|
||||
if findFilter == nil {
|
||||
findFilter = &FindFilterType{}
|
||||
}
|
||||
|
||||
var whereClauses []string
|
||||
var havingClauses []string
|
||||
var args []interface{}
|
||||
body := selectDistinctIDs("studios")
|
||||
body += `
|
||||
left join scenes on studios.id = scenes.studio_id
|
||||
left join studio_stash_ids on studio_stash_ids.studio_id = studios.id
|
||||
`
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
searchColumns := []string{"studios.name"}
|
||||
|
||||
clause, thisArgs := getSearchBinding(searchColumns, *q, false)
|
||||
whereClauses = append(whereClauses, clause)
|
||||
args = append(args, thisArgs...)
|
||||
}
|
||||
|
||||
if parentsFilter := studioFilter.Parents; parentsFilter != nil && len(parentsFilter.Value) > 0 {
|
||||
body += `
|
||||
left join studios as parent_studio on parent_studio.id = studios.parent_id
|
||||
`
|
||||
|
||||
for _, studioID := range parentsFilter.Value {
|
||||
args = append(args, studioID)
|
||||
}
|
||||
|
||||
whereClause, havingClause := getMultiCriterionClause("studios", "parent_studio", "", "", "parent_id", parentsFilter)
|
||||
whereClauses = appendClause(whereClauses, whereClause)
|
||||
havingClauses = appendClause(havingClauses, havingClause)
|
||||
}
|
||||
|
||||
if stashIDFilter := studioFilter.StashID; stashIDFilter != nil {
|
||||
whereClauses = append(whereClauses, "studio_stash_ids.stash_id = ?")
|
||||
args = append(args, stashIDFilter)
|
||||
}
|
||||
|
||||
if isMissingFilter := studioFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" {
|
||||
switch *isMissingFilter {
|
||||
case "image":
|
||||
body += `left join studios_image on studios_image.studio_id = studios.id
|
||||
`
|
||||
whereClauses = appendClause(whereClauses, "studios_image.studio_id IS NULL")
|
||||
case "stash_id":
|
||||
whereClauses = appendClause(whereClauses, "studio_stash_ids.studio_id IS NULL")
|
||||
default:
|
||||
whereClauses = appendClause(whereClauses, "studios."+*isMissingFilter+" IS NULL")
|
||||
}
|
||||
}
|
||||
|
||||
sortAndPagination := qb.getStudioSort(findFilter) + getPagination(findFilter)
|
||||
idsResult, countResult := executeFindQuery("studios", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||
|
||||
var studios []*Studio
|
||||
for _, id := range idsResult {
|
||||
studio, _ := qb.Find(id, nil)
|
||||
studios = append(studios, studio)
|
||||
}
|
||||
|
||||
return studios, countResult
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) getStudioSort(findFilter *FindFilterType) string {
|
||||
var sort string
|
||||
var direction string
|
||||
if findFilter == nil {
|
||||
sort = "name"
|
||||
direction = "ASC"
|
||||
} else {
|
||||
sort = findFilter.GetSort("name")
|
||||
direction = findFilter.GetDirection()
|
||||
}
|
||||
return getSort(sort, direction, "studios")
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) queryStudio(query string, args []interface{}, tx *sqlx.Tx) (*Studio, error) {
|
||||
results, err := qb.queryStudios(query, args, tx)
|
||||
if err != nil || len(results) < 1 {
|
||||
return nil, err
|
||||
}
|
||||
return results[0], nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) queryStudios(query string, args []interface{}, tx *sqlx.Tx) ([]*Studio, error) {
|
||||
var rows *sqlx.Rows
|
||||
var err error
|
||||
if tx != nil {
|
||||
rows, err = tx.Queryx(query, args...)
|
||||
} else {
|
||||
rows, err = database.DB.Queryx(query, args...)
|
||||
}
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
studios := make([]*Studio, 0)
|
||||
for rows.Next() {
|
||||
studio := Studio{}
|
||||
if err := rows.StructScan(&studio); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
studios = append(studios, &studio)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return studios, nil
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) UpdateStudioImage(studioID int, image []byte, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing cover and then create new
|
||||
if err := qb.DestroyStudioImage(studioID, tx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := tx.Exec(
|
||||
`INSERT INTO studios_image (studio_id, image) VALUES (?, ?)`,
|
||||
studioID,
|
||||
image,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) DestroyStudioImage(studioID int, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
|
||||
// Delete the existing joins
|
||||
_, err := tx.Exec("DELETE FROM studios_image WHERE studio_id = ?", studioID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) GetStudioImage(studioID int, tx *sqlx.Tx) ([]byte, error) {
|
||||
query := `SELECT image from studios_image WHERE studio_id = ?`
|
||||
return getImage(tx, query, studioID)
|
||||
}
|
||||
|
||||
func (qb *StudioQueryBuilder) HasStudioImage(studioID int) (bool, error) {
|
||||
ret, err := runCountQuery(buildCountQuery("SELECT studio_id from studios_image WHERE studio_id = ?"), []interface{}{studioID})
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return ret == 1, nil
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user