diff --git a/go.mod b/go.mod index 08c335f7e..56e694c6b 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,7 @@ require ( github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c github.com/chromedp/chromedp v0.5.3 github.com/disintegration/imaging v1.6.0 + github.com/facebookgo/symwalk v0.0.0-20150726040526-42004b9f3222 github.com/go-chi/chi v4.0.2+incompatible github.com/gobuffalo/packr/v2 v2.0.2 github.com/golang-migrate/migrate/v4 v4.3.1 diff --git a/go.sum b/go.sum index 7a51679a7..237496e4e 100644 --- a/go.sum +++ b/go.sum @@ -121,6 +121,8 @@ github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5m github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= +github.com/facebookgo/symwalk v0.0.0-20150726040526-42004b9f3222 h1:ivxAxcE9py2xLAqpcEwN7sN711aLfEWgh3cY0aha7uY= +github.com/facebookgo/symwalk v0.0.0-20150726040526-42004b9f3222/go.mod h1:PgrCjL2+FgkITqxQI+erRTONtAv4JkpOzun5ozKW/Jg= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= diff --git a/gqlgen.yml b/gqlgen.yml index 0d1a780e5..05551cb7b 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -16,6 +16,10 @@ struct_tag: gqlgen models: Gallery: model: github.com/stashapp/stash/pkg/models.Gallery + Image: + model: github.com/stashapp/stash/pkg/models.Image + ImageFileType: + model: github.com/stashapp/stash/pkg/models.ImageFileType Performer: model: github.com/stashapp/stash/pkg/models.Performer Scene: diff --git a/graphql/documents/data/config.graphql b/graphql/documents/data/config.graphql index 384ba85ee..663788299 100644 --- a/graphql/documents/data/config.graphql +++ b/graphql/documents/data/config.graphql @@ -1,5 +1,9 @@ fragment ConfigGeneralData on ConfigGeneralResult { - stashes + stashes { + path + excludeVideo + excludeImage + } databasePath generatedPath cachePath @@ -19,7 +23,12 @@ fragment ConfigGeneralData on ConfigGeneralResult { logOut logLevel logAccess + createGalleriesFromFolders + videoExtensions + imageExtensions + galleryExtensions excludes + imageExcludes scraperUserAgent scraperCDPPath stashBoxes { diff --git a/graphql/documents/data/gallery.graphql b/graphql/documents/data/gallery.graphql index 2f77518b4..af99c77f7 100644 --- a/graphql/documents/data/gallery.graphql +++ b/graphql/documents/data/gallery.graphql @@ -3,10 +3,25 @@ fragment GalleryData on Gallery { checksum path title - files { - index - name - path + date + url + details + rating + images { + ...SlimImageData + } + cover { + ...SlimImageData + } + studio { + ...StudioData + } + tags { + ...TagData + } + + performers { + ...PerformerData } scene { id diff --git a/graphql/documents/data/image-slim.graphql b/graphql/documents/data/image-slim.graphql new file mode 100644 index 000000000..41789f5f5 --- /dev/null +++ b/graphql/documents/data/image-slim.graphql @@ -0,0 +1,43 @@ +fragment SlimImageData on Image { + id + checksum + title + rating + o_counter + path + + file { + size + width + height + } + + paths { + thumbnail + image + } + + galleries { + id + path + title + } + + studio { + id + name + image_path + } + + tags { + id + name + } + + performers { + id + name + favorite + image_path + } +} diff --git a/graphql/documents/data/image.graphql b/graphql/documents/data/image.graphql new file mode 100644 index 000000000..9bd94b633 --- /dev/null +++ b/graphql/documents/data/image.graphql @@ -0,0 +1,35 @@ +fragment ImageData on Image { + id + checksum + title + rating + o_counter + path + + file { + size + width + height + } + + paths { + thumbnail + image + } + + galleries { + ...GalleryData + } + + studio { + ...StudioData + } + + tags { + ...TagData + } + + performers { + ...PerformerData + } +} diff --git a/graphql/documents/mutations/gallery.graphql b/graphql/documents/mutations/gallery.graphql new file mode 100644 index 000000000..8cb5f82b8 --- /dev/null +++ b/graphql/documents/mutations/gallery.graphql @@ -0,0 +1,97 @@ +mutation GalleryCreate( + $title: String!, + $details: String, + $url: String, + $date: String, + $rating: Int, + $scene_id: ID, + $studio_id: ID, + $performer_ids: [ID!] = [], + $tag_ids: [ID!] = []) { + + galleryCreate(input: { + title: $title, + details: $details, + url: $url, + date: $date, + rating: $rating, + scene_id: $scene_id, + studio_id: $studio_id, + tag_ids: $tag_ids, + performer_ids: $performer_ids + }) { + ...GalleryData + } +} + +mutation GalleryUpdate( + $id: ID!, + $title: String, + $details: String, + $url: String, + $date: String, + $rating: Int, + $scene_id: ID, + $studio_id: ID, + $performer_ids: [ID!] = [], + $tag_ids: [ID!] = []) { + + galleryUpdate(input: { + id: $id, + title: $title, + details: $details, + url: $url, + date: $date, + rating: $rating, + scene_id: $scene_id, + studio_id: $studio_id, + tag_ids: $tag_ids, + performer_ids: $performer_ids + }) { + ...GalleryData + } +} + +mutation BulkGalleryUpdate( + $ids: [ID!] = [], + $url: String, + $date: String, + $details: String, + $rating: Int, + $scene_id: ID, + $studio_id: ID, + $tag_ids: BulkUpdateIds, + $performer_ids: BulkUpdateIds) { + + bulkGalleryUpdate(input: { + ids: $ids, + details: $details, + url: $url, + date: $date, + rating: $rating, + scene_id: $scene_id, + studio_id: $studio_id, + tag_ids: $tag_ids, + performer_ids: $performer_ids + }) { + ...GalleryData + } +} + +mutation GalleriesUpdate($input : [GalleryUpdateInput!]!) { + galleriesUpdate(input: $input) { + ...GalleryData + } +} + +mutation GalleryDestroy($ids: [ID!]!, $delete_file: Boolean, $delete_generated : Boolean) { + galleryDestroy(input: {ids: $ids, delete_file: $delete_file, delete_generated: $delete_generated}) +} + +mutation AddGalleryImages($gallery_id: ID!, $image_ids: [ID!]!) { + addGalleryImages(input: {gallery_id: $gallery_id, image_ids: $image_ids}) +} + +mutation RemoveGalleryImages($gallery_id: ID!, $image_ids: [ID!]!) { + removeGalleryImages(input: {gallery_id: $gallery_id, image_ids: $image_ids}) +} diff --git a/graphql/documents/mutations/image.graphql b/graphql/documents/mutations/image.graphql new file mode 100644 index 000000000..7e08efa73 --- /dev/null +++ b/graphql/documents/mutations/image.graphql @@ -0,0 +1,69 @@ +mutation ImageUpdate( + $id: ID!, + $title: String, + $rating: Int, + $studio_id: ID, + $gallery_ids: [ID!] = [], + $performer_ids: [ID!] = [], + $tag_ids: [ID!] = []) { + + imageUpdate(input: { + id: $id, + title: $title, + rating: $rating, + studio_id: $studio_id, + gallery_ids: $gallery_ids, + performer_ids: $performer_ids, + tag_ids: $tag_ids + }) { + ...ImageData + } +} + +mutation BulkImageUpdate( + $ids: [ID!] = [], + $title: String, + $rating: Int, + $studio_id: ID, + $gallery_ids: BulkUpdateIds, + $performer_ids: BulkUpdateIds, + $tag_ids: BulkUpdateIds) { + + bulkImageUpdate(input: { + ids: $ids, + title: $title, + rating: $rating, + studio_id: $studio_id, + gallery_ids: $gallery_ids, + performer_ids: $performer_ids, + tag_ids: $tag_ids + }) { + ...ImageData + } +} + +mutation ImagesUpdate($input : [ImageUpdateInput!]!) { + imagesUpdate(input: $input) { + ...ImageData + } +} + +mutation ImageIncrementO($id: ID!) { + imageIncrementO(id: $id) +} + +mutation ImageDecrementO($id: ID!) { + imageDecrementO(id: $id) +} + +mutation ImageResetO($id: ID!) { + imageResetO(id: $id) +} + +mutation ImageDestroy($id: ID!, $delete_file: Boolean, $delete_generated : Boolean) { + imageDestroy(input: {id: $id, delete_file: $delete_file, delete_generated: $delete_generated}) +} + +mutation ImagesDestroy($ids: [ID!]!, $delete_file: Boolean, $delete_generated : Boolean) { + imagesDestroy(input: {ids: $ids, delete_file: $delete_file, delete_generated: $delete_generated}) +} diff --git a/graphql/documents/queries/image.graphql b/graphql/documents/queries/image.graphql new file mode 100644 index 000000000..4d35bc69b --- /dev/null +++ b/graphql/documents/queries/image.graphql @@ -0,0 +1,14 @@ +query FindImages($filter: FindFilterType, $image_filter: ImageFilterType, $image_ids: [Int!]) { + findImages(filter: $filter, image_filter: $image_filter, image_ids: $image_ids) { + count + images { + ...SlimImageData + } + } +} + +query FindImage($id: ID!, $checksum: String) { + findImage(id: $id, checksum: $checksum) { + ...ImageData + } +} diff --git a/graphql/documents/queries/misc.graphql b/graphql/documents/queries/misc.graphql index fe9e85da7..6cb271310 100644 --- a/graphql/documents/queries/misc.graphql +++ b/graphql/documents/queries/misc.graphql @@ -40,6 +40,7 @@ query ValidGalleriesForScene($scene_id: ID!) { validGalleriesForScene(scene_id: $scene_id) { id path + title } } diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index e0dfea489..fa27e783c 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -17,6 +17,11 @@ type Query { """A function which queries SceneMarker objects""" findSceneMarkers(scene_marker_filter: SceneMarkerFilterType filter: FindFilterType): FindSceneMarkersResultType! + findImage(id: ID, checksum: String): Image + + """A function which queries Scene objects""" + findImages(image_filter: ImageFilterType, image_ids: [Int!], filter: FindFilterType): FindImagesResultType! + """Find a performer by ID""" findPerformer(id: ID!): Performer """A function which queries Performer objects""" @@ -141,6 +146,28 @@ type Mutation { sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker sceneMarkerDestroy(id: ID!): Boolean! + imageUpdate(input: ImageUpdateInput!): Image + bulkImageUpdate(input: BulkImageUpdateInput!): [Image!] + imageDestroy(input: ImageDestroyInput!): Boolean! + imagesDestroy(input: ImagesDestroyInput!): Boolean! + imagesUpdate(input: [ImageUpdateInput!]!): [Image] + + """Increments the o-counter for an image. Returns the new value""" + imageIncrementO(id: ID!): Int! + """Decrements the o-counter for an image. Returns the new value""" + imageDecrementO(id: ID!): Int! + """Resets the o-counter for a image to 0. Returns the new value""" + imageResetO(id: ID!): Int! + + galleryCreate(input: GalleryCreateInput!): Gallery + galleryUpdate(input: GalleryUpdateInput!): Gallery + bulkGalleryUpdate(input: BulkGalleryUpdateInput!): [Gallery!] + galleryDestroy(input: GalleryDestroyInput!): Boolean! + galleriesUpdate(input: [GalleryUpdateInput!]!): [Gallery] + + addGalleryImages(input: GalleryAddInput!): Boolean! + removeGalleryImages(input: GalleryRemoveInput!): Boolean! + performerCreate(input: PerformerCreateInput!): Performer performerUpdate(input: PerformerUpdateInput!): Performer performerDestroy(input: PerformerDestroyInput!): Boolean! diff --git a/graphql/schema/types/config.graphql b/graphql/schema/types/config.graphql index 94831d1a5..819977d24 100644 --- a/graphql/schema/types/config.graphql +++ b/graphql/schema/types/config.graphql @@ -24,7 +24,7 @@ enum HashAlgorithm { input ConfigGeneralInput { """Array of file paths to content""" - stashes: [String!] + stashes: [StashConfigInput!] """Path to the SQLite database""" databasePath: String """Path to generated files""" @@ -63,8 +63,18 @@ input ConfigGeneralInput { logLevel: String! """Whether to log http access""" logAccess: Boolean! - """Array of file regexp to exclude from Scan""" + """True if galleries should be created from folders with images""" + createGalleriesFromFolders: Boolean! + """Array of video file extensions""" + videoExtensions: [String!] + """Array of image file extensions""" + imageExtensions: [String!] + """Array of gallery zip file extensions""" + galleryExtensions: [String!] + """Array of file regexp to exclude from Video Scans""" excludes: [String!] + """Array of file regexp to exclude from Image Scans""" + imageExcludes: [String!] """Scraper user agent string""" scraperUserAgent: String """Scraper CDP path. Path to chrome executable or remote address""" @@ -75,7 +85,7 @@ input ConfigGeneralInput { type ConfigGeneralResult { """Array of file paths to content""" - stashes: [String!]! + stashes: [StashConfig!]! """Path to the SQLite database""" databasePath: String! """Path to generated files""" @@ -114,8 +124,18 @@ type ConfigGeneralResult { logLevel: String! """Whether to log http access""" logAccess: Boolean! - """Array of file regexp to exclude from Scan""" + """Array of video file extensions""" + videoExtensions: [String!]! + """Array of image file extensions""" + imageExtensions: [String!]! + """Array of gallery zip file extensions""" + galleryExtensions: [String!]! + """True if galleries should be created from folders with images""" + createGalleriesFromFolders: Boolean! + """Array of file regexp to exclude from Video Scans""" excludes: [String!]! + """Array of file regexp to exclude from Image Scans""" + imageExcludes: [String!]! """Scraper user agent string""" scraperUserAgent: String """Scraper CDP path. Path to chrome executable or remote address""" @@ -175,3 +195,16 @@ type Directory { parent: String directories: [String!]! } + +"""Stash configuration details""" +input StashConfigInput { + path: String! + excludeVideo: Boolean! + excludeImage: Boolean! +} + +type StashConfig { + path: String! + excludeVideo: Boolean! + excludeImage: Boolean! +} diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 2f77975b6..9ffeb0167 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -107,6 +107,18 @@ input GalleryFilterType { path: StringCriterionInput """Filter to only include galleries missing this property""" is_missing: String + """Filter to include/exclude galleries that were created from zip""" + is_zip: Boolean + """Filter by rating""" + rating: IntCriterionInput + """Filter to only include scenes with this studio""" + studios: MultiCriterionInput + """Filter to only include scenes with these tags""" + tags: MultiCriterionInput + """Filter to only include scenes with these performers""" + performers: MultiCriterionInput + """Filter by number of images in this gallery""" + image_count: IntCriterionInput } input TagFilterType { @@ -120,6 +132,25 @@ input TagFilterType { marker_count: IntCriterionInput } +input ImageFilterType { + """Filter by rating""" + rating: IntCriterionInput + """Filter by o-counter""" + o_counter: IntCriterionInput + """Filter by resolution""" + resolution: ResolutionEnum + """Filter to only include images missing this property""" + is_missing: String + """Filter to only include images with this studio""" + studios: MultiCriterionInput + """Filter to only include images with these tags""" + tags: MultiCriterionInput + """Filter to only include images with these performers""" + performers: MultiCriterionInput + """Filter to only include images with these galleries""" + galleries: MultiCriterionInput +} + enum CriterionModifier { """=""" EQUALS, diff --git a/graphql/schema/types/gallery.graphql b/graphql/schema/types/gallery.graphql index e94be7be1..1e35b0d21 100644 --- a/graphql/schema/types/gallery.graphql +++ b/graphql/schema/types/gallery.graphql @@ -2,12 +2,20 @@ type Gallery { id: ID! checksum: String! - path: String! + path: String title: String + url: String + date: String + details: String + rating: Int scene: Scene + studio: Studio + tags: [Tag!]! + performers: [Performer!]! - """The files in the gallery""" - files: [GalleryFilesType!]! # Resolver + """The images in the gallery""" + images: [Image!]! # Resolver + cover: Image } type GalleryFilesType { @@ -16,7 +24,62 @@ type GalleryFilesType { path: String } +input GalleryCreateInput { + title: String! + url: String + date: String + details: String + rating: Int + scene_id: ID + studio_id: ID + tag_ids: [ID!] + performer_ids: [ID!] +} + +input GalleryUpdateInput { + clientMutationId: String + id: ID! + title: String + url: String + date: String + details: String + rating: Int + scene_id: ID + studio_id: ID + tag_ids: [ID!] + performer_ids: [ID!] +} + +input BulkGalleryUpdateInput { + clientMutationId: String + ids: [ID!] + url: String + date: String + details: String + rating: Int + scene_id: ID + studio_id: ID + tag_ids: BulkUpdateIds + performer_ids: BulkUpdateIds +} + +input GalleryDestroyInput { + ids: [ID!]! + delete_file: Boolean + delete_generated: Boolean +} + type FindGalleriesResultType { count: Int! galleries: [Gallery!]! -} \ No newline at end of file +} + +input GalleryAddInput { + gallery_id: ID! + image_ids: [ID!]! +} + +input GalleryRemoveInput { + gallery_id: ID! + image_ids: [ID!]! +} diff --git a/graphql/schema/types/image.graphql b/graphql/schema/types/image.graphql new file mode 100644 index 000000000..efb90ffaf --- /dev/null +++ b/graphql/schema/types/image.graphql @@ -0,0 +1,68 @@ +type Image { + id: ID! + checksum: String + title: String + rating: Int + o_counter: Int + path: String! + + file: ImageFileType! # Resolver + paths: ImagePathsType! # Resolver + + galleries: [Gallery!]! + studio: Studio + tags: [Tag!]! + performers: [Performer!]! +} + +type ImageFileType { + size: Int + width: Int + height: Int +} + +type ImagePathsType { + thumbnail: String # Resolver + image: String # Resolver +} + +input ImageUpdateInput { + clientMutationId: String + id: ID! + title: String + rating: Int + + studio_id: ID + performer_ids: [ID!] + tag_ids: [ID!] + gallery_ids: [ID!] +} + +input BulkImageUpdateInput { + clientMutationId: String + ids: [ID!] + title: String + rating: Int + + studio_id: ID + performer_ids: BulkUpdateIds + tag_ids: BulkUpdateIds + gallery_ids: BulkUpdateIds +} + +input ImageDestroyInput { + id: ID! + delete_file: Boolean + delete_generated: Boolean +} + +input ImagesDestroyInput { + ids: [ID!]! + delete_file: Boolean + delete_generated: Boolean +} + +type FindImagesResultType { + count: Int! + images: [Image!]! +} \ No newline at end of file diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index 41b952587..f87e6406d 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -7,15 +7,11 @@ input GenerateMetadataInput { previewOptions: GeneratePreviewOptionsInput markers: Boolean! transcodes: Boolean! - """gallery thumbnails for cache usage""" - thumbnails: Boolean! """scene ids to generate for""" sceneIDs: [ID!] """marker ids to generate for""" markerIDs: [ID!] - """gallery ids to generate for""" - galleryIDs: [ID!] """overwrite existing media""" overwrite: Boolean @@ -60,6 +56,7 @@ input ExportObjectTypeInput { input ExportObjectsInput { scenes: ExportObjectTypeInput + images: ExportObjectTypeInput studios: ExportObjectTypeInput performers: ExportObjectTypeInput tags: ExportObjectTypeInput diff --git a/pkg/api/cache_thumbs.go b/pkg/api/cache_thumbs.go deleted file mode 100644 index 0bcbd616c..000000000 --- a/pkg/api/cache_thumbs.go +++ /dev/null @@ -1,72 +0,0 @@ -package api - -import ( - "github.com/stashapp/stash/pkg/logger" - "github.com/stashapp/stash/pkg/manager/paths" - "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/utils" - "io/ioutil" -) - -type thumbBuffer struct { - path string - dir string - data []byte -} - -func newCacheThumb(dir string, path string, data []byte) *thumbBuffer { - t := thumbBuffer{dir: dir, path: path, data: data} - return &t -} - -var writeChan chan *thumbBuffer -var touchChan chan *string - -func startThumbCache() { // TODO add extra wait, close chan code if/when stash gets a stop mode - - writeChan = make(chan *thumbBuffer, 20) - go thumbnailCacheWriter() -} - -//serialize file writes to avoid race conditions -func thumbnailCacheWriter() { - - for thumb := range writeChan { - exists, _ := utils.FileExists(thumb.path) - if !exists { - err := utils.WriteFile(thumb.path, thumb.data) - if err != nil { - logger.Errorf("Write error for thumbnail %s: %s ", thumb.path, err) - } - } - } - -} - -// get thumbnail from cache, otherwise create it and store to cache -func cacheGthumb(gallery *models.Gallery, index int, width int) []byte { - thumbPath := paths.GetGthumbPath(gallery.Checksum, index, width) - exists, _ := utils.FileExists(thumbPath) - if exists { // if thumbnail exists in cache return that - content, err := ioutil.ReadFile(thumbPath) - if err == nil { - return content - } else { - logger.Errorf("Read Error for file %s : %s", thumbPath, err) - } - - } - data := gallery.GetThumbnail(index, width) - thumbDir := paths.GetGthumbDir(gallery.Checksum) - t := newCacheThumb(thumbDir, thumbPath, data) - writeChan <- t // write the file to cache - return data -} - -// create all thumbs for a given gallery -func CreateGthumbs(gallery *models.Gallery) { - count := gallery.ImageCount() - for i := 0; i < count; i++ { - cacheGthumb(gallery, i, models.DefaultGthumbWidth) - } -} diff --git a/pkg/api/context_keys.go b/pkg/api/context_keys.go index f568009d9..95eb0fd6a 100644 --- a/pkg/api/context_keys.go +++ b/pkg/api/context_keys.go @@ -13,4 +13,5 @@ const ( ContextUser key = 5 tagKey key = 6 downloadKey key = 7 + imageKey key = 8 ) diff --git a/pkg/api/resolver.go b/pkg/api/resolver.go index 63ef59e87..208d2da74 100644 --- a/pkg/api/resolver.go +++ b/pkg/api/resolver.go @@ -27,6 +27,9 @@ func (r *Resolver) Query() models.QueryResolver { func (r *Resolver) Scene() models.SceneResolver { return &sceneResolver{r} } +func (r *Resolver) Image() models.ImageResolver { + return &imageResolver{r} +} func (r *Resolver) SceneMarker() models.SceneMarkerResolver { return &sceneMarkerResolver{r} } @@ -67,6 +70,7 @@ type galleryResolver struct{ *Resolver } type performerResolver struct{ *Resolver } type sceneResolver struct{ *Resolver } type sceneMarkerResolver struct{ *Resolver } +type imageResolver struct{ *Resolver } type studioResolver struct{ *Resolver } type movieResolver struct{ *Resolver } type tagResolver struct{ *Resolver } diff --git a/pkg/api/resolver_model_gallery.go b/pkg/api/resolver_model_gallery.go index ebee45420..3061d2d01 100644 --- a/pkg/api/resolver_model_gallery.go +++ b/pkg/api/resolver_model_gallery.go @@ -3,16 +3,82 @@ package api import ( "context" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) -func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*string, error) { - return nil, nil // TODO remove this from schema +func (r *galleryResolver) Path(ctx context.Context, obj *models.Gallery) (*string, error) { + if obj.Path.Valid { + return &obj.Path.String, nil + } + return nil, nil } -func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*models.GalleryFilesType, error) { - baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - return obj.GetFiles(baseURL), nil +func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*string, error) { + if obj.Title.Valid { + return &obj.Title.String, nil + } + return nil, nil +} + +func (r *galleryResolver) Images(ctx context.Context, obj *models.Gallery) ([]*models.Image, error) { + qb := models.NewImageQueryBuilder() + + return qb.FindByGalleryID(obj.ID) +} + +func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (*models.Image, error) { + qb := models.NewImageQueryBuilder() + + imgs, err := qb.FindByGalleryID(obj.ID) + if err != nil { + return nil, err + } + + var ret *models.Image + if len(imgs) > 0 { + ret = imgs[0] + } + + for _, img := range imgs { + if image.IsCover(img) { + ret = img + break + } + } + + return ret, nil +} + +func (r *galleryResolver) Date(ctx context.Context, obj *models.Gallery) (*string, error) { + if obj.Date.Valid { + result := utils.GetYMDFromDatabaseDate(obj.Date.String) + return &result, nil + } + return nil, nil +} + +func (r *galleryResolver) URL(ctx context.Context, obj *models.Gallery) (*string, error) { + if obj.URL.Valid { + return &obj.URL.String, nil + } + return nil, nil +} + +func (r *galleryResolver) Details(ctx context.Context, obj *models.Gallery) (*string, error) { + if obj.Details.Valid { + return &obj.Details.String, nil + } + return nil, nil +} + +func (r *galleryResolver) Rating(ctx context.Context, obj *models.Gallery) (*int, error) { + if obj.Rating.Valid { + rating := int(obj.Rating.Int64) + return &rating, nil + } + return nil, nil } func (r *galleryResolver) Scene(ctx context.Context, obj *models.Gallery) (*models.Scene, error) { @@ -23,3 +89,22 @@ func (r *galleryResolver) Scene(ctx context.Context, obj *models.Gallery) (*mode qb := models.NewSceneQueryBuilder() return qb.Find(int(obj.SceneID.Int64)) } + +func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (*models.Studio, error) { + if !obj.StudioID.Valid { + return nil, nil + } + + qb := models.NewStudioQueryBuilder() + return qb.Find(int(obj.StudioID.Int64), nil) +} + +func (r *galleryResolver) Tags(ctx context.Context, obj *models.Gallery) ([]*models.Tag, error) { + qb := models.NewTagQueryBuilder() + return qb.FindByGalleryID(obj.ID, nil) +} + +func (r *galleryResolver) Performers(ctx context.Context, obj *models.Gallery) ([]*models.Performer, error) { + qb := models.NewPerformerQueryBuilder() + return qb.FindByGalleryID(obj.ID, nil) +} diff --git a/pkg/api/resolver_model_image.go b/pkg/api/resolver_model_image.go new file mode 100644 index 000000000..6ca679f89 --- /dev/null +++ b/pkg/api/resolver_model_image.go @@ -0,0 +1,68 @@ +package api + +import ( + "context" + + "github.com/stashapp/stash/pkg/api/urlbuilders" + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/models" +) + +func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) { + ret := image.GetTitle(obj) + return &ret, nil +} + +func (r *imageResolver) Rating(ctx context.Context, obj *models.Image) (*int, error) { + if obj.Rating.Valid { + rating := int(obj.Rating.Int64) + return &rating, nil + } + return nil, nil +} + +func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*models.ImageFileType, error) { + width := int(obj.Width.Int64) + height := int(obj.Height.Int64) + size := int(obj.Size.Int64) + return &models.ImageFileType{ + Size: &size, + Width: &width, + Height: &height, + }, nil +} + +func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*models.ImagePathsType, error) { + baseURL, _ := ctx.Value(BaseURLCtxKey).(string) + builder := urlbuilders.NewImageURLBuilder(baseURL, obj.ID) + thumbnailPath := builder.GetThumbnailURL() + imagePath := builder.GetImageURL() + return &models.ImagePathsType{ + Image: &imagePath, + Thumbnail: &thumbnailPath, + }, nil +} + +func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) ([]*models.Gallery, error) { + qb := models.NewGalleryQueryBuilder() + return qb.FindByImageID(obj.ID, nil) +} + +func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (*models.Studio, error) { + if !obj.StudioID.Valid { + return nil, nil + } + + qb := models.NewStudioQueryBuilder() + return qb.Find(int(obj.StudioID.Int64), nil) +} + +func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) ([]*models.Tag, error) { + qb := models.NewTagQueryBuilder() + return qb.FindByImageID(obj.ID, nil) +} + +func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) ([]*models.Performer, error) { + qb := models.NewPerformerQueryBuilder() + return qb.FindByImageID(obj.ID, nil) +} diff --git a/pkg/api/resolver_mutation_configure.go b/pkg/api/resolver_mutation_configure.go index 31faeab01..c95eb17ef 100644 --- a/pkg/api/resolver_mutation_configure.go +++ b/pkg/api/resolver_mutation_configure.go @@ -15,8 +15,8 @@ import ( func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.ConfigGeneralInput) (*models.ConfigGeneralResult, error) { if len(input.Stashes) > 0 { - for _, stashPath := range input.Stashes { - exists, err := utils.DirExists(stashPath) + for _, s := range input.Stashes { + exists, err := utils.DirExists(s.Path) if !exists { return makeConfigGeneralResult(), err } @@ -119,6 +119,24 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co config.Set(config.Exclude, input.Excludes) } + if input.ImageExcludes != nil { + config.Set(config.ImageExclude, input.ImageExcludes) + } + + if input.VideoExtensions != nil { + config.Set(config.VideoExtensions, input.VideoExtensions) + } + + if input.ImageExtensions != nil { + config.Set(config.ImageExtensions, input.ImageExtensions) + } + + if input.GalleryExtensions != nil { + config.Set(config.GalleryExtensions, input.GalleryExtensions) + } + + config.Set(config.CreateGalleriesFromFolders, input.CreateGalleriesFromFolders) + refreshScraperCache := false if input.ScraperUserAgent != nil { config.Set(config.ScraperUserAgent, input.ScraperUserAgent) diff --git a/pkg/api/resolver_mutation_gallery.go b/pkg/api/resolver_mutation_gallery.go new file mode 100644 index 000000000..16cbec30d --- /dev/null +++ b/pkg/api/resolver_mutation_gallery.go @@ -0,0 +1,544 @@ +package api + +import ( + "context" + "database/sql" + "errors" + "strconv" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/database" + "github.com/stashapp/stash/pkg/manager" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +func (r *mutationResolver) GalleryCreate(ctx context.Context, input models.GalleryCreateInput) (*models.Gallery, error) { + // name must be provided + if input.Title == "" { + return nil, errors.New("title must not be empty") + } + + // for manually created galleries, generate checksum from title + checksum := utils.MD5FromString(input.Title) + + // Populate a new performer from the input + currentTime := time.Now() + newGallery := models.Gallery{ + Title: sql.NullString{ + String: input.Title, + Valid: true, + }, + Checksum: checksum, + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + } + if input.URL != nil { + newGallery.URL = sql.NullString{String: *input.URL, Valid: true} + } + if input.Details != nil { + newGallery.Details = sql.NullString{String: *input.Details, Valid: true} + } + if input.URL != nil { + newGallery.URL = sql.NullString{String: *input.URL, Valid: true} + } + if input.Date != nil { + newGallery.Date = models.SQLiteDate{String: *input.Date, Valid: true} + } + if input.Rating != nil { + newGallery.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true} + } else { + // rating must be nullable + newGallery.Rating = sql.NullInt64{Valid: false} + } + + if input.StudioID != nil { + studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64) + newGallery.StudioID = sql.NullInt64{Int64: studioID, Valid: true} + } else { + // studio must be nullable + newGallery.StudioID = sql.NullInt64{Valid: false} + } + + if input.SceneID != nil { + sceneID, _ := strconv.ParseInt(*input.SceneID, 10, 64) + newGallery.SceneID = sql.NullInt64{Int64: sceneID, Valid: true} + } else { + // studio must be nullable + newGallery.SceneID = sql.NullInt64{Valid: false} + } + + // Start the transaction and save the performer + tx := database.DB.MustBeginTx(ctx, nil) + qb := models.NewGalleryQueryBuilder() + jqb := models.NewJoinsQueryBuilder() + gallery, err := qb.Create(newGallery, tx) + if err != nil { + _ = tx.Rollback() + return nil, err + } + + // Save the performers + var performerJoins []models.PerformersGalleries + for _, pid := range input.PerformerIds { + performerID, _ := strconv.Atoi(pid) + performerJoin := models.PerformersGalleries{ + PerformerID: performerID, + GalleryID: gallery.ID, + } + performerJoins = append(performerJoins, performerJoin) + } + if err := jqb.UpdatePerformersGalleries(gallery.ID, performerJoins, tx); err != nil { + return nil, err + } + + // Save the tags + var tagJoins []models.GalleriesTags + for _, tid := range input.TagIds { + tagID, _ := strconv.Atoi(tid) + tagJoin := models.GalleriesTags{ + GalleryID: gallery.ID, + TagID: tagID, + } + tagJoins = append(tagJoins, tagJoin) + } + if err := jqb.UpdateGalleriesTags(gallery.ID, tagJoins, tx); err != nil { + return nil, err + } + + // Commit + if err := tx.Commit(); err != nil { + return nil, err + } + + return gallery, nil +} + +func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (*models.Gallery, error) { + // Start the transaction and save the gallery + tx := database.DB.MustBeginTx(ctx, nil) + + ret, err := r.galleryUpdate(input, tx) + + if err != nil { + _ = tx.Rollback() + return nil, err + } + + // Commit + if err := tx.Commit(); err != nil { + return nil, err + } + + return ret, nil +} + +func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models.GalleryUpdateInput) ([]*models.Gallery, error) { + // Start the transaction and save the gallery + tx := database.DB.MustBeginTx(ctx, nil) + + var ret []*models.Gallery + + for _, gallery := range input { + thisGallery, err := r.galleryUpdate(*gallery, tx) + ret = append(ret, thisGallery) + + if err != nil { + _ = tx.Rollback() + return nil, err + } + } + + // Commit + if err := tx.Commit(); err != nil { + return nil, err + } + + return ret, nil +} + +func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, tx *sqlx.Tx) (*models.Gallery, error) { + qb := models.NewGalleryQueryBuilder() + // Populate gallery from the input + galleryID, _ := strconv.Atoi(input.ID) + originalGallery, err := qb.Find(galleryID, nil) + if err != nil { + return nil, err + } + + if originalGallery == nil { + return nil, errors.New("not found") + } + + updatedTime := time.Now() + updatedGallery := models.GalleryPartial{ + ID: galleryID, + UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, + } + if input.Title != nil { + // ensure title is not empty + if *input.Title == "" { + return nil, errors.New("title must not be empty") + } + + // if gallery is not zip-based, then generate the checksum from the title + if !originalGallery.Path.Valid { + checksum := utils.MD5FromString(*input.Title) + updatedGallery.Checksum = &checksum + } + + updatedGallery.Title = &sql.NullString{String: *input.Title, Valid: true} + } + if input.Details != nil { + updatedGallery.Details = &sql.NullString{String: *input.Details, Valid: true} + } + if input.URL != nil { + updatedGallery.URL = &sql.NullString{String: *input.URL, Valid: true} + } + if input.Date != nil { + updatedGallery.Date = &models.SQLiteDate{String: *input.Date, Valid: true} + } + + if input.Rating != nil { + updatedGallery.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true} + } else { + // rating must be nullable + updatedGallery.Rating = &sql.NullInt64{Valid: false} + } + + if input.StudioID != nil { + studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64) + updatedGallery.StudioID = &sql.NullInt64{Int64: studioID, Valid: true} + } else { + // studio must be nullable + updatedGallery.StudioID = &sql.NullInt64{Valid: false} + } + + // gallery scene is set from the scene only + + jqb := models.NewJoinsQueryBuilder() + gallery, err := qb.UpdatePartial(updatedGallery, tx) + if err != nil { + return nil, err + } + + // Save the performers + var performerJoins []models.PerformersGalleries + for _, pid := range input.PerformerIds { + performerID, _ := strconv.Atoi(pid) + performerJoin := models.PerformersGalleries{ + PerformerID: performerID, + GalleryID: galleryID, + } + performerJoins = append(performerJoins, performerJoin) + } + if err := jqb.UpdatePerformersGalleries(galleryID, performerJoins, tx); err != nil { + return nil, err + } + + // Save the tags + var tagJoins []models.GalleriesTags + for _, tid := range input.TagIds { + tagID, _ := strconv.Atoi(tid) + tagJoin := models.GalleriesTags{ + GalleryID: galleryID, + TagID: tagID, + } + tagJoins = append(tagJoins, tagJoin) + } + if err := jqb.UpdateGalleriesTags(galleryID, tagJoins, tx); err != nil { + return nil, err + } + + return gallery, nil +} + +func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.BulkGalleryUpdateInput) ([]*models.Gallery, error) { + // Populate gallery from the input + updatedTime := time.Now() + + // Start the transaction and save the gallery marker + tx := database.DB.MustBeginTx(ctx, nil) + qb := models.NewGalleryQueryBuilder() + jqb := models.NewJoinsQueryBuilder() + + updatedGallery := models.GalleryPartial{ + UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, + } + if input.Details != nil { + updatedGallery.Details = &sql.NullString{String: *input.Details, Valid: true} + } + if input.URL != nil { + updatedGallery.URL = &sql.NullString{String: *input.URL, Valid: true} + } + if input.Date != nil { + updatedGallery.Date = &models.SQLiteDate{String: *input.Date, Valid: true} + } + if input.Rating != nil { + // a rating of 0 means unset the rating + if *input.Rating == 0 { + updatedGallery.Rating = &sql.NullInt64{Int64: 0, Valid: false} + } else { + updatedGallery.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true} + } + } + if input.StudioID != nil { + // empty string means unset the studio + if *input.StudioID == "" { + updatedGallery.StudioID = &sql.NullInt64{Int64: 0, Valid: false} + } else { + studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64) + updatedGallery.StudioID = &sql.NullInt64{Int64: studioID, Valid: true} + } + } + if input.SceneID != nil { + // empty string means unset the studio + if *input.SceneID == "" { + updatedGallery.SceneID = &sql.NullInt64{Int64: 0, Valid: false} + } else { + sceneID, _ := strconv.ParseInt(*input.SceneID, 10, 64) + updatedGallery.SceneID = &sql.NullInt64{Int64: sceneID, Valid: true} + } + } + + ret := []*models.Gallery{} + + for _, galleryIDStr := range input.Ids { + galleryID, _ := strconv.Atoi(galleryIDStr) + updatedGallery.ID = galleryID + + gallery, err := qb.UpdatePartial(updatedGallery, tx) + if err != nil { + _ = tx.Rollback() + return nil, err + } + + ret = append(ret, gallery) + + // Save the performers + if wasFieldIncluded(ctx, "performer_ids") { + performerIDs, err := adjustGalleryPerformerIDs(tx, galleryID, *input.PerformerIds) + if err != nil { + _ = tx.Rollback() + return nil, err + } + + var performerJoins []models.PerformersGalleries + for _, performerID := range performerIDs { + performerJoin := models.PerformersGalleries{ + PerformerID: performerID, + GalleryID: galleryID, + } + performerJoins = append(performerJoins, performerJoin) + } + if err := jqb.UpdatePerformersGalleries(galleryID, performerJoins, tx); err != nil { + _ = tx.Rollback() + return nil, err + } + } + + // Save the tags + if wasFieldIncluded(ctx, "tag_ids") { + tagIDs, err := adjustGalleryTagIDs(tx, galleryID, *input.TagIds) + if err != nil { + _ = tx.Rollback() + return nil, err + } + + var tagJoins []models.GalleriesTags + for _, tagID := range tagIDs { + tagJoin := models.GalleriesTags{ + GalleryID: galleryID, + TagID: tagID, + } + tagJoins = append(tagJoins, tagJoin) + } + if err := jqb.UpdateGalleriesTags(galleryID, tagJoins, tx); err != nil { + _ = tx.Rollback() + return nil, err + } + } + } + + // Commit + if err := tx.Commit(); err != nil { + return nil, err + } + + return ret, nil +} + +func adjustGalleryPerformerIDs(tx *sqlx.Tx, galleryID int, ids models.BulkUpdateIds) ([]int, error) { + var ret []int + + jqb := models.NewJoinsQueryBuilder() + if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove { + // adding to the joins + performerJoins, err := jqb.GetGalleryPerformers(galleryID, tx) + + if err != nil { + return nil, err + } + + for _, join := range performerJoins { + ret = append(ret, join.PerformerID) + } + } + + return adjustIDs(ret, ids), nil +} + +func adjustGalleryTagIDs(tx *sqlx.Tx, galleryID int, ids models.BulkUpdateIds) ([]int, error) { + var ret []int + + jqb := models.NewJoinsQueryBuilder() + if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove { + // adding to the joins + tagJoins, err := jqb.GetGalleryTags(galleryID, tx) + + if err != nil { + return nil, err + } + + for _, join := range tagJoins { + ret = append(ret, join.TagID) + } + } + + return adjustIDs(ret, ids), nil +} + +func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) { + qb := models.NewGalleryQueryBuilder() + tx := database.DB.MustBeginTx(ctx, nil) + + var galleries []*models.Gallery + var imgsToPostProcess []*models.Image + for _, id := range input.Ids { + galleryID, _ := strconv.Atoi(id) + + gallery, err := qb.Find(galleryID, tx) + if gallery != nil { + galleries = append(galleries, gallery) + } + err = qb.Destroy(galleryID, tx) + + if err != nil { + tx.Rollback() + return false, err + } + + // if this is a zip-based gallery, delete the images as well + if gallery.Path.Valid { + iqb := models.NewImageQueryBuilder() + imgs, err := iqb.FindByGalleryID(galleryID) + if err != nil { + tx.Rollback() + return false, err + } + + for _, img := range imgs { + err = qb.Destroy(img.ID, tx) + if err != nil { + tx.Rollback() + return false, err + } + + imgsToPostProcess = append(imgsToPostProcess, img) + } + } + } + + if err := tx.Commit(); err != nil { + return false, err + } + + // if delete file is true, then delete the file as well + // if it fails, just log a message + if input.DeleteFile != nil && *input.DeleteFile { + for _, gallery := range galleries { + manager.DeleteGalleryFile(gallery) + } + } + + // if delete generated is true, then delete the generated files + // for the gallery + if input.DeleteGenerated != nil && *input.DeleteGenerated { + for _, img := range imgsToPostProcess { + manager.DeleteGeneratedImageFiles(img) + } + } + + return true, nil +} + +func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.GalleryAddInput) (bool, error) { + galleryID, _ := strconv.Atoi(input.GalleryID) + qb := models.NewGalleryQueryBuilder() + gallery, err := qb.Find(galleryID, nil) + if err != nil { + return false, err + } + + if gallery == nil { + return false, errors.New("gallery not found") + } + + if gallery.Zip { + return false, errors.New("cannot modify zip gallery images") + } + + jqb := models.NewJoinsQueryBuilder() + tx := database.DB.MustBeginTx(ctx, nil) + + for _, id := range input.ImageIds { + imageID, _ := strconv.Atoi(id) + _, err := jqb.AddImageGallery(imageID, galleryID, tx) + if err != nil { + tx.Rollback() + return false, err + } + } + + if err := tx.Commit(); err != nil { + return false, err + } + + return true, nil +} + +func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input models.GalleryRemoveInput) (bool, error) { + galleryID, _ := strconv.Atoi(input.GalleryID) + qb := models.NewGalleryQueryBuilder() + gallery, err := qb.Find(galleryID, nil) + if err != nil { + return false, err + } + + if gallery == nil { + return false, errors.New("gallery not found") + } + + if gallery.Zip { + return false, errors.New("cannot modify zip gallery images") + } + + jqb := models.NewJoinsQueryBuilder() + tx := database.DB.MustBeginTx(ctx, nil) + + for _, id := range input.ImageIds { + imageID, _ := strconv.Atoi(id) + _, err := jqb.RemoveImageGallery(imageID, galleryID, tx) + if err != nil { + tx.Rollback() + return false, err + } + } + + if err := tx.Commit(); err != nil { + return false, err + } + + return true, nil +} diff --git a/pkg/api/resolver_mutation_image.go b/pkg/api/resolver_mutation_image.go new file mode 100644 index 000000000..a71c617f1 --- /dev/null +++ b/pkg/api/resolver_mutation_image.go @@ -0,0 +1,439 @@ +package api + +import ( + "context" + "database/sql" + "strconv" + "time" + + "github.com/jmoiron/sqlx" + + "github.com/stashapp/stash/pkg/database" + "github.com/stashapp/stash/pkg/manager" + "github.com/stashapp/stash/pkg/models" +) + +func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (*models.Image, error) { + // Start the transaction and save the image + tx := database.DB.MustBeginTx(ctx, nil) + + ret, err := r.imageUpdate(input, tx) + + if err != nil { + _ = tx.Rollback() + return nil, err + } + + // Commit + if err := tx.Commit(); err != nil { + return nil, err + } + + return ret, nil +} + +func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) ([]*models.Image, error) { + // Start the transaction and save the image + tx := database.DB.MustBeginTx(ctx, nil) + + var ret []*models.Image + + for _, image := range input { + thisImage, err := r.imageUpdate(*image, tx) + ret = append(ret, thisImage) + + if err != nil { + _ = tx.Rollback() + return nil, err + } + } + + // Commit + if err := tx.Commit(); err != nil { + return nil, err + } + + return ret, nil +} + +func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, tx *sqlx.Tx) (*models.Image, error) { + // Populate image from the input + imageID, _ := strconv.Atoi(input.ID) + + updatedTime := time.Now() + updatedImage := models.ImagePartial{ + ID: imageID, + UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, + } + if input.Title != nil { + updatedImage.Title = &sql.NullString{String: *input.Title, Valid: true} + } + + if input.Rating != nil { + updatedImage.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true} + } else { + // rating must be nullable + updatedImage.Rating = &sql.NullInt64{Valid: false} + } + + if input.StudioID != nil { + studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64) + updatedImage.StudioID = &sql.NullInt64{Int64: studioID, Valid: true} + } else { + // studio must be nullable + updatedImage.StudioID = &sql.NullInt64{Valid: false} + } + + qb := models.NewImageQueryBuilder() + jqb := models.NewJoinsQueryBuilder() + image, err := qb.Update(updatedImage, tx) + if err != nil { + return nil, err + } + + // don't set the galleries directly. Use add/remove gallery images interface instead + + // Save the performers + var performerJoins []models.PerformersImages + for _, pid := range input.PerformerIds { + performerID, _ := strconv.Atoi(pid) + performerJoin := models.PerformersImages{ + PerformerID: performerID, + ImageID: imageID, + } + performerJoins = append(performerJoins, performerJoin) + } + if err := jqb.UpdatePerformersImages(imageID, performerJoins, tx); err != nil { + return nil, err + } + + // Save the tags + var tagJoins []models.ImagesTags + for _, tid := range input.TagIds { + tagID, _ := strconv.Atoi(tid) + tagJoin := models.ImagesTags{ + ImageID: imageID, + TagID: tagID, + } + tagJoins = append(tagJoins, tagJoin) + } + if err := jqb.UpdateImagesTags(imageID, tagJoins, tx); err != nil { + return nil, err + } + + return image, nil +} + +func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.BulkImageUpdateInput) ([]*models.Image, error) { + // Populate image from the input + updatedTime := time.Now() + + // Start the transaction and save the image marker + tx := database.DB.MustBeginTx(ctx, nil) + qb := models.NewImageQueryBuilder() + jqb := models.NewJoinsQueryBuilder() + + updatedImage := models.ImagePartial{ + UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime}, + } + if input.Title != nil { + updatedImage.Title = &sql.NullString{String: *input.Title, Valid: true} + } + if input.Rating != nil { + // a rating of 0 means unset the rating + if *input.Rating == 0 { + updatedImage.Rating = &sql.NullInt64{Int64: 0, Valid: false} + } else { + updatedImage.Rating = &sql.NullInt64{Int64: int64(*input.Rating), Valid: true} + } + } + if input.StudioID != nil { + // empty string means unset the studio + if *input.StudioID == "" { + updatedImage.StudioID = &sql.NullInt64{Int64: 0, Valid: false} + } else { + studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64) + updatedImage.StudioID = &sql.NullInt64{Int64: studioID, Valid: true} + } + } + + ret := []*models.Image{} + + for _, imageIDStr := range input.Ids { + imageID, _ := strconv.Atoi(imageIDStr) + updatedImage.ID = imageID + + image, err := qb.Update(updatedImage, tx) + if err != nil { + _ = tx.Rollback() + return nil, err + } + + ret = append(ret, image) + + // Save the galleries + if wasFieldIncluded(ctx, "gallery_ids") { + galleryIDs, err := adjustImageGalleryIDs(tx, imageID, *input.GalleryIds) + if err != nil { + _ = tx.Rollback() + return nil, err + } + + var galleryJoins []models.GalleriesImages + for _, gid := range galleryIDs { + galleryJoin := models.GalleriesImages{ + GalleryID: gid, + ImageID: imageID, + } + galleryJoins = append(galleryJoins, galleryJoin) + } + if err := jqb.UpdateGalleriesImages(imageID, galleryJoins, tx); err != nil { + return nil, err + } + } + + // Save the performers + if wasFieldIncluded(ctx, "performer_ids") { + performerIDs, err := adjustImagePerformerIDs(tx, imageID, *input.PerformerIds) + if err != nil { + _ = tx.Rollback() + return nil, err + } + + var performerJoins []models.PerformersImages + for _, performerID := range performerIDs { + performerJoin := models.PerformersImages{ + PerformerID: performerID, + ImageID: imageID, + } + performerJoins = append(performerJoins, performerJoin) + } + if err := jqb.UpdatePerformersImages(imageID, performerJoins, tx); err != nil { + _ = tx.Rollback() + return nil, err + } + } + + // Save the tags + if wasFieldIncluded(ctx, "tag_ids") { + tagIDs, err := adjustImageTagIDs(tx, imageID, *input.TagIds) + if err != nil { + _ = tx.Rollback() + return nil, err + } + + var tagJoins []models.ImagesTags + for _, tagID := range tagIDs { + tagJoin := models.ImagesTags{ + ImageID: imageID, + TagID: tagID, + } + tagJoins = append(tagJoins, tagJoin) + } + if err := jqb.UpdateImagesTags(imageID, tagJoins, tx); err != nil { + _ = tx.Rollback() + return nil, err + } + } + } + + // Commit + if err := tx.Commit(); err != nil { + return nil, err + } + + return ret, nil +} + +func adjustImageGalleryIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) { + var ret []int + + jqb := models.NewJoinsQueryBuilder() + if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove { + // adding to the joins + galleryJoins, err := jqb.GetImageGalleries(imageID, tx) + + if err != nil { + return nil, err + } + + for _, join := range galleryJoins { + ret = append(ret, join.GalleryID) + } + } + + return adjustIDs(ret, ids), nil +} + +func adjustImagePerformerIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) { + var ret []int + + jqb := models.NewJoinsQueryBuilder() + if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove { + // adding to the joins + performerJoins, err := jqb.GetImagePerformers(imageID, tx) + + if err != nil { + return nil, err + } + + for _, join := range performerJoins { + ret = append(ret, join.PerformerID) + } + } + + return adjustIDs(ret, ids), nil +} + +func adjustImageTagIDs(tx *sqlx.Tx, imageID int, ids models.BulkUpdateIds) ([]int, error) { + var ret []int + + jqb := models.NewJoinsQueryBuilder() + if ids.Mode == models.BulkUpdateIDModeAdd || ids.Mode == models.BulkUpdateIDModeRemove { + // adding to the joins + tagJoins, err := jqb.GetImageTags(imageID, tx) + + if err != nil { + return nil, err + } + + for _, join := range tagJoins { + ret = append(ret, join.TagID) + } + } + + return adjustIDs(ret, ids), nil +} + +func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (bool, error) { + qb := models.NewImageQueryBuilder() + tx := database.DB.MustBeginTx(ctx, nil) + + imageID, _ := strconv.Atoi(input.ID) + image, err := qb.Find(imageID) + err = qb.Destroy(imageID, tx) + + if err != nil { + tx.Rollback() + return false, err + } + + if err := tx.Commit(); err != nil { + return false, err + } + + // if delete generated is true, then delete the generated files + // for the image + if input.DeleteGenerated != nil && *input.DeleteGenerated { + manager.DeleteGeneratedImageFiles(image) + } + + // if delete file is true, then delete the file as well + // if it fails, just log a message + if input.DeleteFile != nil && *input.DeleteFile { + manager.DeleteImageFile(image) + } + + return true, nil +} + +func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.ImagesDestroyInput) (bool, error) { + qb := models.NewImageQueryBuilder() + tx := database.DB.MustBeginTx(ctx, nil) + + var images []*models.Image + for _, id := range input.Ids { + imageID, _ := strconv.Atoi(id) + + image, err := qb.Find(imageID) + if image != nil { + images = append(images, image) + } + err = qb.Destroy(imageID, tx) + + if err != nil { + tx.Rollback() + return false, err + } + } + + if err := tx.Commit(); err != nil { + return false, err + } + + for _, image := range images { + // if delete generated is true, then delete the generated files + // for the image + if input.DeleteGenerated != nil && *input.DeleteGenerated { + manager.DeleteGeneratedImageFiles(image) + } + + // if delete file is true, then delete the file as well + // if it fails, just log a message + if input.DeleteFile != nil && *input.DeleteFile { + manager.DeleteImageFile(image) + } + } + + return true, nil +} + +func (r *mutationResolver) ImageIncrementO(ctx context.Context, id string) (int, error) { + imageID, _ := strconv.Atoi(id) + + tx := database.DB.MustBeginTx(ctx, nil) + qb := models.NewImageQueryBuilder() + + newVal, err := qb.IncrementOCounter(imageID, tx) + if err != nil { + _ = tx.Rollback() + return 0, err + } + + // Commit + if err := tx.Commit(); err != nil { + return 0, err + } + + return newVal, nil +} + +func (r *mutationResolver) ImageDecrementO(ctx context.Context, id string) (int, error) { + imageID, _ := strconv.Atoi(id) + + tx := database.DB.MustBeginTx(ctx, nil) + qb := models.NewImageQueryBuilder() + + newVal, err := qb.DecrementOCounter(imageID, tx) + if err != nil { + _ = tx.Rollback() + return 0, err + } + + // Commit + if err := tx.Commit(); err != nil { + return 0, err + } + + return newVal, nil +} + +func (r *mutationResolver) ImageResetO(ctx context.Context, id string) (int, error) { + imageID, _ := strconv.Atoi(id) + + tx := database.DB.MustBeginTx(ctx, nil) + qb := models.NewImageQueryBuilder() + + newVal, err := qb.ResetOCounter(imageID, tx) + if err != nil { + _ = tx.Rollback() + return 0, err + } + + // Commit + if err := tx.Commit(); err != nil { + return 0, err + } + + return newVal, nil +} diff --git a/pkg/api/resolver_query_configuration.go b/pkg/api/resolver_query_configuration.go index 190439fa0..6f1bc4741 100644 --- a/pkg/api/resolver_query_configuration.go +++ b/pkg/api/resolver_query_configuration.go @@ -43,30 +43,35 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult { scraperCDPPath := config.GetScraperCDPPath() return &models.ConfigGeneralResult{ - Stashes: config.GetStashPaths(), - DatabasePath: config.GetDatabasePath(), - GeneratedPath: config.GetGeneratedPath(), - CachePath: config.GetCachePath(), - CalculateMd5: config.IsCalculateMD5(), - VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), - PreviewSegments: config.GetPreviewSegments(), - PreviewSegmentDuration: config.GetPreviewSegmentDuration(), - PreviewExcludeStart: config.GetPreviewExcludeStart(), - PreviewExcludeEnd: config.GetPreviewExcludeEnd(), - PreviewPreset: config.GetPreviewPreset(), - MaxTranscodeSize: &maxTranscodeSize, - MaxStreamingTranscodeSize: &maxStreamingTranscodeSize, - Username: config.GetUsername(), - Password: config.GetPasswordHash(), - MaxSessionAge: config.GetMaxSessionAge(), - LogFile: &logFile, - LogOut: config.GetLogOut(), - LogLevel: config.GetLogLevel(), - LogAccess: config.GetLogAccess(), - Excludes: config.GetExcludes(), - ScraperUserAgent: &scraperUserAgent, - ScraperCDPPath: &scraperCDPPath, - StashBoxes: config.GetStashBoxes(), + Stashes: config.GetStashPaths(), + DatabasePath: config.GetDatabasePath(), + GeneratedPath: config.GetGeneratedPath(), + CachePath: config.GetCachePath(), + CalculateMd5: config.IsCalculateMD5(), + VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(), + PreviewSegments: config.GetPreviewSegments(), + PreviewSegmentDuration: config.GetPreviewSegmentDuration(), + PreviewExcludeStart: config.GetPreviewExcludeStart(), + PreviewExcludeEnd: config.GetPreviewExcludeEnd(), + PreviewPreset: config.GetPreviewPreset(), + MaxTranscodeSize: &maxTranscodeSize, + MaxStreamingTranscodeSize: &maxStreamingTranscodeSize, + Username: config.GetUsername(), + Password: config.GetPasswordHash(), + MaxSessionAge: config.GetMaxSessionAge(), + LogFile: &logFile, + LogOut: config.GetLogOut(), + LogLevel: config.GetLogLevel(), + LogAccess: config.GetLogAccess(), + VideoExtensions: config.GetVideoExtensions(), + ImageExtensions: config.GetImageExtensions(), + GalleryExtensions: config.GetGalleryExtensions(), + CreateGalleriesFromFolders: config.GetCreateGalleriesFromFolders(), + Excludes: config.GetExcludes(), + ImageExcludes: config.GetImageExcludes(), + ScraperUserAgent: &scraperUserAgent, + ScraperCDPPath: &scraperCDPPath, + StashBoxes: config.GetStashBoxes(), } } diff --git a/pkg/api/resolver_query_find_gallery.go b/pkg/api/resolver_query_find_gallery.go index 9468b73bc..de5437efe 100644 --- a/pkg/api/resolver_query_find_gallery.go +++ b/pkg/api/resolver_query_find_gallery.go @@ -10,7 +10,7 @@ import ( func (r *queryResolver) FindGallery(ctx context.Context, id string) (*models.Gallery, error) { qb := models.NewGalleryQueryBuilder() idInt, _ := strconv.Atoi(id) - return qb.Find(idInt) + return qb.Find(idInt, nil) } func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (*models.FindGalleriesResultType, error) { diff --git a/pkg/api/resolver_query_find_image.go b/pkg/api/resolver_query_find_image.go new file mode 100644 index 000000000..471e80853 --- /dev/null +++ b/pkg/api/resolver_query_find_image.go @@ -0,0 +1,30 @@ +package api + +import ( + "context" + "strconv" + + "github.com/stashapp/stash/pkg/models" +) + +func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *string) (*models.Image, error) { + qb := models.NewImageQueryBuilder() + var image *models.Image + var err error + if id != nil { + idInt, _ := strconv.Atoi(*id) + image, err = qb.Find(idInt) + } else if checksum != nil { + image, err = qb.FindByChecksum(*checksum) + } + return image, err +} + +func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (*models.FindImagesResultType, error) { + qb := models.NewImageQueryBuilder() + images, total := qb.Query(imageFilter, filter) + return &models.FindImagesResultType{ + Count: total, + Images: images, + }, nil +} diff --git a/pkg/api/routes_gallery.go b/pkg/api/routes_gallery.go deleted file mode 100644 index 4b5a7f4cd..000000000 --- a/pkg/api/routes_gallery.go +++ /dev/null @@ -1,65 +0,0 @@ -package api - -import ( - "context" - "github.com/go-chi/chi" - "github.com/stashapp/stash/pkg/models" - "net/http" - "strconv" -) - -type galleryRoutes struct{} - -func (rs galleryRoutes) Routes() chi.Router { - r := chi.NewRouter() - - r.Route("/{galleryId}", func(r chi.Router) { - r.Use(GalleryCtx) - r.Get("/{fileIndex}", rs.File) - }) - - return r -} - -func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) { - gallery := r.Context().Value(galleryKey).(*models.Gallery) - if gallery == nil { - http.Error(w, http.StatusText(404), 404) - return - } - fileIndex, _ := strconv.Atoi(chi.URLParam(r, "fileIndex")) - thumb := r.URL.Query().Get("thumb") - w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week - if thumb == "true" { - _, _ = w.Write(cacheGthumb(gallery, fileIndex, models.DefaultGthumbWidth)) - } else if thumb == "" { - _, _ = w.Write(gallery.GetImage(fileIndex)) - } else { - width, err := strconv.ParseInt(thumb, 0, 64) - if err != nil { - http.Error(w, http.StatusText(400), 400) - return - } - _, _ = w.Write(cacheGthumb(gallery, fileIndex, int(width))) - } -} - -func GalleryCtx(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - galleryID, err := strconv.Atoi(chi.URLParam(r, "galleryId")) - if err != nil { - http.Error(w, http.StatusText(404), 404) - return - } - - qb := models.NewGalleryQueryBuilder() - gallery, err := qb.Find(galleryID) - if err != nil { - http.Error(w, http.StatusText(404), 404) - return - } - - ctx := context.WithValue(r.Context(), galleryKey, gallery) - next.ServeHTTP(w, r.WithContext(ctx)) - }) -} diff --git a/pkg/api/routes_image.go b/pkg/api/routes_image.go new file mode 100644 index 000000000..cc182fb0a --- /dev/null +++ b/pkg/api/routes_image.go @@ -0,0 +1,75 @@ +package api + +import ( + "context" + "net/http" + "strconv" + + "github.com/go-chi/chi" + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/manager" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type imageRoutes struct{} + +func (rs imageRoutes) Routes() chi.Router { + r := chi.NewRouter() + + r.Route("/{imageId}", func(r chi.Router) { + r.Use(ImageCtx) + + r.Get("/image", rs.Image) + r.Get("/thumbnail", rs.Thumbnail) + }) + + return r +} + +// region Handlers + +func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) { + image := r.Context().Value(imageKey).(*models.Image) + filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth) + + // if the thumbnail doesn't exist, fall back to the original file + exists, _ := utils.FileExists(filepath) + if exists { + http.ServeFile(w, r, filepath) + } else { + rs.Image(w, r) + } +} + +func (rs imageRoutes) Image(w http.ResponseWriter, r *http.Request) { + i := r.Context().Value(imageKey).(*models.Image) + + // if image is in a zip file, we need to serve it specifically + image.Serve(w, r, i.Path) +} + +// endregion + +func ImageCtx(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + imageIdentifierQueryParam := chi.URLParam(r, "imageId") + imageID, _ := strconv.Atoi(imageIdentifierQueryParam) + + var image *models.Image + qb := models.NewImageQueryBuilder() + if imageID == 0 { + image, _ = qb.FindByChecksum(imageIdentifierQueryParam) + } else { + image, _ = qb.Find(imageID) + } + + if image == nil { + http.Error(w, http.StatusText(404), 404) + return + } + + ctx := context.WithValue(r.Context(), imageKey, image) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} diff --git a/pkg/api/server.go b/pkg/api/server.go index 02b44c07d..75f5ab1aa 100644 --- a/pkg/api/server.go +++ b/pkg/api/server.go @@ -145,9 +145,9 @@ func Start() { r.Get(loginEndPoint, getLoginHandler) - r.Mount("/gallery", galleryRoutes{}.Routes()) r.Mount("/performer", performerRoutes{}.Routes()) r.Mount("/scene", sceneRoutes{}.Routes()) + r.Mount("/image", imageRoutes{}.Routes()) r.Mount("/studio", studioRoutes{}.Routes()) r.Mount("/movie", movieRoutes{}.Routes()) r.Mount("/tag", tagRoutes{}.Routes()) @@ -248,8 +248,6 @@ func Start() { http.Redirect(w, r, "/", 301) }) - startThumbCache() - // Serve static folders customServedFolders := config.GetCustomServedFolders() if customServedFolders != nil { diff --git a/pkg/api/urlbuilders/image.go b/pkg/api/urlbuilders/image.go new file mode 100644 index 000000000..e81dd446e --- /dev/null +++ b/pkg/api/urlbuilders/image.go @@ -0,0 +1,25 @@ +package urlbuilders + +import ( + "strconv" +) + +type ImageURLBuilder struct { + BaseURL string + ImageID string +} + +func NewImageURLBuilder(baseURL string, imageID int) ImageURLBuilder { + return ImageURLBuilder{ + BaseURL: baseURL, + ImageID: strconv.Itoa(imageID), + } +} + +func (b ImageURLBuilder) GetImageURL() string { + return b.BaseURL + "/image/" + b.ImageID + "/image" +} + +func (b ImageURLBuilder) GetThumbnailURL() string { + return b.BaseURL + "/image/" + b.ImageID + "/thumbnail" +} diff --git a/pkg/database/database.go b/pkg/database/database.go index e02aaca91..083cea314 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -19,7 +19,7 @@ import ( var DB *sqlx.DB var dbPath string -var appSchemaVersion uint = 12 +var appSchemaVersion uint = 13 var databaseSchemaVersion uint const sqlite3Driver = "sqlite3ex" diff --git a/pkg/database/migrations/13_images.up.sql b/pkg/database/migrations/13_images.up.sql new file mode 100644 index 000000000..23aef81cd --- /dev/null +++ b/pkg/database/migrations/13_images.up.sql @@ -0,0 +1,117 @@ +CREATE TABLE `images` ( + `id` integer not null primary key autoincrement, + `path` varchar(510) not null, + `checksum` varchar(255) not null, + `title` varchar(255), + `rating` tinyint, + `size` integer, + `width` tinyint, + `height` tinyint, + `studio_id` integer, + `o_counter` tinyint not null default 0, + `created_at` datetime not null, + `updated_at` datetime not null, + foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL +); + +CREATE INDEX `index_images_on_studio_id` on `images` (`studio_id`); + +CREATE TABLE `performers_images` ( + `performer_id` integer, + `image_id` integer, + foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE, + foreign key(`image_id`) references `images`(`id`) on delete CASCADE +); + +CREATE INDEX `index_performers_images_on_image_id` on `performers_images` (`image_id`); +CREATE INDEX `index_performers_images_on_performer_id` on `performers_images` (`performer_id`); + +CREATE TABLE `images_tags` ( + `image_id` integer, + `tag_id` integer, + foreign key(`image_id`) references `images`(`id`) on delete CASCADE, + foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE +); + +CREATE INDEX `index_images_tags_on_tag_id` on `images_tags` (`tag_id`); +CREATE INDEX `index_images_tags_on_image_id` on `images_tags` (`image_id`); + +-- need to recreate galleries to add foreign key +ALTER TABLE `galleries` rename to `_galleries_old`; + +CREATE TABLE `galleries` ( + `id` integer not null primary key autoincrement, + `path` varchar(510), + `checksum` varchar(255) not null, + `zip` boolean not null default '0', + `title` varchar(255), + `url` varchar(255), + `date` date, + `details` text, + `studio_id` integer, + `rating` tinyint, + `scene_id` integer, + `created_at` datetime not null, + `updated_at` datetime not null, + foreign key(`scene_id`) references `scenes`(`id`) on delete SET NULL, + foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL +); + +DROP INDEX IF EXISTS `index_galleries_on_scene_id`; +DROP INDEX IF EXISTS `galleries_path_unique`; +DROP INDEX IF EXISTS `galleries_checksum_unique`; + +CREATE INDEX `index_galleries_on_scene_id` on `galleries` (`scene_id`); +CREATE UNIQUE INDEX `galleries_path_unique` on `galleries` (`path`); +CREATE UNIQUE INDEX `galleries_checksum_unique` on `galleries` (`checksum`); +CREATE INDEX `index_galleries_on_studio_id` on `galleries` (`studio_id`); + +CREATE TABLE `galleries_images` ( + `gallery_id` integer, + `image_id` integer, + foreign key(`gallery_id`) references `galleries`(`id`) on delete CASCADE, + foreign key(`image_id`) references `images`(`id`) on delete CASCADE +); + +CREATE INDEX `index_galleries_images_on_image_id` on `galleries_images` (`image_id`); +CREATE INDEX `index_galleries_images_on_gallery_id` on `galleries_images` (`gallery_id`); + +CREATE TABLE `performers_galleries` ( + `performer_id` integer, + `gallery_id` integer, + foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE, + foreign key(`gallery_id`) references `galleries`(`id`) on delete CASCADE +); + +CREATE INDEX `index_performers_galleries_on_gallery_id` on `performers_galleries` (`gallery_id`); +CREATE INDEX `index_performers_galleries_on_performer_id` on `performers_galleries` (`performer_id`); + +CREATE TABLE `galleries_tags` ( + `gallery_id` integer, + `tag_id` integer, + foreign key(`gallery_id`) references `galleries`(`id`) on delete CASCADE, + foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE +); + +CREATE INDEX `index_galleries_tags_on_tag_id` on `galleries_tags` (`tag_id`); +CREATE INDEX `index_galleries_tags_on_gallery_id` on `galleries_tags` (`gallery_id`); + +INSERT INTO `galleries` + ( + `id`, + `path`, + `checksum`, + `scene_id`, + `created_at`, + `updated_at` + ) + SELECT + `id`, + `path`, + `checksum`, + `scene_id`, + `created_at`, + `updated_at` + FROM `_galleries_old`; + +DROP TABLE `_galleries_old`; diff --git a/pkg/gallery/export.go b/pkg/gallery/export.go new file mode 100644 index 000000000..085dc543f --- /dev/null +++ b/pkg/gallery/export.go @@ -0,0 +1,61 @@ +package gallery + +import ( + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +// ToBasicJSON converts a gallery object into its JSON object equivalent. It +// does not convert the relationships to other objects. +func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { + newGalleryJSON := jsonschema.Gallery{ + Checksum: gallery.Checksum, + Zip: gallery.Zip, + CreatedAt: models.JSONTime{Time: gallery.CreatedAt.Timestamp}, + UpdatedAt: models.JSONTime{Time: gallery.UpdatedAt.Timestamp}, + } + + if gallery.Path.Valid { + newGalleryJSON.Path = gallery.Path.String + } + + if gallery.Title.Valid { + newGalleryJSON.Title = gallery.Title.String + } + + if gallery.URL.Valid { + newGalleryJSON.URL = gallery.URL.String + } + + if gallery.Date.Valid { + newGalleryJSON.Date = utils.GetYMDFromDatabaseDate(gallery.Date.String) + } + + if gallery.Rating.Valid { + newGalleryJSON.Rating = int(gallery.Rating.Int64) + } + + if gallery.Details.Valid { + newGalleryJSON.Details = gallery.Details.String + } + + return &newGalleryJSON, nil +} + +// GetStudioName returns the name of the provided gallery's studio. It returns an +// empty string if there is no studio assigned to the gallery. +func GetStudioName(reader models.StudioReader, gallery *models.Gallery) (string, error) { + if gallery.StudioID.Valid { + studio, err := reader.Find(int(gallery.StudioID.Int64)) + if err != nil { + return "", err + } + + if studio != nil { + return studio.Name.String, nil + } + } + + return "", nil +} diff --git a/pkg/gallery/export_test.go b/pkg/gallery/export_test.go new file mode 100644 index 000000000..439a116de --- /dev/null +++ b/pkg/gallery/export_test.go @@ -0,0 +1,199 @@ +package gallery + +import ( + "errors" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/models/modelstest" + "github.com/stretchr/testify/assert" + + "testing" + "time" +) + +const ( + galleryID = 1 + + studioID = 4 + missingStudioID = 5 + errStudioID = 6 + + noTagsID = 11 + errTagsID = 12 +) + +const ( + path = "path" + zip = true + url = "url" + checksum = "checksum" + title = "title" + date = "2001-01-01" + rating = 5 + details = "details" +) + +const ( + studioName = "studioName" +) + +var names = []string{ + "name1", + "name2", +} + +var createTime time.Time = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) +var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) + +func createFullGallery(id int) models.Gallery { + return models.Gallery{ + ID: id, + Path: modelstest.NullString(path), + Zip: zip, + Title: modelstest.NullString(title), + Checksum: checksum, + Date: models.SQLiteDate{ + String: date, + Valid: true, + }, + Details: modelstest.NullString(details), + Rating: modelstest.NullInt64(rating), + URL: modelstest.NullString(url), + CreatedAt: models.SQLiteTimestamp{ + Timestamp: createTime, + }, + UpdatedAt: models.SQLiteTimestamp{ + Timestamp: updateTime, + }, + } +} + +func createEmptyGallery(id int) models.Gallery { + return models.Gallery{ + ID: id, + CreatedAt: models.SQLiteTimestamp{ + Timestamp: createTime, + }, + UpdatedAt: models.SQLiteTimestamp{ + Timestamp: updateTime, + }, + } +} + +func createFullJSONGallery() *jsonschema.Gallery { + return &jsonschema.Gallery{ + Title: title, + Path: path, + Zip: zip, + Checksum: checksum, + Date: date, + Details: details, + Rating: rating, + URL: url, + CreatedAt: models.JSONTime{ + Time: createTime, + }, + UpdatedAt: models.JSONTime{ + Time: updateTime, + }, + } +} + +func createEmptyJSONGallery() *jsonschema.Gallery { + return &jsonschema.Gallery{ + CreatedAt: models.JSONTime{ + Time: createTime, + }, + UpdatedAt: models.JSONTime{ + Time: updateTime, + }, + } +} + +type basicTestScenario struct { + input models.Gallery + expected *jsonschema.Gallery + err bool +} + +var scenarios = []basicTestScenario{ + { + createFullGallery(galleryID), + createFullJSONGallery(), + false, + }, +} + +func TestToJSON(t *testing.T) { + for i, s := range scenarios { + gallery := s.input + json, err := ToBasicJSON(&gallery) + + if !s.err && err != nil { + t.Errorf("[%d] unexpected error: %s", i, err.Error()) + } else if s.err && err == nil { + t.Errorf("[%d] expected error not returned", i) + } else { + assert.Equal(t, s.expected, json, "[%d]", i) + } + } +} + +func createStudioGallery(studioID int) models.Gallery { + return models.Gallery{ + StudioID: modelstest.NullInt64(int64(studioID)), + } +} + +type stringTestScenario struct { + input models.Gallery + expected string + err bool +} + +var getStudioScenarios = []stringTestScenario{ + { + createStudioGallery(studioID), + studioName, + false, + }, + { + createStudioGallery(missingStudioID), + "", + false, + }, + { + createStudioGallery(errStudioID), + "", + true, + }, +} + +func TestGetStudioName(t *testing.T) { + mockStudioReader := &mocks.StudioReaderWriter{} + + studioErr := errors.New("error getting image") + + mockStudioReader.On("Find", studioID).Return(&models.Studio{ + Name: modelstest.NullString(studioName), + }, nil).Once() + mockStudioReader.On("Find", missingStudioID).Return(nil, nil).Once() + mockStudioReader.On("Find", errStudioID).Return(nil, studioErr).Once() + + for i, s := range getStudioScenarios { + gallery := s.input + json, err := GetStudioName(mockStudioReader, &gallery) + + if !s.err && err != nil { + t.Errorf("[%d] unexpected error: %s", i, err.Error()) + } else if s.err && err == nil { + t.Errorf("[%d] expected error not returned", i) + } else { + assert.Equal(t, s.expected, json, "[%d]", i) + } + } + + mockStudioReader.AssertExpectations(t) +} diff --git a/pkg/gallery/images.go b/pkg/gallery/images.go new file mode 100644 index 000000000..5487c797e --- /dev/null +++ b/pkg/gallery/images.go @@ -0,0 +1,30 @@ +package gallery + +import ( + "github.com/stashapp/stash/pkg/api/urlbuilders" + "github.com/stashapp/stash/pkg/models" +) + +func GetFiles(g *models.Gallery, baseURL string) []*models.GalleryFilesType { + var galleryFiles []*models.GalleryFilesType + + qb := models.NewImageQueryBuilder() + images, err := qb.FindByGalleryID(g.ID) + if err != nil { + return nil + } + + for i, img := range images { + builder := urlbuilders.NewImageURLBuilder(baseURL, img.ID) + imageURL := builder.GetImageURL() + + galleryFile := models.GalleryFilesType{ + Index: i, + Name: &img.Title.String, + Path: &imageURL, + } + galleryFiles = append(galleryFiles, &galleryFile) + } + + return galleryFiles +} diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index f671181fb..3643a2346 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -1,34 +1,268 @@ package gallery import ( + "database/sql" "fmt" - "time" + "strings" "github.com/stashapp/stash/pkg/manager/jsonschema" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) type Importer struct { - ReaderWriter models.GalleryReaderWriter - Input jsonschema.PathMapping + ReaderWriter models.GalleryReaderWriter + StudioWriter models.StudioReaderWriter + PerformerWriter models.PerformerReaderWriter + TagWriter models.TagReaderWriter + JoinWriter models.JoinReaderWriter + Input jsonschema.Gallery + MissingRefBehaviour models.ImportMissingRefEnum - gallery models.Gallery - imageData []byte + gallery models.Gallery + performers []*models.Performer + tags []*models.Tag } func (i *Importer) PreImport() error { - currentTime := time.Now() - i.gallery = models.Gallery{ - Checksum: i.Input.Checksum, - Path: i.Input.Path, - CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, - UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + i.gallery = i.galleryJSONToGallery(i.Input) + + if err := i.populateStudio(); err != nil { + return err + } + + if err := i.populatePerformers(); err != nil { + return err + } + + if err := i.populateTags(); err != nil { + return err } return nil } +func (i *Importer) galleryJSONToGallery(galleryJSON jsonschema.Gallery) models.Gallery { + newGallery := models.Gallery{ + Checksum: galleryJSON.Checksum, + Zip: galleryJSON.Zip, + } + + if galleryJSON.Path != "" { + newGallery.Path = sql.NullString{String: galleryJSON.Path, Valid: true} + } + + if galleryJSON.Title != "" { + newGallery.Title = sql.NullString{String: galleryJSON.Title, Valid: true} + } + if galleryJSON.Details != "" { + newGallery.Details = sql.NullString{String: galleryJSON.Details, Valid: true} + } + if galleryJSON.URL != "" { + newGallery.URL = sql.NullString{String: galleryJSON.URL, Valid: true} + } + if galleryJSON.Date != "" { + newGallery.Date = models.SQLiteDate{String: galleryJSON.Date, Valid: true} + } + if galleryJSON.Rating != 0 { + newGallery.Rating = sql.NullInt64{Int64: int64(galleryJSON.Rating), Valid: true} + } + + newGallery.CreatedAt = models.SQLiteTimestamp{Timestamp: galleryJSON.CreatedAt.GetTime()} + newGallery.UpdatedAt = models.SQLiteTimestamp{Timestamp: galleryJSON.UpdatedAt.GetTime()} + + return newGallery +} + +func (i *Importer) populateStudio() error { + if i.Input.Studio != "" { + studio, err := i.StudioWriter.FindByName(i.Input.Studio, false) + if err != nil { + return fmt.Errorf("error finding studio by name: %s", err.Error()) + } + + if studio == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("gallery studio '%s' not found", i.Input.Studio) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { + return nil + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + studioID, err := i.createStudio(i.Input.Studio) + if err != nil { + return err + } + i.gallery.StudioID = sql.NullInt64{ + Int64: int64(studioID), + Valid: true, + } + } + } else { + i.gallery.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true} + } + } + + return nil +} + +func (i *Importer) createStudio(name string) (int, error) { + newStudio := *models.NewStudio(name) + + created, err := i.StudioWriter.Create(newStudio) + if err != nil { + return 0, err + } + + return created.ID, nil +} + +func (i *Importer) populatePerformers() error { + if len(i.Input.Performers) > 0 { + names := i.Input.Performers + performers, err := i.PerformerWriter.FindByNames(names, false) + if err != nil { + return err + } + + var pluckedNames []string + for _, performer := range performers { + if !performer.Name.Valid { + continue + } + pluckedNames = append(pluckedNames, performer.Name.String) + } + + missingPerformers := utils.StrFilter(names, func(name string) bool { + return !utils.StrInclude(pluckedNames, name) + }) + + if len(missingPerformers) > 0 { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("gallery performers [%s] not found", strings.Join(missingPerformers, ", ")) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + createdPerformers, err := i.createPerformers(missingPerformers) + if err != nil { + return fmt.Errorf("error creating gallery performers: %s", err.Error()) + } + + performers = append(performers, createdPerformers...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + i.performers = performers + } + + return nil +} + +func (i *Importer) createPerformers(names []string) ([]*models.Performer, error) { + var ret []*models.Performer + for _, name := range names { + newPerformer := *models.NewPerformer(name) + + created, err := i.PerformerWriter.Create(newPerformer) + if err != nil { + return nil, err + } + + ret = append(ret, created) + } + + return ret, nil +} + +func (i *Importer) populateTags() error { + if len(i.Input.Tags) > 0 { + names := i.Input.Tags + tags, err := i.TagWriter.FindByNames(names, false) + if err != nil { + return err + } + + var pluckedNames []string + for _, tag := range tags { + pluckedNames = append(pluckedNames, tag.Name) + } + + missingTags := utils.StrFilter(names, func(name string) bool { + return !utils.StrInclude(pluckedNames, name) + }) + + if len(missingTags) > 0 { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("gallery tags [%s] not found", strings.Join(missingTags, ", ")) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + createdTags, err := i.createTags(missingTags) + if err != nil { + return fmt.Errorf("error creating gallery tags: %s", err.Error()) + } + + tags = append(tags, createdTags...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + i.tags = tags + } + + return nil +} + +func (i *Importer) createTags(names []string) ([]*models.Tag, error) { + var ret []*models.Tag + for _, name := range names { + newTag := *models.NewTag(name) + + created, err := i.TagWriter.Create(newTag) + if err != nil { + return nil, err + } + + ret = append(ret, created) + } + + return ret, nil +} + func (i *Importer) PostImport(id int) error { + if len(i.performers) > 0 { + var performerJoins []models.PerformersGalleries + for _, performer := range i.performers { + join := models.PerformersGalleries{ + PerformerID: performer.ID, + GalleryID: id, + } + performerJoins = append(performerJoins, join) + } + if err := i.JoinWriter.UpdatePerformersGalleries(id, performerJoins); err != nil { + return fmt.Errorf("failed to associate performers: %s", err.Error()) + } + } + + if len(i.tags) > 0 { + var tagJoins []models.GalleriesTags + for _, tag := range i.tags { + join := models.GalleriesTags{ + GalleryID: id, + TagID: tag.ID, + } + tagJoins = append(tagJoins, join) + } + if err := i.JoinWriter.UpdateGalleriesTags(id, tagJoins); err != nil { + return fmt.Errorf("failed to associate tags: %s", err.Error()) + } + } + return nil } @@ -37,7 +271,7 @@ func (i *Importer) Name() string { } func (i *Importer) FindExistingID() (*int, error) { - existing, err := i.ReaderWriter.FindByPath(i.Name()) + existing, err := i.ReaderWriter.FindByChecksum(i.Input.Checksum) if err != nil { return nil, err } diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go index 29dc1cb8a..6cbcbc32a 100644 --- a/pkg/gallery/import_test.go +++ b/pkg/gallery/import_test.go @@ -3,42 +3,414 @@ package gallery import ( "errors" "testing" + "time" "github.com/stashapp/stash/pkg/manager/jsonschema" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/models/modelstest" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" ) const ( - galleryPath = "galleryPath" - galleryPathErr = "galleryPathErr" - existingGalleryPath = "existingGalleryPath" + galleryNameErr = "galleryNameErr" + existingGalleryName = "existingGalleryName" - galleryID = 1 - idErr = 2 - existingGalleryID = 100 + existingGalleryID = 100 + existingStudioID = 101 + existingPerformerID = 103 + existingTagID = 105 + + existingStudioName = "existingStudioName" + existingStudioErr = "existingStudioErr" + missingStudioName = "missingStudioName" + + existingPerformerName = "existingPerformerName" + existingPerformerErr = "existingPerformerErr" + missingPerformerName = "missingPerformerName" + + existingTagName = "existingTagName" + existingTagErr = "existingTagErr" + missingTagName = "missingTagName" + + errPerformersID = 200 + + missingChecksum = "missingChecksum" + errChecksum = "errChecksum" ) +var createdAt time.Time = time.Date(2001, time.January, 2, 1, 2, 3, 4, time.Local) +var updatedAt time.Time = time.Date(2002, time.January, 2, 1, 2, 3, 4, time.Local) + func TestImporterName(t *testing.T) { i := Importer{ - Input: jsonschema.PathMapping{ - Path: galleryPath, + Input: jsonschema.Gallery{ + Path: path, }, } - assert.Equal(t, galleryPath, i.Name()) + assert.Equal(t, path, i.Name()) } func TestImporterPreImport(t *testing.T) { i := Importer{ - Input: jsonschema.PathMapping{ - Path: galleryPath, + Input: jsonschema.Gallery{ + Path: path, + Checksum: checksum, + Title: title, + Date: date, + Details: details, + Rating: rating, + URL: url, + CreatedAt: models.JSONTime{ + Time: createdAt, + }, + UpdatedAt: models.JSONTime{ + Time: updatedAt, + }, }, } err := i.PreImport() assert.Nil(t, err) + + expectedGallery := models.Gallery{ + Path: modelstest.NullString(path), + Checksum: checksum, + Title: modelstest.NullString(title), + Date: models.SQLiteDate{ + String: date, + Valid: true, + }, + Details: modelstest.NullString(details), + Rating: modelstest.NullInt64(rating), + URL: modelstest.NullString(url), + CreatedAt: models.SQLiteTimestamp{ + Timestamp: createdAt, + }, + UpdatedAt: models.SQLiteTimestamp{ + Timestamp: updatedAt, + }, + } + + assert.Equal(t, expectedGallery, i.gallery) +} + +func TestImporterPreImportWithStudio(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Input: jsonschema.Gallery{ + Studio: existingStudioName, + Path: path, + }, + } + + studioReaderWriter.On("FindByName", existingStudioName, false).Return(&models.Studio{ + ID: existingStudioID, + }, nil).Once() + studioReaderWriter.On("FindByName", existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.gallery.StudioID.Int64) + + i.Input.Studio = existingStudioErr + err = i.PreImport() + assert.NotNil(t, err) + + studioReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingStudio(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Input: jsonschema.Gallery{ + Path: path, + Studio: missingStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Times(3) + studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(&models.Studio{ + ID: existingStudioID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.gallery.StudioID.Int64) + + studioReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Input: jsonschema.Gallery{ + Path: path, + Studio: missingStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Once() + studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPreImportWithPerformer(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + PerformerWriter: performerReaderWriter, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Gallery{ + Path: path, + Performers: []string{ + existingPerformerName, + }, + }, + } + + performerReaderWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{ + { + ID: existingPerformerID, + Name: modelstest.NullString(existingPerformerName), + }, + }, nil).Once() + performerReaderWriter.On("FindByNames", []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingPerformerID, i.performers[0].ID) + + i.Input.Performers = []string{existingPerformerErr} + err = i.PreImport() + assert.NotNil(t, err) + + performerReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingPerformer(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + PerformerWriter: performerReaderWriter, + Input: jsonschema.Gallery{ + Path: path, + Performers: []string{ + missingPerformerName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + performerReaderWriter.On("FindByNames", []string{missingPerformerName}, false).Return(nil, nil).Times(3) + performerReaderWriter.On("Create", mock.AnythingOfType("models.Performer")).Return(&models.Performer{ + ID: existingPerformerID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingPerformerID, i.performers[0].ID) + + performerReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + PerformerWriter: performerReaderWriter, + Input: jsonschema.Gallery{ + Path: path, + Performers: []string{ + missingPerformerName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + performerReaderWriter.On("FindByNames", []string{missingPerformerName}, false).Return(nil, nil).Once() + performerReaderWriter.On("Create", mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPreImportWithTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Gallery{ + Path: path, + Tags: []string{ + existingTagName, + }, + }, + } + + tagReaderWriter.On("FindByNames", []string{existingTagName}, false).Return([]*models.Tag{ + { + ID: existingTagID, + Name: existingTagName, + }, + }, nil).Once() + tagReaderWriter.On("FindByNames", []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + i.Input.Tags = []string{existingTagErr} + err = i.PreImport() + assert.NotNil(t, err) + + tagReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + Input: jsonschema.Gallery{ + Path: path, + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Times(3) + tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{ + ID: existingTagID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + tagReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + Input: jsonschema.Gallery{ + Path: path, + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Once() + tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPostImportUpdatePerformers(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := Importer{ + JoinWriter: joinReaderWriter, + performers: []*models.Performer{ + { + ID: existingPerformerID, + }, + }, + } + + updateErr := errors.New("UpdatePerformersGalleries error") + + joinReaderWriter.On("UpdatePerformersGalleries", galleryID, []models.PerformersGalleries{ + { + PerformerID: existingPerformerID, + GalleryID: galleryID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdatePerformersGalleries", errPerformersID, mock.AnythingOfType("[]models.PerformersGalleries")).Return(updateErr).Once() + + err := i.PostImport(galleryID) + assert.Nil(t, err) + + err = i.PostImport(errPerformersID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) +} + +func TestImporterPostImportUpdateTags(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := Importer{ + JoinWriter: joinReaderWriter, + tags: []*models.Tag{ + { + ID: existingTagID, + }, + }, + } + + updateErr := errors.New("UpdateGalleriesTags error") + + joinReaderWriter.On("UpdateGalleriesTags", galleryID, []models.GalleriesTags{ + { + TagID: existingTagID, + GalleryID: galleryID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdateGalleriesTags", errTagsID, mock.AnythingOfType("[]models.GalleriesTags")).Return(updateErr).Once() + + err := i.PostImport(galleryID) + assert.Nil(t, err) + + err = i.PostImport(errTagsID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) } func TestImporterFindExistingID(t *testing.T) { @@ -46,28 +418,29 @@ func TestImporterFindExistingID(t *testing.T) { i := Importer{ ReaderWriter: readerWriter, - Input: jsonschema.PathMapping{ - Path: galleryPath, + Input: jsonschema.Gallery{ + Path: path, + Checksum: missingChecksum, }, } - errFindByPath := errors.New("FindByPath error") - readerWriter.On("FindByPath", galleryPath).Return(nil, nil).Once() - readerWriter.On("FindByPath", existingGalleryPath).Return(&models.Gallery{ + expectedErr := errors.New("FindBy* error") + readerWriter.On("FindByChecksum", missingChecksum).Return(nil, nil).Once() + readerWriter.On("FindByChecksum", checksum).Return(&models.Gallery{ ID: existingGalleryID, }, nil).Once() - readerWriter.On("FindByPath", galleryPathErr).Return(nil, errFindByPath).Once() + readerWriter.On("FindByChecksum", errChecksum).Return(nil, expectedErr).Once() id, err := i.FindExistingID() assert.Nil(t, id) assert.Nil(t, err) - i.Input.Path = existingGalleryPath + i.Input.Checksum = checksum id, err = i.FindExistingID() assert.Equal(t, existingGalleryID, *id) assert.Nil(t, err) - i.Input.Path = galleryPathErr + i.Input.Checksum = errChecksum id, err = i.FindExistingID() assert.Nil(t, id) assert.NotNil(t, err) @@ -79,11 +452,11 @@ func TestCreate(t *testing.T) { readerWriter := &mocks.GalleryReaderWriter{} gallery := models.Gallery{ - Path: galleryPath, + Title: modelstest.NullString(title), } galleryErr := models.Gallery{ - Path: galleryPathErr, + Title: modelstest.NullString(galleryNameErr), } i := Importer{ @@ -113,11 +486,7 @@ func TestUpdate(t *testing.T) { readerWriter := &mocks.GalleryReaderWriter{} gallery := models.Gallery{ - Path: galleryPath, - } - - galleryErr := models.Gallery{ - Path: galleryPathErr, + Title: modelstest.NullString(title), } i := Importer{ @@ -125,8 +494,6 @@ func TestUpdate(t *testing.T) { gallery: gallery, } - errUpdate := errors.New("Update error") - // id needs to be set for the mock input gallery.ID = galleryID readerWriter.On("Update", gallery).Return(nil, nil).Once() @@ -134,14 +501,5 @@ func TestUpdate(t *testing.T) { err := i.Update(galleryID) assert.Nil(t, err) - i.gallery = galleryErr - - // need to set id separately - galleryErr.ID = idErr - readerWriter.On("Update", galleryErr).Return(nil, errUpdate).Once() - - err = i.Update(idErr) - assert.NotNil(t, err) - readerWriter.AssertExpectations(t) } diff --git a/pkg/image/export.go b/pkg/image/export.go new file mode 100644 index 000000000..c2c560db5 --- /dev/null +++ b/pkg/image/export.go @@ -0,0 +1,81 @@ +package image + +import ( + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" +) + +// ToBasicJSON converts a image object into its JSON object equivalent. It +// does not convert the relationships to other objects, with the exception +// of cover image. +func ToBasicJSON(image *models.Image) *jsonschema.Image { + newImageJSON := jsonschema.Image{ + Checksum: image.Checksum, + CreatedAt: models.JSONTime{Time: image.CreatedAt.Timestamp}, + UpdatedAt: models.JSONTime{Time: image.UpdatedAt.Timestamp}, + } + + if image.Title.Valid { + newImageJSON.Title = image.Title.String + } + + if image.Rating.Valid { + newImageJSON.Rating = int(image.Rating.Int64) + } + + newImageJSON.OCounter = image.OCounter + + newImageJSON.File = getImageFileJSON(image) + + return &newImageJSON +} + +func getImageFileJSON(image *models.Image) *jsonschema.ImageFile { + ret := &jsonschema.ImageFile{} + + if image.Size.Valid { + ret.Size = int(image.Size.Int64) + } + + if image.Width.Valid { + ret.Width = int(image.Width.Int64) + } + + if image.Height.Valid { + ret.Height = int(image.Height.Int64) + } + + return ret +} + +// GetStudioName returns the name of the provided image's studio. It returns an +// empty string if there is no studio assigned to the image. +func GetStudioName(reader models.StudioReader, image *models.Image) (string, error) { + if image.StudioID.Valid { + studio, err := reader.Find(int(image.StudioID.Int64)) + if err != nil { + return "", err + } + + if studio != nil { + return studio.Name.String, nil + } + } + + return "", nil +} + +// GetGalleryChecksum returns the checksum of the provided image. It returns an +// empty string if there is no gallery assigned to the image. +// func GetGalleryChecksum(reader models.GalleryReader, image *models.Image) (string, error) { +// gallery, err := reader.FindByImageID(image.ID) +// if err != nil { +// return "", fmt.Errorf("error getting image gallery: %s", err.Error()) +// } + +// if gallery != nil { +// return gallery.Checksum, nil +// } + +// return "", nil +// } diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go new file mode 100644 index 000000000..8bbc198b0 --- /dev/null +++ b/pkg/image/export_test.go @@ -0,0 +1,248 @@ +package image + +import ( + "errors" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/models/modelstest" + "github.com/stretchr/testify/assert" + + "testing" + "time" +) + +const ( + imageID = 1 + noImageID = 2 + errImageID = 3 + + studioID = 4 + missingStudioID = 5 + errStudioID = 6 + + // noGalleryID = 7 + // errGalleryID = 8 + + noTagsID = 11 + errTagsID = 12 + + noMoviesID = 13 + errMoviesID = 14 + errFindMovieID = 15 + + noMarkersID = 16 + errMarkersID = 17 + errFindPrimaryTagID = 18 + errFindByMarkerID = 19 +) + +const ( + checksum = "checksum" + title = "title" + rating = 5 + ocounter = 2 + size = 123 + width = 100 + height = 100 +) + +const ( + studioName = "studioName" + //galleryChecksum = "galleryChecksum" +) + +var names = []string{ + "name1", + "name2", +} + +var createTime time.Time = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) +var updateTime time.Time = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) + +func createFullImage(id int) models.Image { + return models.Image{ + ID: id, + Title: modelstest.NullString(title), + Checksum: checksum, + Height: modelstest.NullInt64(height), + OCounter: ocounter, + Rating: modelstest.NullInt64(rating), + Size: modelstest.NullInt64(int64(size)), + Width: modelstest.NullInt64(width), + CreatedAt: models.SQLiteTimestamp{ + Timestamp: createTime, + }, + UpdatedAt: models.SQLiteTimestamp{ + Timestamp: updateTime, + }, + } +} + +func createEmptyImage(id int) models.Image { + return models.Image{ + ID: id, + CreatedAt: models.SQLiteTimestamp{ + Timestamp: createTime, + }, + UpdatedAt: models.SQLiteTimestamp{ + Timestamp: updateTime, + }, + } +} + +func createFullJSONImage() *jsonschema.Image { + return &jsonschema.Image{ + Title: title, + Checksum: checksum, + OCounter: ocounter, + Rating: rating, + File: &jsonschema.ImageFile{ + Height: height, + Size: size, + Width: width, + }, + CreatedAt: models.JSONTime{ + Time: createTime, + }, + UpdatedAt: models.JSONTime{ + Time: updateTime, + }, + } +} + +func createEmptyJSONImage() *jsonschema.Image { + return &jsonschema.Image{ + File: &jsonschema.ImageFile{}, + CreatedAt: models.JSONTime{ + Time: createTime, + }, + UpdatedAt: models.JSONTime{ + Time: updateTime, + }, + } +} + +type basicTestScenario struct { + input models.Image + expected *jsonschema.Image +} + +var scenarios = []basicTestScenario{ + { + createFullImage(imageID), + createFullJSONImage(), + }, +} + +func TestToJSON(t *testing.T) { + for i, s := range scenarios { + image := s.input + json := ToBasicJSON(&image) + + assert.Equal(t, s.expected, json, "[%d]", i) + } +} + +func createStudioImage(studioID int) models.Image { + return models.Image{ + StudioID: modelstest.NullInt64(int64(studioID)), + } +} + +type stringTestScenario struct { + input models.Image + expected string + err bool +} + +var getStudioScenarios = []stringTestScenario{ + { + createStudioImage(studioID), + studioName, + false, + }, + { + createStudioImage(missingStudioID), + "", + false, + }, + { + createStudioImage(errStudioID), + "", + true, + }, +} + +func TestGetStudioName(t *testing.T) { + mockStudioReader := &mocks.StudioReaderWriter{} + + studioErr := errors.New("error getting image") + + mockStudioReader.On("Find", studioID).Return(&models.Studio{ + Name: modelstest.NullString(studioName), + }, nil).Once() + mockStudioReader.On("Find", missingStudioID).Return(nil, nil).Once() + mockStudioReader.On("Find", errStudioID).Return(nil, studioErr).Once() + + for i, s := range getStudioScenarios { + image := s.input + json, err := GetStudioName(mockStudioReader, &image) + + if !s.err && err != nil { + t.Errorf("[%d] unexpected error: %s", i, err.Error()) + } else if s.err && err == nil { + t.Errorf("[%d] expected error not returned", i) + } else { + assert.Equal(t, s.expected, json, "[%d]", i) + } + } + + mockStudioReader.AssertExpectations(t) +} + +// var getGalleryChecksumScenarios = []stringTestScenario{ +// { +// createEmptyImage(imageID), +// galleryChecksum, +// false, +// }, +// { +// createEmptyImage(noGalleryID), +// "", +// false, +// }, +// { +// createEmptyImage(errGalleryID), +// "", +// true, +// }, +// } + +// func TestGetGalleryChecksum(t *testing.T) { +// mockGalleryReader := &mocks.GalleryReaderWriter{} + +// galleryErr := errors.New("error getting gallery") + +// mockGalleryReader.On("FindByImageID", imageID).Return(&models.Gallery{ +// Checksum: galleryChecksum, +// }, nil).Once() +// mockGalleryReader.On("FindByImageID", noGalleryID).Return(nil, nil).Once() +// mockGalleryReader.On("FindByImageID", errGalleryID).Return(nil, galleryErr).Once() + +// for i, s := range getGalleryChecksumScenarios { +// image := s.input +// json, err := GetGalleryChecksum(mockGalleryReader, &image) + +// if !s.err && err != nil { +// t.Errorf("[%d] unexpected error: %s", i, err.Error()) +// } else if s.err && err == nil { +// t.Errorf("[%d] expected error not returned", i) +// } else { +// assert.Equal(t, s.expected, json, "[%d]", i) +// } +// } + +// mockGalleryReader.AssertExpectations(t) +// } diff --git a/pkg/image/image.go b/pkg/image/image.go new file mode 100644 index 000000000..55e5a4162 --- /dev/null +++ b/pkg/image/image.go @@ -0,0 +1,216 @@ +package image + +import ( + "archive/zip" + "database/sql" + "fmt" + "image" + "io" + "io/ioutil" + "net/http" + "os" + "path/filepath" + "strings" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" + _ "golang.org/x/image/webp" +) + +const zipSeparator = "\x00" + +func GetSourceImage(i *models.Image) (image.Image, error) { + f, err := openSourceImage(i.Path) + if err != nil { + return nil, err + } + defer f.Close() + + srcImage, _, err := image.Decode(f) + if err != nil { + return nil, err + } + + return srcImage, nil +} + +func CalculateMD5(path string) (string, error) { + f, err := openSourceImage(path) + if err != nil { + return "", err + } + defer f.Close() + + return utils.MD5FromReader(f) +} + +func FileExists(path string) bool { + _, err := openSourceImage(path) + if err != nil { + return false + } + + return true +} + +func ZipFilename(zipFilename, filenameInZip string) string { + return zipFilename + zipSeparator + filenameInZip +} + +type imageReadCloser struct { + src io.ReadCloser + zrc *zip.ReadCloser +} + +func (i *imageReadCloser) Read(p []byte) (n int, err error) { + return i.src.Read(p) +} + +func (i *imageReadCloser) Close() error { + err := i.src.Close() + var err2 error + if i.zrc != nil { + err2 = i.zrc.Close() + } + + if err != nil { + return err + } + return err2 +} + +func openSourceImage(path string) (io.ReadCloser, error) { + // may need to read from a zip file + zipFilename, filename := getFilePath(path) + if zipFilename != "" { + r, err := zip.OpenReader(zipFilename) + if err != nil { + return nil, err + } + + // find the file matching the filename + for _, f := range r.File { + if f.Name == filename { + src, err := f.Open() + if err != nil { + return nil, err + } + return &imageReadCloser{ + src: src, + zrc: r, + }, nil + } + } + + return nil, fmt.Errorf("file with name '%s' not found in zip file '%s'", filename, zipFilename) + } + + return os.Open(filename) +} + +func getFilePath(path string) (zipFilename, filename string) { + nullIndex := strings.Index(path, zipSeparator) + if nullIndex != -1 { + zipFilename = path[0:nullIndex] + filename = path[nullIndex+1:] + } else { + filename = path + } + return +} + +func SetFileDetails(i *models.Image) error { + f, err := stat(i.Path) + if err != nil { + return err + } + + src, _ := GetSourceImage(i) + + if src != nil { + i.Width = sql.NullInt64{ + Int64: int64(src.Bounds().Max.X), + Valid: true, + } + i.Height = sql.NullInt64{ + Int64: int64(src.Bounds().Max.Y), + Valid: true, + } + } + + i.Size = sql.NullInt64{ + Int64: int64(f.Size()), + Valid: true, + } + + return nil +} + +func stat(path string) (os.FileInfo, error) { + // may need to read from a zip file + zipFilename, filename := getFilePath(path) + if zipFilename != "" { + r, err := zip.OpenReader(zipFilename) + if err != nil { + return nil, err + } + defer r.Close() + + // find the file matching the filename + for _, f := range r.File { + if f.Name == filename { + return f.FileInfo(), nil + } + } + + return nil, fmt.Errorf("file with name '%s' not found in zip file '%s'", filename, zipFilename) + } + + return os.Stat(filename) +} + +// PathDisplayName converts an image path for display. It translates the zip +// file separator character into '/', since this character is also used for +// path separators within zip files. It returns the original provided path +// if it does not contain the zip file separator character. +func PathDisplayName(path string) string { + return strings.Replace(path, zipSeparator, "/", -1) +} + +func Serve(w http.ResponseWriter, r *http.Request, path string) { + zipFilename, _ := getFilePath(path) + w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week + if zipFilename == "" { + http.ServeFile(w, r, path) + } else { + rc, err := openSourceImage(path) + if err != nil { + // assume not found + http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound) + return + } + defer rc.Close() + + data, err := ioutil.ReadAll(rc) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Write(data) + } +} + +func IsCover(img *models.Image) bool { + _, fn := getFilePath(img.Path) + return fn == "cover.jpg" +} + +func GetTitle(s *models.Image) string { + if s.Title.String != "" { + return s.Title.String + } + + _, fn := getFilePath(s.Path) + return filepath.Base(fn) +} diff --git a/pkg/image/import.go b/pkg/image/import.go new file mode 100644 index 000000000..b970be83d --- /dev/null +++ b/pkg/image/import.go @@ -0,0 +1,366 @@ +package image + +import ( + "database/sql" + "fmt" + "strings" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type Importer struct { + ReaderWriter models.ImageReaderWriter + StudioWriter models.StudioReaderWriter + GalleryWriter models.GalleryReaderWriter + PerformerWriter models.PerformerReaderWriter + TagWriter models.TagReaderWriter + JoinWriter models.JoinReaderWriter + Input jsonschema.Image + Path string + MissingRefBehaviour models.ImportMissingRefEnum + + ID int + image models.Image + galleries []*models.Gallery + performers []*models.Performer + tags []*models.Tag +} + +func (i *Importer) PreImport() error { + i.image = i.imageJSONToImage(i.Input) + + if err := i.populateStudio(); err != nil { + return err + } + + if err := i.populateGalleries(); err != nil { + return err + } + + if err := i.populatePerformers(); err != nil { + return err + } + + if err := i.populateTags(); err != nil { + return err + } + + return nil +} + +func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image { + newImage := models.Image{ + Checksum: imageJSON.Checksum, + Path: i.Path, + } + + if imageJSON.Title != "" { + newImage.Title = sql.NullString{String: imageJSON.Title, Valid: true} + } + if imageJSON.Rating != 0 { + newImage.Rating = sql.NullInt64{Int64: int64(imageJSON.Rating), Valid: true} + } + + newImage.OCounter = imageJSON.OCounter + newImage.CreatedAt = models.SQLiteTimestamp{Timestamp: imageJSON.CreatedAt.GetTime()} + newImage.UpdatedAt = models.SQLiteTimestamp{Timestamp: imageJSON.UpdatedAt.GetTime()} + + if imageJSON.File != nil { + if imageJSON.File.Size != 0 { + newImage.Size = sql.NullInt64{Int64: int64(imageJSON.File.Size), Valid: true} + } + if imageJSON.File.Width != 0 { + newImage.Width = sql.NullInt64{Int64: int64(imageJSON.File.Width), Valid: true} + } + if imageJSON.File.Height != 0 { + newImage.Height = sql.NullInt64{Int64: int64(imageJSON.File.Height), Valid: true} + } + } + + return newImage +} + +func (i *Importer) populateStudio() error { + if i.Input.Studio != "" { + studio, err := i.StudioWriter.FindByName(i.Input.Studio, false) + if err != nil { + return fmt.Errorf("error finding studio by name: %s", err.Error()) + } + + if studio == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("image studio '%s' not found", i.Input.Studio) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { + return nil + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + studioID, err := i.createStudio(i.Input.Studio) + if err != nil { + return err + } + i.image.StudioID = sql.NullInt64{ + Int64: int64(studioID), + Valid: true, + } + } + } else { + i.image.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true} + } + } + + return nil +} + +func (i *Importer) createStudio(name string) (int, error) { + newStudio := *models.NewStudio(name) + + created, err := i.StudioWriter.Create(newStudio) + if err != nil { + return 0, err + } + + return created.ID, nil +} + +func (i *Importer) populateGalleries() error { + for _, checksum := range i.Input.Galleries { + gallery, err := i.GalleryWriter.FindByChecksum(checksum) + if err != nil { + return fmt.Errorf("error finding gallery: %s", err.Error()) + } + + if gallery == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("image gallery '%s' not found", i.Input.Studio) + } + + // we don't create galleries - just ignore + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore || i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + continue + } + } else { + i.galleries = append(i.galleries, gallery) + } + } + + return nil +} + +func (i *Importer) populatePerformers() error { + if len(i.Input.Performers) > 0 { + names := i.Input.Performers + performers, err := i.PerformerWriter.FindByNames(names, false) + if err != nil { + return err + } + + var pluckedNames []string + for _, performer := range performers { + if !performer.Name.Valid { + continue + } + pluckedNames = append(pluckedNames, performer.Name.String) + } + + missingPerformers := utils.StrFilter(names, func(name string) bool { + return !utils.StrInclude(pluckedNames, name) + }) + + if len(missingPerformers) > 0 { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("image performers [%s] not found", strings.Join(missingPerformers, ", ")) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + createdPerformers, err := i.createPerformers(missingPerformers) + if err != nil { + return fmt.Errorf("error creating image performers: %s", err.Error()) + } + + performers = append(performers, createdPerformers...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + i.performers = performers + } + + return nil +} + +func (i *Importer) createPerformers(names []string) ([]*models.Performer, error) { + var ret []*models.Performer + for _, name := range names { + newPerformer := *models.NewPerformer(name) + + created, err := i.PerformerWriter.Create(newPerformer) + if err != nil { + return nil, err + } + + ret = append(ret, created) + } + + return ret, nil +} + +func (i *Importer) populateTags() error { + if len(i.Input.Tags) > 0 { + + tags, err := importTags(i.TagWriter, i.Input.Tags, i.MissingRefBehaviour) + if err != nil { + return err + } + + i.tags = tags + } + + return nil +} + +func (i *Importer) PostImport(id int) error { + if len(i.galleries) > 0 { + var galleryJoins []models.GalleriesImages + for _, gallery := range i.galleries { + join := models.GalleriesImages{ + GalleryID: gallery.ID, + ImageID: id, + } + galleryJoins = append(galleryJoins, join) + } + if err := i.JoinWriter.UpdateGalleriesImages(id, galleryJoins); err != nil { + return fmt.Errorf("failed to associate galleries: %s", err.Error()) + } + } + + if len(i.performers) > 0 { + var performerJoins []models.PerformersImages + for _, performer := range i.performers { + join := models.PerformersImages{ + PerformerID: performer.ID, + ImageID: id, + } + performerJoins = append(performerJoins, join) + } + if err := i.JoinWriter.UpdatePerformersImages(id, performerJoins); err != nil { + return fmt.Errorf("failed to associate performers: %s", err.Error()) + } + } + + if len(i.tags) > 0 { + var tagJoins []models.ImagesTags + for _, tag := range i.tags { + join := models.ImagesTags{ + ImageID: id, + TagID: tag.ID, + } + tagJoins = append(tagJoins, join) + } + if err := i.JoinWriter.UpdateImagesTags(id, tagJoins); err != nil { + return fmt.Errorf("failed to associate tags: %s", err.Error()) + } + } + + return nil +} + +func (i *Importer) Name() string { + return i.Path +} + +func (i *Importer) FindExistingID() (*int, error) { + var existing *models.Image + var err error + existing, err = i.ReaderWriter.FindByChecksum(i.Input.Checksum) + + if err != nil { + return nil, err + } + + if existing != nil { + id := existing.ID + return &id, nil + } + + return nil, nil +} + +func (i *Importer) Create() (*int, error) { + created, err := i.ReaderWriter.Create(i.image) + if err != nil { + return nil, fmt.Errorf("error creating image: %s", err.Error()) + } + + id := created.ID + i.ID = id + return &id, nil +} + +func (i *Importer) Update(id int) error { + image := i.image + image.ID = id + i.ID = id + _, err := i.ReaderWriter.UpdateFull(image) + if err != nil { + return fmt.Errorf("error updating existing image: %s", err.Error()) + } + + return nil +} + +func importTags(tagWriter models.TagReaderWriter, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { + tags, err := tagWriter.FindByNames(names, false) + if err != nil { + return nil, err + } + + var pluckedNames []string + for _, tag := range tags { + pluckedNames = append(pluckedNames, tag.Name) + } + + missingTags := utils.StrFilter(names, func(name string) bool { + return !utils.StrInclude(pluckedNames, name) + }) + + if len(missingTags) > 0 { + if missingRefBehaviour == models.ImportMissingRefEnumFail { + return nil, fmt.Errorf("tags [%s] not found", strings.Join(missingTags, ", ")) + } + + if missingRefBehaviour == models.ImportMissingRefEnumCreate { + createdTags, err := createTags(tagWriter, missingTags) + if err != nil { + return nil, fmt.Errorf("error creating tags: %s", err.Error()) + } + + tags = append(tags, createdTags...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + return tags, nil +} + +func createTags(tagWriter models.TagWriter, names []string) ([]*models.Tag, error) { + var ret []*models.Tag + for _, name := range names { + newTag := *models.NewTag(name) + + created, err := tagWriter.Create(newTag) + if err != nil { + return nil, err + } + + ret = append(ret, created) + } + + return ret, nil +} diff --git a/pkg/image/import_test.go b/pkg/image/import_test.go new file mode 100644 index 000000000..53414666e --- /dev/null +++ b/pkg/image/import_test.go @@ -0,0 +1,588 @@ +package image + +import ( + "errors" + "testing" + + "github.com/stashapp/stash/pkg/manager/jsonschema" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/models/modelstest" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +const invalidImage = "aW1hZ2VCeXRlcw&&" + +const ( + path = "path" + + imageNameErr = "imageNameErr" + existingImageName = "existingImageName" + + existingImageID = 100 + existingStudioID = 101 + existingGalleryID = 102 + existingPerformerID = 103 + existingMovieID = 104 + existingTagID = 105 + + existingStudioName = "existingStudioName" + existingStudioErr = "existingStudioErr" + missingStudioName = "missingStudioName" + + existingGalleryChecksum = "existingGalleryChecksum" + existingGalleryErr = "existingGalleryErr" + missingGalleryChecksum = "missingGalleryChecksum" + + existingPerformerName = "existingPerformerName" + existingPerformerErr = "existingPerformerErr" + missingPerformerName = "missingPerformerName" + + existingTagName = "existingTagName" + existingTagErr = "existingTagErr" + missingTagName = "missingTagName" + + errPerformersID = 200 + errGalleriesID = 201 + + missingChecksum = "missingChecksum" + errChecksum = "errChecksum" +) + +func TestImporterName(t *testing.T) { + i := Importer{ + Path: path, + Input: jsonschema.Image{}, + } + + assert.Equal(t, path, i.Name()) +} + +func TestImporterPreImport(t *testing.T) { + i := Importer{ + Path: path, + } + + err := i.PreImport() + assert.Nil(t, err) +} + +func TestImporterPreImportWithStudio(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Path: path, + Input: jsonschema.Image{ + Studio: existingStudioName, + }, + } + + studioReaderWriter.On("FindByName", existingStudioName, false).Return(&models.Studio{ + ID: existingStudioID, + }, nil).Once() + studioReaderWriter.On("FindByName", existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.image.StudioID.Int64) + + i.Input.Studio = existingStudioErr + err = i.PreImport() + assert.NotNil(t, err) + + studioReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingStudio(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + Path: path, + StudioWriter: studioReaderWriter, + Input: jsonschema.Image{ + Studio: missingStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Times(3) + studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(&models.Studio{ + ID: existingStudioID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, int64(existingStudioID), i.image.StudioID.Int64) + + studioReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { + studioReaderWriter := &mocks.StudioReaderWriter{} + + i := Importer{ + StudioWriter: studioReaderWriter, + Path: path, + Input: jsonschema.Image{ + Studio: missingStudioName, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + studioReaderWriter.On("FindByName", missingStudioName, false).Return(nil, nil).Once() + studioReaderWriter.On("Create", mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPreImportWithGallery(t *testing.T) { + galleryReaderWriter := &mocks.GalleryReaderWriter{} + + i := Importer{ + GalleryWriter: galleryReaderWriter, + Path: path, + Input: jsonschema.Image{ + Galleries: []string{ + existingGalleryChecksum, + }, + }, + } + + galleryReaderWriter.On("FindByChecksum", existingGalleryChecksum).Return(&models.Gallery{ + ID: existingGalleryID, + }, nil).Once() + galleryReaderWriter.On("FindByChecksum", existingGalleryErr).Return(nil, errors.New("FindByChecksum error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingGalleryID, i.galleries[0].ID) + + i.Input.Galleries = []string{ + existingGalleryErr, + } + + err = i.PreImport() + assert.NotNil(t, err) + + galleryReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingGallery(t *testing.T) { + galleryReaderWriter := &mocks.GalleryReaderWriter{} + + i := Importer{ + Path: path, + GalleryWriter: galleryReaderWriter, + Input: jsonschema.Image{ + Galleries: []string{ + missingGalleryChecksum, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + galleryReaderWriter.On("FindByChecksum", missingGalleryChecksum).Return(nil, nil).Times(3) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + assert.Nil(t, i.galleries) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Nil(t, i.galleries) + + galleryReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithPerformer(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + PerformerWriter: performerReaderWriter, + Path: path, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Image{ + Performers: []string{ + existingPerformerName, + }, + }, + } + + performerReaderWriter.On("FindByNames", []string{existingPerformerName}, false).Return([]*models.Performer{ + { + ID: existingPerformerID, + Name: modelstest.NullString(existingPerformerName), + }, + }, nil).Once() + performerReaderWriter.On("FindByNames", []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingPerformerID, i.performers[0].ID) + + i.Input.Performers = []string{existingPerformerErr} + err = i.PreImport() + assert.NotNil(t, err) + + performerReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingPerformer(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + Path: path, + PerformerWriter: performerReaderWriter, + Input: jsonschema.Image{ + Performers: []string{ + missingPerformerName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + performerReaderWriter.On("FindByNames", []string{missingPerformerName}, false).Return(nil, nil).Times(3) + performerReaderWriter.On("Create", mock.AnythingOfType("models.Performer")).Return(&models.Performer{ + ID: existingPerformerID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingPerformerID, i.performers[0].ID) + + performerReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { + performerReaderWriter := &mocks.PerformerReaderWriter{} + + i := Importer{ + PerformerWriter: performerReaderWriter, + Path: path, + Input: jsonschema.Image{ + Performers: []string{ + missingPerformerName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + performerReaderWriter.On("FindByNames", []string{missingPerformerName}, false).Return(nil, nil).Once() + performerReaderWriter.On("Create", mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPreImportWithTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + Path: path, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Image{ + Tags: []string{ + existingTagName, + }, + }, + } + + tagReaderWriter.On("FindByNames", []string{existingTagName}, false).Return([]*models.Tag{ + { + ID: existingTagID, + Name: existingTagName, + }, + }, nil).Once() + tagReaderWriter.On("FindByNames", []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + i.Input.Tags = []string{existingTagErr} + err = i.PreImport() + assert.NotNil(t, err) + + tagReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTag(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + Path: path, + TagWriter: tagReaderWriter, + Input: jsonschema.Image{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Times(3) + tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(&models.Tag{ + ID: existingTagID, + }, nil) + + err := i.PreImport() + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport() + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport() + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.tags[0].ID) + + tagReaderWriter.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { + tagReaderWriter := &mocks.TagReaderWriter{} + + i := Importer{ + TagWriter: tagReaderWriter, + Path: path, + Input: jsonschema.Image{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + tagReaderWriter.On("FindByNames", []string{missingTagName}, false).Return(nil, nil).Once() + tagReaderWriter.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error")) + + err := i.PreImport() + assert.NotNil(t, err) +} + +func TestImporterPostImportUpdateGallery(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := Importer{ + JoinWriter: joinReaderWriter, + galleries: []*models.Gallery{ + { + ID: existingGalleryID, + }, + }, + } + + updateErr := errors.New("UpdateGalleriesImages error") + + joinReaderWriter.On("UpdateGalleriesImages", imageID, []models.GalleriesImages{ + { + GalleryID: existingGalleryID, + ImageID: imageID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdateGalleriesImages", errGalleriesID, mock.AnythingOfType("[]models.GalleriesImages")).Return(updateErr).Once() + + err := i.PostImport(imageID) + assert.Nil(t, err) + + err = i.PostImport(errGalleriesID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) +} + +func TestImporterPostImportUpdatePerformers(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := Importer{ + JoinWriter: joinReaderWriter, + performers: []*models.Performer{ + { + ID: existingPerformerID, + }, + }, + } + + updateErr := errors.New("UpdatePerformersImages error") + + joinReaderWriter.On("UpdatePerformersImages", imageID, []models.PerformersImages{ + { + PerformerID: existingPerformerID, + ImageID: imageID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdatePerformersImages", errPerformersID, mock.AnythingOfType("[]models.PerformersImages")).Return(updateErr).Once() + + err := i.PostImport(imageID) + assert.Nil(t, err) + + err = i.PostImport(errPerformersID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) +} + +func TestImporterPostImportUpdateTags(t *testing.T) { + joinReaderWriter := &mocks.JoinReaderWriter{} + + i := Importer{ + JoinWriter: joinReaderWriter, + tags: []*models.Tag{ + { + ID: existingTagID, + }, + }, + } + + updateErr := errors.New("UpdateImagesTags error") + + joinReaderWriter.On("UpdateImagesTags", imageID, []models.ImagesTags{ + { + TagID: existingTagID, + ImageID: imageID, + }, + }).Return(nil).Once() + joinReaderWriter.On("UpdateImagesTags", errTagsID, mock.AnythingOfType("[]models.ImagesTags")).Return(updateErr).Once() + + err := i.PostImport(imageID) + assert.Nil(t, err) + + err = i.PostImport(errTagsID) + assert.NotNil(t, err) + + joinReaderWriter.AssertExpectations(t) +} + +func TestImporterFindExistingID(t *testing.T) { + readerWriter := &mocks.ImageReaderWriter{} + + i := Importer{ + ReaderWriter: readerWriter, + Path: path, + Input: jsonschema.Image{ + Checksum: missingChecksum, + }, + } + + expectedErr := errors.New("FindBy* error") + readerWriter.On("FindByChecksum", missingChecksum).Return(nil, nil).Once() + readerWriter.On("FindByChecksum", checksum).Return(&models.Image{ + ID: existingImageID, + }, nil).Once() + readerWriter.On("FindByChecksum", errChecksum).Return(nil, expectedErr).Once() + + id, err := i.FindExistingID() + assert.Nil(t, id) + assert.Nil(t, err) + + i.Input.Checksum = checksum + id, err = i.FindExistingID() + assert.Equal(t, existingImageID, *id) + assert.Nil(t, err) + + i.Input.Checksum = errChecksum + id, err = i.FindExistingID() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestCreate(t *testing.T) { + readerWriter := &mocks.ImageReaderWriter{} + + image := models.Image{ + Title: modelstest.NullString(title), + } + + imageErr := models.Image{ + Title: modelstest.NullString(imageNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + image: image, + } + + errCreate := errors.New("Create error") + readerWriter.On("Create", image).Return(&models.Image{ + ID: imageID, + }, nil).Once() + readerWriter.On("Create", imageErr).Return(nil, errCreate).Once() + + id, err := i.Create() + assert.Equal(t, imageID, *id) + assert.Nil(t, err) + assert.Equal(t, imageID, i.ID) + + i.image = imageErr + id, err = i.Create() + assert.Nil(t, id) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} + +func TestUpdate(t *testing.T) { + readerWriter := &mocks.ImageReaderWriter{} + + image := models.Image{ + Title: modelstest.NullString(title), + } + + imageErr := models.Image{ + Title: modelstest.NullString(imageNameErr), + } + + i := Importer{ + ReaderWriter: readerWriter, + image: image, + } + + errUpdate := errors.New("Update error") + + // id needs to be set for the mock input + image.ID = imageID + readerWriter.On("UpdateFull", image).Return(nil, nil).Once() + + err := i.Update(imageID) + assert.Nil(t, err) + assert.Equal(t, imageID, i.ID) + + i.image = imageErr + + // need to set id separately + imageErr.ID = errImageID + readerWriter.On("UpdateFull", imageErr).Return(nil, errUpdate).Once() + + err = i.Update(errImageID) + assert.NotNil(t, err) + + readerWriter.AssertExpectations(t) +} diff --git a/pkg/image/thumbnail.go b/pkg/image/thumbnail.go new file mode 100644 index 000000000..107b77143 --- /dev/null +++ b/pkg/image/thumbnail.go @@ -0,0 +1,40 @@ +package image + +import ( + "bytes" + "image" + "image/jpeg" + + "github.com/disintegration/imaging" +) + +func ThumbnailNeeded(srcImage image.Image, maxSize int) bool { + dim := srcImage.Bounds().Max + w := dim.X + h := dim.Y + + return w > maxSize || h > maxSize +} + +// GetThumbnail returns the thumbnail image of the provided image resized to +// the provided max size. It resizes based on the largest X/Y direction. +// It returns nil and an error if an error occurs reading, decoding or encoding +// the image. +func GetThumbnail(srcImage image.Image, maxSize int) ([]byte, error) { + var resizedImage image.Image + + // if height is longer then resize by height instead of width + dim := srcImage.Bounds().Max + if dim.Y > dim.X { + resizedImage = imaging.Resize(srcImage, 0, maxSize, imaging.Box) + } else { + resizedImage = imaging.Resize(srcImage, maxSize, 0, imaging.Box) + } + + buf := new(bytes.Buffer) + err := jpeg.Encode(buf, resizedImage, nil) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} diff --git a/pkg/manager/config/config.go b/pkg/manager/config/config.go index a5a746f44..770b9da38 100644 --- a/pkg/manager/config/config.go +++ b/pkg/manager/config/config.go @@ -27,6 +27,21 @@ const DefaultMaxSessionAge = 60 * 60 * 1 // 1 hours const Database = "database" const Exclude = "exclude" +const ImageExclude = "image_exclude" + +const VideoExtensions = "video_extensions" + +var defaultVideoExtensions = []string{"m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm"} + +const ImageExtensions = "image_extensions" + +var defaultImageExtensions = []string{"png", "jpg", "jpeg", "gif", "webp"} + +const GalleryExtensions = "gallery_extensions" + +var defaultGalleryExtensions = []string{"zip", "cbz"} + +const CreateGalleriesFromFolders = "create_galleries_from_folders" // CalculateMD5 is the config key used to determine if MD5 should be calculated // for video files. @@ -118,8 +133,21 @@ func GetConfigPath() string { return filepath.Dir(configFileUsed) } -func GetStashPaths() []string { - return viper.GetStringSlice(Stash) +func GetStashPaths() []*models.StashConfig { + var ret []*models.StashConfig + if err := viper.UnmarshalKey(Stash, &ret); err != nil || len(ret) == 0 { + // fallback to legacy format + ss := viper.GetStringSlice(Stash) + ret = nil + for _, path := range ss { + toAdd := &models.StashConfig{ + Path: path, + } + ret = append(ret, toAdd) + } + } + + return ret } func GetCachePath() string { @@ -158,6 +186,38 @@ func GetExcludes() []string { return viper.GetStringSlice(Exclude) } +func GetImageExcludes() []string { + return viper.GetStringSlice(ImageExclude) +} + +func GetVideoExtensions() []string { + ret := viper.GetStringSlice(VideoExtensions) + if ret == nil { + ret = defaultVideoExtensions + } + return ret +} + +func GetImageExtensions() []string { + ret := viper.GetStringSlice(ImageExtensions) + if ret == nil { + ret = defaultImageExtensions + } + return ret +} + +func GetGalleryExtensions() []string { + ret := viper.GetStringSlice(GalleryExtensions) + if ret == nil { + ret = defaultGalleryExtensions + } + return ret +} + +func GetCreateGalleriesFromFolders() bool { + return viper.GetBool(CreateGalleriesFromFolders) +} + func GetLanguage() string { ret := viper.GetString(Language) @@ -204,7 +264,7 @@ func GetScraperCDPPath() string { func GetStashBoxes() []*models.StashBox { var boxes []*models.StashBox - _ = viper.UnmarshalKey(StashBoxes, &boxes) + viper.UnmarshalKey(StashBoxes, &boxes) return boxes } diff --git a/pkg/manager/exclude_files.go b/pkg/manager/exclude_files.go index 6d5a28f9f..8bf50e153 100644 --- a/pkg/manager/exclude_files.go +++ b/pkg/manager/exclude_files.go @@ -1,6 +1,7 @@ package manager import ( + "path/filepath" "regexp" "strings" @@ -80,3 +81,14 @@ func matchFileSimple(file string, regExps []*regexp.Regexp) bool { } return false } + +func matchExtension(path string, extensions []string) bool { + ext := filepath.Ext(path) + for _, e := range extensions { + if strings.ToLower(ext) == strings.ToLower("."+e) { + return true + } + } + + return false +} diff --git a/pkg/manager/gallery.go b/pkg/manager/gallery.go new file mode 100644 index 000000000..b7929ee67 --- /dev/null +++ b/pkg/manager/gallery.go @@ -0,0 +1,17 @@ +package manager + +import ( + "os" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +func DeleteGalleryFile(gallery *models.Gallery) { + if gallery.Path.Valid { + err := os.Remove(gallery.Path.String) + if err != nil { + logger.Warnf("Could not delete file %s: %s", gallery.Path.String, err.Error()) + } + } +} diff --git a/pkg/manager/image.go b/pkg/manager/image.go new file mode 100644 index 000000000..35ee3d654 --- /dev/null +++ b/pkg/manager/image.go @@ -0,0 +1,101 @@ +package manager + +import ( + "archive/zip" + "os" + "strings" + + "github.com/jmoiron/sqlx" + + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +// DestroyImage deletes an image and its associated relationships from the +// database. +func DestroyImage(imageID int, tx *sqlx.Tx) error { + qb := models.NewImageQueryBuilder() + jqb := models.NewJoinsQueryBuilder() + + _, err := qb.Find(imageID) + if err != nil { + return err + } + + if err := jqb.DestroyImagesTags(imageID, tx); err != nil { + return err + } + + if err := jqb.DestroyPerformersImages(imageID, tx); err != nil { + return err + } + + if err := jqb.DestroyImageGalleries(imageID, tx); err != nil { + return err + } + + if err := qb.Destroy(imageID, tx); err != nil { + return err + } + + return nil +} + +// DeleteGeneratedImageFiles deletes generated files for the provided image. +func DeleteGeneratedImageFiles(image *models.Image) { + thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth) + exists, _ := utils.FileExists(thumbPath) + if exists { + err := os.Remove(thumbPath) + if err != nil { + logger.Warnf("Could not delete file %s: %s", thumbPath, err.Error()) + } + } +} + +// DeleteImageFile deletes the image file from the filesystem. +func DeleteImageFile(image *models.Image) { + err := os.Remove(image.Path) + if err != nil { + logger.Warnf("Could not delete file %s: %s", image.Path, err.Error()) + } +} + +func walkGalleryZip(path string, walkFunc func(file *zip.File) error) error { + readCloser, err := zip.OpenReader(path) + if err != nil { + return err + } + + for _, file := range readCloser.File { + if file.FileInfo().IsDir() { + continue + } + + if strings.Contains(file.Name, "__MACOSX") { + continue + } + + if !isImage(file.Name) { + continue + } + + err := walkFunc(file) + if err != nil { + return err + } + } + + return nil +} + +func countImagesInZip(path string) int { + ret := 0 + walkGalleryZip(path, func(file *zip.File) error { + ret++ + return nil + }) + + return ret +} diff --git a/pkg/manager/json_utils.go b/pkg/manager/json_utils.go index b72a4c60e..9a04e45cf 100644 --- a/pkg/manager/json_utils.go +++ b/pkg/manager/json_utils.go @@ -64,3 +64,19 @@ func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) { func (jp *jsonUtils) saveScene(checksum string, scene *jsonschema.Scene) error { return jsonschema.SaveSceneFile(jp.json.SceneJSONPath(checksum), scene) } + +func (jp *jsonUtils) getImage(checksum string) (*jsonschema.Image, error) { + return jsonschema.LoadImageFile(jp.json.ImageJSONPath(checksum)) +} + +func (jp *jsonUtils) saveImage(checksum string, image *jsonschema.Image) error { + return jsonschema.SaveImageFile(jp.json.ImageJSONPath(checksum), image) +} + +func (jp *jsonUtils) getGallery(checksum string) (*jsonschema.Gallery, error) { + return jsonschema.LoadGalleryFile(jp.json.GalleryJSONPath(checksum)) +} + +func (jp *jsonUtils) saveGallery(checksum string, gallery *jsonschema.Gallery) error { + return jsonschema.SaveGalleryFile(jp.json.GalleryJSONPath(checksum), gallery) +} diff --git a/pkg/manager/jsonschema/gallery.go b/pkg/manager/jsonschema/gallery.go new file mode 100644 index 000000000..2f463620b --- /dev/null +++ b/pkg/manager/jsonschema/gallery.go @@ -0,0 +1,48 @@ +package jsonschema + +import ( + "fmt" + "os" + + jsoniter "github.com/json-iterator/go" + "github.com/stashapp/stash/pkg/models" +) + +type Gallery struct { + Path string `json:"path,omitempty"` + Checksum string `json:"checksum,omitempty"` + Zip bool `json:"zip,omitempty"` + Title string `json:"title,omitempty"` + URL string `json:"url,omitempty"` + Date string `json:"date,omitempty"` + Details string `json:"details,omitempty"` + Rating int `json:"rating,omitempty"` + Studio string `json:"studio,omitempty"` + Performers []string `json:"performers,omitempty"` + Tags []string `json:"tags,omitempty"` + CreatedAt models.JSONTime `json:"created_at,omitempty"` + UpdatedAt models.JSONTime `json:"updated_at,omitempty"` +} + +func LoadGalleryFile(filePath string) (*Gallery, error) { + var gallery Gallery + file, err := os.Open(filePath) + defer file.Close() + if err != nil { + return nil, err + } + var json = jsoniter.ConfigCompatibleWithStandardLibrary + jsonParser := json.NewDecoder(file) + err = jsonParser.Decode(&gallery) + if err != nil { + return nil, err + } + return &gallery, nil +} + +func SaveGalleryFile(filePath string, gallery *Gallery) error { + if gallery == nil { + return fmt.Errorf("gallery must not be nil") + } + return marshalToFile(filePath, gallery) +} diff --git a/pkg/manager/jsonschema/image.go b/pkg/manager/jsonschema/image.go new file mode 100644 index 000000000..5b032765d --- /dev/null +++ b/pkg/manager/jsonschema/image.go @@ -0,0 +1,52 @@ +package jsonschema + +import ( + "fmt" + "os" + + jsoniter "github.com/json-iterator/go" + "github.com/stashapp/stash/pkg/models" +) + +type ImageFile struct { + Size int `json:"size"` + Width int `json:"width"` + Height int `json:"height"` +} + +type Image struct { + Title string `json:"title,omitempty"` + Checksum string `json:"checksum,omitempty"` + Studio string `json:"studio,omitempty"` + Rating int `json:"rating,omitempty"` + OCounter int `json:"o_counter,omitempty"` + Galleries []string `json:"galleries,omitempty"` + Performers []string `json:"performers,omitempty"` + Tags []string `json:"tags,omitempty"` + File *ImageFile `json:"file,omitempty"` + CreatedAt models.JSONTime `json:"created_at,omitempty"` + UpdatedAt models.JSONTime `json:"updated_at,omitempty"` +} + +func LoadImageFile(filePath string) (*Image, error) { + var image Image + file, err := os.Open(filePath) + defer file.Close() + if err != nil { + return nil, err + } + var json = jsoniter.ConfigCompatibleWithStandardLibrary + jsonParser := json.NewDecoder(file) + err = jsonParser.Decode(&image) + if err != nil { + return nil, err + } + return &image, nil +} + +func SaveImageFile(filePath string, image *Image) error { + if image == nil { + return fmt.Errorf("image must not be nil") + } + return marshalToFile(filePath, image) +} diff --git a/pkg/manager/jsonschema/mappings.go b/pkg/manager/jsonschema/mappings.go index 2e65f1023..1622e52a4 100644 --- a/pkg/manager/jsonschema/mappings.go +++ b/pkg/manager/jsonschema/mappings.go @@ -7,23 +7,20 @@ import ( jsoniter "github.com/json-iterator/go" ) -type NameMapping struct { - Name string `json:"name"` - Checksum string `json:"checksum"` -} - -type PathMapping struct { - Path string `json:"path"` +type PathNameMapping struct { + Path string `json:"path,omitempty"` + Name string `json:"name,omitempty"` Checksum string `json:"checksum"` } type Mappings struct { - Tags []NameMapping `json:"tags"` - Performers []NameMapping `json:"performers"` - Studios []NameMapping `json:"studios"` - Movies []NameMapping `json:"movies"` - Galleries []PathMapping `json:"galleries"` - Scenes []PathMapping `json:"scenes"` + Tags []PathNameMapping `json:"tags"` + Performers []PathNameMapping `json:"performers"` + Studios []PathNameMapping `json:"studios"` + Movies []PathNameMapping `json:"movies"` + Galleries []PathNameMapping `json:"galleries"` + Scenes []PathNameMapping `json:"scenes"` + Images []PathNameMapping `json:"images"` } func LoadMappingsFile(filePath string) (*Mappings, error) { diff --git a/pkg/manager/manager_tasks.go b/pkg/manager/manager_tasks.go index af47e2f63..4b2c39899 100644 --- a/pkg/manager/manager_tasks.go +++ b/pkg/manager/manager_tasks.go @@ -2,38 +2,30 @@ package manager import ( "errors" - "path/filepath" + "os" "strconv" - "strings" "sync" "time" - "github.com/bmatcuk/doublestar/v2" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) -var extensionsToScan = []string{"zip", "cbz", "m4v", "mp4", "mov", "wmv", "avi", "mpg", "mpeg", "rmvb", "rm", "flv", "asf", "mkv", "webm"} -var extensionsGallery = []string{"zip", "cbz"} - -func constructGlob() string { // create a sequence for glob doublestar from our extensions - var extList []string - for _, ext := range extensionsToScan { - extList = append(extList, strings.ToLower(ext)) - extList = append(extList, strings.ToUpper(ext)) - } - return "{" + strings.Join(extList, ",") + "}" +func isGallery(pathname string) bool { + gExt := config.GetGalleryExtensions() + return matchExtension(pathname, gExt) } -func isGallery(pathname string) bool { - for _, ext := range extensionsGallery { - if strings.ToLower(filepath.Ext(pathname)) == "."+strings.ToLower(ext) { - return true - } - } - return false +func isVideo(pathname string) bool { + vidExt := config.GetVideoExtensions() + return matchExtension(pathname, vidExt) +} + +func isImage(pathname string) bool { + imgExt := config.GetImageExtensions() + return matchExtension(pathname, imgExt) } type TaskStatus struct { @@ -86,6 +78,55 @@ func (t *TaskStatus) updated() { t.LastUpdate = time.Now() } +func (s *singleton) neededScan() (total *int, newFiles *int) { + const timeout = 90 * time.Second + + // create a control channel through which to signal the counting loop when the timeout is reached + chTimeout := time.After(timeout) + + logger.Infof("Counting files to scan...") + + t := 0 + n := 0 + + timeoutErr := errors.New("timed out") + + for _, sp := range config.GetStashPaths() { + err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error { + t++ + task := ScanTask{FilePath: path} + if !task.doesPathExist() { + n++ + } + + //check for timeout + select { + case <-chTimeout: + return timeoutErr + default: + } + + // check stop + if s.Status.stopping { + return timeoutErr + } + + return nil + }) + + if err == timeoutErr { + break + } + + if err != nil { + logger.Errorf("Error encountered counting files to scan: %s", err.Error()) + return nil, nil + } + } + + return &t, &n +} + func (s *singleton) Scan(useFileMetadata bool) { if s.Status.Status != Idle { return @@ -96,11 +137,61 @@ func (s *singleton) Scan(useFileMetadata bool) { go func() { defer s.returnToIdleState() - var results []string - for _, path := range config.GetStashPaths() { - globPath := filepath.Join(path, "**/*."+constructGlob()) - globResults, _ := doublestar.Glob(globPath) - results = append(results, globResults...) + total, newFiles := s.neededScan() + + if s.Status.stopping { + logger.Info("Stopping due to user request") + return + } + + if total == nil || newFiles == nil { + logger.Infof("Taking too long to count content. Skipping...") + logger.Infof("Starting scan") + } else { + logger.Infof("Starting scan of %d files. %d New files found", *total, *newFiles) + } + + var wg sync.WaitGroup + s.Status.Progress = 0 + fileNamingAlgo := config.GetVideoFileNamingAlgorithm() + calculateMD5 := config.IsCalculateMD5() + + i := 0 + stoppingErr := errors.New("stopping") + + var galleries []string + + for _, sp := range config.GetStashPaths() { + err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error { + if total != nil { + s.Status.setProgress(i, *total) + i++ + } + + if s.Status.stopping { + return stoppingErr + } + + if isGallery(path) { + galleries = append(galleries, path) + } + + wg.Add(1) + task := ScanTask{FilePath: path, UseFileMetadata: useFileMetadata, fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5} + go task.Start(&wg) + wg.Wait() + + return nil + }) + + if err == stoppingErr { + break + } + + if err != nil { + logger.Errorf("Error encountered scanning files: %s", err.Error()) + return + } } if s.Status.stopping { @@ -108,34 +199,12 @@ func (s *singleton) Scan(useFileMetadata bool) { return } - results, _ = excludeFiles(results, config.GetExcludes()) - total := len(results) - logger.Infof("Starting scan of %d files. %d New files found", total, s.neededScan(results)) - - var wg sync.WaitGroup - s.Status.Progress = 0 - fileNamingAlgo := config.GetVideoFileNamingAlgorithm() - calculateMD5 := config.IsCalculateMD5() - for i, path := range results { - s.Status.setProgress(i, total) - if s.Status.stopping { - logger.Info("Stopping due to user request") - return - } - wg.Add(1) - task := ScanTask{FilePath: path, UseFileMetadata: useFileMetadata, fileNamingAlgorithm: fileNamingAlgo, calculateMD5: calculateMD5} - go task.Start(&wg) - wg.Wait() - } - logger.Info("Finished scan") - for _, path := range results { - if isGallery(path) { - wg.Add(1) - task := ScanTask{FilePath: path, UseFileMetadata: false} - go task.associateGallery(&wg) - wg.Wait() - } + for _, path := range galleries { + wg.Add(1) + task := ScanTask{FilePath: path, UseFileMetadata: false} + go task.associateGallery(&wg) + wg.Wait() } logger.Info("Finished gallery association") }() @@ -238,13 +307,11 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { s.Status.indefiniteProgress() qb := models.NewSceneQueryBuilder() - qg := models.NewGalleryQueryBuilder() mqb := models.NewSceneMarkerQueryBuilder() //this.job.total = await ObjectionUtils.getCount(Scene); instance.Paths.Generated.EnsureTmpDir() - galleryIDs := utils.StringSliceToIntSlice(input.GalleryIDs) sceneIDs := utils.StringSliceToIntSlice(input.SceneIDs) markerIDs := utils.StringSliceToIntSlice(input.MarkerIDs) @@ -272,21 +339,6 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { lenScenes := len(scenes) total := lenScenes - var galleries []*models.Gallery - if input.Thumbnails { - if len(galleryIDs) > 0 { - galleries, err = qg.FindMany(galleryIDs) - } else { - galleries, err = qg.All() - } - - if err != nil { - logger.Errorf("failed to get galleries for generate") - return - } - total += len(galleries) - } - var markers []*models.SceneMarker if len(markerIDs) > 0 { markers, err = mqb.FindMany(markerIDs) @@ -368,29 +420,8 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { wg.Wait() } - if input.Thumbnails { - logger.Infof("Generating thumbnails for the galleries") - for i, gallery := range galleries { - s.Status.setProgress(lenScenes+i, total) - if s.Status.stopping { - logger.Info("Stopping due to user request") - return - } - - if gallery == nil { - logger.Errorf("nil gallery, skipping generate") - continue - } - - wg.Add(1) - task := GenerateGthumbsTask{Gallery: *gallery, Overwrite: overwrite} - go task.Start(&wg) - wg.Wait() - } - } - for i, marker := range markers { - s.Status.setProgress(lenScenes+len(galleries)+i, total) + s.Status.setProgress(lenScenes+i, total) if s.Status.stopping { logger.Info("Stopping due to user request") return @@ -635,6 +666,7 @@ func (s *singleton) Clean() { s.Status.indefiniteProgress() qb := models.NewSceneQueryBuilder() + iqb := models.NewImageQueryBuilder() gqb := models.NewGalleryQueryBuilder() go func() { defer s.returnToIdleState() @@ -646,6 +678,12 @@ func (s *singleton) Clean() { return } + images, err := iqb.All() + if err != nil { + logger.Errorf("failed to fetch list of images for cleaning") + return + } + galleries, err := gqb.All() if err != nil { logger.Errorf("failed to fetch list of galleries for cleaning") @@ -659,7 +697,7 @@ func (s *singleton) Clean() { var wg sync.WaitGroup s.Status.Progress = 0 - total := len(scenes) + len(galleries) + total := len(scenes) + len(images) + len(galleries) fileNamingAlgo := config.GetVideoFileNamingAlgorithm() for i, scene := range scenes { s.Status.setProgress(i, total) @@ -680,13 +718,32 @@ func (s *singleton) Clean() { wg.Wait() } - for i, gallery := range galleries { + for i, img := range images { s.Status.setProgress(len(scenes)+i, total) if s.Status.stopping { logger.Info("Stopping due to user request") return } + if img == nil { + logger.Errorf("nil image, skipping Clean") + continue + } + + wg.Add(1) + + task := CleanTask{Image: img} + go task.Start(&wg) + wg.Wait() + } + + for i, gallery := range galleries { + s.Status.setProgress(len(scenes)+len(galleries)+i, total) + if s.Status.stopping { + logger.Info("Stopping due to user request") + return + } + if gallery == nil { logger.Errorf("nil gallery, skipping Clean") continue @@ -764,18 +821,6 @@ func (s *singleton) returnToIdleState() { s.Status.stopping = false } -func (s *singleton) neededScan(paths []string) int64 { - var neededScans int64 - - for _, path := range paths { - task := ScanTask{FilePath: path} - if !task.doesPathExist() { - neededScans++ - } - } - return neededScans -} - type totalsGenerate struct { sprites int64 previews int64 diff --git a/pkg/manager/paths/paths.go b/pkg/manager/paths/paths.go index e379d22ea..459c60943 100644 --- a/pkg/manager/paths/paths.go +++ b/pkg/manager/paths/paths.go @@ -9,7 +9,6 @@ import ( type Paths struct { Generated *generatedPaths - Gallery *galleryPaths Scene *scenePaths SceneMarkers *sceneMarkerPaths } @@ -18,7 +17,6 @@ func NewPaths() *Paths { p := Paths{} p.Generated = newGeneratedPaths() - p.Gallery = newGalleryPaths() p.Scene = newScenePaths(p) p.SceneMarkers = newSceneMarkerPaths(p) return &p diff --git a/pkg/manager/paths/paths_gallery.go b/pkg/manager/paths/paths_gallery.go deleted file mode 100644 index 9e4665c95..000000000 --- a/pkg/manager/paths/paths_gallery.go +++ /dev/null @@ -1,39 +0,0 @@ -package paths - -import ( - "fmt" - "github.com/stashapp/stash/pkg/manager/config" - "github.com/stashapp/stash/pkg/utils" - "path/filepath" -) - -type galleryPaths struct{} - -const thumbDir = "gthumbs" -const thumbDirDepth int = 2 -const thumbDirLength int = 2 // thumbDirDepth * thumbDirLength must be smaller than the length of checksum - -func newGalleryPaths() *galleryPaths { - return &galleryPaths{} -} - -func (gp *galleryPaths) GetExtractedPath(checksum string) string { - return filepath.Join(config.GetCachePath(), checksum) -} - -func GetGthumbCache() string { - return filepath.Join(config.GetCachePath(), thumbDir) -} - -func GetGthumbDir(checksum string) string { - return filepath.Join(config.GetCachePath(), thumbDir, utils.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), checksum) -} - -func GetGthumbPath(checksum string, index int, width int) string { - fname := fmt.Sprintf("%s_%d_%d.jpg", checksum, index, width) - return filepath.Join(config.GetCachePath(), thumbDir, utils.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), checksum, fname) -} - -func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string { - return filepath.Join(config.GetCachePath(), checksum, fileName) -} diff --git a/pkg/manager/paths/paths_generated.go b/pkg/manager/paths/paths_generated.go index 76ca06c20..91bbabed7 100644 --- a/pkg/manager/paths/paths_generated.go +++ b/pkg/manager/paths/paths_generated.go @@ -1,6 +1,7 @@ package paths import ( + "fmt" "io/ioutil" "path/filepath" @@ -8,8 +9,12 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +const thumbDirDepth int = 2 +const thumbDirLength int = 2 // thumbDirDepth * thumbDirLength must be smaller than the length of checksum + type generatedPaths struct { Screenshots string + Thumbnails string Vtt string Markers string Transcodes string @@ -20,6 +25,7 @@ type generatedPaths struct { func newGeneratedPaths() *generatedPaths { gp := generatedPaths{} gp.Screenshots = filepath.Join(config.GetGeneratedPath(), "screenshots") + gp.Thumbnails = filepath.Join(config.GetGeneratedPath(), "thumbnails") gp.Vtt = filepath.Join(config.GetGeneratedPath(), "vtt") gp.Markers = filepath.Join(config.GetGeneratedPath(), "markers") gp.Transcodes = filepath.Join(config.GetGeneratedPath(), "transcodes") @@ -55,3 +61,8 @@ func (gp *generatedPaths) TempDir(pattern string) (string, error) { return ret, nil } + +func (gp *generatedPaths) GetThumbnailPath(checksum string, width int) string { + fname := fmt.Sprintf("%s_%d.jpg", checksum, width) + return filepath.Join(gp.Thumbnails, utils.GetIntraDir(checksum, thumbDirDepth, thumbDirLength), fname) +} diff --git a/pkg/manager/paths/paths_json.go b/pkg/manager/paths/paths_json.go index 9a4aceab6..c7f3b7490 100644 --- a/pkg/manager/paths/paths_json.go +++ b/pkg/manager/paths/paths_json.go @@ -14,6 +14,7 @@ type JSONPaths struct { Performers string Scenes string + Images string Galleries string Studios string Tags string @@ -27,6 +28,7 @@ func newJSONPaths(baseDir string) *JSONPaths { jp.ScrapedFile = filepath.Join(baseDir, "scraped.json") jp.Performers = filepath.Join(baseDir, "performers") jp.Scenes = filepath.Join(baseDir, "scenes") + jp.Images = filepath.Join(baseDir, "images") jp.Galleries = filepath.Join(baseDir, "galleries") jp.Studios = filepath.Join(baseDir, "studios") jp.Movies = filepath.Join(baseDir, "movies") @@ -43,6 +45,7 @@ func EnsureJSONDirs(baseDir string) { jsonPaths := GetJSONPaths(baseDir) utils.EnsureDir(jsonPaths.Metadata) utils.EnsureDir(jsonPaths.Scenes) + utils.EnsureDir(jsonPaths.Images) utils.EnsureDir(jsonPaths.Galleries) utils.EnsureDir(jsonPaths.Performers) utils.EnsureDir(jsonPaths.Studios) @@ -58,6 +61,14 @@ func (jp *JSONPaths) SceneJSONPath(checksum string) string { return filepath.Join(jp.Scenes, checksum+".json") } +func (jp *JSONPaths) ImageJSONPath(checksum string) string { + return filepath.Join(jp.Images, checksum+".json") +} + +func (jp *JSONPaths) GalleryJSONPath(checksum string) string { + return filepath.Join(jp.Galleries, checksum+".json") +} + func (jp *JSONPaths) StudioJSONPath(checksum string) string { return filepath.Join(jp.Studios, checksum+".json") } diff --git a/pkg/manager/task_clean.go b/pkg/manager/task_clean.go index 80b5f72e0..43ec3772a 100644 --- a/pkg/manager/task_clean.go +++ b/pkg/manager/task_clean.go @@ -8,38 +8,40 @@ import ( "sync" "github.com/stashapp/stash/pkg/database" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" - "github.com/stashapp/stash/pkg/manager/paths" "github.com/stashapp/stash/pkg/models" ) type CleanTask struct { Scene *models.Scene Gallery *models.Gallery + Image *models.Image fileNamingAlgorithm models.HashAlgorithm } func (t *CleanTask) Start(wg *sync.WaitGroup) { defer wg.Done() - if t.Scene != nil && t.shouldClean(t.Scene.Path) { + if t.Scene != nil && t.shouldCleanScene(t.Scene) { t.deleteScene(t.Scene.ID) } if t.Gallery != nil && t.shouldCleanGallery(t.Gallery) { t.deleteGallery(t.Gallery.ID) } + + if t.Image != nil && t.shouldCleanImage(t.Image) { + t.deleteImage(t.Image.ID) + } } func (t *CleanTask) shouldClean(path string) bool { - fileExists, err := t.fileExists(path) - if err != nil { - logger.Errorf("Error checking existence of %s: %s", path, err.Error()) - return false - } + // use image.FileExists for zip file checking + fileExists := image.FileExists(path) - if fileExists && t.pathInStash(path) { + if fileExists && t.getStashFromPath(path) != nil { logger.Debugf("File Found: %s", path) if matchFile(path, config.GetExcludes()) { logger.Infof("File matched regex. Cleaning: \"%s\"", path) @@ -53,13 +55,68 @@ func (t *CleanTask) shouldClean(path string) bool { return false } -func (t *CleanTask) shouldCleanGallery(g *models.Gallery) bool { - if t.shouldClean(g.Path) { +func (t *CleanTask) shouldCleanScene(s *models.Scene) bool { + if t.shouldClean(s.Path) { return true } - if t.Gallery.CountFiles() == 0 { - logger.Infof("Gallery has 0 images. Cleaning: \"%s\"", g.Path) + stash := t.getStashFromPath(s.Path) + if stash.ExcludeVideo { + logger.Infof("File in stash library that excludes video. Cleaning: \"%s\"", s.Path) + return true + } + + if !matchExtension(s.Path, config.GetVideoExtensions()) { + logger.Infof("File extension does not match video extensions. Cleaning: \"%s\"", s.Path) + return true + } + + return false +} + +func (t *CleanTask) shouldCleanGallery(g *models.Gallery) bool { + // never clean manually created galleries + if !g.Zip { + return false + } + + path := g.Path.String + if t.shouldClean(path) { + return true + } + + stash := t.getStashFromPath(path) + if stash.ExcludeImage { + logger.Infof("File in stash library that excludes images. Cleaning: \"%s\"", path) + return true + } + + if !matchExtension(path, config.GetGalleryExtensions()) { + logger.Infof("File extension does not match gallery extensions. Cleaning: \"%s\"", path) + return true + } + + if countImagesInZip(path) == 0 { + logger.Infof("Gallery has 0 images. Cleaning: \"%s\"", path) + return true + } + + return false +} + +func (t *CleanTask) shouldCleanImage(s *models.Image) bool { + if t.shouldClean(s.Path) { + return true + } + + stash := t.getStashFromPath(s.Path) + if stash.ExcludeImage { + logger.Infof("File in stash library that excludes images. Cleaning: \"%s\"", s.Path) + return true + } + + if !matchExtension(s.Path, config.GetImageExtensions()) { + logger.Infof("File extension does not match image extensions. Cleaning: \"%s\"", s.Path) return true } @@ -105,10 +162,29 @@ func (t *CleanTask) deleteGallery(galleryID int) { logger.Errorf("Error deleting gallery from database: %s", err.Error()) return } +} - pathErr := os.RemoveAll(paths.GetGthumbDir(t.Gallery.Checksum)) // remove cache dir of gallery +func (t *CleanTask) deleteImage(imageID int) { + ctx := context.TODO() + qb := models.NewImageQueryBuilder() + tx := database.DB.MustBeginTx(ctx, nil) + + err := qb.Destroy(imageID, tx) + + if err != nil { + logger.Errorf("Error deleting image from database: %s", err.Error()) + tx.Rollback() + return + } + + if err := tx.Commit(); err != nil { + logger.Errorf("Error deleting image from database: %s", err.Error()) + return + } + + pathErr := os.Remove(GetInstance().Paths.Generated.GetThumbnailPath(t.Image.Checksum, models.DefaultGthumbWidth)) // remove cache dir of gallery if pathErr != nil { - logger.Errorf("Error deleting gallery directory from cache: %s", pathErr) + logger.Errorf("Error deleting thumbnail image from cache: %s", pathErr) } } @@ -126,19 +202,17 @@ func (t *CleanTask) fileExists(filename string) (bool, error) { return !info.IsDir(), nil } -func (t *CleanTask) pathInStash(pathToCheck string) bool { - for _, path := range config.GetStashPaths() { +func (t *CleanTask) getStashFromPath(pathToCheck string) *models.StashConfig { + for _, s := range config.GetStashPaths() { - rel, error := filepath.Rel(path, filepath.Dir(pathToCheck)) + rel, error := filepath.Rel(s.Path, filepath.Dir(pathToCheck)) if error == nil { if !strings.HasPrefix(rel, ".."+string(filepath.Separator)) { - logger.Debugf("File %s belongs to stash path %s", pathToCheck, path) - return true + return s } } } - logger.Debugf("File %s is out from stash path", pathToCheck) - return false + return nil } diff --git a/pkg/manager/task_export.go b/pkg/manager/task_export.go index 41874a3ff..b8f0306d6 100644 --- a/pkg/manager/task_export.go +++ b/pkg/manager/task_export.go @@ -11,6 +11,8 @@ import ( "sync" "time" + "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/manager/jsonschema" @@ -34,6 +36,7 @@ type ExportTask struct { fileNamingAlgorithm models.HashAlgorithm scenes *exportSpec + images *exportSpec performers *exportSpec movies *exportSpec tags *exportSpec @@ -75,6 +78,7 @@ func CreateExportTask(a models.HashAlgorithm, input models.ExportObjectsInput) * return &ExportTask{ fileNamingAlgorithm: a, scenes: newExportSpec(input.Scenes), + images: newExportSpec(input.Images), performers: newExportSpec(input.Performers), movies: newExportSpec(input.Movies), tags: newExportSpec(input.Tags), @@ -122,7 +126,8 @@ func (t *ExportTask) Start(wg *sync.WaitGroup) { paths.EnsureJSONDirs(t.baseDir) t.ExportScenes(workerCount) - t.ExportGalleries() + t.ExportImages(workerCount) + t.ExportGalleries(workerCount) t.ExportPerformers(workerCount) t.ExportStudios(workerCount) t.ExportMovies(workerCount) @@ -183,6 +188,7 @@ func (t *ExportTask) zipFiles(w io.Writer) error { filepath.Walk(t.json.json.Studios, t.zipWalkFunc(u.json.Studios, z)) filepath.Walk(t.json.json.Movies, t.zipWalkFunc(u.json.Movies, z)) filepath.Walk(t.json.json.Scenes, t.zipWalkFunc(u.json.Scenes, z)) + filepath.Walk(t.json.json.Images, t.zipWalkFunc(u.json.Images, z)) return nil } @@ -257,7 +263,7 @@ func (t *ExportTask) ExportScenes(workers int) { if (i % 100) == 0 { // make progress easier to read logger.Progressf("[scenes] %d of %d", index, len(scenes)) } - t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{Path: scene.Path, Checksum: scene.GetHash(t.fileNamingAlgorithm)}) + t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathNameMapping{Path: scene.Path, Checksum: scene.GetHash(t.fileNamingAlgorithm)}) jobCh <- scene // feed workers } @@ -366,7 +372,127 @@ func exportScene(wg *sync.WaitGroup, jobChan <-chan *models.Scene, t *ExportTask } } -func (t *ExportTask) ExportGalleries() { +func (t *ExportTask) ExportImages(workers int) { + var imagesWg sync.WaitGroup + + imageReader := models.NewImageReaderWriter(nil) + + var images []*models.Image + var err error + all := t.full || (t.images != nil && t.images.all) + if all { + images, err = imageReader.All() + } else if t.images != nil && len(t.images.IDs) > 0 { + images, err = imageReader.FindMany(t.images.IDs) + } + + if err != nil { + logger.Errorf("[images] failed to fetch images: %s", err.Error()) + } + + jobCh := make(chan *models.Image, workers*2) // make a buffered channel to feed workers + + logger.Info("[images] exporting") + startTime := time.Now() + + for w := 0; w < workers; w++ { // create export Image workers + imagesWg.Add(1) + go exportImage(&imagesWg, jobCh, t) + } + + for i, image := range images { + index := i + 1 + + if (i % 100) == 0 { // make progress easier to read + logger.Progressf("[images] %d of %d", index, len(images)) + } + t.Mappings.Images = append(t.Mappings.Images, jsonschema.PathNameMapping{Path: image.Path, Checksum: image.Checksum}) + jobCh <- image // feed workers + } + + close(jobCh) // close channel so that workers will know no more jobs are available + imagesWg.Wait() + + logger.Infof("[images] export complete in %s. %d workers used.", time.Since(startTime), workers) +} + +func exportImage(wg *sync.WaitGroup, jobChan <-chan *models.Image, t *ExportTask) { + defer wg.Done() + studioReader := models.NewStudioReaderWriter(nil) + galleryReader := models.NewGalleryReaderWriter(nil) + performerReader := models.NewPerformerReaderWriter(nil) + tagReader := models.NewTagReaderWriter(nil) + + for s := range jobChan { + imageHash := s.Checksum + + newImageJSON := image.ToBasicJSON(s) + + var err error + newImageJSON.Studio, err = image.GetStudioName(studioReader, s) + if err != nil { + logger.Errorf("[images] <%s> error getting image studio name: %s", imageHash, err.Error()) + continue + } + + imageGalleries, err := galleryReader.FindByImageID(s.ID) + if err != nil { + logger.Errorf("[images] <%s> error getting image galleries: %s", imageHash, err.Error()) + continue + } + + newImageJSON.Galleries = t.getGalleryChecksums(imageGalleries) + + performers, err := performerReader.FindByImageID(s.ID) + if err != nil { + logger.Errorf("[images] <%s> error getting image performer names: %s", imageHash, err.Error()) + continue + } + + newImageJSON.Performers = performer.GetNames(performers) + + tags, err := tagReader.FindByImageID(s.ID) + if err != nil { + logger.Errorf("[images] <%s> error getting image tag names: %s", imageHash, err.Error()) + continue + } + + newImageJSON.Tags = tag.GetNames(tags) + + if t.includeDependencies { + if s.StudioID.Valid { + t.studios.IDs = utils.IntAppendUnique(t.studios.IDs, int(s.StudioID.Int64)) + } + + // if imageGallery != nil { + // t.galleries.IDs = utils.IntAppendUnique(t.galleries.IDs, imageGallery.ID) + // } + + t.tags.IDs = utils.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags)) + t.performers.IDs = utils.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers)) + } + + imageJSON, err := t.json.getImage(imageHash) + if err == nil && jsonschema.CompareJSON(*imageJSON, *newImageJSON) { + continue + } + + if err := t.json.saveImage(imageHash, newImageJSON); err != nil { + logger.Errorf("[images] <%s> failed to save json: %s", imageHash, err.Error()) + } + } +} + +func (t *ExportTask) getGalleryChecksums(galleries []*models.Gallery) (ret []string) { + for _, g := range galleries { + ret = append(ret, g.Checksum) + } + return +} + +func (t *ExportTask) ExportGalleries(workers int) { + var galleriesWg sync.WaitGroup + reader := models.NewGalleryReaderWriter(nil) var galleries []*models.Gallery @@ -382,15 +508,92 @@ func (t *ExportTask) ExportGalleries() { logger.Errorf("[galleries] failed to fetch galleries: %s", err.Error()) } + jobCh := make(chan *models.Gallery, workers*2) // make a buffered channel to feed workers + logger.Info("[galleries] exporting") + startTime := time.Now() + + for w := 0; w < workers; w++ { // create export Scene workers + galleriesWg.Add(1) + go exportGallery(&galleriesWg, jobCh, t) + } for i, gallery := range galleries { index := i + 1 - logger.Progressf("[galleries] %d of %d", index, len(galleries)) - t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathMapping{Path: gallery.Path, Checksum: gallery.Checksum}) + + if (i % 100) == 0 { // make progress easier to read + logger.Progressf("[galleries] %d of %d", index, len(galleries)) + } + + t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathNameMapping{ + Path: gallery.Path.String, + Name: gallery.Title.String, + Checksum: gallery.Checksum, + }) + jobCh <- gallery } - logger.Infof("[galleries] export complete") + close(jobCh) // close channel so that workers will know no more jobs are available + galleriesWg.Wait() + + logger.Infof("[galleries] export complete in %s. %d workers used.", time.Since(startTime), workers) +} + +func exportGallery(wg *sync.WaitGroup, jobChan <-chan *models.Gallery, t *ExportTask) { + defer wg.Done() + studioReader := models.NewStudioReaderWriter(nil) + performerReader := models.NewPerformerReaderWriter(nil) + tagReader := models.NewTagReaderWriter(nil) + + for g := range jobChan { + galleryHash := g.Checksum + + newGalleryJSON, err := gallery.ToBasicJSON(g) + if err != nil { + logger.Errorf("[galleries] <%s> error getting gallery JSON: %s", galleryHash, err.Error()) + continue + } + + newGalleryJSON.Studio, err = gallery.GetStudioName(studioReader, g) + if err != nil { + logger.Errorf("[galleries] <%s> error getting gallery studio name: %s", galleryHash, err.Error()) + continue + } + + performers, err := performerReader.FindByGalleryID(g.ID) + if err != nil { + logger.Errorf("[galleries] <%s> error getting gallery performer names: %s", galleryHash, err.Error()) + continue + } + + newGalleryJSON.Performers = performer.GetNames(performers) + + tags, err := tagReader.FindByGalleryID(g.ID) + if err != nil { + logger.Errorf("[galleries] <%s> error getting gallery tag names: %s", galleryHash, err.Error()) + continue + } + + newGalleryJSON.Tags = tag.GetNames(tags) + + if t.includeDependencies { + if g.StudioID.Valid { + t.studios.IDs = utils.IntAppendUnique(t.studios.IDs, int(g.StudioID.Int64)) + } + + t.tags.IDs = utils.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags)) + t.performers.IDs = utils.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers)) + } + + galleryJSON, err := t.json.getGallery(galleryHash) + if err == nil && jsonschema.CompareJSON(*galleryJSON, *newGalleryJSON) { + continue + } + + if err := t.json.saveGallery(galleryHash, newGalleryJSON); err != nil { + logger.Errorf("[galleries] <%s> failed to save json: %s", galleryHash, err.Error()) + } + } } func (t *ExportTask) ExportPerformers(workers int) { @@ -423,7 +626,7 @@ func (t *ExportTask) ExportPerformers(workers int) { index := i + 1 logger.Progressf("[performers] %d of %d", index, len(performers)) - t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.NameMapping{Name: performer.Name.String, Checksum: performer.Checksum}) + t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.PathNameMapping{Name: performer.Name.String, Checksum: performer.Checksum}) jobCh <- performer // feed workers } @@ -490,7 +693,7 @@ func (t *ExportTask) ExportStudios(workers int) { index := i + 1 logger.Progressf("[studios] %d of %d", index, len(studios)) - t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.NameMapping{Name: studio.Name.String, Checksum: studio.Checksum}) + t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.PathNameMapping{Name: studio.Name.String, Checksum: studio.Checksum}) jobCh <- studio // feed workers } @@ -558,7 +761,7 @@ func (t *ExportTask) ExportTags(workers int) { // generate checksum on the fly by name, since we don't store it checksum := utils.MD5FromString(tag.Name) - t.Mappings.Tags = append(t.Mappings.Tags, jsonschema.NameMapping{Name: tag.Name, Checksum: checksum}) + t.Mappings.Tags = append(t.Mappings.Tags, jsonschema.PathNameMapping{Name: tag.Name, Checksum: checksum}) jobCh <- tag // feed workers } @@ -626,7 +829,7 @@ func (t *ExportTask) ExportMovies(workers int) { index := i + 1 logger.Progressf("[movies] %d of %d", index, len(movies)) - t.Mappings.Movies = append(t.Mappings.Movies, jsonschema.NameMapping{Name: movie.Name.String, Checksum: movie.Checksum}) + t.Mappings.Movies = append(t.Mappings.Movies, jsonschema.PathNameMapping{Name: movie.Name.String, Checksum: movie.Checksum}) jobCh <- movie // feed workers } diff --git a/pkg/manager/task_generate_gallery_thumbs.go b/pkg/manager/task_generate_gallery_thumbs.go deleted file mode 100644 index 6aad9c80d..000000000 --- a/pkg/manager/task_generate_gallery_thumbs.go +++ /dev/null @@ -1,39 +0,0 @@ -package manager - -import ( - "sync" - - "github.com/stashapp/stash/pkg/logger" - "github.com/stashapp/stash/pkg/manager/paths" - "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/utils" -) - -type GenerateGthumbsTask struct { - Gallery models.Gallery - Overwrite bool -} - -func (t *GenerateGthumbsTask) Start(wg *sync.WaitGroup) { - defer wg.Done() - generated := 0 - count := t.Gallery.ImageCount() - for i := 0; i < count; i++ { - thumbPath := paths.GetGthumbPath(t.Gallery.Checksum, i, models.DefaultGthumbWidth) - exists, _ := utils.FileExists(thumbPath) - if !t.Overwrite && exists { - continue - } - data := t.Gallery.GetThumbnail(i, models.DefaultGthumbWidth) - err := utils.WriteFile(thumbPath, data) - if err != nil { - logger.Errorf("error writing gallery thumbnail: %s", err) - } else { - generated++ - } - - } - if generated > 0 { - logger.Infof("Generated %d thumbnails for %s", generated, t.Gallery.Path) - } -} diff --git a/pkg/manager/task_import.go b/pkg/manager/task_import.go index d6da28523..600497e48 100644 --- a/pkg/manager/task_import.go +++ b/pkg/manager/task_import.go @@ -14,6 +14,7 @@ import ( "github.com/jmoiron/sqlx" "github.com/stashapp/stash/pkg/database" "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/manager/jsonschema" @@ -122,6 +123,7 @@ func (t *ImportTask) Start(wg *sync.WaitGroup) { t.ImportScrapedItems(ctx) t.ImportScenes(ctx) + t.ImportImages(ctx) } func (t *ImportTask) unzipFile() error { @@ -361,15 +363,29 @@ func (t *ImportTask) ImportGalleries(ctx context.Context) { for i, mappingJSON := range t.mappings.Galleries { index := i + 1 + galleryJSON, err := t.json.getGallery(mappingJSON.Checksum) + if err != nil { + logger.Errorf("[galleries] failed to read json: %s", err.Error()) + continue + } logger.Progressf("[galleries] %d of %d", index, len(t.mappings.Galleries)) tx := database.DB.MustBeginTx(ctx, nil) readerWriter := models.NewGalleryReaderWriter(tx) + tagWriter := models.NewTagReaderWriter(tx) + joinWriter := models.NewJoinReaderWriter(tx) + performerWriter := models.NewPerformerReaderWriter(tx) + studioWriter := models.NewStudioReaderWriter(tx) galleryImporter := &gallery.Importer{ - ReaderWriter: readerWriter, - Input: mappingJSON, + ReaderWriter: readerWriter, + PerformerWriter: performerWriter, + StudioWriter: studioWriter, + TagWriter: tagWriter, + JoinWriter: joinWriter, + Input: *galleryJSON, + MissingRefBehaviour: t.MissingRefBehaviour, } if err := performImport(galleryImporter, t.DuplicateBehaviour); err != nil { @@ -553,6 +569,59 @@ func (t *ImportTask) ImportScenes(ctx context.Context) { logger.Info("[scenes] import complete") } +func (t *ImportTask) ImportImages(ctx context.Context) { + logger.Info("[images] importing") + + for i, mappingJSON := range t.mappings.Images { + index := i + 1 + + logger.Progressf("[images] %d of %d", index, len(t.mappings.Images)) + + imageJSON, err := t.json.getImage(mappingJSON.Checksum) + if err != nil { + logger.Infof("[images] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error()) + continue + } + + imageHash := mappingJSON.Checksum + + tx := database.DB.MustBeginTx(ctx, nil) + readerWriter := models.NewImageReaderWriter(tx) + tagWriter := models.NewTagReaderWriter(tx) + galleryWriter := models.NewGalleryReaderWriter(tx) + joinWriter := models.NewJoinReaderWriter(tx) + performerWriter := models.NewPerformerReaderWriter(tx) + studioWriter := models.NewStudioReaderWriter(tx) + + imageImporter := &image.Importer{ + ReaderWriter: readerWriter, + Input: *imageJSON, + Path: mappingJSON.Path, + + MissingRefBehaviour: t.MissingRefBehaviour, + + GalleryWriter: galleryWriter, + JoinWriter: joinWriter, + PerformerWriter: performerWriter, + StudioWriter: studioWriter, + TagWriter: tagWriter, + } + + if err := performImport(imageImporter, t.DuplicateBehaviour); err != nil { + tx.Rollback() + logger.Errorf("[images] <%s> failed to import: %s", imageHash, err.Error()) + continue + } + + if err := tx.Commit(); err != nil { + tx.Rollback() + logger.Errorf("[images] <%s> import failed to commit: %s", imageHash, err.Error()) + } + } + + logger.Info("[images] import complete") +} + func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]*models.Performer, error) { pqb := models.NewPerformerQueryBuilder() performers, err := pqb.FindByNames(names, tx, false) diff --git a/pkg/manager/task_scan.go b/pkg/manager/task_scan.go index 8536d109f..de9011209 100644 --- a/pkg/manager/task_scan.go +++ b/pkg/manager/task_scan.go @@ -1,17 +1,24 @@ package manager import ( + "archive/zip" "context" "database/sql" + "os" "path/filepath" "strconv" "strings" "sync" "time" + "github.com/facebookgo/symwalk" + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/database" "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) @@ -21,13 +28,17 @@ type ScanTask struct { UseFileMetadata bool calculateMD5 bool fileNamingAlgorithm models.HashAlgorithm + + zipGallery *models.Gallery } func (t *ScanTask) Start(wg *sync.WaitGroup) { if isGallery(t.FilePath) { t.scanGallery() - } else { + } else if isVideo(t.FilePath) { t.scanScene() + } else if isImage(t.FilePath) { + t.scanImage() } wg.Done() @@ -39,6 +50,20 @@ func (t *ScanTask) scanGallery() { if gallery != nil { // We already have this item in the database, keep going + + // scan the zip files if the gallery has no images + iqb := models.NewImageQueryBuilder() + images, err := iqb.CountByGalleryID(gallery.ID) + if err != nil { + logger.Errorf("error getting images for zip gallery %s: %s", t.FilePath, err.Error()) + } + + if images == 0 { + t.scanZipImages(gallery) + } else { + // in case thumbnails have been deleted, regenerate them + t.regenerateZipImages(gallery) + } return } @@ -57,27 +82,34 @@ func (t *ScanTask) scanGallery() { tx := database.DB.MustBeginTx(ctx, nil) gallery, _ = qb.FindByChecksum(checksum, tx) if gallery != nil { - exists, _ := utils.FileExists(gallery.Path) + exists, _ := utils.FileExists(gallery.Path.String) if exists { - logger.Infof("%s already exists. Duplicate of %s ", t.FilePath, gallery.Path) + logger.Infof("%s already exists. Duplicate of %s ", t.FilePath, gallery.Path.String) } else { logger.Infof("%s already exists. Updating path...", t.FilePath) - gallery.Path = t.FilePath - _, err = qb.Update(*gallery, tx) + gallery.Path = sql.NullString{ + String: t.FilePath, + Valid: true, + } + gallery, err = qb.Update(*gallery, tx) } } else { currentTime := time.Now() newGallery := models.Gallery{ - Checksum: checksum, - Path: t.FilePath, + Checksum: checksum, + Zip: true, + Path: sql.NullString{ + String: t.FilePath, + Valid: true, + }, CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, } // don't create gallery if it has no images - if newGallery.CountFiles() > 0 { + if countImagesInZip(t.FilePath) > 0 { // only warn when creating the gallery ok, err := utils.IsZipFileUncompressed(t.FilePath) if err == nil && !ok { @@ -85,15 +117,25 @@ func (t *ScanTask) scanGallery() { } logger.Infof("%s doesn't exist. Creating new item...", t.FilePath) - _, err = qb.Create(newGallery, tx) + gallery, err = qb.Create(newGallery, tx) } } if err != nil { logger.Error(err.Error()) - _ = tx.Rollback() - } else if err := tx.Commit(); err != nil { + tx.Rollback() + return + } + + err = tx.Commit() + if err != nil { logger.Error(err.Error()) + return + } + + // if the gallery has no associated images, then scan the zip for images + if gallery != nil { + t.scanZipImages(gallery) } } @@ -109,19 +151,24 @@ func (t *ScanTask) associateGallery(wg *sync.WaitGroup) { return } - if !gallery.SceneID.Valid { // gallery has no SceneID + // gallery has no SceneID + if !gallery.SceneID.Valid { basename := strings.TrimSuffix(t.FilePath, filepath.Ext(t.FilePath)) var relatedFiles []string - for _, ext := range extensionsToScan { // make a list of media files that can be related to the gallery + vExt := config.GetVideoExtensions() + // make a list of media files that can be related to the gallery + for _, ext := range vExt { related := basename + "." + ext - if !isGallery(related) { //exclude gallery extensions from the related files + // exclude gallery extensions from the related files + if !isGallery(related) { relatedFiles = append(relatedFiles, related) } } for _, scenePath := range relatedFiles { qbScene := models.NewSceneQueryBuilder() scene, _ := qbScene.FindByPath(scenePath) - if scene != nil { // found related Scene + // found related Scene + if scene != nil { logger.Infof("associate: Gallery %s is related to scene: %d", t.FilePath, scene.ID) gallery.SceneID.Int64 = int64(scene.ID) @@ -138,12 +185,11 @@ func (t *ScanTask) associateGallery(wg *sync.WaitGroup) { logger.Error(err.Error()) } - break // since a gallery can have only one related scene + // since a gallery can have only one related scene // only first found is associated + break } - } - } wg.Done() } @@ -371,6 +417,188 @@ func (t *ScanTask) makeScreenshots(probeResult *ffmpeg.VideoFile, checksum strin } } +func (t *ScanTask) scanZipImages(zipGallery *models.Gallery) { + err := walkGalleryZip(zipGallery.Path.String, func(file *zip.File) error { + // copy this task and change the filename + subTask := *t + + // filepath is the zip file and the internal file name, separated by a null byte + subTask.FilePath = image.ZipFilename(zipGallery.Path.String, file.Name) + subTask.zipGallery = zipGallery + + // run the subtask and wait for it to complete + var wg sync.WaitGroup + wg.Add(1) + subTask.Start(&wg) + return nil + }) + if err != nil { + logger.Warnf("failed to scan zip file images for %s: %s", zipGallery.Path.String, err.Error()) + } +} + +func (t *ScanTask) regenerateZipImages(zipGallery *models.Gallery) { + iqb := models.NewImageQueryBuilder() + + images, err := iqb.FindByGalleryID(zipGallery.ID) + if err != nil { + logger.Warnf("failed to find gallery images: %s", err.Error()) + return + } + + for _, img := range images { + t.generateThumbnail(img) + } +} + +func (t *ScanTask) scanImage() { + qb := models.NewImageQueryBuilder() + i, _ := qb.FindByPath(t.FilePath) + if i != nil { + // We already have this item in the database + // check for thumbnails + t.generateThumbnail(i) + + return + } + + // Ignore directories. + if isDir, _ := utils.DirExists(t.FilePath); isDir { + return + } + + var checksum string + + logger.Infof("%s not found. Calculating checksum...", t.FilePath) + checksum, err := t.calculateImageChecksum() + if err != nil { + logger.Errorf("error calculating checksum for %s: %s", t.FilePath, err.Error()) + return + } + + // check for scene by checksum and oshash - MD5 should be + // redundant, but check both + i, _ = qb.FindByChecksum(checksum) + + ctx := context.TODO() + tx := database.DB.MustBeginTx(ctx, nil) + if i != nil { + exists := image.FileExists(i.Path) + if exists { + logger.Infof("%s already exists. Duplicate of %s ", image.PathDisplayName(t.FilePath), image.PathDisplayName(i.Path)) + } else { + logger.Infof("%s already exists. Updating path...", image.PathDisplayName(t.FilePath)) + imagePartial := models.ImagePartial{ + ID: i.ID, + Path: &t.FilePath, + } + _, err = qb.Update(imagePartial, tx) + } + } else { + logger.Infof("%s doesn't exist. Creating new item...", image.PathDisplayName(t.FilePath)) + currentTime := time.Now() + newImage := models.Image{ + Checksum: checksum, + Path: t.FilePath, + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + } + err = image.SetFileDetails(&newImage) + if err == nil { + i, err = qb.Create(newImage, tx) + } + } + + if err == nil { + jqb := models.NewJoinsQueryBuilder() + if t.zipGallery != nil { + // associate with gallery + _, err = jqb.AddImageGallery(i.ID, t.zipGallery.ID, tx) + } else if config.GetCreateGalleriesFromFolders() { + // create gallery from folder or associate with existing gallery + logger.Infof("Associating image %s with folder gallery", i.Path) + err = t.associateImageWithFolderGallery(i.ID, tx) + } + } + + if err != nil { + logger.Error(err.Error()) + _ = tx.Rollback() + return + } else if err := tx.Commit(); err != nil { + logger.Error(err.Error()) + return + } + + t.generateThumbnail(i) +} + +func (t *ScanTask) associateImageWithFolderGallery(imageID int, tx *sqlx.Tx) error { + // find a gallery with the path specified + path := filepath.Dir(t.FilePath) + gqb := models.NewGalleryQueryBuilder() + jqb := models.NewJoinsQueryBuilder() + g, err := gqb.FindByPath(path) + if err != nil { + return err + } + + if g == nil { + checksum := utils.MD5FromString(path) + + // create the gallery + currentTime := time.Now() + + newGallery := models.Gallery{ + Checksum: checksum, + Path: sql.NullString{ + String: path, + Valid: true, + }, + CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, + } + + logger.Infof("Creating gallery for folder %s", path) + g, err = gqb.Create(newGallery, tx) + if err != nil { + return err + } + } + + // associate image with gallery + _, err = jqb.AddImageGallery(imageID, g.ID, tx) + return err +} + +func (t *ScanTask) generateThumbnail(i *models.Image) { + thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth) + exists, _ := utils.FileExists(thumbPath) + if exists { + logger.Debug("Thumbnail already exists for this path... skipping") + return + } + + srcImage, err := image.GetSourceImage(i) + if err != nil { + logger.Errorf("error reading image %s: %s", i.Path, err.Error()) + return + } + + if image.ThumbnailNeeded(srcImage, models.DefaultGthumbWidth) { + data, err := image.GetThumbnail(srcImage, models.DefaultGthumbWidth) + if err != nil { + logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error()) + return + } + + err = utils.WriteFile(thumbPath, data) + if err != nil { + logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err) + } + } +} + func (t *ScanTask) calculateChecksum() (string, error) { logger.Infof("Calculating checksum for %s...", t.FilePath) checksum, err := utils.MD5FromFilePath(t.FilePath) @@ -381,19 +609,67 @@ func (t *ScanTask) calculateChecksum() (string, error) { return checksum, nil } +func (t *ScanTask) calculateImageChecksum() (string, error) { + logger.Infof("Calculating checksum for %s...", image.PathDisplayName(t.FilePath)) + // uses image.CalculateMD5 to read files in zips + checksum, err := image.CalculateMD5(t.FilePath) + if err != nil { + return "", err + } + logger.Debugf("Checksum calculated: %s", checksum) + return checksum, nil +} + func (t *ScanTask) doesPathExist() bool { - if filepath.Ext(t.FilePath) == ".zip" { + vidExt := config.GetVideoExtensions() + imgExt := config.GetImageExtensions() + gExt := config.GetGalleryExtensions() + + if matchExtension(t.FilePath, gExt) { qb := models.NewGalleryQueryBuilder() gallery, _ := qb.FindByPath(t.FilePath) if gallery != nil { return true } - } else { + } else if matchExtension(t.FilePath, vidExt) { qb := models.NewSceneQueryBuilder() scene, _ := qb.FindByPath(t.FilePath) if scene != nil { return true } + } else if matchExtension(t.FilePath, imgExt) { + qb := models.NewImageQueryBuilder() + i, _ := qb.FindByPath(t.FilePath) + if i != nil { + return true + } } + return false } + +func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error { + vidExt := config.GetVideoExtensions() + imgExt := config.GetImageExtensions() + gExt := config.GetGalleryExtensions() + excludeVid := config.GetExcludes() + excludeImg := config.GetImageExcludes() + + return symwalk.Walk(s.Path, func(path string, info os.FileInfo, err error) error { + if info.IsDir() { + return nil + } + + if !s.ExcludeVideo && matchExtension(path, vidExt) && !matchFile(path, excludeVid) { + return f(path, info, err) + } + + if !s.ExcludeImage { + if (matchExtension(path, imgExt) || matchExtension(path, gExt)) && !matchFile(path, excludeImg) { + return f(path, info, err) + } + } + + return nil + }) +} diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 6ab87ac80..0aa5f3cc3 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -10,6 +10,7 @@ type GalleryReader interface { FindByChecksum(checksum string) (*Gallery, error) FindByPath(path string) (*Gallery, error) FindBySceneID(sceneID int) (*Gallery, error) + FindByImageID(imageID int) ([]*Gallery, error) // ValidGalleriesForScenePath(scenePath string) ([]*Gallery, error) // Count() (int, error) All() ([]*Gallery, error) @@ -60,6 +61,10 @@ func (t *galleryReaderWriter) FindBySceneID(sceneID int) (*Gallery, error) { return t.qb.FindBySceneID(sceneID, t.tx) } +func (t *galleryReaderWriter) FindByImageID(imageID int) ([]*Gallery, error) { + return t.qb.FindByImageID(imageID, t.tx) +} + func (t *galleryReaderWriter) Create(newGallery Gallery) (*Gallery, error) { return t.qb.Create(newGallery, t.tx) } diff --git a/pkg/models/image.go b/pkg/models/image.go new file mode 100644 index 000000000..ed1735817 --- /dev/null +++ b/pkg/models/image.go @@ -0,0 +1,72 @@ +package models + +import ( + "github.com/jmoiron/sqlx" +) + +type ImageReader interface { + // Find(id int) (*Image, error) + FindMany(ids []int) ([]*Image, error) + FindByChecksum(checksum string) (*Image, error) + // FindByPath(path string) (*Image, error) + // FindByPerformerID(performerID int) ([]*Image, error) + // CountByPerformerID(performerID int) (int, error) + // FindByStudioID(studioID int) ([]*Image, error) + // Count() (int, error) + // SizeCount() (string, error) + // CountByStudioID(studioID int) (int, error) + // CountByTagID(tagID int) (int, error) + All() ([]*Image, error) + // Query(imageFilter *ImageFilterType, findFilter *FindFilterType) ([]*Image, int) +} + +type ImageWriter interface { + Create(newImage Image) (*Image, error) + Update(updatedImage ImagePartial) (*Image, error) + UpdateFull(updatedImage Image) (*Image, error) + // IncrementOCounter(id int) (int, error) + // DecrementOCounter(id int) (int, error) + // ResetOCounter(id int) (int, error) + // Destroy(id string) error +} + +type ImageReaderWriter interface { + ImageReader + ImageWriter +} + +func NewImageReaderWriter(tx *sqlx.Tx) ImageReaderWriter { + return &imageReaderWriter{ + tx: tx, + qb: NewImageQueryBuilder(), + } +} + +type imageReaderWriter struct { + tx *sqlx.Tx + qb ImageQueryBuilder +} + +func (t *imageReaderWriter) FindMany(ids []int) ([]*Image, error) { + return t.qb.FindMany(ids) +} + +func (t *imageReaderWriter) FindByChecksum(checksum string) (*Image, error) { + return t.qb.FindByChecksum(checksum) +} + +func (t *imageReaderWriter) All() ([]*Image, error) { + return t.qb.All() +} + +func (t *imageReaderWriter) Create(newImage Image) (*Image, error) { + return t.qb.Create(newImage, t.tx) +} + +func (t *imageReaderWriter) Update(updatedImage ImagePartial) (*Image, error) { + return t.qb.Update(updatedImage, t.tx) +} + +func (t *imageReaderWriter) UpdateFull(updatedImage Image) (*Image, error) { + return t.qb.UpdateFull(updatedImage, t.tx) +} diff --git a/pkg/models/join.go b/pkg/models/join.go index 443dcf218..3b0d259ba 100644 --- a/pkg/models/join.go +++ b/pkg/models/join.go @@ -28,6 +28,11 @@ type JoinWriter interface { // DestroySceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error // DestroyScenesGalleries(sceneID int) error // DestroyScenesMarkers(sceneID int) error + UpdatePerformersGalleries(galleryID int, updatedJoins []PerformersGalleries) error + UpdateGalleriesTags(galleryID int, updatedJoins []GalleriesTags) error + UpdateGalleriesImages(imageID int, updatedJoins []GalleriesImages) error + UpdatePerformersImages(imageID int, updatedJoins []PerformersImages) error + UpdateImagesTags(imageID int, updatedJoins []ImagesTags) error } type JoinReaderWriter interface { @@ -74,3 +79,23 @@ func (t *joinReaderWriter) UpdateScenesTags(sceneID int, updatedJoins []ScenesTa func (t *joinReaderWriter) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags) error { return t.qb.UpdateSceneMarkersTags(sceneMarkerID, updatedJoins, t.tx) } + +func (t *joinReaderWriter) UpdatePerformersGalleries(galleryID int, updatedJoins []PerformersGalleries) error { + return t.qb.UpdatePerformersGalleries(galleryID, updatedJoins, t.tx) +} + +func (t *joinReaderWriter) UpdateGalleriesTags(galleryID int, updatedJoins []GalleriesTags) error { + return t.qb.UpdateGalleriesTags(galleryID, updatedJoins, t.tx) +} + +func (t *joinReaderWriter) UpdateGalleriesImages(imageID int, updatedJoins []GalleriesImages) error { + return t.qb.UpdateGalleriesImages(imageID, updatedJoins, t.tx) +} + +func (t *joinReaderWriter) UpdatePerformersImages(imageID int, updatedJoins []PerformersImages) error { + return t.qb.UpdatePerformersImages(imageID, updatedJoins, t.tx) +} + +func (t *joinReaderWriter) UpdateImagesTags(imageID int, updatedJoins []ImagesTags) error { + return t.qb.UpdateImagesTags(imageID, updatedJoins, t.tx) +} diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index 31c23fadf..dcc37fc47 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -81,6 +81,29 @@ func (_m *GalleryReaderWriter) FindByChecksum(checksum string) (*models.Gallery, return r0, r1 } +// FindByImageID provides a mock function with given fields: imageID +func (_m *GalleryReaderWriter) FindByImageID(imageID int) ([]*models.Gallery, error) { + ret := _m.Called(imageID) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(int) []*models.Gallery); ok { + r0 = rf(imageID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(imageID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByPath provides a mock function with given fields: path func (_m *GalleryReaderWriter) FindByPath(path string) (*models.Gallery, error) { ret := _m.Called(path) diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go new file mode 100644 index 000000000..bed6fbd5d --- /dev/null +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -0,0 +1,151 @@ +// Code generated by mockery v0.0.0-dev. DO NOT EDIT. + +package mocks + +import ( + models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" +) + +// ImageReaderWriter is an autogenerated mock type for the ImageReaderWriter type +type ImageReaderWriter struct { + mock.Mock +} + +// All provides a mock function with given fields: +func (_m *ImageReaderWriter) All() ([]*models.Image, error) { + ret := _m.Called() + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func() []*models.Image); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Create provides a mock function with given fields: newImage +func (_m *ImageReaderWriter) Create(newImage models.Image) (*models.Image, error) { + ret := _m.Called(newImage) + + var r0 *models.Image + if rf, ok := ret.Get(0).(func(models.Image) *models.Image); ok { + r0 = rf(newImage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Image) error); ok { + r1 = rf(newImage) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByChecksum provides a mock function with given fields: checksum +func (_m *ImageReaderWriter) FindByChecksum(checksum string) (*models.Image, error) { + ret := _m.Called(checksum) + + var r0 *models.Image + if rf, ok := ret.Get(0).(func(string) *models.Image); ok { + r0 = rf(checksum) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(checksum) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindMany provides a mock function with given fields: ids +func (_m *ImageReaderWriter) FindMany(ids []int) ([]*models.Image, error) { + ret := _m.Called(ids) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func([]int) []*models.Image); ok { + r0 = rf(ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func([]int) error); ok { + r1 = rf(ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Update provides a mock function with given fields: updatedImage +func (_m *ImageReaderWriter) Update(updatedImage models.ImagePartial) (*models.Image, error) { + ret := _m.Called(updatedImage) + + var r0 *models.Image + if rf, ok := ret.Get(0).(func(models.ImagePartial) *models.Image); ok { + r0 = rf(updatedImage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.ImagePartial) error); ok { + r1 = rf(updatedImage) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateFull provides a mock function with given fields: updatedImage +func (_m *ImageReaderWriter) UpdateFull(updatedImage models.Image) (*models.Image, error) { + ret := _m.Called(updatedImage) + + var r0 *models.Image + if rf, ok := ret.Get(0).(func(models.Image) *models.Image); ok { + r0 = rf(updatedImage) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(models.Image) error); ok { + r1 = rf(updatedImage) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/pkg/models/mocks/JoinReaderWriter.go b/pkg/models/mocks/JoinReaderWriter.go index a3c94c1a4..6fc20057e 100644 --- a/pkg/models/mocks/JoinReaderWriter.go +++ b/pkg/models/mocks/JoinReaderWriter.go @@ -63,6 +63,48 @@ func (_m *JoinReaderWriter) GetSceneMovies(sceneID int) ([]models.MoviesScenes, return r0, r1 } +// UpdateGalleriesImages provides a mock function with given fields: imageID, updatedJoins +func (_m *JoinReaderWriter) UpdateGalleriesImages(imageID int, updatedJoins []models.GalleriesImages) error { + ret := _m.Called(imageID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.GalleriesImages) error); ok { + r0 = rf(imageID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateGalleriesTags provides a mock function with given fields: galleryID, updatedJoins +func (_m *JoinReaderWriter) UpdateGalleriesTags(galleryID int, updatedJoins []models.GalleriesTags) error { + ret := _m.Called(galleryID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.GalleriesTags) error); ok { + r0 = rf(galleryID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateImagesTags provides a mock function with given fields: imageID, updatedJoins +func (_m *JoinReaderWriter) UpdateImagesTags(imageID int, updatedJoins []models.ImagesTags) error { + ret := _m.Called(imageID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.ImagesTags) error); ok { + r0 = rf(imageID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // UpdateMoviesScenes provides a mock function with given fields: sceneID, updatedJoins func (_m *JoinReaderWriter) UpdateMoviesScenes(sceneID int, updatedJoins []models.MoviesScenes) error { ret := _m.Called(sceneID, updatedJoins) @@ -77,6 +119,34 @@ func (_m *JoinReaderWriter) UpdateMoviesScenes(sceneID int, updatedJoins []model return r0 } +// UpdatePerformersGalleries provides a mock function with given fields: galleryID, updatedJoins +func (_m *JoinReaderWriter) UpdatePerformersGalleries(galleryID int, updatedJoins []models.PerformersGalleries) error { + ret := _m.Called(galleryID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.PerformersGalleries) error); ok { + r0 = rf(galleryID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdatePerformersImages provides a mock function with given fields: imageID, updatedJoins +func (_m *JoinReaderWriter) UpdatePerformersImages(imageID int, updatedJoins []models.PerformersImages) error { + ret := _m.Called(imageID, updatedJoins) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []models.PerformersImages) error); ok { + r0 = rf(imageID, updatedJoins) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // UpdatePerformersScenes provides a mock function with given fields: sceneID, updatedJoins func (_m *JoinReaderWriter) UpdatePerformersScenes(sceneID int, updatedJoins []models.PerformersScenes) error { ret := _m.Called(sceneID, updatedJoins) diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index 2f7db7bba..908529b2a 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -58,6 +58,52 @@ func (_m *PerformerReaderWriter) Create(newPerformer models.Performer) (*models. return r0, r1 } +// FindByGalleryID provides a mock function with given fields: galleryID +func (_m *PerformerReaderWriter) FindByGalleryID(galleryID int) ([]*models.Performer, error) { + ret := _m.Called(galleryID) + + var r0 []*models.Performer + if rf, ok := ret.Get(0).(func(int) []*models.Performer); ok { + r0 = rf(galleryID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Performer) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(galleryID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByImageID provides a mock function with given fields: imageID +func (_m *PerformerReaderWriter) FindByImageID(imageID int) ([]*models.Performer, error) { + ret := _m.Called(imageID) + + var r0 []*models.Performer + if rf, ok := ret.Get(0).(func(int) []*models.Performer); ok { + r0 = rf(imageID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Performer) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(imageID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByNames provides a mock function with given fields: names, nocase func (_m *PerformerReaderWriter) FindByNames(names []string, nocase bool) ([]*models.Performer, error) { ret := _m.Called(names, nocase) diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index 43d8a299f..ddeea97bf 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -81,6 +81,52 @@ func (_m *TagReaderWriter) Find(id int) (*models.Tag, error) { return r0, r1 } +// FindByGalleryID provides a mock function with given fields: galleryID +func (_m *TagReaderWriter) FindByGalleryID(galleryID int) ([]*models.Tag, error) { + ret := _m.Called(galleryID) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func(int) []*models.Tag); ok { + r0 = rf(galleryID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(galleryID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByImageID provides a mock function with given fields: imageID +func (_m *TagReaderWriter) FindByImageID(imageID int) ([]*models.Tag, error) { + ret := _m.Called(imageID) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func(int) []*models.Tag); ok { + r0 = rf(imageID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(imageID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByName provides a mock function with given fields: name, nocase func (_m *TagReaderWriter) FindByName(name string, nocase bool) (*models.Tag, error) { ret := _m.Called(name, nocase) diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go index fe864b866..86d411b93 100644 --- a/pkg/models/model_gallery.go +++ b/pkg/models/model_gallery.go @@ -1,175 +1,40 @@ package models import ( - "archive/zip" - "bytes" "database/sql" - "image" - "image/jpeg" - "io/ioutil" - "path/filepath" - "sort" - "strings" - - "github.com/disintegration/imaging" - "github.com/stashapp/stash/pkg/api/urlbuilders" - "github.com/stashapp/stash/pkg/logger" - "github.com/stashapp/stash/pkg/utils" - _ "golang.org/x/image/webp" ) type Gallery struct { ID int `db:"id" json:"id"` - Path string `db:"path" json:"path"` + Path sql.NullString `db:"path" json:"path"` Checksum string `db:"checksum" json:"checksum"` + Zip bool `db:"zip" json:"zip"` + Title sql.NullString `db:"title" json:"title"` + URL sql.NullString `db:"url" json:"url"` + Date SQLiteDate `db:"date" json:"date"` + Details sql.NullString `db:"details" json:"details"` + Rating sql.NullInt64 `db:"rating" json:"rating"` + StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` SceneID sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"` CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` } -const DefaultGthumbWidth int = 200 - -func (g *Gallery) CountFiles() int { - filteredFiles, readCloser, err := g.listZipContents() - if err != nil { - return 0 - } - defer readCloser.Close() - - return len(filteredFiles) +// GalleryPartial represents part of a Gallery object. It is used to update +// the database entry. Only non-nil fields will be updated. +type GalleryPartial struct { + ID int `db:"id" json:"id"` + Path *sql.NullString `db:"path" json:"path"` + Checksum *string `db:"checksum" json:"checksum"` + Title *sql.NullString `db:"title" json:"title"` + URL *sql.NullString `db:"url" json:"url"` + Date *SQLiteDate `db:"date" json:"date"` + Details *sql.NullString `db:"details" json:"details"` + Rating *sql.NullInt64 `db:"rating" json:"rating"` + StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` + SceneID *sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"` + CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"` + UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` } -func (g *Gallery) GetFiles(baseURL string) []*GalleryFilesType { - var galleryFiles []*GalleryFilesType - filteredFiles, readCloser, err := g.listZipContents() - if err != nil { - return nil - } - defer readCloser.Close() - - builder := urlbuilders.NewGalleryURLBuilder(baseURL, g.ID) - for i, file := range filteredFiles { - galleryURL := builder.GetGalleryImageURL(i) - galleryFile := GalleryFilesType{ - Index: i, - Name: &file.Name, - Path: &galleryURL, - } - galleryFiles = append(galleryFiles, &galleryFile) - } - - return galleryFiles -} - -func (g *Gallery) GetImage(index int) []byte { - data, _ := g.readZipFile(index) - return data -} - -func (g *Gallery) GetThumbnail(index int, width int) []byte { - data, _ := g.readZipFile(index) - srcImage, _, err := image.Decode(bytes.NewReader(data)) - if err != nil { - return data - } - resizedImage := imaging.Resize(srcImage, width, 0, imaging.Box) - buf := new(bytes.Buffer) - err = jpeg.Encode(buf, resizedImage, nil) - if err != nil { - return data - } - return buf.Bytes() -} - -func (g *Gallery) readZipFile(index int) ([]byte, error) { - filteredFiles, readCloser, err := g.listZipContents() - if err != nil { - return nil, err - } - defer readCloser.Close() - - zipFile := filteredFiles[index] - zipFileReadCloser, err := zipFile.Open() - if err != nil { - logger.Warn("failed to read file inside zip file") - return nil, err - } - defer zipFileReadCloser.Close() - - return ioutil.ReadAll(zipFileReadCloser) -} - -func (g *Gallery) listZipContents() ([]*zip.File, *zip.ReadCloser, error) { - readCloser, err := zip.OpenReader(g.Path) - if err != nil { - logger.Warnf("failed to read zip file %s", g.Path) - return nil, nil, err - } - - filteredFiles := make([]*zip.File, 0) - for _, file := range readCloser.File { - if file.FileInfo().IsDir() { - continue - } - ext := filepath.Ext(file.Name) - ext = strings.ToLower(ext) - if ext != ".jpg" && ext != ".jpeg" && ext != ".png" && ext != ".gif" && ext != ".webp" { - continue - } - if strings.Contains(file.Name, "__MACOSX") { - continue - } - filteredFiles = append(filteredFiles, file) - } - sort.Slice(filteredFiles, func(i, j int) bool { - a := filteredFiles[i] - b := filteredFiles[j] - return utils.NaturalCompare(a.Name, b.Name) - }) - - cover := contains(filteredFiles, "cover.jpg") // first image with cover.jpg in the name - if cover >= 0 { // will be moved to the start - reorderedFiles := reorder(filteredFiles, cover) - if reorderedFiles != nil { - return reorderedFiles, readCloser, nil - } - } - - return filteredFiles, readCloser, nil -} - -// return index of first occurrenece of string x ( case insensitive ) in name of zip contents, -1 otherwise -func contains(a []*zip.File, x string) int { - for i, n := range a { - if strings.Contains(strings.ToLower(n.Name), strings.ToLower(x)) { - return i - } - } - return -1 -} - -// reorder slice so that element with position toFirst gets at the start -func reorder(a []*zip.File, toFirst int) []*zip.File { - var first *zip.File - switch { - case toFirst < 0 || toFirst >= len(a): - return nil - case toFirst == 0: - return a - default: - first = a[toFirst] - copy(a[toFirst:], a[toFirst+1:]) // Shift a[toFirst+1:] left one index removing a[toFirst] element - a[len(a)-1] = nil // Nil now unused element for garbage collection - a = a[:len(a)-1] // Truncate slice - a = append([]*zip.File{first}, a...) // Push first to the start of the slice - } - return a -} - -func (g *Gallery) ImageCount() int { - images, _, _ := g.listZipContents() - if images == nil { - return 0 - } - return len(images) -} +const DefaultGthumbWidth int = 640 diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go new file mode 100644 index 000000000..88d311730 --- /dev/null +++ b/pkg/models/model_image.go @@ -0,0 +1,44 @@ +package models + +import ( + "database/sql" +) + +// Image stores the metadata for a single image. +type Image struct { + ID int `db:"id" json:"id"` + Checksum string `db:"checksum" json:"checksum"` + Path string `db:"path" json:"path"` + Title sql.NullString `db:"title" json:"title"` + Rating sql.NullInt64 `db:"rating" json:"rating"` + OCounter int `db:"o_counter" json:"o_counter"` + Size sql.NullInt64 `db:"size" json:"size"` + Width sql.NullInt64 `db:"width" json:"width"` + Height sql.NullInt64 `db:"height" json:"height"` + StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` + CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"` + UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` +} + +// ImagePartial represents part of a Image object. It is used to update +// the database entry. Only non-nil fields will be updated. +type ImagePartial struct { + ID int `db:"id" json:"id"` + Checksum *string `db:"checksum" json:"checksum"` + Path *string `db:"path" json:"path"` + Title *sql.NullString `db:"title" json:"title"` + Rating *sql.NullInt64 `db:"rating" json:"rating"` + Size *sql.NullInt64 `db:"size" json:"size"` + Width *sql.NullInt64 `db:"width" json:"width"` + Height *sql.NullInt64 `db:"height" json:"height"` + StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` + CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"` + UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` +} + +// ImageFileType represents the file metadata for an image. +type ImageFileType struct { + Size *int `graphql:"size" json:"size"` + Width *int `graphql:"width" json:"width"` + Height *int `graphql:"height" json:"height"` +} diff --git a/pkg/models/model_joins.go b/pkg/models/model_joins.go index f69be8946..09c98131e 100644 --- a/pkg/models/model_joins.go +++ b/pkg/models/model_joins.go @@ -22,3 +22,28 @@ type SceneMarkersTags struct { SceneMarkerID int `db:"scene_marker_id" json:"scene_marker_id"` TagID int `db:"tag_id" json:"tag_id"` } + +type PerformersImages struct { + PerformerID int `db:"performer_id" json:"performer_id"` + ImageID int `db:"image_id" json:"image_id"` +} + +type ImagesTags struct { + ImageID int `db:"image_id" json:"image_id"` + TagID int `db:"tag_id" json:"tag_id"` +} + +type GalleriesImages struct { + GalleryID int `db:"gallery_id" json:"gallery_id"` + ImageID int `db:"image_id" json:"image_id"` +} + +type PerformersGalleries struct { + PerformerID int `db:"performer_id" json:"performer_id"` + GalleryID int `db:"gallery_id" json:"gallery_id"` +} + +type GalleriesTags struct { + TagID int `db:"tag_id" json:"tag_id"` + GalleryID int `db:"gallery_id" json:"gallery_id"` +} diff --git a/pkg/models/performer.go b/pkg/models/performer.go index b4e4eaddc..d3956b3bb 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -9,6 +9,8 @@ type PerformerReader interface { FindMany(ids []int) ([]*Performer, error) FindBySceneID(sceneID int) ([]*Performer, error) FindNamesBySceneID(sceneID int) ([]*Performer, error) + FindByImageID(imageID int) ([]*Performer, error) + FindByGalleryID(galleryID int) ([]*Performer, error) FindByNames(names []string, nocase bool) ([]*Performer, error) // Count() (int, error) All() ([]*Performer, error) @@ -66,6 +68,14 @@ func (t *performerReaderWriter) FindNamesBySceneID(sceneID int) ([]*Performer, e return t.qb.FindNameBySceneID(sceneID, t.tx) } +func (t *performerReaderWriter) FindByImageID(id int) ([]*Performer, error) { + return t.qb.FindByImageID(id, t.tx) +} + +func (t *performerReaderWriter) FindByGalleryID(id int) ([]*Performer, error) { + return t.qb.FindByGalleryID(id, t.tx) +} + func (t *performerReaderWriter) Create(newPerformer Performer) (*Performer, error) { return t.qb.Create(newPerformer, t.tx) } diff --git a/pkg/models/querybuilder_gallery.go b/pkg/models/querybuilder_gallery.go index fdf9fd73c..ffe3e54a6 100644 --- a/pkg/models/querybuilder_gallery.go +++ b/pkg/models/querybuilder_gallery.go @@ -21,8 +21,8 @@ func NewGalleryQueryBuilder() GalleryQueryBuilder { func (qb *GalleryQueryBuilder) Create(newGallery Gallery, tx *sqlx.Tx) (*Gallery, error) { ensureTx(tx) result, err := tx.NamedExec( - `INSERT INTO galleries (path, checksum, scene_id, created_at, updated_at) - VALUES (:path, :checksum, :scene_id, :created_at, :updated_at) + `INSERT INTO galleries (path, checksum, zip, title, date, details, url, studio_id, rating, scene_id, created_at, updated_at) + VALUES (:path, :checksum, :zip, :title, :date, :details, :url, :studio_id, :rating, :scene_id, :created_at, :updated_at) `, newGallery, ) @@ -55,6 +55,19 @@ func (qb *GalleryQueryBuilder) Update(updatedGallery Gallery, tx *sqlx.Tx) (*Gal return &updatedGallery, nil } +func (qb *GalleryQueryBuilder) UpdatePartial(updatedGallery GalleryPartial, tx *sqlx.Tx) (*Gallery, error) { + ensureTx(tx) + _, err := tx.NamedExec( + `UPDATE galleries SET `+SQLGenKeysPartial(updatedGallery)+` WHERE galleries.id = :id`, + updatedGallery, + ) + if err != nil { + return nil, err + } + + return qb.Find(updatedGallery.ID, tx) +} + func (qb *GalleryQueryBuilder) Destroy(id int, tx *sqlx.Tx) error { return executeDeleteQuery("galleries", strconv.Itoa(id), tx) } @@ -77,16 +90,16 @@ func (qb *GalleryQueryBuilder) ClearGalleryId(sceneID int, tx *sqlx.Tx) error { return err } -func (qb *GalleryQueryBuilder) Find(id int) (*Gallery, error) { +func (qb *GalleryQueryBuilder) Find(id int, tx *sqlx.Tx) (*Gallery, error) { query := "SELECT * FROM galleries WHERE id = ? LIMIT 1" args := []interface{}{id} - return qb.queryGallery(query, args, nil) + return qb.queryGallery(query, args, tx) } func (qb *GalleryQueryBuilder) FindMany(ids []int) ([]*Gallery, error) { var galleries []*Gallery for _, id := range ids { - gallery, err := qb.Find(id) + gallery, err := qb.Find(id, nil) if err != nil { return nil, err } @@ -125,6 +138,24 @@ func (qb *GalleryQueryBuilder) ValidGalleriesForScenePath(scenePath string) ([]* return qb.queryGalleries(query, nil, nil) } +func (qb *GalleryQueryBuilder) FindByImageID(imageID int, tx *sqlx.Tx) ([]*Gallery, error) { + query := selectAll(galleryTable) + ` + LEFT JOIN galleries_images as images_join on images_join.gallery_id = galleries.id + WHERE images_join.image_id = ? + GROUP BY galleries.id + ` + args := []interface{}{imageID} + return qb.queryGalleries(query, args, tx) +} + +func (qb *GalleryQueryBuilder) CountByImageID(imageID int) (int, error) { + query := `SELECT image_id FROM galleries_images + WHERE image_id = ? + GROUP BY gallery_id` + args := []interface{}{imageID} + return runCountQuery(buildCountQuery(query), args) +} + func (qb *GalleryQueryBuilder) Count() (int, error) { return runCountQuery(buildCountQuery("SELECT galleries.id FROM galleries"), nil) } @@ -146,6 +177,11 @@ func (qb *GalleryQueryBuilder) Query(galleryFilter *GalleryFilterType, findFilte } query.body = selectDistinctIDs("galleries") + query.body += ` + left join performers_galleries as performers_join on performers_join.gallery_id = galleries.id + left join studios as studio on studio.id = galleries.studio_id + left join galleries_tags as tags_join on tags_join.gallery_id = galleries.id + ` if q := findFilter.Q; q != nil && *q != "" { searchColumns := []string{"galleries.path", "galleries.checksum"} @@ -154,21 +190,73 @@ func (qb *GalleryQueryBuilder) Query(galleryFilter *GalleryFilterType, findFilte query.addArg(thisArgs...) } + if zipFilter := galleryFilter.IsZip; zipFilter != nil { + var favStr string + if *zipFilter == true { + favStr = "1" + } else { + favStr = "0" + } + query.addWhere("galleries.zip = " + favStr) + } + query.handleStringCriterionInput(galleryFilter.Path, "galleries.path") if isMissingFilter := galleryFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" { switch *isMissingFilter { case "scene": query.addWhere("galleries.scene_id IS NULL") + case "studio": + query.addWhere("galleries.studio_id IS NULL") + case "performers": + query.addWhere("performers_join.gallery_id IS NULL") + case "date": + query.addWhere("galleries.date IS \"\" OR galleries.date IS \"0001-01-01\"") + case "tags": + query.addWhere("tags_join.gallery_id IS NULL") + default: + query.addWhere("galleries." + *isMissingFilter + " IS NULL") } } + if tagsFilter := galleryFilter.Tags; tagsFilter != nil && len(tagsFilter.Value) > 0 { + for _, tagID := range tagsFilter.Value { + query.addArg(tagID) + } + + query.body += " LEFT JOIN tags on tags_join.tag_id = tags.id" + whereClause, havingClause := getMultiCriterionClause("galleries", "tags", "tags_join", "gallery_id", "tag_id", tagsFilter) + query.addWhere(whereClause) + query.addHaving(havingClause) + } + + if performersFilter := galleryFilter.Performers; performersFilter != nil && len(performersFilter.Value) > 0 { + for _, performerID := range performersFilter.Value { + query.addArg(performerID) + } + + query.body += " LEFT JOIN performers ON performers_join.performer_id = performers.id" + whereClause, havingClause := getMultiCriterionClause("galleries", "performers", "performers_join", "gallery_id", "performer_id", performersFilter) + query.addWhere(whereClause) + query.addHaving(havingClause) + } + + if studiosFilter := galleryFilter.Studios; studiosFilter != nil && len(studiosFilter.Value) > 0 { + for _, studioID := range studiosFilter.Value { + query.addArg(studioID) + } + + whereClause, havingClause := getMultiCriterionClause("galleries", "studio", "", "", "studio_id", studiosFilter) + query.addWhere(whereClause) + query.addHaving(havingClause) + } + query.sortAndPagination = qb.getGallerySort(findFilter) + getPagination(findFilter) idsResult, countResult := query.executeFind() var galleries []*Gallery for _, id := range idsResult { - gallery, _ := qb.Find(id) + gallery, _ := qb.Find(id, nil) galleries = append(galleries, gallery) } diff --git a/pkg/models/querybuilder_gallery_test.go b/pkg/models/querybuilder_gallery_test.go index 16b04d009..16e9a50a4 100644 --- a/pkg/models/querybuilder_gallery_test.go +++ b/pkg/models/querybuilder_gallery_test.go @@ -14,15 +14,15 @@ func TestGalleryFind(t *testing.T) { gqb := models.NewGalleryQueryBuilder() const galleryIdx = 0 - gallery, err := gqb.Find(galleryIDs[galleryIdx]) + gallery, err := gqb.Find(galleryIDs[galleryIdx], nil) if err != nil { t.Fatalf("Error finding gallery: %s", err.Error()) } - assert.Equal(t, getGalleryStringValue(galleryIdx, "Path"), gallery.Path) + assert.Equal(t, getGalleryStringValue(galleryIdx, "Path"), gallery.Path.String) - gallery, err = gqb.Find(0) + gallery, err = gqb.Find(0, nil) if err != nil { t.Fatalf("Error finding gallery: %s", err.Error()) @@ -42,7 +42,7 @@ func TestGalleryFindByChecksum(t *testing.T) { t.Fatalf("Error finding gallery: %s", err.Error()) } - assert.Equal(t, getGalleryStringValue(galleryIdx, "Path"), gallery.Path) + assert.Equal(t, getGalleryStringValue(galleryIdx, "Path"), gallery.Path.String) galleryChecksum = "not exist" gallery, err = gqb.FindByChecksum(galleryChecksum, nil) @@ -65,7 +65,7 @@ func TestGalleryFindByPath(t *testing.T) { t.Fatalf("Error finding gallery: %s", err.Error()) } - assert.Equal(t, galleryPath, gallery.Path) + assert.Equal(t, galleryPath, gallery.Path.String) galleryPath = "not exist" gallery, err = gqb.FindByPath(galleryPath) @@ -87,7 +87,7 @@ func TestGalleryFindBySceneID(t *testing.T) { t.Fatalf("Error finding gallery: %s", err.Error()) } - assert.Equal(t, getGalleryStringValue(galleryIdxWithScene, "Path"), gallery.Path) + assert.Equal(t, getGalleryStringValue(galleryIdxWithScene, "Path"), gallery.Path.String) gallery, err = gqb.FindBySceneID(0, nil) @@ -149,7 +149,7 @@ func verifyGalleriesPath(t *testing.T, pathCriterion models.StringCriterionInput galleries, _ := sqb.Query(&galleryFilter, nil) for _, gallery := range galleries { - verifyString(t, gallery.Path, pathCriterion) + verifyNullString(t, gallery.Path, pathCriterion) } } diff --git a/pkg/models/querybuilder_image.go b/pkg/models/querybuilder_image.go new file mode 100644 index 000000000..bd9095080 --- /dev/null +++ b/pkg/models/querybuilder_image.go @@ -0,0 +1,434 @@ +package models + +import ( + "database/sql" + "fmt" + "strconv" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/database" + "github.com/stashapp/stash/pkg/utils" +) + +const imageTable = "images" + +var imagesForPerformerQuery = selectAll(imageTable) + ` +LEFT JOIN performers_images as performers_join on performers_join.image_id = images.id +WHERE performers_join.performer_id = ? +GROUP BY images.id +` + +var countImagesForPerformerQuery = ` +SELECT performer_id FROM performers_images as performers_join +WHERE performer_id = ? +GROUP BY image_id +` + +var imagesForStudioQuery = selectAll(imageTable) + ` +JOIN studios ON studios.id = images.studio_id +WHERE studios.id = ? +GROUP BY images.id +` +var imagesForMovieQuery = selectAll(imageTable) + ` +LEFT JOIN movies_images as movies_join on movies_join.image_id = images.id +WHERE movies_join.movie_id = ? +GROUP BY images.id +` + +var countImagesForTagQuery = ` +SELECT tag_id AS id FROM images_tags +WHERE images_tags.tag_id = ? +GROUP BY images_tags.image_id +` + +var imagesForGalleryQuery = selectAll(imageTable) + ` +LEFT JOIN galleries_images as galleries_join on galleries_join.image_id = images.id +WHERE galleries_join.gallery_id = ? +GROUP BY images.id +` + +var countImagesForGalleryQuery = ` +SELECT gallery_id FROM galleries_images +WHERE gallery_id = ? +GROUP BY image_id +` + +type ImageQueryBuilder struct{} + +func NewImageQueryBuilder() ImageQueryBuilder { + return ImageQueryBuilder{} +} + +func (qb *ImageQueryBuilder) Create(newImage Image, tx *sqlx.Tx) (*Image, error) { + ensureTx(tx) + result, err := tx.NamedExec( + `INSERT INTO images (checksum, path, title, rating, o_counter, size, + width, height, studio_id, created_at, updated_at) + VALUES (:checksum, :path, :title, :rating, :o_counter, :size, + :width, :height, :studio_id, :created_at, :updated_at) + `, + newImage, + ) + if err != nil { + return nil, err + } + imageID, err := result.LastInsertId() + if err != nil { + return nil, err + } + if err := tx.Get(&newImage, `SELECT * FROM images WHERE id = ? LIMIT 1`, imageID); err != nil { + return nil, err + } + return &newImage, nil +} + +func (qb *ImageQueryBuilder) Update(updatedImage ImagePartial, tx *sqlx.Tx) (*Image, error) { + ensureTx(tx) + _, err := tx.NamedExec( + `UPDATE images SET `+SQLGenKeysPartial(updatedImage)+` WHERE images.id = :id`, + updatedImage, + ) + if err != nil { + return nil, err + } + + return qb.find(updatedImage.ID, tx) +} + +func (qb *ImageQueryBuilder) UpdateFull(updatedImage Image, tx *sqlx.Tx) (*Image, error) { + ensureTx(tx) + _, err := tx.NamedExec( + `UPDATE images SET `+SQLGenKeys(updatedImage)+` WHERE images.id = :id`, + updatedImage, + ) + if err != nil { + return nil, err + } + + return qb.find(updatedImage.ID, tx) +} + +func (qb *ImageQueryBuilder) IncrementOCounter(id int, tx *sqlx.Tx) (int, error) { + ensureTx(tx) + _, err := tx.Exec( + `UPDATE images SET o_counter = o_counter + 1 WHERE images.id = ?`, + id, + ) + if err != nil { + return 0, err + } + + image, err := qb.find(id, tx) + if err != nil { + return 0, err + } + + return image.OCounter, nil +} + +func (qb *ImageQueryBuilder) DecrementOCounter(id int, tx *sqlx.Tx) (int, error) { + ensureTx(tx) + _, err := tx.Exec( + `UPDATE images SET o_counter = o_counter - 1 WHERE images.id = ? and images.o_counter > 0`, + id, + ) + if err != nil { + return 0, err + } + + image, err := qb.find(id, tx) + if err != nil { + return 0, err + } + + return image.OCounter, nil +} + +func (qb *ImageQueryBuilder) ResetOCounter(id int, tx *sqlx.Tx) (int, error) { + ensureTx(tx) + _, err := tx.Exec( + `UPDATE images SET o_counter = 0 WHERE images.id = ?`, + id, + ) + if err != nil { + return 0, err + } + + image, err := qb.find(id, tx) + if err != nil { + return 0, err + } + + return image.OCounter, nil +} + +func (qb *ImageQueryBuilder) Destroy(id int, tx *sqlx.Tx) error { + return executeDeleteQuery("images", strconv.Itoa(id), tx) +} +func (qb *ImageQueryBuilder) Find(id int) (*Image, error) { + return qb.find(id, nil) +} + +func (qb *ImageQueryBuilder) FindMany(ids []int) ([]*Image, error) { + var images []*Image + for _, id := range ids { + image, err := qb.Find(id) + if err != nil { + return nil, err + } + + if image == nil { + return nil, fmt.Errorf("image with id %d not found", id) + } + + images = append(images, image) + } + + return images, nil +} + +func (qb *ImageQueryBuilder) find(id int, tx *sqlx.Tx) (*Image, error) { + query := selectAll(imageTable) + "WHERE id = ? LIMIT 1" + args := []interface{}{id} + return qb.queryImage(query, args, tx) +} + +func (qb *ImageQueryBuilder) FindByChecksum(checksum string) (*Image, error) { + query := "SELECT * FROM images WHERE checksum = ? LIMIT 1" + args := []interface{}{checksum} + return qb.queryImage(query, args, nil) +} + +func (qb *ImageQueryBuilder) FindByPath(path string) (*Image, error) { + query := selectAll(imageTable) + "WHERE path = ? LIMIT 1" + args := []interface{}{path} + return qb.queryImage(query, args, nil) +} + +func (qb *ImageQueryBuilder) FindByPerformerID(performerID int) ([]*Image, error) { + args := []interface{}{performerID} + return qb.queryImages(imagesForPerformerQuery, args, nil) +} + +func (qb *ImageQueryBuilder) CountByPerformerID(performerID int) (int, error) { + args := []interface{}{performerID} + return runCountQuery(buildCountQuery(countImagesForPerformerQuery), args) +} + +func (qb *ImageQueryBuilder) FindByStudioID(studioID int) ([]*Image, error) { + args := []interface{}{studioID} + return qb.queryImages(imagesForStudioQuery, args, nil) +} + +func (qb *ImageQueryBuilder) FindByGalleryID(galleryID int) ([]*Image, error) { + args := []interface{}{galleryID} + return qb.queryImages(imagesForGalleryQuery, args, nil) +} + +func (qb *ImageQueryBuilder) CountByGalleryID(galleryID int) (int, error) { + args := []interface{}{galleryID} + return runCountQuery(buildCountQuery(countImagesForGalleryQuery), args) +} + +func (qb *ImageQueryBuilder) Count() (int, error) { + return runCountQuery(buildCountQuery("SELECT images.id FROM images"), nil) +} + +func (qb *ImageQueryBuilder) SizeCount() (string, error) { + sum, err := runSumQuery("SELECT SUM(size) as sum FROM images", nil) + if err != nil { + return "0 B", err + } + return utils.HumanizeBytes(sum), err +} + +func (qb *ImageQueryBuilder) CountByStudioID(studioID int) (int, error) { + args := []interface{}{studioID} + return runCountQuery(buildCountQuery(imagesForStudioQuery), args) +} + +func (qb *ImageQueryBuilder) CountByTagID(tagID int) (int, error) { + args := []interface{}{tagID} + return runCountQuery(buildCountQuery(countImagesForTagQuery), args) +} + +func (qb *ImageQueryBuilder) All() ([]*Image, error) { + return qb.queryImages(selectAll(imageTable)+qb.getImageSort(nil), nil, nil) +} + +func (qb *ImageQueryBuilder) Query(imageFilter *ImageFilterType, findFilter *FindFilterType) ([]*Image, int) { + if imageFilter == nil { + imageFilter = &ImageFilterType{} + } + if findFilter == nil { + findFilter = &FindFilterType{} + } + + query := queryBuilder{ + tableName: imageTable, + } + + query.body = selectDistinctIDs(imageTable) + query.body += ` + left join performers_images as performers_join on performers_join.image_id = images.id + left join studios as studio on studio.id = images.studio_id + left join images_tags as tags_join on tags_join.image_id = images.id + left join galleries_images as galleries_join on galleries_join.image_id = images.id + ` + + if q := findFilter.Q; q != nil && *q != "" { + searchColumns := []string{"images.title", "images.path", "images.checksum"} + clause, thisArgs := getSearchBinding(searchColumns, *q, false) + query.addWhere(clause) + query.addArg(thisArgs...) + } + + if rating := imageFilter.Rating; rating != nil { + clause, count := getIntCriterionWhereClause("images.rating", *imageFilter.Rating) + query.addWhere(clause) + if count == 1 { + query.addArg(imageFilter.Rating.Value) + } + } + + if oCounter := imageFilter.OCounter; oCounter != nil { + clause, count := getIntCriterionWhereClause("images.o_counter", *imageFilter.OCounter) + query.addWhere(clause) + if count == 1 { + query.addArg(imageFilter.OCounter.Value) + } + } + + if resolutionFilter := imageFilter.Resolution; resolutionFilter != nil { + if resolution := resolutionFilter.String(); resolutionFilter.IsValid() { + switch resolution { + case "LOW": + query.addWhere("images.height < 480") + case "STANDARD": + query.addWhere("(images.height >= 480 AND images.height < 720)") + case "STANDARD_HD": + query.addWhere("(images.height >= 720 AND images.height < 1080)") + case "FULL_HD": + query.addWhere("(images.height >= 1080 AND images.height < 2160)") + case "FOUR_K": + query.addWhere("images.height >= 2160") + } + } + } + + if isMissingFilter := imageFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" { + switch *isMissingFilter { + case "studio": + query.addWhere("images.studio_id IS NULL") + case "performers": + query.addWhere("performers_join.image_id IS NULL") + case "galleries": + query.addWhere("galleries_join.image_id IS NULL") + case "tags": + query.addWhere("tags_join.image_id IS NULL") + default: + query.addWhere("images." + *isMissingFilter + " IS NULL") + } + } + + if tagsFilter := imageFilter.Tags; tagsFilter != nil && len(tagsFilter.Value) > 0 { + for _, tagID := range tagsFilter.Value { + query.addArg(tagID) + } + + query.body += " LEFT JOIN tags on tags_join.tag_id = tags.id" + whereClause, havingClause := getMultiCriterionClause("images", "tags", "images_tags", "image_id", "tag_id", tagsFilter) + query.addWhere(whereClause) + query.addHaving(havingClause) + } + + if galleriesFilter := imageFilter.Galleries; galleriesFilter != nil && len(galleriesFilter.Value) > 0 { + for _, galleryID := range galleriesFilter.Value { + query.addArg(galleryID) + } + + query.body += " LEFT JOIN galleries ON galleries_join.gallery_id = galleries.id" + whereClause, havingClause := getMultiCriterionClause("images", "galleries", "galleries_images", "image_id", "gallery_id", galleriesFilter) + query.addWhere(whereClause) + query.addHaving(havingClause) + } + + if performersFilter := imageFilter.Performers; performersFilter != nil && len(performersFilter.Value) > 0 { + for _, performerID := range performersFilter.Value { + query.addArg(performerID) + } + + query.body += " LEFT JOIN performers ON performers_join.performer_id = performers.id" + whereClause, havingClause := getMultiCriterionClause("images", "performers", "performers_images", "image_id", "performer_id", performersFilter) + query.addWhere(whereClause) + query.addHaving(havingClause) + } + + if studiosFilter := imageFilter.Studios; studiosFilter != nil && len(studiosFilter.Value) > 0 { + for _, studioID := range studiosFilter.Value { + query.addArg(studioID) + } + + whereClause, havingClause := getMultiCriterionClause("images", "studio", "", "", "studio_id", studiosFilter) + query.addWhere(whereClause) + query.addHaving(havingClause) + } + + query.sortAndPagination = qb.getImageSort(findFilter) + getPagination(findFilter) + idsResult, countResult := query.executeFind() + + var images []*Image + for _, id := range idsResult { + image, _ := qb.Find(id) + images = append(images, image) + } + + return images, countResult +} + +func (qb *ImageQueryBuilder) getImageSort(findFilter *FindFilterType) string { + if findFilter == nil { + return " ORDER BY images.path ASC " + } + sort := findFilter.GetSort("title") + direction := findFilter.GetDirection() + return getSort(sort, direction, "images") +} + +func (qb *ImageQueryBuilder) queryImage(query string, args []interface{}, tx *sqlx.Tx) (*Image, error) { + results, err := qb.queryImages(query, args, tx) + if err != nil || len(results) < 1 { + return nil, err + } + return results[0], nil +} + +func (qb *ImageQueryBuilder) queryImages(query string, args []interface{}, tx *sqlx.Tx) ([]*Image, error) { + var rows *sqlx.Rows + var err error + if tx != nil { + rows, err = tx.Queryx(query, args...) + } else { + rows, err = database.DB.Queryx(query, args...) + } + + if err != nil && err != sql.ErrNoRows { + return nil, err + } + defer rows.Close() + + images := make([]*Image, 0) + for rows.Next() { + image := Image{} + if err := rows.StructScan(&image); err != nil { + return nil, err + } + images = append(images, &image) + } + + if err := rows.Err(); err != nil { + return nil, err + } + + return images, nil +} diff --git a/pkg/models/querybuilder_image_test.go b/pkg/models/querybuilder_image_test.go new file mode 100644 index 000000000..46855ae0d --- /dev/null +++ b/pkg/models/querybuilder_image_test.go @@ -0,0 +1,624 @@ +// +build integration + +package models_test + +import ( + "database/sql" + "strconv" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/stashapp/stash/pkg/models" +) + +func TestImageFind(t *testing.T) { + // assume that the first image is imageWithGalleryPath + sqb := models.NewImageQueryBuilder() + + const imageIdx = 0 + imageID := imageIDs[imageIdx] + image, err := sqb.Find(imageID) + + if err != nil { + t.Fatalf("Error finding image: %s", err.Error()) + } + + assert.Equal(t, getImageStringValue(imageIdx, "Path"), image.Path) + + imageID = 0 + image, err = sqb.Find(imageID) + + if err != nil { + t.Fatalf("Error finding image: %s", err.Error()) + } + + assert.Nil(t, image) +} + +func TestImageFindByPath(t *testing.T) { + sqb := models.NewImageQueryBuilder() + + const imageIdx = 1 + imagePath := getImageStringValue(imageIdx, "Path") + image, err := sqb.FindByPath(imagePath) + + if err != nil { + t.Fatalf("Error finding image: %s", err.Error()) + } + + assert.Equal(t, imageIDs[imageIdx], image.ID) + assert.Equal(t, imagePath, image.Path) + + imagePath = "not exist" + image, err = sqb.FindByPath(imagePath) + + if err != nil { + t.Fatalf("Error finding image: %s", err.Error()) + } + + assert.Nil(t, image) +} + +func TestImageCountByPerformerID(t *testing.T) { + sqb := models.NewImageQueryBuilder() + count, err := sqb.CountByPerformerID(performerIDs[performerIdxWithImage]) + + if err != nil { + t.Fatalf("Error counting images: %s", err.Error()) + } + + assert.Equal(t, 1, count) + + count, err = sqb.CountByPerformerID(0) + + if err != nil { + t.Fatalf("Error counting images: %s", err.Error()) + } + + assert.Equal(t, 0, count) +} + +func TestImageQueryQ(t *testing.T) { + const imageIdx = 2 + + q := getImageStringValue(imageIdx, titleField) + + sqb := models.NewImageQueryBuilder() + + imageQueryQ(t, sqb, q, imageIdx) +} + +func imageQueryQ(t *testing.T, sqb models.ImageQueryBuilder, q string, expectedImageIdx int) { + filter := models.FindFilterType{ + Q: &q, + } + images, _ := sqb.Query(nil, &filter) + + assert.Len(t, images, 1) + image := images[0] + assert.Equal(t, imageIDs[expectedImageIdx], image.ID) + + // no Q should return all results + filter.Q = nil + images, _ = sqb.Query(nil, &filter) + + assert.Len(t, images, totalImages) +} + +func TestImageQueryRating(t *testing.T) { + const rating = 3 + ratingCriterion := models.IntCriterionInput{ + Value: rating, + Modifier: models.CriterionModifierEquals, + } + + verifyImagesRating(t, ratingCriterion) + + ratingCriterion.Modifier = models.CriterionModifierNotEquals + verifyImagesRating(t, ratingCriterion) + + ratingCriterion.Modifier = models.CriterionModifierGreaterThan + verifyImagesRating(t, ratingCriterion) + + ratingCriterion.Modifier = models.CriterionModifierLessThan + verifyImagesRating(t, ratingCriterion) + + ratingCriterion.Modifier = models.CriterionModifierIsNull + verifyImagesRating(t, ratingCriterion) + + ratingCriterion.Modifier = models.CriterionModifierNotNull + verifyImagesRating(t, ratingCriterion) +} + +func verifyImagesRating(t *testing.T, ratingCriterion models.IntCriterionInput) { + sqb := models.NewImageQueryBuilder() + imageFilter := models.ImageFilterType{ + Rating: &ratingCriterion, + } + + images, _ := sqb.Query(&imageFilter, nil) + + for _, image := range images { + verifyInt64(t, image.Rating, ratingCriterion) + } +} + +func TestImageQueryOCounter(t *testing.T) { + const oCounter = 1 + oCounterCriterion := models.IntCriterionInput{ + Value: oCounter, + Modifier: models.CriterionModifierEquals, + } + + verifyImagesOCounter(t, oCounterCriterion) + + oCounterCriterion.Modifier = models.CriterionModifierNotEquals + verifyImagesOCounter(t, oCounterCriterion) + + oCounterCriterion.Modifier = models.CriterionModifierGreaterThan + verifyImagesOCounter(t, oCounterCriterion) + + oCounterCriterion.Modifier = models.CriterionModifierLessThan + verifyImagesOCounter(t, oCounterCriterion) +} + +func verifyImagesOCounter(t *testing.T, oCounterCriterion models.IntCriterionInput) { + sqb := models.NewImageQueryBuilder() + imageFilter := models.ImageFilterType{ + OCounter: &oCounterCriterion, + } + + images, _ := sqb.Query(&imageFilter, nil) + + for _, image := range images { + verifyInt(t, image.OCounter, oCounterCriterion) + } +} + +func TestImageQueryResolution(t *testing.T) { + verifyImagesResolution(t, models.ResolutionEnumLow) + verifyImagesResolution(t, models.ResolutionEnumStandard) + verifyImagesResolution(t, models.ResolutionEnumStandardHd) + verifyImagesResolution(t, models.ResolutionEnumFullHd) + verifyImagesResolution(t, models.ResolutionEnumFourK) + verifyImagesResolution(t, models.ResolutionEnum("unknown")) +} + +func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) { + sqb := models.NewImageQueryBuilder() + imageFilter := models.ImageFilterType{ + Resolution: &resolution, + } + + images, _ := sqb.Query(&imageFilter, nil) + + for _, image := range images { + verifyImageResolution(t, image.Height, resolution) + } +} + +func verifyImageResolution(t *testing.T, height sql.NullInt64, resolution models.ResolutionEnum) { + assert := assert.New(t) + h := height.Int64 + + switch resolution { + case models.ResolutionEnumLow: + assert.True(h < 480) + case models.ResolutionEnumStandard: + assert.True(h >= 480 && h < 720) + case models.ResolutionEnumStandardHd: + assert.True(h >= 720 && h < 1080) + case models.ResolutionEnumFullHd: + assert.True(h >= 1080 && h < 2160) + case models.ResolutionEnumFourK: + assert.True(h >= 2160) + } +} + +func TestImageQueryIsMissingGalleries(t *testing.T) { + sqb := models.NewImageQueryBuilder() + isMissing := "galleries" + imageFilter := models.ImageFilterType{ + IsMissing: &isMissing, + } + + q := getImageStringValue(imageIdxWithGallery, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + images, _ := sqb.Query(&imageFilter, &findFilter) + + assert.Len(t, images, 0) + + findFilter.Q = nil + images, _ = sqb.Query(&imageFilter, &findFilter) + + // ensure non of the ids equal the one with gallery + for _, image := range images { + assert.NotEqual(t, imageIDs[imageIdxWithGallery], image.ID) + } +} + +func TestImageQueryIsMissingStudio(t *testing.T) { + sqb := models.NewImageQueryBuilder() + isMissing := "studio" + imageFilter := models.ImageFilterType{ + IsMissing: &isMissing, + } + + q := getImageStringValue(imageIdxWithStudio, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + images, _ := sqb.Query(&imageFilter, &findFilter) + + assert.Len(t, images, 0) + + findFilter.Q = nil + images, _ = sqb.Query(&imageFilter, &findFilter) + + // ensure non of the ids equal the one with studio + for _, image := range images { + assert.NotEqual(t, imageIDs[imageIdxWithStudio], image.ID) + } +} + +func TestImageQueryIsMissingPerformers(t *testing.T) { + sqb := models.NewImageQueryBuilder() + isMissing := "performers" + imageFilter := models.ImageFilterType{ + IsMissing: &isMissing, + } + + q := getImageStringValue(imageIdxWithPerformer, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + images, _ := sqb.Query(&imageFilter, &findFilter) + + assert.Len(t, images, 0) + + findFilter.Q = nil + images, _ = sqb.Query(&imageFilter, &findFilter) + + assert.True(t, len(images) > 0) + + // ensure non of the ids equal the one with movies + for _, image := range images { + assert.NotEqual(t, imageIDs[imageIdxWithPerformer], image.ID) + } +} + +func TestImageQueryIsMissingTags(t *testing.T) { + sqb := models.NewImageQueryBuilder() + isMissing := "tags" + imageFilter := models.ImageFilterType{ + IsMissing: &isMissing, + } + + q := getImageStringValue(imageIdxWithTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + images, _ := sqb.Query(&imageFilter, &findFilter) + + assert.Len(t, images, 0) + + findFilter.Q = nil + images, _ = sqb.Query(&imageFilter, &findFilter) + + assert.True(t, len(images) > 0) +} + +func TestImageQueryIsMissingRating(t *testing.T) { + sqb := models.NewImageQueryBuilder() + isMissing := "rating" + imageFilter := models.ImageFilterType{ + IsMissing: &isMissing, + } + + images, _ := sqb.Query(&imageFilter, nil) + + assert.True(t, len(images) > 0) + + // ensure date is null, empty or "0001-01-01" + for _, image := range images { + assert.True(t, !image.Rating.Valid) + } +} + +func TestImageQueryPerformers(t *testing.T) { + sqb := models.NewImageQueryBuilder() + performerCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdxWithImage]), + strconv.Itoa(performerIDs[performerIdx1WithImage]), + }, + Modifier: models.CriterionModifierIncludes, + } + + imageFilter := models.ImageFilterType{ + Performers: &performerCriterion, + } + + images, _ := sqb.Query(&imageFilter, nil) + + assert.Len(t, images, 2) + + // ensure ids are correct + for _, image := range images { + assert.True(t, image.ID == imageIDs[imageIdxWithPerformer] || image.ID == imageIDs[imageIdxWithTwoPerformers]) + } + + performerCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdx1WithImage]), + strconv.Itoa(performerIDs[performerIdx2WithImage]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + images, _ = sqb.Query(&imageFilter, nil) + + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithTwoPerformers], images[0].ID) + + performerCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(performerIDs[performerIdx1WithImage]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getImageStringValue(imageIdxWithTwoPerformers, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + images, _ = sqb.Query(&imageFilter, &findFilter) + assert.Len(t, images, 0) +} + +func TestImageQueryTags(t *testing.T) { + sqb := models.NewImageQueryBuilder() + tagCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithImage]), + strconv.Itoa(tagIDs[tagIdx1WithImage]), + }, + Modifier: models.CriterionModifierIncludes, + } + + imageFilter := models.ImageFilterType{ + Tags: &tagCriterion, + } + + images, _ := sqb.Query(&imageFilter, nil) + + assert.Len(t, images, 2) + + // ensure ids are correct + for _, image := range images { + assert.True(t, image.ID == imageIDs[imageIdxWithTag] || image.ID == imageIDs[imageIdxWithTwoTags]) + } + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithImage]), + strconv.Itoa(tagIDs[tagIdx2WithImage]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + images, _ = sqb.Query(&imageFilter, nil) + + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithTwoTags], images[0].ID) + + tagCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithImage]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getImageStringValue(imageIdxWithTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + images, _ = sqb.Query(&imageFilter, &findFilter) + assert.Len(t, images, 0) +} + +func TestImageQueryStudio(t *testing.T) { + sqb := models.NewImageQueryBuilder() + studioCriterion := models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithImage]), + }, + Modifier: models.CriterionModifierIncludes, + } + + imageFilter := models.ImageFilterType{ + Studios: &studioCriterion, + } + + images, _ := sqb.Query(&imageFilter, nil) + + assert.Len(t, images, 1) + + // ensure id is correct + assert.Equal(t, imageIDs[imageIdxWithStudio], images[0].ID) + + studioCriterion = models.MultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithImage]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getImageStringValue(imageIdxWithStudio, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + images, _ = sqb.Query(&imageFilter, &findFilter) + assert.Len(t, images, 0) +} + +func TestImageQuerySorting(t *testing.T) { + sort := titleField + direction := models.SortDirectionEnumAsc + findFilter := models.FindFilterType{ + Sort: &sort, + Direction: &direction, + } + + sqb := models.NewImageQueryBuilder() + images, _ := sqb.Query(nil, &findFilter) + + // images should be in same order as indexes + firstImage := images[0] + lastImage := images[len(images)-1] + + assert.Equal(t, imageIDs[0], firstImage.ID) + assert.Equal(t, imageIDs[len(imageIDs)-1], lastImage.ID) + + // sort in descending order + direction = models.SortDirectionEnumDesc + + images, _ = sqb.Query(nil, &findFilter) + firstImage = images[0] + lastImage = images[len(images)-1] + + assert.Equal(t, imageIDs[len(imageIDs)-1], firstImage.ID) + assert.Equal(t, imageIDs[0], lastImage.ID) +} + +func TestImageQueryPagination(t *testing.T) { + perPage := 1 + findFilter := models.FindFilterType{ + PerPage: &perPage, + } + + sqb := models.NewImageQueryBuilder() + images, _ := sqb.Query(nil, &findFilter) + + assert.Len(t, images, 1) + + firstID := images[0].ID + + page := 2 + findFilter.Page = &page + images, _ = sqb.Query(nil, &findFilter) + + assert.Len(t, images, 1) + secondID := images[0].ID + assert.NotEqual(t, firstID, secondID) + + perPage = 2 + page = 1 + + images, _ = sqb.Query(nil, &findFilter) + assert.Len(t, images, 2) + assert.Equal(t, firstID, images[0].ID) + assert.Equal(t, secondID, images[1].ID) +} + +func TestImageCountByTagID(t *testing.T) { + sqb := models.NewImageQueryBuilder() + + imageCount, err := sqb.CountByTagID(tagIDs[tagIdxWithImage]) + + if err != nil { + t.Fatalf("error calling CountByTagID: %s", err.Error()) + } + + assert.Equal(t, 1, imageCount) + + imageCount, err = sqb.CountByTagID(0) + + if err != nil { + t.Fatalf("error calling CountByTagID: %s", err.Error()) + } + + assert.Equal(t, 0, imageCount) +} + +func TestImageCountByStudioID(t *testing.T) { + sqb := models.NewImageQueryBuilder() + + imageCount, err := sqb.CountByStudioID(studioIDs[studioIdxWithImage]) + + if err != nil { + t.Fatalf("error calling CountByStudioID: %s", err.Error()) + } + + assert.Equal(t, 1, imageCount) + + imageCount, err = sqb.CountByStudioID(0) + + if err != nil { + t.Fatalf("error calling CountByStudioID: %s", err.Error()) + } + + assert.Equal(t, 0, imageCount) +} + +func TestImageFindByPerformerID(t *testing.T) { + sqb := models.NewImageQueryBuilder() + + images, err := sqb.FindByPerformerID(performerIDs[performerIdxWithImage]) + + if err != nil { + t.Fatalf("error calling FindByPerformerID: %s", err.Error()) + } + + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithPerformer], images[0].ID) + + images, err = sqb.FindByPerformerID(0) + + if err != nil { + t.Fatalf("error calling FindByPerformerID: %s", err.Error()) + } + + assert.Len(t, images, 0) +} + +func TestImageFindByStudioID(t *testing.T) { + sqb := models.NewImageQueryBuilder() + + images, err := sqb.FindByStudioID(performerIDs[studioIdxWithImage]) + + if err != nil { + t.Fatalf("error calling FindByStudioID: %s", err.Error()) + } + + assert.Len(t, images, 1) + assert.Equal(t, imageIDs[imageIdxWithStudio], images[0].ID) + + images, err = sqb.FindByStudioID(0) + + if err != nil { + t.Fatalf("error calling FindByStudioID: %s", err.Error()) + } + + assert.Len(t, images, 0) +} + +// TODO Update +// TODO IncrementOCounter +// TODO DecrementOCounter +// TODO ResetOCounter +// TODO Destroy +// TODO FindByChecksum +// TODO Count +// TODO SizeCount +// TODO All diff --git a/pkg/models/querybuilder_joins.go b/pkg/models/querybuilder_joins.go index 416b85a18..db9973da9 100644 --- a/pkg/models/querybuilder_joins.go +++ b/pkg/models/querybuilder_joins.go @@ -365,3 +365,523 @@ func (qb *JoinsQueryBuilder) DestroyScenesMarkers(sceneID int, tx *sqlx.Tx) erro return err } + +func (qb *JoinsQueryBuilder) GetImagePerformers(imageID int, tx *sqlx.Tx) ([]PerformersImages, error) { + ensureTx(tx) + + // Delete the existing joins and then create new ones + query := `SELECT * from performers_images WHERE image_id = ?` + + var rows *sqlx.Rows + var err error + if tx != nil { + rows, err = tx.Queryx(query, imageID) + } else { + rows, err = database.DB.Queryx(query, imageID) + } + + if err != nil && err != sql.ErrNoRows { + return nil, err + } + defer rows.Close() + + performerImages := make([]PerformersImages, 0) + for rows.Next() { + performerImage := PerformersImages{} + if err := rows.StructScan(&performerImage); err != nil { + return nil, err + } + performerImages = append(performerImages, performerImage) + } + + if err := rows.Err(); err != nil { + return nil, err + } + + return performerImages, nil +} + +func (qb *JoinsQueryBuilder) CreatePerformersImages(newJoins []PerformersImages, tx *sqlx.Tx) error { + ensureTx(tx) + for _, join := range newJoins { + _, err := tx.NamedExec( + `INSERT INTO performers_images (performer_id, image_id) VALUES (:performer_id, :image_id)`, + join, + ) + if err != nil { + return err + } + } + return nil +} + +// AddPerformerImage adds a performer to a image. It does not make any change +// if the performer already exists on the image. It returns true if image +// performer was added. +func (qb *JoinsQueryBuilder) AddPerformerImage(imageID int, performerID int, tx *sqlx.Tx) (bool, error) { + ensureTx(tx) + + existingPerformers, err := qb.GetImagePerformers(imageID, tx) + + if err != nil { + return false, err + } + + // ensure not already present + for _, p := range existingPerformers { + if p.PerformerID == performerID && p.ImageID == imageID { + return false, nil + } + } + + performerJoin := PerformersImages{ + PerformerID: performerID, + ImageID: imageID, + } + performerJoins := append(existingPerformers, performerJoin) + + err = qb.UpdatePerformersImages(imageID, performerJoins, tx) + + return err == nil, err +} + +func (qb *JoinsQueryBuilder) UpdatePerformersImages(imageID int, updatedJoins []PerformersImages, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins and then create new ones + _, err := tx.Exec("DELETE FROM performers_images WHERE image_id = ?", imageID) + if err != nil { + return err + } + return qb.CreatePerformersImages(updatedJoins, tx) +} + +func (qb *JoinsQueryBuilder) DestroyPerformersImages(imageID int, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins + _, err := tx.Exec("DELETE FROM performers_images WHERE image_id = ?", imageID) + return err +} + +func (qb *JoinsQueryBuilder) GetImageTags(imageID int, tx *sqlx.Tx) ([]ImagesTags, error) { + ensureTx(tx) + + // Delete the existing joins and then create new ones + query := `SELECT * from images_tags WHERE image_id = ?` + + var rows *sqlx.Rows + var err error + if tx != nil { + rows, err = tx.Queryx(query, imageID) + } else { + rows, err = database.DB.Queryx(query, imageID) + } + + if err != nil && err != sql.ErrNoRows { + return nil, err + } + defer rows.Close() + + imageTags := make([]ImagesTags, 0) + for rows.Next() { + imageTag := ImagesTags{} + if err := rows.StructScan(&imageTag); err != nil { + return nil, err + } + imageTags = append(imageTags, imageTag) + } + + if err := rows.Err(); err != nil { + return nil, err + } + + return imageTags, nil +} + +func (qb *JoinsQueryBuilder) CreateImagesTags(newJoins []ImagesTags, tx *sqlx.Tx) error { + ensureTx(tx) + for _, join := range newJoins { + _, err := tx.NamedExec( + `INSERT INTO images_tags (image_id, tag_id) VALUES (:image_id, :tag_id)`, + join, + ) + if err != nil { + return err + } + } + return nil +} + +func (qb *JoinsQueryBuilder) UpdateImagesTags(imageID int, updatedJoins []ImagesTags, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins and then create new ones + _, err := tx.Exec("DELETE FROM images_tags WHERE image_id = ?", imageID) + if err != nil { + return err + } + return qb.CreateImagesTags(updatedJoins, tx) +} + +// AddImageTag adds a tag to a image. It does not make any change if the tag +// already exists on the image. It returns true if image tag was added. +func (qb *JoinsQueryBuilder) AddImageTag(imageID int, tagID int, tx *sqlx.Tx) (bool, error) { + ensureTx(tx) + + existingTags, err := qb.GetImageTags(imageID, tx) + + if err != nil { + return false, err + } + + // ensure not already present + for _, p := range existingTags { + if p.TagID == tagID && p.ImageID == imageID { + return false, nil + } + } + + tagJoin := ImagesTags{ + TagID: tagID, + ImageID: imageID, + } + tagJoins := append(existingTags, tagJoin) + + err = qb.UpdateImagesTags(imageID, tagJoins, tx) + + return err == nil, err +} + +func (qb *JoinsQueryBuilder) DestroyImagesTags(imageID int, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins + _, err := tx.Exec("DELETE FROM images_tags WHERE image_id = ?", imageID) + + return err +} + +func (qb *JoinsQueryBuilder) GetImageGalleries(imageID int, tx *sqlx.Tx) ([]GalleriesImages, error) { + ensureTx(tx) + + // Delete the existing joins and then create new ones + query := `SELECT * from galleries_images WHERE image_id = ?` + + var rows *sqlx.Rows + var err error + if tx != nil { + rows, err = tx.Queryx(query, imageID) + } else { + rows, err = database.DB.Queryx(query, imageID) + } + + if err != nil && err != sql.ErrNoRows { + return nil, err + } + defer rows.Close() + + galleryImages := make([]GalleriesImages, 0) + for rows.Next() { + galleriesImages := GalleriesImages{} + if err := rows.StructScan(&galleriesImages); err != nil { + return nil, err + } + galleryImages = append(galleryImages, galleriesImages) + } + + if err := rows.Err(); err != nil { + return nil, err + } + + return galleryImages, nil +} + +func (qb *JoinsQueryBuilder) CreateGalleriesImages(newJoins []GalleriesImages, tx *sqlx.Tx) error { + ensureTx(tx) + for _, join := range newJoins { + _, err := tx.NamedExec( + `INSERT INTO galleries_images (gallery_id, image_id) VALUES (:gallery_id, :image_id)`, + join, + ) + if err != nil { + return err + } + } + return nil +} + +func (qb *JoinsQueryBuilder) UpdateGalleriesImages(imageID int, updatedJoins []GalleriesImages, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins and then create new ones + _, err := tx.Exec("DELETE FROM galleries_images WHERE image_id = ?", imageID) + if err != nil { + return err + } + return qb.CreateGalleriesImages(updatedJoins, tx) +} + +// AddGalleryImage adds a gallery to an image. It does not make any change if the tag +// already exists on the image. It returns true if image tag was added. +func (qb *JoinsQueryBuilder) AddImageGallery(imageID int, galleryID int, tx *sqlx.Tx) (bool, error) { + ensureTx(tx) + + existingGalleries, err := qb.GetImageGalleries(imageID, tx) + + if err != nil { + return false, err + } + + // ensure not already present + for _, p := range existingGalleries { + if p.GalleryID == galleryID && p.ImageID == imageID { + return false, nil + } + } + + galleryJoin := GalleriesImages{ + GalleryID: galleryID, + ImageID: imageID, + } + galleryJoins := append(existingGalleries, galleryJoin) + + err = qb.UpdateGalleriesImages(imageID, galleryJoins, tx) + + return err == nil, err +} + +// RemoveImageGallery removes a gallery from an image. Returns true if the join +// was removed. +func (qb *JoinsQueryBuilder) RemoveImageGallery(imageID int, galleryID int, tx *sqlx.Tx) (bool, error) { + ensureTx(tx) + + existingGalleries, err := qb.GetImageGalleries(imageID, tx) + + if err != nil { + return false, err + } + + // remove the join + var updatedJoins []GalleriesImages + found := false + for _, p := range existingGalleries { + if p.GalleryID == galleryID && p.ImageID == imageID { + found = true + continue + } + + updatedJoins = append(updatedJoins, p) + } + + if found { + err = qb.UpdateGalleriesImages(imageID, updatedJoins, tx) + } + + return found && err == nil, err +} + +func (qb *JoinsQueryBuilder) DestroyImageGalleries(imageID int, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins + _, err := tx.Exec("DELETE FROM galleries_images WHERE image_id = ?", imageID) + + return err +} + +func (qb *JoinsQueryBuilder) GetGalleryPerformers(galleryID int, tx *sqlx.Tx) ([]PerformersGalleries, error) { + ensureTx(tx) + + // Delete the existing joins and then create new ones + query := `SELECT * from performers_galleries WHERE gallery_id = ?` + + var rows *sqlx.Rows + var err error + if tx != nil { + rows, err = tx.Queryx(query, galleryID) + } else { + rows, err = database.DB.Queryx(query, galleryID) + } + + if err != nil && err != sql.ErrNoRows { + return nil, err + } + defer rows.Close() + + performerGalleries := make([]PerformersGalleries, 0) + for rows.Next() { + performerGallery := PerformersGalleries{} + if err := rows.StructScan(&performerGallery); err != nil { + return nil, err + } + performerGalleries = append(performerGalleries, performerGallery) + } + + if err := rows.Err(); err != nil { + return nil, err + } + + return performerGalleries, nil +} + +func (qb *JoinsQueryBuilder) CreatePerformersGalleries(newJoins []PerformersGalleries, tx *sqlx.Tx) error { + ensureTx(tx) + for _, join := range newJoins { + _, err := tx.NamedExec( + `INSERT INTO performers_galleries (performer_id, gallery_id) VALUES (:performer_id, :gallery_id)`, + join, + ) + if err != nil { + return err + } + } + return nil +} + +// AddPerformerGallery adds a performer to a gallery. It does not make any change +// if the performer already exists on the gallery. It returns true if gallery +// performer was added. +func (qb *JoinsQueryBuilder) AddPerformerGallery(galleryID int, performerID int, tx *sqlx.Tx) (bool, error) { + ensureTx(tx) + + existingPerformers, err := qb.GetGalleryPerformers(galleryID, tx) + + if err != nil { + return false, err + } + + // ensure not already present + for _, p := range existingPerformers { + if p.PerformerID == performerID && p.GalleryID == galleryID { + return false, nil + } + } + + performerJoin := PerformersGalleries{ + PerformerID: performerID, + GalleryID: galleryID, + } + performerJoins := append(existingPerformers, performerJoin) + + err = qb.UpdatePerformersGalleries(galleryID, performerJoins, tx) + + return err == nil, err +} + +func (qb *JoinsQueryBuilder) UpdatePerformersGalleries(galleryID int, updatedJoins []PerformersGalleries, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins and then create new ones + _, err := tx.Exec("DELETE FROM performers_galleries WHERE gallery_id = ?", galleryID) + if err != nil { + return err + } + return qb.CreatePerformersGalleries(updatedJoins, tx) +} + +func (qb *JoinsQueryBuilder) DestroyPerformersGalleries(galleryID int, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins + _, err := tx.Exec("DELETE FROM performers_galleries WHERE gallery_id = ?", galleryID) + return err +} + +func (qb *JoinsQueryBuilder) GetGalleryTags(galleryID int, tx *sqlx.Tx) ([]GalleriesTags, error) { + ensureTx(tx) + + // Delete the existing joins and then create new ones + query := `SELECT * from galleries_tags WHERE gallery_id = ?` + + var rows *sqlx.Rows + var err error + if tx != nil { + rows, err = tx.Queryx(query, galleryID) + } else { + rows, err = database.DB.Queryx(query, galleryID) + } + + if err != nil && err != sql.ErrNoRows { + return nil, err + } + defer rows.Close() + + galleryTags := make([]GalleriesTags, 0) + for rows.Next() { + galleryTag := GalleriesTags{} + if err := rows.StructScan(&galleryTag); err != nil { + return nil, err + } + galleryTags = append(galleryTags, galleryTag) + } + + if err := rows.Err(); err != nil { + return nil, err + } + + return galleryTags, nil +} + +func (qb *JoinsQueryBuilder) CreateGalleriesTags(newJoins []GalleriesTags, tx *sqlx.Tx) error { + ensureTx(tx) + for _, join := range newJoins { + _, err := tx.NamedExec( + `INSERT INTO galleries_tags (gallery_id, tag_id) VALUES (:gallery_id, :tag_id)`, + join, + ) + if err != nil { + return err + } + } + return nil +} + +func (qb *JoinsQueryBuilder) UpdateGalleriesTags(galleryID int, updatedJoins []GalleriesTags, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins and then create new ones + _, err := tx.Exec("DELETE FROM galleries_tags WHERE gallery_id = ?", galleryID) + if err != nil { + return err + } + return qb.CreateGalleriesTags(updatedJoins, tx) +} + +// AddGalleryTag adds a tag to a gallery. It does not make any change if the tag +// already exists on the gallery. It returns true if gallery tag was added. +func (qb *JoinsQueryBuilder) AddGalleryTag(galleryID int, tagID int, tx *sqlx.Tx) (bool, error) { + ensureTx(tx) + + existingTags, err := qb.GetGalleryTags(galleryID, tx) + + if err != nil { + return false, err + } + + // ensure not already present + for _, p := range existingTags { + if p.TagID == tagID && p.GalleryID == galleryID { + return false, nil + } + } + + tagJoin := GalleriesTags{ + TagID: tagID, + GalleryID: galleryID, + } + tagJoins := append(existingTags, tagJoin) + + err = qb.UpdateGalleriesTags(galleryID, tagJoins, tx) + + return err == nil, err +} + +func (qb *JoinsQueryBuilder) DestroyGalleriesTags(galleryID int, tx *sqlx.Tx) error { + ensureTx(tx) + + // Delete the existing joins + _, err := tx.Exec("DELETE FROM galleries_tags WHERE gallery_id = ?", galleryID) + + return err +} diff --git a/pkg/models/querybuilder_performer.go b/pkg/models/querybuilder_performer.go index 321b38e11..4c50e3699 100644 --- a/pkg/models/querybuilder_performer.go +++ b/pkg/models/querybuilder_performer.go @@ -104,6 +104,24 @@ func (qb *PerformerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]*Per return qb.queryPerformers(query, args, tx) } +func (qb *PerformerQueryBuilder) FindByImageID(imageID int, tx *sqlx.Tx) ([]*Performer, error) { + query := selectAll("performers") + ` + LEFT JOIN performers_images as images_join on images_join.performer_id = performers.id + WHERE images_join.image_id = ? + ` + args := []interface{}{imageID} + return qb.queryPerformers(query, args, tx) +} + +func (qb *PerformerQueryBuilder) FindByGalleryID(galleryID int, tx *sqlx.Tx) ([]*Performer, error) { + query := selectAll("performers") + ` + LEFT JOIN performers_galleries as galleries_join on galleries_join.performer_id = performers.id + WHERE galleries_join.gallery_id = ? + ` + args := []interface{}{galleryID} + return qb.queryPerformers(query, args, tx) +} + func (qb *PerformerQueryBuilder) FindNameBySceneID(sceneID int, tx *sqlx.Tx) ([]*Performer, error) { query := ` SELECT performers.name FROM performers diff --git a/pkg/models/querybuilder_sql.go b/pkg/models/querybuilder_sql.go index 78e719292..79f9f5135 100644 --- a/pkg/models/querybuilder_sql.go +++ b/pkg/models/querybuilder_sql.go @@ -418,6 +418,8 @@ func sqlGenKeys(i interface{}, partial bool) string { if partial || t != 0 { query = append(query, fmt.Sprintf("%s=:%s", key, key)) } + case bool: + query = append(query, fmt.Sprintf("%s=:%s", key, key)) case SQLiteTimestamp: if partial || !t.Timestamp.IsZero() { query = append(query, fmt.Sprintf("%s=:%s", key, key)) diff --git a/pkg/models/querybuilder_tag.go b/pkg/models/querybuilder_tag.go index 53292da03..de6d78b2f 100644 --- a/pkg/models/querybuilder_tag.go +++ b/pkg/models/querybuilder_tag.go @@ -120,6 +120,30 @@ func (qb *TagQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]*Tag, erro return qb.queryTags(query, args, tx) } +func (qb *TagQueryBuilder) FindByImageID(imageID int, tx *sqlx.Tx) ([]*Tag, error) { + query := ` + SELECT tags.* FROM tags + LEFT JOIN images_tags as images_join on images_join.tag_id = tags.id + WHERE images_join.image_id = ? + GROUP BY tags.id + ` + query += qb.getTagSort(nil) + args := []interface{}{imageID} + return qb.queryTags(query, args, tx) +} + +func (qb *TagQueryBuilder) FindByGalleryID(galleryID int, tx *sqlx.Tx) ([]*Tag, error) { + query := ` + SELECT tags.* FROM tags + LEFT JOIN galleries_tags as galleries_join on galleries_join.tag_id = tags.id + WHERE galleries_join.gallery_id = ? + GROUP BY tags.id + ` + query += qb.getTagSort(nil) + args := []interface{}{galleryID} + return qb.queryTags(query, args, tx) +} + func (qb *TagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int, tx *sqlx.Tx) ([]*Tag, error) { query := ` SELECT tags.* FROM tags diff --git a/pkg/models/querybuilder_tag_test.go b/pkg/models/querybuilder_tag_test.go index 83357600f..02b431ce7 100644 --- a/pkg/models/querybuilder_tag_test.go +++ b/pkg/models/querybuilder_tag_test.go @@ -116,7 +116,7 @@ func TestTagQueryIsMissingImage(t *testing.T) { IsMissing: &isMissing, } - q := getTagStringValue(tagIdxWithImage, "name") + q := getTagStringValue(tagIdxWithCoverImage, "name") findFilter := models.FindFilterType{ Q: &q, } @@ -130,7 +130,7 @@ func TestTagQueryIsMissingImage(t *testing.T) { // ensure non of the ids equal the one with image for _, tag := range tags { - assert.NotEqual(t, tagIDs[tagIdxWithImage], tag.ID) + assert.NotEqual(t, tagIDs[tagIdxWithCoverImage], tag.ID) } } diff --git a/pkg/models/setup_test.go b/pkg/models/setup_test.go index 36c53905d..489cf54fa 100644 --- a/pkg/models/setup_test.go +++ b/pkg/models/setup_test.go @@ -17,21 +17,24 @@ import ( "github.com/stashapp/stash/pkg/database" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/modelstest" "github.com/stashapp/stash/pkg/utils" ) const totalScenes = 12 -const performersNameCase = 3 +const totalImages = 6 +const performersNameCase = 6 const performersNameNoCase = 2 const moviesNameCase = 2 const moviesNameNoCase = 1 -const totalGalleries = 2 +const totalGalleries = 3 const tagsNameNoCase = 2 -const tagsNameCase = 6 -const studiosNameCase = 4 +const tagsNameCase = 9 +const studiosNameCase = 5 const studiosNameNoCase = 1 var sceneIDs []int +var imageIDs []int var performerIDs []int var movieIDs []int var galleryIDs []int @@ -53,13 +56,23 @@ const sceneIdxWithTwoTags = 5 const sceneIdxWithStudio = 6 const sceneIdxWithMarker = 7 +const imageIdxWithGallery = 0 +const imageIdxWithPerformer = 1 +const imageIdxWithTwoPerformers = 2 +const imageIdxWithTag = 3 +const imageIdxWithTwoTags = 4 +const imageIdxWithStudio = 5 + const performerIdxWithScene = 0 const performerIdx1WithScene = 1 const performerIdx2WithScene = 2 +const performerIdxWithImage = 3 +const performerIdx1WithImage = 4 +const performerIdx2WithImage = 5 // performers with dup names start from the end -const performerIdx1WithDupName = 3 -const performerIdxWithDupName = 4 +const performerIdx1WithDupName = 6 +const performerIdxWithDupName = 7 const movieIdxWithScene = 0 const movieIdxWithStudio = 1 @@ -68,25 +81,30 @@ const movieIdxWithStudio = 1 const movieIdxWithDupName = 2 const galleryIdxWithScene = 0 +const galleryIdxWithImage = 1 const tagIdxWithScene = 0 const tagIdx1WithScene = 1 const tagIdx2WithScene = 2 const tagIdxWithPrimaryMarker = 3 const tagIdxWithMarker = 4 -const tagIdxWithImage = 5 +const tagIdxWithCoverImage = 5 +const tagIdxWithImage = 6 +const tagIdx1WithImage = 7 +const tagIdx2WithImage = 8 // tags with dup names start from the end -const tagIdx1WithDupName = 6 -const tagIdxWithDupName = 7 +const tagIdx1WithDupName = 9 +const tagIdxWithDupName = 10 const studioIdxWithScene = 0 const studioIdxWithMovie = 1 const studioIdxWithChildStudio = 2 const studioIdxWithParentStudio = 3 +const studioIdxWithImage = 4 // studios with dup names start from the end -const studioIdxWithDupName = 4 +const studioIdxWithDupName = 5 const markerIdxWithScene = 0 @@ -144,6 +162,11 @@ func populateDB() error { return err } + if err := createImages(tx, totalImages); err != nil { + tx.Rollback() + return err + } + if err := createGalleries(tx, totalGalleries); err != nil { tx.Rollback() return err @@ -164,7 +187,7 @@ func populateDB() error { return err } - if err := addTagImage(tx, tagIdxWithImage); err != nil { + if err := addTagImage(tx, tagIdxWithCoverImage); err != nil { tx.Rollback() return err } @@ -207,6 +230,26 @@ func populateDB() error { return err } + if err := linkImageGallery(tx, imageIdxWithGallery, galleryIdxWithImage); err != nil { + tx.Rollback() + return err + } + + if err := linkImagePerformers(tx); err != nil { + tx.Rollback() + return err + } + + if err := linkImageTags(tx); err != nil { + tx.Rollback() + return err + } + + if err := linkImageStudio(tx, imageIdxWithStudio, studioIdxWithImage); err != nil { + tx.Rollback() + return err + } + if err := linkMovieStudio(tx, movieIdxWithStudio, studioIdxWithMovie); err != nil { tx.Rollback() return err @@ -233,12 +276,12 @@ func getSceneStringValue(index int, field string) string { return fmt.Sprintf("scene_%04d_%s", index, field) } -func getSceneRating(index int) sql.NullInt64 { +func getRating(index int) sql.NullInt64 { rating := index % 6 return sql.NullInt64{Int64: int64(rating), Valid: rating > 0} } -func getSceneOCounter(index int) int { +func getOCounter(index int) int { return index % 3 } @@ -252,7 +295,7 @@ func getSceneDuration(index int) sql.NullFloat64 { } } -func getSceneHeight(index int) sql.NullInt64 { +func getHeight(index int) sql.NullInt64 { heights := []int64{0, 200, 240, 300, 480, 700, 720, 800, 1080, 1500, 2160, 3000} height := heights[index%len(heights)] return sql.NullInt64{ @@ -279,10 +322,10 @@ func createScenes(tx *sqlx.Tx, n int) error { Title: sql.NullString{String: getSceneStringValue(i, titleField), Valid: true}, Checksum: sql.NullString{String: getSceneStringValue(i, checksumField), Valid: true}, Details: sql.NullString{String: getSceneStringValue(i, "Details"), Valid: true}, - Rating: getSceneRating(i), - OCounter: getSceneOCounter(i), + Rating: getRating(i), + OCounter: getOCounter(i), Duration: getSceneDuration(i), - Height: getSceneHeight(i), + Height: getHeight(i), Date: getSceneDate(i), } @@ -298,6 +341,35 @@ func createScenes(tx *sqlx.Tx, n int) error { return nil } +func getImageStringValue(index int, field string) string { + return fmt.Sprintf("image_%04d_%s", index, field) +} + +func createImages(tx *sqlx.Tx, n int) error { + qb := models.NewImageQueryBuilder() + + for i := 0; i < n; i++ { + image := models.Image{ + Path: getImageStringValue(i, pathField), + Title: sql.NullString{String: getImageStringValue(i, titleField), Valid: true}, + Checksum: getImageStringValue(i, checksumField), + Rating: getRating(i), + OCounter: getOCounter(i), + Height: getHeight(i), + } + + created, err := qb.Create(image, tx) + + if err != nil { + return fmt.Errorf("Error creating image %v+: %s", image, err.Error()) + } + + imageIDs = append(imageIDs, created.ID) + } + + return nil +} + func getGalleryStringValue(index int, field string) string { return "gallery_" + strconv.FormatInt(int64(index), 10) + "_" + field } @@ -307,7 +379,7 @@ func createGalleries(tx *sqlx.Tx, n int) error { for i := 0; i < n; i++ { gallery := models.Gallery{ - Path: getGalleryStringValue(i, pathField), + Path: modelstest.NullString(getGalleryStringValue(i, pathField)), Checksum: getGalleryStringValue(i, checksumField), } @@ -591,7 +663,7 @@ func linkScenePerformer(tx *sqlx.Tx, sceneIndex, performerIndex int) error { func linkSceneGallery(tx *sqlx.Tx, sceneIndex, galleryIndex int) error { gqb := models.NewGalleryQueryBuilder() - gallery, err := gqb.Find(galleryIDs[galleryIndex]) + gallery, err := gqb.Find(galleryIDs[galleryIndex], nil) if err != nil { return fmt.Errorf("error finding gallery: %s", err.Error()) @@ -640,6 +712,68 @@ func linkSceneStudio(tx *sqlx.Tx, sceneIndex, studioIndex int) error { return err } +func linkImageGallery(tx *sqlx.Tx, imageIndex, galleryIndex int) error { + jqb := models.NewJoinsQueryBuilder() + + _, err := jqb.AddImageGallery(imageIDs[imageIndex], galleryIDs[galleryIndex], tx) + + return err +} + +func linkImageTags(tx *sqlx.Tx) error { + if err := linkImageTag(tx, imageIdxWithTag, tagIdxWithImage); err != nil { + return err + } + if err := linkImageTag(tx, imageIdxWithTwoTags, tagIdx1WithImage); err != nil { + return err + } + if err := linkImageTag(tx, imageIdxWithTwoTags, tagIdx2WithImage); err != nil { + return err + } + + return nil +} + +func linkImageTag(tx *sqlx.Tx, imageIndex, tagIndex int) error { + jqb := models.NewJoinsQueryBuilder() + + _, err := jqb.AddImageTag(imageIDs[imageIndex], tagIDs[tagIndex], tx) + return err +} + +func linkImageStudio(tx *sqlx.Tx, imageIndex, studioIndex int) error { + sqb := models.NewImageQueryBuilder() + + image := models.ImagePartial{ + ID: imageIDs[imageIndex], + StudioID: &sql.NullInt64{Int64: int64(studioIDs[studioIndex]), Valid: true}, + } + _, err := sqb.Update(image, tx) + + return err +} + +func linkImagePerformers(tx *sqlx.Tx) error { + if err := linkImagePerformer(tx, imageIdxWithPerformer, performerIdxWithImage); err != nil { + return err + } + if err := linkImagePerformer(tx, imageIdxWithTwoPerformers, performerIdx1WithImage); err != nil { + return err + } + if err := linkImagePerformer(tx, imageIdxWithTwoPerformers, performerIdx2WithImage); err != nil { + return err + } + + return nil +} + +func linkImagePerformer(tx *sqlx.Tx, imageIndex, performerIndex int) error { + jqb := models.NewJoinsQueryBuilder() + + _, err := jqb.AddPerformerImage(imageIDs[imageIndex], performerIDs[performerIndex], tx) + return err +} + func linkMovieStudio(tx *sqlx.Tx, movieIndex, studioIndex int) error { mqb := models.NewMovieQueryBuilder() diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 044b667e4..5816f60b7 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -9,6 +9,8 @@ type TagReader interface { FindMany(ids []int) ([]*Tag, error) FindBySceneID(sceneID int) ([]*Tag, error) FindBySceneMarkerID(sceneMarkerID int) ([]*Tag, error) + FindByImageID(imageID int) ([]*Tag, error) + FindByGalleryID(galleryID int) ([]*Tag, error) FindByName(name string, nocase bool) (*Tag, error) FindByNames(names []string, nocase bool) ([]*Tag, error) // Count() (int, error) @@ -75,6 +77,14 @@ func (t *tagReaderWriter) FindBySceneID(sceneID int) ([]*Tag, error) { return t.qb.FindBySceneID(sceneID, t.tx) } +func (t *tagReaderWriter) FindByImageID(imageID int) ([]*Tag, error) { + return t.qb.FindByImageID(imageID, t.tx) +} + +func (t *tagReaderWriter) FindByGalleryID(imageID int) ([]*Tag, error) { + return t.qb.FindByGalleryID(imageID, t.tx) +} + func (t *tagReaderWriter) Create(newTag Tag) (*Tag, error) { return t.qb.Create(newTag, t.tx) } diff --git a/pkg/scene/export.go b/pkg/scene/export.go index 8068a0be8..1274c4415 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -122,7 +122,7 @@ func GetStudioName(reader models.StudioReader, scene *models.Scene) (string, err return "", nil } -// GetGalleryChecksum returns the checksum of the provided scene. It returns an +// GetGalleryChecksum returns the checksum of the provided gallery. It returns an // empty string if there is no gallery assigned to the scene. func GetGalleryChecksum(reader models.GalleryReader, scene *models.Scene) (string, error) { gallery, err := reader.FindBySceneID(scene.ID) diff --git a/pkg/tag/export.go b/pkg/tag/export.go index 39b28185f..9671ee869 100644 --- a/pkg/tag/export.go +++ b/pkg/tag/export.go @@ -27,3 +27,21 @@ func ToJSON(reader models.TagReader, tag *models.Tag) (*jsonschema.Tag, error) { return &newTagJSON, nil } + +func GetIDs(tags []*models.Tag) []int { + var results []int + for _, tag := range tags { + results = append(results, tag.ID) + } + + return results +} + +func GetNames(tags []*models.Tag) []string { + var results []string + for _, tag := range tags { + results = append(results, tag.Name) + } + + return results +} diff --git a/pkg/utils/crypto.go b/pkg/utils/crypto.go index 06e11c9f9..8f20ad9b2 100644 --- a/pkg/utils/crypto.go +++ b/pkg/utils/crypto.go @@ -26,8 +26,12 @@ func MD5FromFilePath(filePath string) (string, error) { } defer f.Close() + return MD5FromReader(f) +} + +func MD5FromReader(src io.Reader) (string, error) { h := md5.New() - if _, err := io.Copy(h, f); err != nil { + if _, err := io.Copy(h, src); err != nil { return "", err } checksum := h.Sum(nil) diff --git a/ui/v2.5/src/App.tsx b/ui/v2.5/src/App.tsx index bdab5931d..28d39bf18 100755 --- a/ui/v2.5/src/App.tsx +++ b/ui/v2.5/src/App.tsx @@ -23,6 +23,7 @@ import Studios from "./components/Studios/Studios"; import { SceneFilenameParser } from "./components/SceneFilenameParser/SceneFilenameParser"; import Movies from "./components/Movies/Movies"; import Tags from "./components/Tags/Tags"; +import Images from "./components/Images/Images"; // Set fontawesome/free-solid-svg as default fontawesome icons library.add(fas); @@ -49,6 +50,7 @@ export const App: React.FC = () => { + diff --git a/ui/v2.5/src/components/Changelog/versions/v040.md b/ui/v2.5/src/components/Changelog/versions/v040.md index c778db807..9b1ff4f39 100644 --- a/ui/v2.5/src/components/Changelog/versions/v040.md +++ b/ui/v2.5/src/components/Changelog/versions/v040.md @@ -1,4 +1,6 @@ ### ✨ New Features +* Add support for individual images and manual creation of galleries. +* Add various fields to galleries. * Add partial import from zip file. * Add selective scene export. diff --git a/ui/v2.5/src/components/Galleries/DeleteGalleriesDialog.tsx b/ui/v2.5/src/components/Galleries/DeleteGalleriesDialog.tsx new file mode 100644 index 000000000..5496ac48a --- /dev/null +++ b/ui/v2.5/src/components/Galleries/DeleteGalleriesDialog.tsx @@ -0,0 +1,91 @@ +import React, { useState } from "react"; +import { Form } from "react-bootstrap"; +import { useGalleryDestroy } from "src/core/StashService"; +import * as GQL from "src/core/generated-graphql"; +import { Modal } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { FormattedMessage } from "react-intl"; + +interface IDeleteGalleryDialogProps { + selected: Partial[]; + onClose: (confirmed: boolean) => void; +} + +export const DeleteGalleriesDialog: React.FC = ( + props: IDeleteGalleryDialogProps +) => { + const plural = props.selected.length > 1; + + const singleMessageId = "deleteGalleryText"; + const pluralMessageId = "deleteGallerysText"; + + const singleMessage = + "Are you sure you want to delete this gallery? Galleries for zip files will be re-added during the next scan unless the zip file is also deleted."; + const pluralMessage = + "Are you sure you want to delete these galleries? Galleries for zip files will be re-added during the next scan unless the zip files are also deleted."; + + const header = plural ? "Delete Galleries" : "Delete Gallery"; + const toastMessage = plural ? "Deleted galleries" : "Deleted gallery"; + const messageId = plural ? pluralMessageId : singleMessageId; + const message = plural ? pluralMessage : singleMessage; + + const [deleteFile, setDeleteFile] = useState(false); + const [deleteGenerated, setDeleteGenerated] = useState(true); + + const Toast = useToast(); + const [deleteGallery] = useGalleryDestroy(getGalleriesDeleteInput()); + + // Network state + const [isDeleting, setIsDeleting] = useState(false); + + function getGalleriesDeleteInput(): GQL.GalleryDestroyInput { + return { + ids: props.selected.map((gallery) => gallery.id!), + delete_file: deleteFile, + delete_generated: deleteGenerated, + }; + } + + async function onDelete() { + setIsDeleting(true); + try { + await deleteGallery(); + Toast.success({ content: toastMessage }); + } catch (e) { + Toast.error(e); + } + setIsDeleting(false); + props.onClose(true); + } + + return ( + props.onClose(false), + text: "Cancel", + variant: "secondary", + }} + isRunning={isDeleting} + > +

+ +

+
+ setDeleteFile(!deleteFile)} + /> + setDeleteGenerated(!deleteGenerated)} + /> + +
+ ); +}; diff --git a/ui/v2.5/src/components/Galleries/EditGalleriesDialog.tsx b/ui/v2.5/src/components/Galleries/EditGalleriesDialog.tsx new file mode 100644 index 000000000..6a7907f50 --- /dev/null +++ b/ui/v2.5/src/components/Galleries/EditGalleriesDialog.tsx @@ -0,0 +1,368 @@ +import React, { useEffect, useState } from "react"; +import { Form, Col, Row } from "react-bootstrap"; +import _ from "lodash"; +import { useBulkGalleryUpdate } from "src/core/StashService"; +import * as GQL from "src/core/generated-graphql"; +import { StudioSelect, Modal } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { FormUtils } from "src/utils"; +import MultiSet from "../Shared/MultiSet"; +import { RatingStars } from "../Scenes/SceneDetails/RatingStars"; + +interface IListOperationProps { + selected: GQL.GalleryDataFragment[]; + onClose: (applied: boolean) => void; +} + +export const EditGalleriesDialog: React.FC = ( + props: IListOperationProps +) => { + const Toast = useToast(); + const [rating, setRating] = useState(); + const [studioId, setStudioId] = useState(); + const [performerMode, setPerformerMode] = React.useState< + GQL.BulkUpdateIdMode + >(GQL.BulkUpdateIdMode.Add); + const [performerIds, setPerformerIds] = useState(); + const [tagMode, setTagMode] = React.useState( + GQL.BulkUpdateIdMode.Add + ); + const [tagIds, setTagIds] = useState(); + + const [updateGalleries] = useBulkGalleryUpdate(getGalleryInput()); + + // Network state + const [isUpdating, setIsUpdating] = useState(false); + + function makeBulkUpdateIds( + ids: string[], + mode: GQL.BulkUpdateIdMode + ): GQL.BulkUpdateIds { + return { + mode, + ids, + }; + } + + function getGalleryInput(): GQL.BulkGalleryUpdateInput { + // need to determine what we are actually setting on each gallery + const aggregateRating = getRating(props.selected); + const aggregateStudioId = getStudioId(props.selected); + const aggregatePerformerIds = getPerformerIds(props.selected); + const aggregateTagIds = getTagIds(props.selected); + + const galleryInput: GQL.BulkGalleryUpdateInput = { + ids: props.selected.map((gallery) => { + return gallery.id; + }), + }; + + // if rating is undefined + if (rating === undefined) { + // and all galleries have the same rating, then we are unsetting the rating. + if (aggregateRating) { + // an undefined rating is ignored in the server, so set it to 0 instead + galleryInput.rating = 0; + } + // otherwise not setting the rating + } else { + // if rating is set, then we are setting the rating for all + galleryInput.rating = rating; + } + + // if studioId is undefined + if (studioId === undefined) { + // and all galleries have the same studioId, + // then unset the studioId, otherwise ignoring studioId + if (aggregateStudioId) { + // an undefined studio_id is ignored in the server, so set it to empty string instead + galleryInput.studio_id = ""; + } + } else { + // if studioId is set, then we are setting it + galleryInput.studio_id = studioId; + } + + // if performerIds are empty + if ( + performerMode === GQL.BulkUpdateIdMode.Set && + (!performerIds || performerIds.length === 0) + ) { + // and all galleries have the same ids, + if (aggregatePerformerIds.length > 0) { + // then unset the performerIds, otherwise ignore + galleryInput.performer_ids = makeBulkUpdateIds( + performerIds || [], + performerMode + ); + } + } else { + // if performerIds non-empty, then we are setting them + galleryInput.performer_ids = makeBulkUpdateIds( + performerIds || [], + performerMode + ); + } + + // if tagIds non-empty, then we are setting them + if ( + tagMode === GQL.BulkUpdateIdMode.Set && + (!tagIds || tagIds.length === 0) + ) { + // and all galleries have the same ids, + if (aggregateTagIds.length > 0) { + // then unset the tagIds, otherwise ignore + galleryInput.tag_ids = makeBulkUpdateIds(tagIds || [], tagMode); + } + } else { + // if tagIds non-empty, then we are setting them + galleryInput.tag_ids = makeBulkUpdateIds(tagIds || [], tagMode); + } + + return galleryInput; + } + + async function onSave() { + setIsUpdating(true); + try { + await updateGalleries(); + Toast.success({ content: "Updated galleries" }); + props.onClose(true); + } catch (e) { + Toast.error(e); + } + setIsUpdating(false); + } + + function getRating(state: GQL.GalleryDataFragment[]) { + let ret: number | undefined; + let first = true; + + state.forEach((gallery: GQL.GalleryDataFragment) => { + if (first) { + ret = gallery.rating ?? undefined; + first = false; + } else if (ret !== gallery.rating) { + ret = undefined; + } + }); + + return ret; + } + + function getStudioId(state: GQL.GalleryDataFragment[]) { + let ret: string | undefined; + let first = true; + + state.forEach((gallery: GQL.GalleryDataFragment) => { + if (first) { + ret = gallery?.studio?.id; + first = false; + } else { + const studio = gallery?.studio?.id; + if (ret !== studio) { + ret = undefined; + } + } + }); + + return ret; + } + + function getPerformerIds(state: GQL.GalleryDataFragment[]) { + let ret: string[] = []; + let first = true; + + state.forEach((gallery: GQL.GalleryDataFragment) => { + if (first) { + ret = gallery.performers + ? gallery.performers.map((p) => p.id).sort() + : []; + first = false; + } else { + const perfIds = gallery.performers + ? gallery.performers.map((p) => p.id).sort() + : []; + + if (!_.isEqual(ret, perfIds)) { + ret = []; + } + } + }); + + return ret; + } + + function getTagIds(state: GQL.GalleryDataFragment[]) { + let ret: string[] = []; + let first = true; + + state.forEach((gallery: GQL.GalleryDataFragment) => { + if (first) { + ret = gallery.tags ? gallery.tags.map((t) => t.id).sort() : []; + first = false; + } else { + const tIds = gallery.tags ? gallery.tags.map((t) => t.id).sort() : []; + + if (!_.isEqual(ret, tIds)) { + ret = []; + } + } + }); + + return ret; + } + + useEffect(() => { + const state = props.selected; + let updateRating: number | undefined; + let updateStudioID: string | undefined; + let updatePerformerIds: string[] = []; + let updateTagIds: string[] = []; + let first = true; + + state.forEach((gallery: GQL.GalleryDataFragment) => { + const galleryRating = gallery.rating; + const GalleriestudioID = gallery?.studio?.id; + const galleryPerformerIDs = (gallery.performers ?? []) + .map((p) => p.id) + .sort(); + const galleryTagIDs = (gallery.tags ?? []).map((p) => p.id).sort(); + + if (first) { + updateRating = galleryRating ?? undefined; + updateStudioID = GalleriestudioID; + updatePerformerIds = galleryPerformerIDs; + updateTagIds = galleryTagIDs; + first = false; + } else { + if (galleryRating !== updateRating) { + updateRating = undefined; + } + if (GalleriestudioID !== updateStudioID) { + updateStudioID = undefined; + } + if (!_.isEqual(galleryPerformerIDs, updatePerformerIds)) { + updatePerformerIds = []; + } + if (!_.isEqual(galleryTagIDs, updateTagIds)) { + updateTagIds = []; + } + } + }); + + setRating(updateRating); + setStudioId(updateStudioID); + if (performerMode === GQL.BulkUpdateIdMode.Set) { + setPerformerIds(updatePerformerIds); + } + + if (tagMode === GQL.BulkUpdateIdMode.Set) { + setTagIds(updateTagIds); + } + }, [props.selected, performerMode, tagMode]); + + function renderMultiSelect( + type: "performers" | "tags", + ids: string[] | undefined + ) { + let mode = GQL.BulkUpdateIdMode.Add; + switch (type) { + case "performers": + mode = performerMode; + break; + case "tags": + mode = tagMode; + break; + } + + return ( + { + const itemIDs = items.map((i) => i.id); + switch (type) { + case "performers": + setPerformerIds(itemIDs); + break; + case "tags": + setTagIds(itemIDs); + break; + } + }} + onSetMode={(newMode) => { + switch (type) { + case "performers": + setPerformerMode(newMode); + break; + case "tags": + setTagMode(newMode); + break; + } + }} + ids={ids ?? []} + mode={mode} + /> + ); + } + + function render() { + return ( + props.onClose(false), + text: "Cancel", + variant: "secondary", + }} + isRunning={isUpdating} + > +
+ + {FormUtils.renderLabel({ + title: "Rating", + })} + + setRating(value)} + disabled={isUpdating} + /> + + + + + {FormUtils.renderLabel({ + title: "Studio", + })} + + + setStudioId(items.length > 0 ? items[0]?.id : undefined) + } + ids={studioId ? [studioId] : []} + isDisabled={isUpdating} + /> + + + + + Performers + {renderMultiSelect("performers", performerIds)} + + + + Tags + {renderMultiSelect("tags", tagIds)} + +
+
+ ); + } + + return render(); +}; diff --git a/ui/v2.5/src/components/Galleries/Galleries.tsx b/ui/v2.5/src/components/Galleries/Galleries.tsx index aad72ddd5..fcdbf66e8 100644 --- a/ui/v2.5/src/components/Galleries/Galleries.tsx +++ b/ui/v2.5/src/components/Galleries/Galleries.tsx @@ -1,12 +1,12 @@ import React from "react"; import { Route, Switch } from "react-router-dom"; -import { Gallery } from "./Gallery"; +import { Gallery } from "./GalleryDetails/Gallery"; import { GalleryList } from "./GalleryList"; const Galleries = () => ( - + ); diff --git a/ui/v2.5/src/components/Galleries/Gallery.tsx b/ui/v2.5/src/components/Galleries/Gallery.tsx deleted file mode 100644 index e586a2587..000000000 --- a/ui/v2.5/src/components/Galleries/Gallery.tsx +++ /dev/null @@ -1,25 +0,0 @@ -import React from "react"; -import { useParams } from "react-router-dom"; -import { useFindGallery } from "src/core/StashService"; -import { LoadingIndicator } from "src/components/Shared"; -import { GalleryViewer } from "./GalleryViewer"; - -interface IGalleryParams { - id: string; -} - -export const Gallery: React.FC = () => { - const { id } = useParams(); - - const { data, error, loading } = useFindGallery(id); - const gallery = data?.findGallery; - - if (loading || !gallery) return ; - if (error) return
{error.message}
; - - return ( -
- -
- ); -}; diff --git a/ui/v2.5/src/components/Galleries/GalleryCard.tsx b/ui/v2.5/src/components/Galleries/GalleryCard.tsx index cf72fe4c6..20a4a60be 100644 --- a/ui/v2.5/src/components/Galleries/GalleryCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryCard.tsx @@ -1,26 +1,34 @@ -import { Card, Button, ButtonGroup } from "react-bootstrap"; +import { Card, Button, ButtonGroup, Form } from "react-bootstrap"; import React from "react"; import { Link } from "react-router-dom"; import * as GQL from "src/core/generated-graphql"; import { FormattedPlural } from "react-intl"; +import { useConfiguration } from "src/core/StashService"; import { HoverPopover, Icon, TagLink } from "../Shared"; interface IProps { gallery: GQL.GalleryDataFragment; + selecting?: boolean; + selected: boolean | undefined; zoomIndex: number; + onSelectedChanged: (selected: boolean, shiftKey: boolean) => void; } -export const GalleryCard: React.FC = ({ gallery, zoomIndex }) => { +export const GalleryCard: React.FC = (props) => { + const config = useConfiguration(); + const showStudioAsText = + config?.data?.configuration.interface.showStudioAsText ?? false; + function maybeRenderScenePopoverButton() { - if (!gallery.scene) return; + if (!props.gallery.scene) return; const popoverContent = ( - + ); return ( - + @@ -29,12 +37,84 @@ export const GalleryCard: React.FC = ({ gallery, zoomIndex }) => { ); } + function maybeRenderTagPopoverButton() { + if (props.gallery.tags.length <= 0) return; + + const popoverContent = props.gallery.tags.map((tag) => ( + + )); + + return ( + + + + ); + } + + function maybeRenderPerformerPopoverButton() { + if (props.gallery.performers.length <= 0) return; + + const popoverContent = props.gallery.performers.map((performer) => ( +
+ + {performer.name + + +
+ )); + + return ( + + + + ); + } + + function maybeRenderSceneStudioOverlay() { + if (!props.gallery.studio) return; + + return ( +
+ + {showStudioAsText ? ( + props.gallery.studio.name + ) : ( + {props.gallery.studio.name} + )} + +
+ ); + } + function maybeRenderPopoverButtonGroup() { - if (gallery.scene) { + if ( + props.gallery.scene || + props.gallery.performers.length > 0 || + props.gallery.tags.length > 0 + ) { return ( <>
+ {maybeRenderTagPopoverButton()} + {maybeRenderPerformerPopoverButton()} {maybeRenderScenePopoverButton()} @@ -42,23 +122,97 @@ export const GalleryCard: React.FC = ({ gallery, zoomIndex }) => { } } + function maybeRenderRatingBanner() { + if (!props.gallery.rating) { + return; + } + return ( +
+ RATING: {props.gallery.rating} +
+ ); + } + + function handleImageClick( + event: React.MouseEvent + ) { + const { shiftKey } = event; + + if (props.selecting) { + props.onSelectedChanged(!props.selected, shiftKey); + event.preventDefault(); + } + } + + function handleDrag(event: React.DragEvent) { + if (props.selecting) { + event.dataTransfer.setData("text/plain", ""); + event.dataTransfer.setDragImage(new Image(), 0, 0); + } + } + + function handleDragOver(event: React.DragEvent) { + const ev = event; + const shiftKey = false; + + if (props.selecting && !props.selected) { + props.onSelectedChanged(true, shiftKey); + } + + ev.dataTransfer.dropEffect = "move"; + ev.preventDefault(); + } + + let shiftKey = false; + return ( - - - {gallery.files.length > 0 ? ( - {gallery.path} - ) : undefined} - + + props.onSelectedChanged(!props.selected, shiftKey)} + onClick={(event: React.MouseEvent) => { + // eslint-disable-next-line prefer-destructuring + shiftKey = event.shiftKey; + event.stopPropagation(); + }} + /> + +
+ + {props.gallery.cover ? ( + {props.gallery.title + ) : undefined} + {maybeRenderRatingBanner()} + + {maybeRenderSceneStudioOverlay()} +
-
{gallery.path}
+ +
+ {props.gallery.title ?? props.gallery.path} +
+ - {gallery.files.length}  + {props.gallery.images.length}  diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx new file mode 100644 index 000000000..8b9b74c42 --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx @@ -0,0 +1,233 @@ +import { Tab, Nav, Dropdown } from "react-bootstrap"; +import React, { useEffect, useState } from "react"; +import { useParams, useHistory, Link } from "react-router-dom"; +import * as GQL from "src/core/generated-graphql"; +import { useFindGallery } from "src/core/StashService"; +import { LoadingIndicator, Icon } from "src/components/Shared"; +import { TextUtils } from "src/utils"; +import * as Mousetrap from "mousetrap"; +import { GalleryEditPanel } from "./GalleryEditPanel"; +import { GalleryDetailPanel } from "./GalleryDetailPanel"; +import { DeleteGalleriesDialog } from "../DeleteGalleriesDialog"; +import { GalleryImagesPanel } from "./GalleryImagesPanel"; +import { GalleryAddPanel } from "./GalleryAddPanel"; + +interface IGalleryParams { + id?: string; + tab?: string; +} + +export const Gallery: React.FC = () => { + const { tab = "images", id = "new" } = useParams(); + const history = useHistory(); + const isNew = id === "new"; + + const [gallery, setGallery] = useState>({}); + const { data, error, loading } = useFindGallery(id); + + const [activeTabKey, setActiveTabKey] = useState("gallery-details-panel"); + const activeRightTabKey = tab === "images" || tab === "add" ? tab : "images"; + const setActiveRightTabKey = (newTab: string | null) => { + if (tab !== newTab) { + const tabParam = newTab === "images" ? "" : `/${newTab}`; + history.replace(`/galleries/${id}${tabParam}`); + } + }; + + const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); + + useEffect(() => { + if (data?.findGallery) setGallery(data.findGallery); + }, [data]); + + function onDeleteDialogClosed(deleted: boolean) { + setIsDeleteAlertOpen(false); + if (deleted) { + history.push("/galleries"); + } + } + + function maybeRenderDeleteDialog() { + if (isDeleteAlertOpen && gallery) { + return ( + + ); + } + } + + function renderOperations() { + return ( + + + + + + setIsDeleteAlertOpen(true)} + > + Delete Gallery + + + + ); + } + + function renderTabs() { + if (!gallery) { + return; + } + + return ( + k && setActiveTabKey(k)} + > +
+ +
+ + + + + + {/* {gallery.gallery ? ( + + + + ) : ( + "" + )} */} + + setGallery(newGallery)} + onDelete={() => setIsDeleteAlertOpen(true)} + /> + + +
+ ); + } + + function renderRightTabs() { + if (!gallery) { + return; + } + + return ( + k && setActiveRightTabKey(k)} + > +
+ +
+ + + + {/* */} + + + + + + +
+ ); + } + + // set up hotkeys + useEffect(() => { + Mousetrap.bind("a", () => setActiveTabKey("gallery-details-panel")); + Mousetrap.bind("e", () => setActiveTabKey("gallery-edit-panel")); + Mousetrap.bind("f", () => setActiveTabKey("gallery-file-info-panel")); + + return () => { + Mousetrap.unbind("a"); + Mousetrap.unbind("e"); + Mousetrap.unbind("f"); + }; + }); + + if (isNew) + return ( +
+
+

Create Gallery

+ setGallery(newGallery)} + onDelete={() => setIsDeleteAlertOpen(true)} + /> +
+
+ ); + + if (loading || !gallery || !data?.findGallery) { + return ; + } + + if (error) return
{error.message}
; + + return ( +
+ {maybeRenderDeleteDialog()} +
+
+ {gallery.studio && ( +

+ + {`${gallery.studio.name} + +

+ )} +

+ {gallery.title ?? TextUtils.fileNameFromPath(gallery.path ?? "")} +

+
+ {renderTabs()} +
+
{renderRightTabs()}
+
+ ); +}; diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx new file mode 100644 index 000000000..f0e9e3adf --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx @@ -0,0 +1,86 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { GalleriesCriterion } from "src/models/list-filter/criteria/galleries"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { ImageList } from "src/components/Images/ImageList"; +import { showWhenSelected } from "src/hooks/ListHook"; +import { mutateAddGalleryImages } from "src/core/StashService"; +import { useToast } from "src/hooks"; + +interface IGalleryAddProps { + gallery: Partial; +} + +export const GalleryAddPanel: React.FC = ({ gallery }) => { + const Toast = useToast(); + + function filterHook(filter: ListFilterModel) { + const galleryValue = { + id: gallery.id!, + label: gallery.title ?? gallery.path ?? "", + }; + // if galleries is already present, then we modify it, otherwise add + let galleryCriterion = filter.criteria.find((c) => { + return c.type === "galleries"; + }) as GalleriesCriterion; + + if ( + galleryCriterion && + galleryCriterion.modifier === GQL.CriterionModifier.Excludes + ) { + // add the gallery if not present + if ( + !galleryCriterion.value.find((p) => { + return p.id === gallery.id; + }) + ) { + galleryCriterion.value.push(galleryValue); + } + + galleryCriterion.modifier = GQL.CriterionModifier.Excludes; + } else { + // overwrite + galleryCriterion = new GalleriesCriterion(); + galleryCriterion.modifier = GQL.CriterionModifier.Excludes; + galleryCriterion.value = [galleryValue]; + filter.criteria.push(galleryCriterion); + } + + return filter; + } + + async function addImages( + result: GQL.FindImagesQueryResult, + filter: ListFilterModel, + selectedIds: Set + ) { + try { + await mutateAddGalleryImages({ + gallery_id: gallery.id!, + image_ids: Array.from(selectedIds.values()), + }); + Toast.success({ + content: "Added images", + }); + } catch (e) { + Toast.error(e); + } + } + + const otherOperations = [ + { + text: "Add to Gallery", + onClick: addImages, + isDisplayed: showWhenSelected, + postRefetch: true, + }, + ]; + + return ( + + ); +}; diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx new file mode 100644 index 000000000..a8901bb36 --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx @@ -0,0 +1,110 @@ +import React from "react"; +import { Link } from "react-router-dom"; +import { FormattedDate } from "react-intl"; +import * as GQL from "src/core/generated-graphql"; +import { TextUtils } from "src/utils"; +import { TagLink } from "src/components/Shared"; +import { PerformerCard } from "src/components/Performers/PerformerCard"; +import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars"; + +interface IGalleryDetailProps { + gallery: Partial; +} + +export const GalleryDetailPanel: React.FC = (props) => { + function renderDetails() { + if (!props.gallery.details || props.gallery.details === "") return; + return ( + <> +
Details
+

{props.gallery.details}

+ + ); + } + + function renderTags() { + if (!props.gallery.tags || props.gallery.tags.length === 0) return; + const tags = props.gallery.tags.map((tag) => ( + + )); + return ( + <> +
Tags
+ {tags} + + ); + } + + function renderPerformers() { + if (!props.gallery.performers || props.gallery.performers.length === 0) + return; + const cards = props.gallery.performers.map((performer) => ( + + )); + + return ( + <> +
Performers
+
+ {cards} +
+ + ); + } + + // filename should use entire row if there is no studio + const galleryDetailsWidth = props.gallery.studio ? "col-9" : "col-12"; + + return ( + <> +
+
+
+

+ {props.gallery.title ?? + TextUtils.fileNameFromPath(props.gallery.path ?? "")} +

+
+ {props.gallery.date ? ( +
+ +
+ ) : undefined} + {props.gallery.rating ? ( +
+ Rating: +
+ ) : ( + "" + )} +
+ {props.gallery.studio && ( +
+ + {`${props.gallery.studio.name} + +
+ )} +
+
+
+ {renderDetails()} + {renderTags()} + {renderPerformers()} +
+
+ + ); +}; diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx new file mode 100644 index 000000000..ccdda0bb2 --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx @@ -0,0 +1,267 @@ +import React, { useEffect, useState } from "react"; +import { Button, Form, Col, Row } from "react-bootstrap"; +import * as GQL from "src/core/generated-graphql"; +import { useGalleryCreate, useGalleryUpdate } from "src/core/StashService"; +import { + PerformerSelect, + TagSelect, + StudioSelect, + LoadingIndicator, +} from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { FormUtils, EditableTextUtils } from "src/utils"; +import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars"; + +interface IProps { + gallery: Partial; + isVisible: boolean; + isNew?: boolean; + onUpdate: (gallery: GQL.GalleryDataFragment) => void; + onDelete: () => void; +} + +export const GalleryEditPanel: React.FC = (props: IProps) => { + const Toast = useToast(); + const [title, setTitle] = useState(); + const [details, setDetails] = useState(); + const [url, setUrl] = useState(); + const [date, setDate] = useState(); + const [rating, setRating] = useState(); + const [studioId, setStudioId] = useState(); + const [performerIds, setPerformerIds] = useState(); + const [tagIds, setTagIds] = useState(); + + // Network state + const [isLoading, setIsLoading] = useState(true); + + const [createGallery] = useGalleryCreate( + getGalleryInput() as GQL.GalleryCreateInput + ); + const [updateGallery] = useGalleryUpdate( + getGalleryInput() as GQL.GalleryUpdateInput + ); + + useEffect(() => { + if (props.isVisible) { + Mousetrap.bind("s s", () => { + onSave(); + }); + Mousetrap.bind("d d", () => { + props.onDelete(); + }); + + // numeric keypresses get caught by jwplayer, so blur the element + // if the rating sequence is started + Mousetrap.bind("r", () => { + if (document.activeElement instanceof HTMLElement) { + document.activeElement.blur(); + } + + Mousetrap.bind("0", () => setRating(NaN)); + Mousetrap.bind("1", () => setRating(1)); + Mousetrap.bind("2", () => setRating(2)); + Mousetrap.bind("3", () => setRating(3)); + Mousetrap.bind("4", () => setRating(4)); + Mousetrap.bind("5", () => setRating(5)); + + setTimeout(() => { + Mousetrap.unbind("0"); + Mousetrap.unbind("1"); + Mousetrap.unbind("2"); + Mousetrap.unbind("3"); + Mousetrap.unbind("4"); + Mousetrap.unbind("5"); + }, 1000); + }); + + return () => { + Mousetrap.unbind("s s"); + Mousetrap.unbind("d d"); + + Mousetrap.unbind("r"); + }; + } + }); + + function updateGalleryEditState(state: Partial) { + const perfIds = state.performers?.map((performer) => performer.id); + const tIds = state.tags ? state.tags.map((tag) => tag.id) : undefined; + + setTitle(state.title ?? undefined); + setDetails(state.details ?? undefined); + setUrl(state.url ?? undefined); + setDate(state.date ?? undefined); + setRating(state.rating === null ? NaN : state.rating); + setStudioId(state?.studio?.id ?? undefined); + setPerformerIds(perfIds); + setTagIds(tIds); + } + + useEffect(() => { + updateGalleryEditState(props.gallery); + setIsLoading(false); + }, [props.gallery]); + + function getGalleryInput() { + return { + id: props.isNew ? undefined : props.gallery.id!, + title, + details, + url, + date, + rating, + studio_id: studioId, + performer_ids: performerIds, + tag_ids: tagIds, + }; + } + + async function onSave() { + setIsLoading(true); + try { + if (props.isNew) { + const result = await createGallery(); + if (result.data?.galleryCreate) { + props.onUpdate(result.data.galleryCreate); + Toast.success({ content: "Created gallery" }); + } + } else { + const result = await updateGallery(); + if (result.data?.galleryUpdate) { + props.onUpdate(result.data.galleryUpdate); + Toast.success({ content: "Updated gallery" }); + } + } + } catch (e) { + Toast.error(e); + } + setIsLoading(false); + } + + if (isLoading) return ; + + return ( + + ); +}; diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx new file mode 100644 index 000000000..f28a595af --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx @@ -0,0 +1,88 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { GalleriesCriterion } from "src/models/list-filter/criteria/galleries"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { ImageList } from "src/components/Images/ImageList"; +import { mutateRemoveGalleryImages } from "src/core/StashService"; +import { showWhenSelected } from "src/hooks/ListHook"; +import { useToast } from "src/hooks"; + +interface IGalleryDetailsProps { + gallery: Partial; +} + +export const GalleryImagesPanel: React.FC = ({ + gallery, +}) => { + const Toast = useToast(); + + function filterHook(filter: ListFilterModel) { + const galleryValue = { + id: gallery.id!, + label: gallery.title ?? gallery.path ?? "", + }; + // if galleries is already present, then we modify it, otherwise add + let galleryCriterion = filter.criteria.find((c) => { + return c.type === "galleries"; + }) as GalleriesCriterion; + + if ( + galleryCriterion && + (galleryCriterion.modifier === GQL.CriterionModifier.IncludesAll || + galleryCriterion.modifier === GQL.CriterionModifier.Includes) + ) { + // add the gallery if not present + if ( + !galleryCriterion.value.find((p) => { + return p.id === gallery.id; + }) + ) { + galleryCriterion.value.push(galleryValue); + } + + galleryCriterion.modifier = GQL.CriterionModifier.IncludesAll; + } else { + // overwrite + galleryCriterion = new GalleriesCriterion(); + galleryCriterion.value = [galleryValue]; + filter.criteria.push(galleryCriterion); + } + + return filter; + } + + async function removeImages( + result: GQL.FindImagesQueryResult, + filter: ListFilterModel, + selectedIds: Set + ) { + try { + await mutateRemoveGalleryImages({ + gallery_id: gallery.id!, + image_ids: Array.from(selectedIds.values()), + }); + Toast.success({ + content: "Added images", + }); + } catch (e) { + Toast.error(e); + } + } + + const otherOperations = [ + { + text: "Remove from Gallery", + onClick: removeImages, + isDisplayed: showWhenSelected, + postRefetch: true, + }, + ]; + + return ( + + ); +}; diff --git a/ui/v2.5/src/components/Galleries/GalleryExportDialog.tsx b/ui/v2.5/src/components/Galleries/GalleryExportDialog.tsx new file mode 100644 index 000000000..9ff1ce451 --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryExportDialog.tsx @@ -0,0 +1,73 @@ +import React, { useState } from "react"; +import { Form } from "react-bootstrap"; +import { mutateExportObjects } from "src/core/StashService"; +import { Modal } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { downloadFile } from "src/utils"; + +interface IGalleryExportDialogProps { + selectedIds?: string[]; + all?: boolean; + onClose: () => void; +} + +export const GalleryExportDialog: React.FC = ( + props: IGalleryExportDialogProps +) => { + const [includeDependencies, setIncludeDependencies] = useState(true); + + // Network state + const [isRunning, setIsRunning] = useState(false); + + const Toast = useToast(); + + async function onExport() { + try { + setIsRunning(true); + const ret = await mutateExportObjects({ + galleries: { + ids: props.selectedIds, + all: props.all, + }, + includeDependencies, + }); + + // download the result + if (ret.data && ret.data.exportObjects) { + const link = ret.data.exportObjects; + downloadFile(link); + } + } catch (e) { + Toast.error(e); + } finally { + setIsRunning(false); + props.onClose(); + } + } + + return ( + props.onClose(), + text: "Cancel", + variant: "secondary", + }} + isRunning={isRunning} + > +
+ + setIncludeDependencies(!includeDependencies)} + /> + +
+
+ ); +}; diff --git a/ui/v2.5/src/components/Galleries/GalleryList.tsx b/ui/v2.5/src/components/Galleries/GalleryList.tsx index 740a4ee6f..da6a9388e 100644 --- a/ui/v2.5/src/components/Galleries/GalleryList.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryList.tsx @@ -1,20 +1,150 @@ -import React from "react"; +import React, { useState } from "react"; +import _ from "lodash"; import { Table } from "react-bootstrap"; -import { Link } from "react-router-dom"; -import { FindGalleriesQueryResult } from "src/core/generated-graphql"; +import { Link, useHistory } from "react-router-dom"; +import { + FindGalleriesQueryResult, + GalleryDataFragment, +} from "src/core/generated-graphql"; import { useGalleriesList } from "src/hooks"; +import { showWhenSelected } from "src/hooks/ListHook"; import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; +import { queryFindGalleries } from "src/core/StashService"; import { GalleryCard } from "./GalleryCard"; +import { GalleryExportDialog } from "./GalleryExportDialog"; +import { EditGalleriesDialog } from "./EditGalleriesDialog"; +import { DeleteGalleriesDialog } from "./DeleteGalleriesDialog"; + +interface IGalleryList { + filterHook?: (filter: ListFilterModel) => ListFilterModel; + persistState?: boolean; +} + +export const GalleryList: React.FC = ({ + filterHook, + persistState, +}) => { + const history = useHistory(); + const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); + const [isExportAll, setIsExportAll] = useState(false); + + const otherOperations = [ + { + text: "View Random", + onClick: viewRandom, + }, + { + text: "Export...", + onClick: onExport, + isDisplayed: showWhenSelected, + }, + { + text: "Export all...", + onClick: onExportAll, + }, + ]; + + const addKeybinds = ( + result: FindGalleriesQueryResult, + filter: ListFilterModel + ) => { + Mousetrap.bind("p r", () => { + viewRandom(result, filter); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }; -export const GalleryList: React.FC = () => { const listData = useGalleriesList({ zoomable: true, + selectable: true, + otherOperations, renderContent, - persistState: true, + renderEditDialog: renderEditGalleriesDialog, + renderDeleteDialog: renderDeleteGalleriesDialog, + filterHook, + addKeybinds, + persistState, }); - function renderContent( + async function viewRandom( + result: FindGalleriesQueryResult, + filter: ListFilterModel + ) { + // query for a random image + if (result.data && result.data.findGalleries) { + const { count } = result.data.findGalleries; + + const index = Math.floor(Math.random() * count); + const filterCopy = _.cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindGalleries(filterCopy); + if ( + singleResult && + singleResult.data && + singleResult.data.findGalleries && + singleResult.data.findGalleries.galleries.length === 1 + ) { + const { id } = singleResult!.data!.findGalleries!.galleries[0]; + // navigate to the image player page + history.push(`/galleries/${id}`); + } + } + } + + async function onExport() { + setIsExportAll(false); + setIsExportDialogOpen(true); + } + + async function onExportAll() { + setIsExportAll(true); + setIsExportDialogOpen(true); + } + + function maybeRenderGalleryExportDialog(selectedIds: Set) { + if (isExportDialogOpen) { + return ( + <> + { + setIsExportDialogOpen(false); + }} + /> + + ); + } + } + + function renderEditGalleriesDialog( + selectedImages: GalleryDataFragment[], + onClose: (applied: boolean) => void + ) { + return ( + <> + + + ); + } + + function renderDeleteGalleriesDialog( + selectedImages: GalleryDataFragment[], + onClose: (confirmed: boolean) => void + ) { + return ( + <> + + + ); + } + + function renderGalleries( result: FindGalleriesQueryResult, filter: ListFilterModel, selectedIds: Set, @@ -31,6 +161,11 @@ export const GalleryList: React.FC = () => { key={gallery.id} gallery={gallery} zoomIndex={zoomIndex} + selecting={selectedIds.size > 0} + selected={selectedIds.has(gallery.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + listData.onSelectChange(gallery.id, selected, shiftKey) + } /> ))}
@@ -42,7 +177,7 @@ export const GalleryList: React.FC = () => { Preview - Path + Title @@ -50,19 +185,19 @@ export const GalleryList: React.FC = () => { - {gallery.files.length > 0 ? ( + {gallery.cover ? ( {gallery.title ) : undefined} - {gallery.path} ({gallery.files.length}{" "} - {gallery.files.length === 1 ? "image" : "images"}) + {gallery.title ?? gallery.path} ({gallery.images.length}{" "} + {gallery.images.length === 1 ? "image" : "images"}) @@ -76,5 +211,19 @@ export const GalleryList: React.FC = () => { } } + function renderContent( + result: FindGalleriesQueryResult, + filter: ListFilterModel, + selectedIds: Set, + zoomIndex: number + ) { + return ( + <> + {maybeRenderGalleryExportDialog(selectedIds)} + {renderGalleries(result, filter, selectedIds, zoomIndex)} + + ); + } + return listData.template; }; diff --git a/ui/v2.5/src/components/Galleries/GalleryViewer.tsx b/ui/v2.5/src/components/Galleries/GalleryViewer.tsx index b2c555124..0865f8735 100644 --- a/ui/v2.5/src/components/Galleries/GalleryViewer.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryViewer.tsx @@ -4,7 +4,7 @@ import FsLightbox from "fslightbox-react"; import "flexbin/flexbin.css"; interface IProps { - gallery: GQL.GalleryDataFragment; + gallery: Partial; } export const GalleryViewer: React.FC = ({ gallery }) => { @@ -16,23 +16,27 @@ export const GalleryViewer: React.FC = ({ gallery }) => { setLightboxToggle(!lightboxToggle); }; - const photos = gallery.files.map((file) => file.path ?? ""); - const thumbs = gallery.files.map((file, index) => ( -
openImage(index)} - onKeyPress={() => openImage(index)} - > - {file.name -
- )); + const photos = !gallery.images + ? [] + : gallery.images.map((file) => file.paths.image ?? ""); + const thumbs = !gallery.images + ? [] + : gallery.images.map((file, index) => ( +
openImage(index)} + onKeyPress={() => openImage(index)} + > + {file.title +
+ )); return (
@@ -41,6 +45,7 @@ export const GalleryViewer: React.FC = ({ gallery }) => { sourceIndex={currentIndex} toggler={lightboxToggle} sources={photos} + key={gallery.id!} />
); diff --git a/ui/v2.5/src/components/Galleries/styles.scss b/ui/v2.5/src/components/Galleries/styles.scss index 60c986f53..15e4c040a 100644 --- a/ui/v2.5/src/components/Galleries/styles.scss +++ b/ui/v2.5/src/components/Galleries/styles.scss @@ -1,11 +1,41 @@ +.gallery-image { + &:hover { + cursor: pointer; + } +} + +.gallery-header { + flex-basis: auto; + margin-top: 30px; +} + +#gallery-details-container { + .tab-content { + min-height: 15rem; + } + + .gallery-description { + width: 100%; + } +} + .gallery-card { &.card { + overflow: hidden; padding: 0; padding-bottom: 1rem; } .card-section { margin-top: auto; + + a:hover { + text-decoration: none; + } + } + + .card-section-title { + color: $text-color; } &-image { @@ -13,8 +43,56 @@ } } -.gallery-image { - &:hover { - cursor: pointer; +.gallery-tabs { + max-height: calc(100vh - 4rem); + + overflow-wrap: break-word; + word-wrap: break-word; +} + +$galleryTabWidth: 450px; + +@media (min-width: 1200px) { + .gallery-tabs { + flex: 0 0 $galleryTabWidth; + max-width: $galleryTabWidth; + overflow: auto; + } + + .gallery-container { + flex: 0 0 calc(100% - #{$galleryTabWidth}); + max-width: calc(100% - #{$galleryTabWidth}); + } +} + +.gallery-tabs, +.gallery-container { + padding-left: 15px; + padding-right: 15px; + position: relative; + width: 100%; +} + +.gallery-container { + height: calc(100vh - 4rem); + overflow: auto; +} + +@media (min-width: 1200px), (max-width: 575px) { + .gallery-performers { + .performer-card { + width: 15rem; + + &-gallery { + height: 22.5rem; + } + } + } +} + +#gallery-edit-details { + .rating-stars { + font-size: 1.3em; + height: calc(1.5em + 0.75rem + 2px); } } diff --git a/ui/v2.5/src/components/Images/DeleteImagesDialog.tsx b/ui/v2.5/src/components/Images/DeleteImagesDialog.tsx new file mode 100644 index 000000000..3c3615018 --- /dev/null +++ b/ui/v2.5/src/components/Images/DeleteImagesDialog.tsx @@ -0,0 +1,91 @@ +import React, { useState } from "react"; +import { Form } from "react-bootstrap"; +import { useImagesDestroy } from "src/core/StashService"; +import * as GQL from "src/core/generated-graphql"; +import { Modal } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { FormattedMessage } from "react-intl"; + +interface IDeleteImageDialogProps { + selected: GQL.SlimImageDataFragment[]; + onClose: (confirmed: boolean) => void; +} + +export const DeleteImagesDialog: React.FC = ( + props: IDeleteImageDialogProps +) => { + const plural = props.selected.length > 1; + + const singleMessageId = "deleteImageText"; + const pluralMessageId = "deleteImagesText"; + + const singleMessage = + "Are you sure you want to delete this image? Unless the file is also deleted, this image will be re-added when scan is performed."; + const pluralMessage = + "Are you sure you want to delete these images? Unless the files are also deleted, these images will be re-added when scan is performed."; + + const header = plural ? "Delete Images" : "Delete Image"; + const toastMessage = plural ? "Deleted images" : "Deleted image"; + const messageId = plural ? pluralMessageId : singleMessageId; + const message = plural ? pluralMessage : singleMessage; + + const [deleteFile, setDeleteFile] = useState(false); + const [deleteGenerated, setDeleteGenerated] = useState(true); + + const Toast = useToast(); + const [deleteImage] = useImagesDestroy(getImagesDeleteInput()); + + // Network state + const [isDeleting, setIsDeleting] = useState(false); + + function getImagesDeleteInput(): GQL.ImagesDestroyInput { + return { + ids: props.selected.map((image) => image.id), + delete_file: deleteFile, + delete_generated: deleteGenerated, + }; + } + + async function onDelete() { + setIsDeleting(true); + try { + await deleteImage(); + Toast.success({ content: toastMessage }); + } catch (e) { + Toast.error(e); + } + setIsDeleting(false); + props.onClose(true); + } + + return ( + props.onClose(false), + text: "Cancel", + variant: "secondary", + }} + isRunning={isDeleting} + > +

+ +

+
+ setDeleteFile(!deleteFile)} + /> + setDeleteGenerated(!deleteGenerated)} + /> + +
+ ); +}; diff --git a/ui/v2.5/src/components/Images/EditImagesDialog.tsx b/ui/v2.5/src/components/Images/EditImagesDialog.tsx new file mode 100644 index 000000000..2fe85adbd --- /dev/null +++ b/ui/v2.5/src/components/Images/EditImagesDialog.tsx @@ -0,0 +1,366 @@ +import React, { useEffect, useState } from "react"; +import { Form, Col, Row } from "react-bootstrap"; +import _ from "lodash"; +import { useBulkImageUpdate } from "src/core/StashService"; +import * as GQL from "src/core/generated-graphql"; +import { StudioSelect, Modal } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { FormUtils } from "src/utils"; +import MultiSet from "../Shared/MultiSet"; +import { RatingStars } from "../Scenes/SceneDetails/RatingStars"; + +interface IListOperationProps { + selected: GQL.SlimImageDataFragment[]; + onClose: (applied: boolean) => void; +} + +export const EditImagesDialog: React.FC = ( + props: IListOperationProps +) => { + const Toast = useToast(); + const [rating, setRating] = useState(); + const [studioId, setStudioId] = useState(); + const [performerMode, setPerformerMode] = React.useState< + GQL.BulkUpdateIdMode + >(GQL.BulkUpdateIdMode.Add); + const [performerIds, setPerformerIds] = useState(); + const [tagMode, setTagMode] = React.useState( + GQL.BulkUpdateIdMode.Add + ); + const [tagIds, setTagIds] = useState(); + + const [updateImages] = useBulkImageUpdate(getImageInput()); + + // Network state + const [isUpdating, setIsUpdating] = useState(false); + + function makeBulkUpdateIds( + ids: string[], + mode: GQL.BulkUpdateIdMode + ): GQL.BulkUpdateIds { + return { + mode, + ids, + }; + } + + function getImageInput(): GQL.BulkImageUpdateInput { + // need to determine what we are actually setting on each image + const aggregateRating = getRating(props.selected); + const aggregateStudioId = getStudioId(props.selected); + const aggregatePerformerIds = getPerformerIds(props.selected); + const aggregateTagIds = getTagIds(props.selected); + + const imageInput: GQL.BulkImageUpdateInput = { + ids: props.selected.map((image) => { + return image.id; + }), + }; + + // if rating is undefined + if (rating === undefined) { + // and all images have the same rating, then we are unsetting the rating. + if (aggregateRating) { + // an undefined rating is ignored in the server, so set it to 0 instead + imageInput.rating = 0; + } + // otherwise not setting the rating + } else { + // if rating is set, then we are setting the rating for all + imageInput.rating = rating; + } + + // if studioId is undefined + if (studioId === undefined) { + // and all images have the same studioId, + // then unset the studioId, otherwise ignoring studioId + if (aggregateStudioId) { + // an undefined studio_id is ignored in the server, so set it to empty string instead + imageInput.studio_id = ""; + } + } else { + // if studioId is set, then we are setting it + imageInput.studio_id = studioId; + } + + // if performerIds are empty + if ( + performerMode === GQL.BulkUpdateIdMode.Set && + (!performerIds || performerIds.length === 0) + ) { + // and all images have the same ids, + if (aggregatePerformerIds.length > 0) { + // then unset the performerIds, otherwise ignore + imageInput.performer_ids = makeBulkUpdateIds( + performerIds || [], + performerMode + ); + } + } else { + // if performerIds non-empty, then we are setting them + imageInput.performer_ids = makeBulkUpdateIds( + performerIds || [], + performerMode + ); + } + + // if tagIds non-empty, then we are setting them + if ( + tagMode === GQL.BulkUpdateIdMode.Set && + (!tagIds || tagIds.length === 0) + ) { + // and all images have the same ids, + if (aggregateTagIds.length > 0) { + // then unset the tagIds, otherwise ignore + imageInput.tag_ids = makeBulkUpdateIds(tagIds || [], tagMode); + } + } else { + // if tagIds non-empty, then we are setting them + imageInput.tag_ids = makeBulkUpdateIds(tagIds || [], tagMode); + } + + return imageInput; + } + + async function onSave() { + setIsUpdating(true); + try { + await updateImages(); + Toast.success({ content: "Updated images" }); + props.onClose(true); + } catch (e) { + Toast.error(e); + } + setIsUpdating(false); + } + + function getRating(state: GQL.SlimImageDataFragment[]) { + let ret: number | undefined; + let first = true; + + state.forEach((image: GQL.SlimImageDataFragment) => { + if (first) { + ret = image.rating ?? undefined; + first = false; + } else if (ret !== image.rating) { + ret = undefined; + } + }); + + return ret; + } + + function getStudioId(state: GQL.SlimImageDataFragment[]) { + let ret: string | undefined; + let first = true; + + state.forEach((image: GQL.SlimImageDataFragment) => { + if (first) { + ret = image?.studio?.id; + first = false; + } else { + const studio = image?.studio?.id; + if (ret !== studio) { + ret = undefined; + } + } + }); + + return ret; + } + + function getPerformerIds(state: GQL.SlimImageDataFragment[]) { + let ret: string[] = []; + let first = true; + + state.forEach((image: GQL.SlimImageDataFragment) => { + if (first) { + ret = image.performers ? image.performers.map((p) => p.id).sort() : []; + first = false; + } else { + const perfIds = image.performers + ? image.performers.map((p) => p.id).sort() + : []; + + if (!_.isEqual(ret, perfIds)) { + ret = []; + } + } + }); + + return ret; + } + + function getTagIds(state: GQL.SlimImageDataFragment[]) { + let ret: string[] = []; + let first = true; + + state.forEach((image: GQL.SlimImageDataFragment) => { + if (first) { + ret = image.tags ? image.tags.map((t) => t.id).sort() : []; + first = false; + } else { + const tIds = image.tags ? image.tags.map((t) => t.id).sort() : []; + + if (!_.isEqual(ret, tIds)) { + ret = []; + } + } + }); + + return ret; + } + + useEffect(() => { + const state = props.selected; + let updateRating: number | undefined; + let updateStudioID: string | undefined; + let updatePerformerIds: string[] = []; + let updateTagIds: string[] = []; + let first = true; + + state.forEach((image: GQL.SlimImageDataFragment) => { + const imageRating = image.rating; + const imageStudioID = image?.studio?.id; + const imagePerformerIDs = (image.performers ?? []) + .map((p) => p.id) + .sort(); + const imageTagIDs = (image.tags ?? []).map((p) => p.id).sort(); + + if (first) { + updateRating = imageRating ?? undefined; + updateStudioID = imageStudioID; + updatePerformerIds = imagePerformerIDs; + updateTagIds = imageTagIDs; + first = false; + } else { + if (imageRating !== updateRating) { + updateRating = undefined; + } + if (imageStudioID !== updateStudioID) { + updateStudioID = undefined; + } + if (!_.isEqual(imagePerformerIDs, updatePerformerIds)) { + updatePerformerIds = []; + } + if (!_.isEqual(imageTagIDs, updateTagIds)) { + updateTagIds = []; + } + } + }); + + setRating(updateRating); + setStudioId(updateStudioID); + if (performerMode === GQL.BulkUpdateIdMode.Set) { + setPerformerIds(updatePerformerIds); + } + + if (tagMode === GQL.BulkUpdateIdMode.Set) { + setTagIds(updateTagIds); + } + }, [props.selected, performerMode, tagMode]); + + function renderMultiSelect( + type: "performers" | "tags", + ids: string[] | undefined + ) { + let mode = GQL.BulkUpdateIdMode.Add; + switch (type) { + case "performers": + mode = performerMode; + break; + case "tags": + mode = tagMode; + break; + } + + return ( + { + const itemIDs = items.map((i) => i.id); + switch (type) { + case "performers": + setPerformerIds(itemIDs); + break; + case "tags": + setTagIds(itemIDs); + break; + } + }} + onSetMode={(newMode) => { + switch (type) { + case "performers": + setPerformerMode(newMode); + break; + case "tags": + setTagMode(newMode); + break; + } + }} + ids={ids ?? []} + mode={mode} + /> + ); + } + + function render() { + return ( + props.onClose(false), + text: "Cancel", + variant: "secondary", + }} + isRunning={isUpdating} + > +
+ + {FormUtils.renderLabel({ + title: "Rating", + })} + + setRating(value)} + disabled={isUpdating} + /> + + + + + {FormUtils.renderLabel({ + title: "Studio", + })} + + + setStudioId(items.length > 0 ? items[0]?.id : undefined) + } + ids={studioId ? [studioId] : []} + isDisabled={isUpdating} + /> + + + + + Performers + {renderMultiSelect("performers", performerIds)} + + + + Tags + {renderMultiSelect("tags", tagIds)} + +
+
+ ); + } + + return render(); +}; diff --git a/ui/v2.5/src/components/Images/ImageCard.tsx b/ui/v2.5/src/components/Images/ImageCard.tsx new file mode 100644 index 000000000..71e38fcca --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageCard.tsx @@ -0,0 +1,198 @@ +import React from "react"; +import { Button, ButtonGroup, Card, Form } from "react-bootstrap"; +import { Link } from "react-router-dom"; +import cx from "classnames"; +import * as GQL from "src/core/generated-graphql"; +import { Icon, TagLink, HoverPopover, SweatDrops } from "src/components/Shared"; +import { TextUtils } from "src/utils"; + +interface IImageCardProps { + image: GQL.SlimImageDataFragment; + selecting?: boolean; + selected: boolean | undefined; + zoomIndex: number; + onSelectedChanged: (selected: boolean, shiftKey: boolean) => void; +} + +export const ImageCard: React.FC = ( + props: IImageCardProps +) => { + function maybeRenderRatingBanner() { + if (!props.image.rating) { + return; + } + return ( +
+ RATING: {props.image.rating} +
+ ); + } + + function maybeRenderTagPopoverButton() { + if (props.image.tags.length <= 0) return; + + const popoverContent = props.image.tags.map((tag) => ( + + )); + + return ( + + + + ); + } + + function maybeRenderPerformerPopoverButton() { + if (props.image.performers.length <= 0) return; + + const popoverContent = props.image.performers.map((performer) => ( +
+ + {performer.name + + +
+ )); + + return ( + + + + ); + } + + function maybeRenderOCounter() { + if (props.image.o_counter) { + return ( +
+ +
+ ); + } + } + + function maybeRenderPopoverButtonGroup() { + if ( + props.image.tags.length > 0 || + props.image.performers.length > 0 || + props.image?.o_counter + ) { + return ( + <> +
+ + {maybeRenderTagPopoverButton()} + {maybeRenderPerformerPopoverButton()} + {maybeRenderOCounter()} + + + ); + } + } + + function handleImageClick( + event: React.MouseEvent + ) { + const { shiftKey } = event; + + if (props.selecting) { + props.onSelectedChanged(!props.selected, shiftKey); + event.preventDefault(); + } + } + + function handleDrag(event: React.DragEvent) { + if (props.selecting) { + event.dataTransfer.setData("text/plain", ""); + event.dataTransfer.setDragImage(new Image(), 0, 0); + } + } + + function handleDragOver(event: React.DragEvent) { + const ev = event; + const shiftKey = false; + + if (props.selecting && !props.selected) { + props.onSelectedChanged(true, shiftKey); + } + + ev.dataTransfer.dropEffect = "move"; + ev.preventDefault(); + } + + function isPortrait() { + const { file } = props.image; + const width = file.width ? file.width : 0; + const height = file.height ? file.height : 0; + return height > width; + } + + let shiftKey = false; + + return ( + + props.onSelectedChanged(!props.selected, shiftKey)} + onClick={(event: React.MouseEvent) => { + // eslint-disable-next-line prefer-destructuring + shiftKey = event.shiftKey; + event.stopPropagation(); + }} + /> + +
+ +
+ {props.image.title +
+ {maybeRenderRatingBanner()} + +
+
+
+ {props.image.title + ? props.image.title + : TextUtils.fileNameFromPath(props.image.path)} +
+
+ + {maybeRenderPopoverButtonGroup()} +
+ ); +}; diff --git a/ui/v2.5/src/components/Images/ImageDetails/Image.tsx b/ui/v2.5/src/components/Images/ImageDetails/Image.tsx new file mode 100644 index 000000000..0b8e65576 --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageDetails/Image.tsx @@ -0,0 +1,236 @@ +import { Tab, Nav, Dropdown } from "react-bootstrap"; +import React, { useEffect, useState } from "react"; +import { useParams, useHistory, Link } from "react-router-dom"; +import * as GQL from "src/core/generated-graphql"; +import { + useFindImage, + useImageIncrementO, + useImageDecrementO, + useImageResetO, +} from "src/core/StashService"; +import { LoadingIndicator, Icon } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { TextUtils } from "src/utils"; +import * as Mousetrap from "mousetrap"; +import { OCounterButton } from "src/components/Scenes/SceneDetails/OCounterButton"; +import { ImageFileInfoPanel } from "./ImageFileInfoPanel"; +import { ImageEditPanel } from "./ImageEditPanel"; +import { ImageDetailPanel } from "./ImageDetailPanel"; +import { DeleteImagesDialog } from "../DeleteImagesDialog"; + +interface IImageParams { + id?: string; +} + +export const Image: React.FC = () => { + const { id = "new" } = useParams(); + const history = useHistory(); + const Toast = useToast(); + + const [image, setImage] = useState(); + const { data, error, loading } = useFindImage(id); + const [oLoading, setOLoading] = useState(false); + const [incrementO] = useImageIncrementO(image?.id ?? "0"); + const [decrementO] = useImageDecrementO(image?.id ?? "0"); + const [resetO] = useImageResetO(image?.id ?? "0"); + + const [activeTabKey, setActiveTabKey] = useState("image-details-panel"); + + const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState(false); + + useEffect(() => { + if (data?.findImage) setImage(data.findImage); + }, [data]); + + const updateOCounter = (newValue: number) => { + const modifiedImage = { ...image } as GQL.ImageDataFragment; + modifiedImage.o_counter = newValue; + setImage(modifiedImage); + }; + + const onIncrementClick = async () => { + try { + setOLoading(true); + const result = await incrementO(); + if (result.data) updateOCounter(result.data.imageIncrementO); + } catch (e) { + Toast.error(e); + } finally { + setOLoading(false); + } + }; + + const onDecrementClick = async () => { + try { + setOLoading(true); + const result = await decrementO(); + if (result.data) updateOCounter(result.data.imageDecrementO); + } catch (e) { + Toast.error(e); + } finally { + setOLoading(false); + } + }; + + const onResetClick = async () => { + try { + setOLoading(true); + const result = await resetO(); + if (result.data) updateOCounter(result.data.imageResetO); + } catch (e) { + Toast.error(e); + } finally { + setOLoading(false); + } + }; + + function onDeleteDialogClosed(deleted: boolean) { + setIsDeleteAlertOpen(false); + if (deleted) { + history.push("/images"); + } + } + + function maybeRenderDeleteDialog() { + if (isDeleteAlertOpen && image) { + return ( + + ); + } + } + + function renderOperations() { + return ( + + + + + + setIsDeleteAlertOpen(true)} + > + Delete Image + + + + ); + } + + function renderTabs() { + if (!image) { + return; + } + + return ( + k && setActiveTabKey(k)} + > +
+ +
+ + + + + + + + + + setImage(newImage)} + onDelete={() => setIsDeleteAlertOpen(true)} + /> + + +
+ ); + } + + // set up hotkeys + useEffect(() => { + Mousetrap.bind("a", () => setActiveTabKey("image-details-panel")); + Mousetrap.bind("e", () => setActiveTabKey("image-edit-panel")); + Mousetrap.bind("f", () => setActiveTabKey("image-file-info-panel")); + Mousetrap.bind("o", () => onIncrementClick()); + + return () => { + Mousetrap.unbind("a"); + Mousetrap.unbind("e"); + Mousetrap.unbind("f"); + Mousetrap.unbind("o"); + }; + }); + + if (loading || !image || !data?.findImage) { + return ; + } + + if (error) return
{error.message}
; + + return ( +
+ {maybeRenderDeleteDialog()} +
+
+ {image.studio && ( +

+ + {`${image.studio.name} + +

+ )} +

+ {image.title ?? TextUtils.fileNameFromPath(image.path)} +

+
+ {renderTabs()} +
+
+ {image.title +
+
+ ); +}; diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageDetailPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageDetailPanel.tsx new file mode 100644 index 000000000..1e8d99174 --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageDetailPanel.tsx @@ -0,0 +1,103 @@ +import React from "react"; +import { Link } from "react-router-dom"; +import * as GQL from "src/core/generated-graphql"; +import { TextUtils } from "src/utils"; +import { TagLink } from "src/components/Shared"; +import { PerformerCard } from "src/components/Performers/PerformerCard"; +import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars"; + +interface IImageDetailProps { + image: GQL.ImageDataFragment; +} + +export const ImageDetailPanel: React.FC = (props) => { + function renderTags() { + if (props.image.tags.length === 0) return; + const tags = props.image.tags.map((tag) => ( + + )); + return ( + <> +
Tags
+ {tags} + + ); + } + + function renderPerformers() { + if (props.image.performers.length === 0) return; + const cards = props.image.performers.map((performer) => ( + + )); + + return ( + <> +
Performers
+
+ {cards} +
+ + ); + } + + function renderGalleries() { + if (props.image.galleries.length === 0) return; + const tags = props.image.galleries.map((gallery) => ( + + )); + return ( + <> +
Galleries
+ {tags} + + ); + } + + // filename should use entire row if there is no studio + const imageDetailsWidth = props.image.studio ? "col-9" : "col-12"; + + return ( + <> +
+
+
+

+ {props.image.title ?? + TextUtils.fileNameFromPath(props.image.path)} +

+
+ {props.image.rating ? ( +
+ Rating: +
+ ) : ( + "" + )} + {renderGalleries()} + {props.image.file.height ? ( +
Resolution: {TextUtils.resolution(props.image.file.height)}
+ ) : ( + "" + )} +
+ {props.image.studio && ( +
+ + {`${props.image.studio.name} + +
+ )} +
+
+
+ {renderTags()} + {renderPerformers()} +
+
+ + ); +}; diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx new file mode 100644 index 000000000..840ec9727 --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx @@ -0,0 +1,212 @@ +import React, { useEffect, useState } from "react"; +import { Button, Form, Col, Row } from "react-bootstrap"; +import * as GQL from "src/core/generated-graphql"; +import { useImageUpdate } from "src/core/StashService"; +import { + PerformerSelect, + TagSelect, + StudioSelect, + LoadingIndicator, +} from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { FormUtils } from "src/utils"; +import { RatingStars } from "src/components/Scenes/SceneDetails/RatingStars"; + +interface IProps { + image: GQL.ImageDataFragment; + isVisible: boolean; + onUpdate: (image: GQL.ImageDataFragment) => void; + onDelete: () => void; +} + +export const ImageEditPanel: React.FC = (props: IProps) => { + const Toast = useToast(); + const [title, setTitle] = useState(); + const [rating, setRating] = useState(); + const [studioId, setStudioId] = useState(); + const [performerIds, setPerformerIds] = useState(); + const [tagIds, setTagIds] = useState(); + + // Network state + const [isLoading, setIsLoading] = useState(true); + + const [updateImage] = useImageUpdate(getImageInput()); + + useEffect(() => { + if (props.isVisible) { + Mousetrap.bind("s s", () => { + onSave(); + }); + Mousetrap.bind("d d", () => { + props.onDelete(); + }); + + // numeric keypresses get caught by jwplayer, so blur the element + // if the rating sequence is started + Mousetrap.bind("r", () => { + if (document.activeElement instanceof HTMLElement) { + document.activeElement.blur(); + } + + Mousetrap.bind("0", () => setRating(NaN)); + Mousetrap.bind("1", () => setRating(1)); + Mousetrap.bind("2", () => setRating(2)); + Mousetrap.bind("3", () => setRating(3)); + Mousetrap.bind("4", () => setRating(4)); + Mousetrap.bind("5", () => setRating(5)); + + setTimeout(() => { + Mousetrap.unbind("0"); + Mousetrap.unbind("1"); + Mousetrap.unbind("2"); + Mousetrap.unbind("3"); + Mousetrap.unbind("4"); + Mousetrap.unbind("5"); + }, 1000); + }); + + return () => { + Mousetrap.unbind("s s"); + Mousetrap.unbind("d d"); + + Mousetrap.unbind("r"); + }; + } + }); + + function updateImageEditState(state: Partial) { + const perfIds = state.performers?.map((performer) => performer.id); + const tIds = state.tags ? state.tags.map((tag) => tag.id) : undefined; + + setTitle(state.title ?? undefined); + setRating(state.rating === null ? NaN : state.rating); + // setGalleryId(state?.gallery?.id ?? undefined); + setStudioId(state?.studio?.id ?? undefined); + setPerformerIds(perfIds); + setTagIds(tIds); + } + + useEffect(() => { + updateImageEditState(props.image); + setIsLoading(false); + }, [props.image]); + + function getImageInput(): GQL.ImageUpdateInput { + return { + id: props.image.id, + title, + rating, + studio_id: studioId, + performer_ids: performerIds, + tag_ids: tagIds, + }; + } + + async function onSave() { + setIsLoading(true); + try { + const result = await updateImage(); + if (result.data?.imageUpdate) { + props.onUpdate(result.data.imageUpdate); + Toast.success({ content: "Updated image" }); + } + } catch (e) { + Toast.error(e); + } + setIsLoading(false); + } + + if (isLoading) return ; + + return ( +
+
+
+ + +
+
+
+
+ {FormUtils.renderInputGroup({ + title: "Title", + value: title, + onChange: setTitle, + isEditing: true, + })} + + {FormUtils.renderLabel({ + title: "Rating", + })} + + setRating(value)} + /> + + + + + {FormUtils.renderLabel({ + title: "Studio", + })} + + + setStudioId(items.length > 0 ? items[0]?.id : undefined) + } + ids={studioId ? [studioId] : []} + /> + + + + + {FormUtils.renderLabel({ + title: "Performers", + labelProps: { + column: true, + sm: 3, + xl: 12, + }, + })} + + + setPerformerIds(items.map((item) => item.id)) + } + ids={performerIds} + /> + + + + + {FormUtils.renderLabel({ + title: "Tags", + labelProps: { + column: true, + sm: 3, + xl: 12, + }, + })} + + setTagIds(items.map((item) => item.id))} + ids={tagIds} + /> + + +
+
+
+ ); +}; diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx new file mode 100644 index 000000000..05fb698db --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx @@ -0,0 +1,81 @@ +import React from "react"; +import { FormattedNumber } from "react-intl"; +import * as GQL from "src/core/generated-graphql"; +import { TextUtils } from "src/utils"; + +interface IImageFileInfoPanelProps { + image: GQL.ImageDataFragment; +} + +export const ImageFileInfoPanel: React.FC = ( + props: IImageFileInfoPanelProps +) => { + function renderChecksum() { + return ( +
+ Checksum + {props.image.checksum} +
+ ); + } + + function renderPath() { + const { + image: { path }, + } = props; + return ( +
+ Path + + {`file://${props.image.path}`}{" "} + +
+ ); + } + + function renderFileSize() { + if (props.image.file.size === undefined) { + return; + } + + const { size, unit } = TextUtils.fileSize(props.image.file.size ?? 0); + + return ( +
+ File Size + + + +
+ ); + } + + function renderDimensions() { + if (props.image.file.height && props.image.file.width) { + return ( +
+ Dimensions + + {props.image.file.width} x {props.image.file.height} + +
+ ); + } + } + + return ( +
+ {renderChecksum()} + {renderPath()} + {renderFileSize()} + {renderDimensions()} +
+ ); +}; diff --git a/ui/v2.5/src/components/Images/ImageExportDialog.tsx b/ui/v2.5/src/components/Images/ImageExportDialog.tsx new file mode 100644 index 000000000..1b61cca1a --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageExportDialog.tsx @@ -0,0 +1,73 @@ +import React, { useState } from "react"; +import { Form } from "react-bootstrap"; +import { mutateExportObjects } from "src/core/StashService"; +import { Modal } from "src/components/Shared"; +import { useToast } from "src/hooks"; +import { downloadFile } from "src/utils"; + +interface IImageExportDialogProps { + selectedIds?: string[]; + all?: boolean; + onClose: () => void; +} + +export const ImageExportDialog: React.FC = ( + props: IImageExportDialogProps +) => { + const [includeDependencies, setIncludeDependencies] = useState(true); + + // Network state + const [isRunning, setIsRunning] = useState(false); + + const Toast = useToast(); + + async function onExport() { + try { + setIsRunning(true); + const ret = await mutateExportObjects({ + images: { + ids: props.selectedIds, + all: props.all, + }, + includeDependencies, + }); + + // download the result + if (ret.data && ret.data.exportObjects) { + const link = ret.data.exportObjects; + downloadFile(link); + } + } catch (e) { + Toast.error(e); + } finally { + setIsRunning(false); + props.onClose(); + } + } + + return ( + props.onClose(), + text: "Cancel", + variant: "secondary", + }} + isRunning={isRunning} + > +
+ + setIncludeDependencies(!includeDependencies)} + /> + +
+
+ ); +}; diff --git a/ui/v2.5/src/components/Images/ImageList.tsx b/ui/v2.5/src/components/Images/ImageList.tsx new file mode 100644 index 000000000..559b6ee9c --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageList.tsx @@ -0,0 +1,259 @@ +import React, { useState } from "react"; +import _ from "lodash"; +import { useHistory } from "react-router-dom"; +import FsLightbox from "fslightbox-react"; +import { + FindImagesQueryResult, + SlimImageDataFragment, +} from "src/core/generated-graphql"; +import * as GQL from "src/core/generated-graphql"; +import { queryFindImages } from "src/core/StashService"; +import { useImagesList } from "src/hooks"; +import { TextUtils } from "src/utils"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { DisplayMode } from "src/models/list-filter/types"; +import { IListHookOperation, showWhenSelected } from "src/hooks/ListHook"; +import { ImageCard } from "./ImageCard"; +import { EditImagesDialog } from "./EditImagesDialog"; +import { DeleteImagesDialog } from "./DeleteImagesDialog"; +import { ImageExportDialog } from "./ImageExportDialog"; +import "flexbin/flexbin.css"; + +interface IImageWallProps { + images: GQL.SlimImageDataFragment[]; +} + +const ImageWall: React.FC = ({ images }) => { + const [lightboxToggle, setLightboxToggle] = useState(false); + const [currentIndex, setCurrentIndex] = useState(0); + + const openImage = (index: number) => { + setCurrentIndex(index); + setLightboxToggle(!lightboxToggle); + }; + + const photos = images.map((image) => image.paths.image ?? ""); + const thumbs = images.map((image, index) => ( +
openImage(index)} + onKeyPress={() => openImage(index)} + > + {image.title +
+ )); + + // FsLightbox doesn't update unless the key updates + const key = images.map((i) => i.id).join(","); + + return ( +
+
{thumbs}
+ +
+ ); +}; + +interface IImageList { + filterHook?: (filter: ListFilterModel) => ListFilterModel; + persistState?: boolean; + extraOperations?: IListHookOperation[]; +} + +export const ImageList: React.FC = ({ + filterHook, + persistState, + extraOperations, +}) => { + const history = useHistory(); + const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); + const [isExportAll, setIsExportAll] = useState(false); + + const otherOperations = (extraOperations ?? []).concat([ + { + text: "View Random", + onClick: viewRandom, + }, + { + text: "Export...", + onClick: onExport, + isDisplayed: showWhenSelected, + }, + { + text: "Export all...", + onClick: onExportAll, + }, + ]); + + const addKeybinds = ( + result: FindImagesQueryResult, + filter: ListFilterModel + ) => { + Mousetrap.bind("p r", () => { + viewRandom(result, filter); + }); + + return () => { + Mousetrap.unbind("p r"); + }; + }; + + const listData = useImagesList({ + zoomable: true, + selectable: true, + otherOperations, + renderContent, + renderEditDialog: renderEditImagesDialog, + renderDeleteDialog: renderDeleteImagesDialog, + filterHook, + addKeybinds, + persistState, + }); + + async function viewRandom( + result: FindImagesQueryResult, + filter: ListFilterModel + ) { + // query for a random image + if (result.data && result.data.findImages) { + const { count } = result.data.findImages; + + const index = Math.floor(Math.random() * count); + const filterCopy = _.cloneDeep(filter); + filterCopy.itemsPerPage = 1; + filterCopy.currentPage = index + 1; + const singleResult = await queryFindImages(filterCopy); + if ( + singleResult && + singleResult.data && + singleResult.data.findImages && + singleResult.data.findImages.images.length === 1 + ) { + const { id } = singleResult!.data!.findImages!.images[0]; + // navigate to the image player page + history.push(`/images/${id}`); + } + } + } + + async function onExport() { + setIsExportAll(false); + setIsExportDialogOpen(true); + } + + async function onExportAll() { + setIsExportAll(true); + setIsExportDialogOpen(true); + } + + function maybeRenderImageExportDialog(selectedIds: Set) { + if (isExportDialogOpen) { + return ( + <> + { + setIsExportDialogOpen(false); + }} + /> + + ); + } + } + + function renderEditImagesDialog( + selectedImages: SlimImageDataFragment[], + onClose: (applied: boolean) => void + ) { + return ( + <> + + + ); + } + + function renderDeleteImagesDialog( + selectedImages: SlimImageDataFragment[], + onClose: (confirmed: boolean) => void + ) { + return ( + <> + + + ); + } + + function renderImageCard( + image: SlimImageDataFragment, + selectedIds: Set, + zoomIndex: number + ) { + return ( + 0} + selected={selectedIds.has(image.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + listData.onSelectChange(image.id, selected, shiftKey) + } + /> + ); + } + + function renderImages( + result: FindImagesQueryResult, + filter: ListFilterModel, + selectedIds: Set, + zoomIndex: number + ) { + if (!result.data || !result.data.findImages) { + return; + } + if (filter.displayMode === DisplayMode.Grid) { + return ( +
+ {result.data.findImages.images.map((image) => + renderImageCard(image, selectedIds, zoomIndex) + )} +
+ ); + } + // if (filter.displayMode === DisplayMode.List) { + // return ; + // } + if (filter.displayMode === DisplayMode.Wall) { + return ; + } + } + + function renderContent( + result: FindImagesQueryResult, + filter: ListFilterModel, + selectedIds: Set, + zoomIndex: number + ) { + return ( + <> + {maybeRenderImageExportDialog(selectedIds)} + {renderImages(result, filter, selectedIds, zoomIndex)} + + ); + } + + return listData.template; +}; diff --git a/ui/v2.5/src/components/Images/Images.tsx b/ui/v2.5/src/components/Images/Images.tsx new file mode 100644 index 000000000..576b6f674 --- /dev/null +++ b/ui/v2.5/src/components/Images/Images.tsx @@ -0,0 +1,17 @@ +import React from "react"; +import { Route, Switch } from "react-router-dom"; +import { Image } from "./ImageDetails/Image"; +import { ImageList } from "./ImageList"; + +const Images = () => ( + + } + /> + + +); + +export default Images; diff --git a/ui/v2.5/src/components/Images/styles.scss b/ui/v2.5/src/components/Images/styles.scss new file mode 100644 index 000000000..7724256a2 --- /dev/null +++ b/ui/v2.5/src/components/Images/styles.scss @@ -0,0 +1,135 @@ +.image-header { + flex-basis: auto; + margin-top: 30px; +} + +#image-details-container { + .tab-content { + min-height: 15rem; + } + + .image-description { + width: 100%; + } +} + +.image-card { + &.card { + overflow: hidden; + padding: 0; + } + + &-check { + left: 0.5rem; + margin-top: -12px; + opacity: 0; + padding-left: 15px; + position: absolute; + top: 0.7rem; + width: 1.2rem; + z-index: 1; + + &:checked { + opacity: 0.75; + } + } + + .rating-banner { + transition: opacity 0.5s; + } + + &-preview { + display: flex; + justify-content: center; + margin-bottom: 5px; + position: relative; + + &-image { + height: 100%; + object-fit: cover; + width: 100%; + } + + &.portrait { + .image-card-preview-image { + object-fit: contain; + } + } + } + + &:hover { + .rating-banner { + opacity: 0; + transition: opacity 0.5s; + } + + .image-card-check { + opacity: 0.75; + transition: opacity 0.5s; + } + } +} + +.image-tabs { + max-height: calc(100vh - 4rem); + + overflow-wrap: break-word; + word-wrap: break-word; +} + +$imageTabWidth: 450px; + +@media (min-width: 1200px) { + .image-tabs { + flex: 0 0 $imageTabWidth; + max-width: $imageTabWidth; + overflow: auto; + } + + .image-container { + flex: 0 0 calc(100% - #{$imageTabWidth}); + max-width: calc(100% - #{$imageTabWidth}); + } +} + +.image-tabs, +.image-container { + padding-left: 15px; + padding-right: 15px; + position: relative; + width: 100%; +} + +.image-container { + display: flex; + + img { + max-height: calc(100vh - 4rem); + max-width: 100%; + object-fit: contain; + } +} + +@media (min-width: 1200px) { + .image-container { + height: calc(100vh - 4rem); + } +} +@media (min-width: 1200px), (max-width: 575px) { + .image-performers { + .performer-card { + width: 15rem; + + &-image { + height: 22.5rem; + } + } + } +} + +#image-edit-details { + .rating-stars { + font-size: 1.3em; + height: calc(1.5em + 0.75rem + 2px); + } +} diff --git a/ui/v2.5/src/components/List/ListFilter.tsx b/ui/v2.5/src/components/List/ListFilter.tsx index 10b090d01..3fd99087d 100644 --- a/ui/v2.5/src/components/List/ListFilter.tsx +++ b/ui/v2.5/src/components/List/ListFilter.tsx @@ -402,7 +402,7 @@ export const ListFilter: React.FC = ( } function maybeRenderZoom() { - if (props.onChangeZoom) { + if (props.onChangeZoom && props.filter.displayMode === DisplayMode.Grid) { return (
{ ? "/movies/new" : location.pathname === "/tags" ? "/tags/new" + : location.pathname === "/galleries" + ? "/galleries/new" : null; const newButton = newPath === null ? ( @@ -153,6 +164,7 @@ export const MainNavbar: React.FC = () => { useEffect(() => { Mousetrap.bind("?", () => setShowManual(!showManual)); Mousetrap.bind("g s", () => goto("/scenes")); + Mousetrap.bind("g i", () => goto("/images")); Mousetrap.bind("g v", () => goto("/movies")); Mousetrap.bind("g k", () => goto("/scenes/markers")); Mousetrap.bind("g l", () => goto("/galleries")); diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx index 52148c695..f0c15660c 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx @@ -294,7 +294,7 @@ export const Performer: React.FC = () => { return (
-
+
{imageEncoding ? ( ) : ( diff --git a/ui/v2.5/src/components/Performers/styles.scss b/ui/v2.5/src/components/Performers/styles.scss index 107a4b7a3..d5d1df7b4 100644 --- a/ui/v2.5/src/components/Performers/styles.scss +++ b/ui/v2.5/src/components/Performers/styles.scss @@ -11,7 +11,7 @@ margin: 10px auto; overflow: hidden; - .image-container .performer { + .performer-image-container .performer { max-height: calc(100vh - 6rem); max-width: 100%; } diff --git a/ui/v2.5/src/components/Scenes/SceneGenerateDialog.tsx b/ui/v2.5/src/components/Scenes/SceneGenerateDialog.tsx index 5f3372742..abf950e9e 100644 --- a/ui/v2.5/src/components/Scenes/SceneGenerateDialog.tsx +++ b/ui/v2.5/src/components/Scenes/SceneGenerateDialog.tsx @@ -64,7 +64,6 @@ export const SceneGenerateDialog: React.FC = ( imagePreviews: previews && imagePreviews, markers, transcodes, - thumbnails: false, overwrite, sceneIDs: props.selectedIds, previewOptions: { diff --git a/ui/v2.5/src/components/Scenes/styles.scss b/ui/v2.5/src/components/Scenes/styles.scss index 436d0a451..ba557d9a1 100644 --- a/ui/v2.5/src/components/Scenes/styles.scss +++ b/ui/v2.5/src/components/Scenes/styles.scss @@ -163,12 +163,8 @@ textarea.scene-description { text-transform: uppercase; } -.scene-card { - &.card { - overflow: hidden; - padding: 0; - } - +.scene-card, +.gallery-card { &-check { left: 0.5rem; margin-top: -12px; @@ -243,6 +239,11 @@ textarea.scene-description { } } +.scene-card.card { + overflow: hidden; + padding: 0; +} + .scene-cover { display: block; margin-bottom: 10px; diff --git a/ui/v2.5/src/components/Settings/SettingsConfigurationPanel.tsx b/ui/v2.5/src/components/Settings/SettingsConfigurationPanel.tsx index 34d181da7..67e4599d6 100644 --- a/ui/v2.5/src/components/Settings/SettingsConfigurationPanel.tsx +++ b/ui/v2.5/src/components/Settings/SettingsConfigurationPanel.tsx @@ -4,15 +4,70 @@ import * as GQL from "src/core/generated-graphql"; import { useConfiguration, useConfigureGeneral } from "src/core/StashService"; import { useToast } from "src/hooks"; import { Icon, LoadingIndicator } from "src/components/Shared"; -import { FolderSelect } from "src/components/Shared/FolderSelect/FolderSelect"; import StashBoxConfiguration, { IStashBoxInstance, } from "./StashBoxConfiguration"; +import StashConfiguration from "./StashConfiguration"; + +interface IExclusionPatternsProps { + excludes: string[]; + setExcludes: (value: string[]) => void; +} + +const ExclusionPatterns: React.FC = (props) => { + function excludeRegexChanged(idx: number, value: string) { + const newExcludes = props.excludes.map((regex, i) => { + const ret = idx !== i ? regex : value; + return ret; + }); + props.setExcludes(newExcludes); + } + + function excludeRemoveRegex(idx: number) { + const newExcludes = props.excludes.filter((_regex, i) => i !== idx); + + props.setExcludes(newExcludes); + } + + function excludeAddRegex() { + const demo = "sample\\.mp4$"; + const newExcludes = props.excludes.concat(demo); + + props.setExcludes(newExcludes); + } + + return ( + <> + + {props.excludes && + props.excludes.map((regexp, i) => ( + + ) => + excludeRegexChanged(i, e.currentTarget.value) + } + /> + + + + + ))} + + + + ); +}; export const SettingsConfigurationPanel: React.FC = () => { const Toast = useToast(); // Editing config state - const [stashes, setStashes] = useState([]); + const [stashes, setStashes] = useState([]); const [databasePath, setDatabasePath] = useState( undefined ); @@ -50,7 +105,18 @@ export const SettingsConfigurationPanel: React.FC = () => { const [logOut, setLogOut] = useState(true); const [logLevel, setLogLevel] = useState("Info"); const [logAccess, setLogAccess] = useState(true); + + const [videoExtensions, setVideoExtensions] = useState(); + const [imageExtensions, setImageExtensions] = useState(); + const [galleryExtensions, setGalleryExtensions] = useState< + string | undefined + >(); + const [createGalleriesFromFolders, setCreateGalleriesFromFolders] = useState< + boolean + >(false); + const [excludes, setExcludes] = useState([]); + const [imageExcludes, setImageExcludes] = useState([]); const [scraperUserAgent, setScraperUserAgent] = useState( undefined ); @@ -62,7 +128,11 @@ export const SettingsConfigurationPanel: React.FC = () => { const { data, error, loading } = useConfiguration(); const [updateGeneralConfig] = useConfigureGeneral({ - stashes, + stashes: stashes.map((s) => ({ + path: s.path, + excludeVideo: s.excludeVideo, + excludeImage: s.excludeImage, + })), databasePath, generatedPath, cachePath, @@ -83,7 +153,12 @@ export const SettingsConfigurationPanel: React.FC = () => { logOut, logLevel, logAccess, + createGalleriesFromFolders, + videoExtensions: commaDelimitedToList(videoExtensions), + imageExtensions: commaDelimitedToList(imageExtensions), + galleryExtensions: commaDelimitedToList(galleryExtensions), excludes, + imageExcludes, scraperUserAgent, scraperCDPPath, stashBoxes: stashBoxes.map( @@ -123,7 +198,14 @@ export const SettingsConfigurationPanel: React.FC = () => { setLogOut(conf.general.logOut); setLogLevel(conf.general.logLevel); setLogAccess(conf.general.logAccess); + setCreateGalleriesFromFolders(conf.general.createGalleriesFromFolders); + setVideoExtensions(listToCommaDelimited(conf.general.videoExtensions)); + setImageExtensions(listToCommaDelimited(conf.general.imageExtensions)); + setGalleryExtensions( + listToCommaDelimited(conf.general.galleryExtensions) + ); setExcludes(conf.general.excludes); + setImageExcludes(conf.general.imageExcludes); setScraperUserAgent(conf.general.scraperUserAgent ?? undefined); setScraperCDPPath(conf.general.scraperCDPPath ?? undefined); setStashBoxes( @@ -137,29 +219,16 @@ export const SettingsConfigurationPanel: React.FC = () => { } }, [data, error]); - function onStashesChanged(directories: string[]) { - setStashes(directories); + function commaDelimitedToList(value: string | undefined) { + if (value) { + return value.split(",").map((s) => s.trim()); + } } - function excludeRegexChanged(idx: number, value: string) { - const newExcludes = excludes.map((regex, i) => { - const ret = idx !== i ? regex : value; - return ret; - }); - setExcludes(newExcludes); - } - - function excludeRemoveRegex(idx: number) { - const newExcludes = excludes.filter((_regex, i) => i !== idx); - - setExcludes(newExcludes); - } - - function excludeAddRegex() { - const demo = "sample\\.mp4$"; - const newExcludes = excludes.concat(demo); - - setExcludes(newExcludes); + function listToCommaDelimited(value: string[] | undefined) { + if (value) { + return value.join(", "); + } } async function onSave() { @@ -256,9 +325,9 @@ export const SettingsConfigurationPanel: React.FC = () => {
Stashes
- setStashes(s)} /> Directory locations to your content @@ -308,35 +377,56 @@ export const SettingsConfigurationPanel: React.FC = () => {
+ +
Video Extensions
+ ) => + setVideoExtensions(e.currentTarget.value) + } + /> + + Comma-delimited list of file extensions that will be identified as + videos. + +
+ + +
Image Extensions
+ ) => + setImageExtensions(e.currentTarget.value) + } + /> + + Comma-delimited list of file extensions that will be identified as + images. + +
+ + +
Gallery zip Extensions
+ ) => + setGalleryExtensions(e.currentTarget.value) + } + /> + + Comma-delimited list of file extensions that will be identified as + gallery zip files. + +
+
Excluded Patterns
- - {excludes && - excludes.map((regexp, i) => ( - - ) => - excludeRegexChanged(i, e.currentTarget.value) - } - /> - - - - - ))} - - + - Regexps of files/paths to exclude from Scan and add to Clean + Regexps of video files/paths to exclude from Scan and add to Clean {
+ + +
Excluded Image/Gallery Patterns
+ + + Regexps of image and gallery files/paths to exclude from Scan and + add to Clean + + + + +
+ + + + setCreateGalleriesFromFolders(!createGalleriesFromFolders) + } + /> + + If true, creates galleries from folders containing images. + +

diff --git a/ui/v2.5/src/components/Settings/SettingsTasksPanel/GenerateButton.tsx b/ui/v2.5/src/components/Settings/SettingsTasksPanel/GenerateButton.tsx index 65ceec0c4..77ced445b 100644 --- a/ui/v2.5/src/components/Settings/SettingsTasksPanel/GenerateButton.tsx +++ b/ui/v2.5/src/components/Settings/SettingsTasksPanel/GenerateButton.tsx @@ -9,7 +9,6 @@ export const GenerateButton: React.FC = () => { const [previews, setPreviews] = useState(true); const [markers, setMarkers] = useState(true); const [transcodes, setTranscodes] = useState(false); - const [thumbnails, setThumbnails] = useState(false); const [imagePreviews, setImagePreviews] = useState(false); async function onGenerate() { @@ -20,7 +19,6 @@ export const GenerateButton: React.FC = () => { imagePreviews: previews && imagePreviews, markers, transcodes, - thumbnails, }); Toast.success({ content: "Started generating" }); } catch (e) { @@ -66,12 +64,6 @@ export const GenerateButton: React.FC = () => { label="Transcodes (MP4 conversions of unsupported video formats)" onChange={() => setTranscodes(!transcodes)} /> - setThumbnails(!thumbnails)} - /> + + + ); +}; + +interface IStashConfigurationProps { + stashes: GQL.StashConfig[]; + setStashes: (v: GQL.StashConfig[]) => void; +} + +export const StashConfiguration: React.FC = ({ + stashes, + setStashes, +}) => { + const [isDisplayingDialog, setIsDisplayingDialog] = useState(false); + + const handleSave = (index: number, stash: GQL.StashConfig) => + setStashes(stashes.map((s, i) => (i === index ? stash : s))); + const handleDelete = (index: number) => + setStashes(stashes.filter((s, i) => i !== index)); + const handleAdd = (folder?: string) => { + setIsDisplayingDialog(false); + + if (!folder) { + return; + } + + setStashes([ + ...stashes, + { + path: folder, + excludeImage: false, + excludeVideo: false, + }, + ]); + }; + + function maybeRenderDialog() { + if (!isDisplayingDialog) { + return; + } + + return ; + } + + return ( + <> + {maybeRenderDialog()} + + {stashes.length > 0 && ( + +
Path
+
Exclude Video
+
Exclude Image
+
+ )} + {stashes.map((stash, index) => ( + handleSave(index, s)} + onDelete={() => handleDelete(index)} + key={stash.path} + /> + ))} + +
+ + ); +}; + +export default StashConfiguration; diff --git a/ui/v2.5/src/components/Shared/FolderSelect/FolderSelect.tsx b/ui/v2.5/src/components/Shared/FolderSelect/FolderSelect.tsx index 9f5c4286b..e85a5446e 100644 --- a/ui/v2.5/src/components/Shared/FolderSelect/FolderSelect.tsx +++ b/ui/v2.5/src/components/Shared/FolderSelect/FolderSelect.tsx @@ -5,20 +5,13 @@ import { LoadingIndicator } from "src/components/Shared"; import { useDirectory } from "src/core/StashService"; interface IProps { - directories: string[]; - onDirectoriesChanged: (directories: string[]) => void; + onClose: (directory?: string) => void; } export const FolderSelect: React.FC = (props: IProps) => { const [currentDirectory, setCurrentDirectory] = useState(""); - const [isDisplayingDialog, setIsDisplayingDialog] = useState(false); - const [selectedDirectories, setSelectedDirectories] = useState([]); const { data, error, loading } = useDirectory(currentDirectory); - useEffect(() => { - setSelectedDirectories(props.directories); - }, [props.directories]); - useEffect(() => { if (currentDirectory === "" && data?.directory.path) setCurrentDirectory(data.directory.path); @@ -26,22 +19,6 @@ export const FolderSelect: React.FC = (props: IProps) => { const selectableDirectories: string[] = data?.directory.directories ?? []; - function onSelectDirectory() { - selectedDirectories.push(currentDirectory); - setSelectedDirectories(selectedDirectories); - setCurrentDirectory(""); - setIsDisplayingDialog(false); - props.onDirectoriesChanged(selectedDirectories); - } - - function onRemoveDirectory(directory: string) { - const newSelectedDirectories = selectedDirectories.filter( - (dir) => dir !== directory - ); - setSelectedDirectories(newSelectedDirectories); - props.onDirectoriesChanged(newSelectedDirectories); - } - const topDirectory = data?.directory?.parent ? (
  • ) : null; - function renderDialog() { - return ( - setIsDisplayingDialog(false)} - title="" - > - Select Directory - -
    - - ) => - setCurrentDirectory(e.currentTarget.value) - } - value={currentDirectory} - spellCheck={false} - /> - - {!data || !data.directory || loading ? ( - - ) : ( - "" - )} - - -
      - {topDirectory} - {selectableDirectories.map((path) => { - return ( -
    • - -
    • - ); - })} -
    -
    -
    - - - -
    - ); - } - return ( - <> - {error ?

    {error.message}

    : ""} - {renderDialog()} - - {selectedDirectories.map((path) => { - return ( -
    - {path}{" "} - -
    - ); - })} -
    - - - + props.onClose()} title=""> + Select Directory + +
    + {error ?

    {error.message}

    : ""} + + ) => + setCurrentDirectory(e.currentTarget.value) + } + value={currentDirectory} + spellCheck={false} + /> + + {!data || !data.directory || loading ? ( + + ) : ( + "" + )} + + +
      + {topDirectory} + {selectableDirectories.map((path) => { + return ( +
    • + +
    • + ); + })} +
    +
    +
    + + + +
    ); }; diff --git a/ui/v2.5/src/components/Shared/Select.tsx b/ui/v2.5/src/components/Shared/Select.tsx index 9c284bce8..3d26fdea7 100644 --- a/ui/v2.5/src/components/Shared/Select.tsx +++ b/ui/v2.5/src/components/Shared/Select.tsx @@ -95,7 +95,7 @@ export const SceneGallerySelect: React.FC = (props) => { const items = (galleries.length > 0 ? [{ path: "None", id: "0" }, ...galleries] : [] - ).map((g) => ({ label: g.path, value: g.id })); + ).map((g) => ({ label: g.title ?? "", value: g.id })); const onChange = (selectedItems: ValueType