mirror of
https://github.com/stashapp/stash.git
synced 2025-12-17 20:34:37 +03:00
[Files Refactor] Performance tuning (#2865)
* Don't load image files by default * Don't load gallery files by default * Don't load scene files by default * Retry locked transactions forever * Don't show release notes if config not loaded * Don't translate path slashes in export
This commit is contained in:
@@ -5,6 +5,10 @@
|
||||
//go:generate go run -mod=vendor github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio
|
||||
//go:generate go run -mod=vendor github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag
|
||||
//go:generate go run -mod=vendor github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie
|
||||
//go:generate go run -mod=vendor github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/file.ID github.com/stashapp/stash/pkg/file.File
|
||||
//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
|
||||
//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
|
||||
//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID
|
||||
|
||||
package loaders
|
||||
|
||||
@@ -14,6 +18,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/txn"
|
||||
)
|
||||
@@ -31,12 +36,17 @@ const (
|
||||
|
||||
type Loaders struct {
|
||||
SceneByID *SceneLoader
|
||||
SceneFiles *SceneFileIDsLoader
|
||||
ImageFiles *ImageFileIDsLoader
|
||||
GalleryFiles *GalleryFileIDsLoader
|
||||
|
||||
GalleryByID *GalleryLoader
|
||||
ImageByID *ImageLoader
|
||||
PerformerByID *PerformerLoader
|
||||
StudioByID *StudioLoader
|
||||
TagByID *TagLoader
|
||||
MovieByID *MovieLoader
|
||||
FileByID *FileLoader
|
||||
}
|
||||
|
||||
type Middleware struct {
|
||||
@@ -83,6 +93,26 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
|
||||
maxBatch: maxBatch,
|
||||
fetch: m.fetchMovies(ctx),
|
||||
},
|
||||
FileByID: &FileLoader{
|
||||
wait: wait,
|
||||
maxBatch: maxBatch,
|
||||
fetch: m.fetchFiles(ctx),
|
||||
},
|
||||
SceneFiles: &SceneFileIDsLoader{
|
||||
wait: wait,
|
||||
maxBatch: maxBatch,
|
||||
fetch: m.fetchScenesFileIDs(ctx),
|
||||
},
|
||||
ImageFiles: &ImageFileIDsLoader{
|
||||
wait: wait,
|
||||
maxBatch: maxBatch,
|
||||
fetch: m.fetchImagesFileIDs(ctx),
|
||||
},
|
||||
GalleryFiles: &GalleryFileIDsLoader{
|
||||
wait: wait,
|
||||
maxBatch: maxBatch,
|
||||
fetch: m.fetchGalleriesFileIDs(ctx),
|
||||
},
|
||||
}
|
||||
|
||||
newCtx := context.WithValue(r.Context(), loadersCtxKey, ldrs)
|
||||
@@ -185,3 +215,47 @@ func (m Middleware) fetchMovies(ctx context.Context) func(keys []int) ([]*models
|
||||
return ret, toErrorSlice(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file.File, []error) {
|
||||
return func(keys []file.ID) (ret []file.File, errs []error) {
|
||||
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
ret, err = m.Repository.File.Find(ctx, keys...)
|
||||
return err
|
||||
})
|
||||
return ret, toErrorSlice(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
|
||||
return func(keys []int) (ret [][]file.ID, errs []error) {
|
||||
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
ret, err = m.Repository.Scene.GetManyFileIDs(ctx, keys)
|
||||
return err
|
||||
})
|
||||
return ret, toErrorSlice(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
|
||||
return func(keys []int) (ret [][]file.ID, errs []error) {
|
||||
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
ret, err = m.Repository.Image.GetManyFileIDs(ctx, keys)
|
||||
return err
|
||||
})
|
||||
return ret, toErrorSlice(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) {
|
||||
return func(keys []int) (ret [][]file.ID, errs []error) {
|
||||
err := m.withTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
ret, err = m.Repository.Gallery.GetManyFileIDs(ctx, keys)
|
||||
return err
|
||||
})
|
||||
return ret, toErrorSlice(err)
|
||||
}
|
||||
}
|
||||
|
||||
221
internal/api/loaders/fileloader_gen.go
Normal file
221
internal/api/loaders/fileloader_gen.go
Normal file
@@ -0,0 +1,221 @@
|
||||
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||
|
||||
package loaders
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
)
|
||||
|
||||
// FileLoaderConfig captures the config to create a new FileLoader
|
||||
type FileLoaderConfig struct {
|
||||
// Fetch is a method that provides the data for the loader
|
||||
Fetch func(keys []file.ID) ([]file.File, []error)
|
||||
|
||||
// Wait is how long wait before sending a batch
|
||||
Wait time.Duration
|
||||
|
||||
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||
MaxBatch int
|
||||
}
|
||||
|
||||
// NewFileLoader creates a new FileLoader given a fetch, wait, and maxBatch
|
||||
func NewFileLoader(config FileLoaderConfig) *FileLoader {
|
||||
return &FileLoader{
|
||||
fetch: config.Fetch,
|
||||
wait: config.Wait,
|
||||
maxBatch: config.MaxBatch,
|
||||
}
|
||||
}
|
||||
|
||||
// FileLoader batches and caches requests
|
||||
type FileLoader struct {
|
||||
// this method provides the data for the loader
|
||||
fetch func(keys []file.ID) ([]file.File, []error)
|
||||
|
||||
// how long to done before sending a batch
|
||||
wait time.Duration
|
||||
|
||||
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||
maxBatch int
|
||||
|
||||
// INTERNAL
|
||||
|
||||
// lazily created cache
|
||||
cache map[file.ID]file.File
|
||||
|
||||
// the current batch. keys will continue to be collected until timeout is hit,
|
||||
// then everything will be sent to the fetch method and out to the listeners
|
||||
batch *fileLoaderBatch
|
||||
|
||||
// mutex to prevent races
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
type fileLoaderBatch struct {
|
||||
keys []file.ID
|
||||
data []file.File
|
||||
error []error
|
||||
closing bool
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
// Load a File by key, batching and caching will be applied automatically
|
||||
func (l *FileLoader) Load(key file.ID) (file.File, error) {
|
||||
return l.LoadThunk(key)()
|
||||
}
|
||||
|
||||
// LoadThunk returns a function that when called will block waiting for a File.
|
||||
// This method should be used if you want one goroutine to make requests to many
|
||||
// different data loaders without blocking until the thunk is called.
|
||||
func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) {
|
||||
l.mu.Lock()
|
||||
if it, ok := l.cache[key]; ok {
|
||||
l.mu.Unlock()
|
||||
return func() (file.File, error) {
|
||||
return it, nil
|
||||
}
|
||||
}
|
||||
if l.batch == nil {
|
||||
l.batch = &fileLoaderBatch{done: make(chan struct{})}
|
||||
}
|
||||
batch := l.batch
|
||||
pos := batch.keyIndex(l, key)
|
||||
l.mu.Unlock()
|
||||
|
||||
return func() (file.File, error) {
|
||||
<-batch.done
|
||||
|
||||
var data file.File
|
||||
if pos < len(batch.data) {
|
||||
data = batch.data[pos]
|
||||
}
|
||||
|
||||
var err error
|
||||
// its convenient to be able to return a single error for everything
|
||||
if len(batch.error) == 1 {
|
||||
err = batch.error[0]
|
||||
} else if batch.error != nil {
|
||||
err = batch.error[pos]
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
l.mu.Lock()
|
||||
l.unsafeSet(key, data)
|
||||
l.mu.Unlock()
|
||||
}
|
||||
|
||||
return data, err
|
||||
}
|
||||
}
|
||||
|
||||
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||
// sub batches depending on how the loader is configured
|
||||
func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) {
|
||||
results := make([]func() (file.File, error), len(keys))
|
||||
|
||||
for i, key := range keys {
|
||||
results[i] = l.LoadThunk(key)
|
||||
}
|
||||
|
||||
files := make([]file.File, len(keys))
|
||||
errors := make([]error, len(keys))
|
||||
for i, thunk := range results {
|
||||
files[i], errors[i] = thunk()
|
||||
}
|
||||
return files, errors
|
||||
}
|
||||
|
||||
// LoadAllThunk returns a function that when called will block waiting for a Files.
|
||||
// This method should be used if you want one goroutine to make requests to many
|
||||
// different data loaders without blocking until the thunk is called.
|
||||
func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) {
|
||||
results := make([]func() (file.File, error), len(keys))
|
||||
for i, key := range keys {
|
||||
results[i] = l.LoadThunk(key)
|
||||
}
|
||||
return func() ([]file.File, []error) {
|
||||
files := make([]file.File, len(keys))
|
||||
errors := make([]error, len(keys))
|
||||
for i, thunk := range results {
|
||||
files[i], errors[i] = thunk()
|
||||
}
|
||||
return files, errors
|
||||
}
|
||||
}
|
||||
|
||||
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||
// and false is returned.
|
||||
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||
func (l *FileLoader) Prime(key file.ID, value file.File) bool {
|
||||
l.mu.Lock()
|
||||
var found bool
|
||||
if _, found = l.cache[key]; !found {
|
||||
l.unsafeSet(key, value)
|
||||
}
|
||||
l.mu.Unlock()
|
||||
return !found
|
||||
}
|
||||
|
||||
// Clear the value at key from the cache, if it exists
|
||||
func (l *FileLoader) Clear(key file.ID) {
|
||||
l.mu.Lock()
|
||||
delete(l.cache, key)
|
||||
l.mu.Unlock()
|
||||
}
|
||||
|
||||
func (l *FileLoader) unsafeSet(key file.ID, value file.File) {
|
||||
if l.cache == nil {
|
||||
l.cache = map[file.ID]file.File{}
|
||||
}
|
||||
l.cache[key] = value
|
||||
}
|
||||
|
||||
// keyIndex will return the location of the key in the batch, if its not found
|
||||
// it will add the key to the batch
|
||||
func (b *fileLoaderBatch) keyIndex(l *FileLoader, key file.ID) int {
|
||||
for i, existingKey := range b.keys {
|
||||
if key == existingKey {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
pos := len(b.keys)
|
||||
b.keys = append(b.keys, key)
|
||||
if pos == 0 {
|
||||
go b.startTimer(l)
|
||||
}
|
||||
|
||||
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||
if !b.closing {
|
||||
b.closing = true
|
||||
l.batch = nil
|
||||
go b.end(l)
|
||||
}
|
||||
}
|
||||
|
||||
return pos
|
||||
}
|
||||
|
||||
func (b *fileLoaderBatch) startTimer(l *FileLoader) {
|
||||
time.Sleep(l.wait)
|
||||
l.mu.Lock()
|
||||
|
||||
// we must have hit a batch limit and are already finalizing this batch
|
||||
if b.closing {
|
||||
l.mu.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
l.batch = nil
|
||||
l.mu.Unlock()
|
||||
|
||||
b.end(l)
|
||||
}
|
||||
|
||||
func (b *fileLoaderBatch) end(l *FileLoader) {
|
||||
b.data, b.error = l.fetch(b.keys)
|
||||
close(b.done)
|
||||
}
|
||||
225
internal/api/loaders/galleryfileidsloader_gen.go
Normal file
225
internal/api/loaders/galleryfileidsloader_gen.go
Normal file
@@ -0,0 +1,225 @@
|
||||
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||
|
||||
package loaders
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
)
|
||||
|
||||
// GalleryFileIDsLoaderConfig captures the config to create a new GalleryFileIDsLoader
|
||||
type GalleryFileIDsLoaderConfig struct {
|
||||
// Fetch is a method that provides the data for the loader
|
||||
Fetch func(keys []int) ([][]file.ID, []error)
|
||||
|
||||
// Wait is how long wait before sending a batch
|
||||
Wait time.Duration
|
||||
|
||||
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||
MaxBatch int
|
||||
}
|
||||
|
||||
// NewGalleryFileIDsLoader creates a new GalleryFileIDsLoader given a fetch, wait, and maxBatch
|
||||
func NewGalleryFileIDsLoader(config GalleryFileIDsLoaderConfig) *GalleryFileIDsLoader {
|
||||
return &GalleryFileIDsLoader{
|
||||
fetch: config.Fetch,
|
||||
wait: config.Wait,
|
||||
maxBatch: config.MaxBatch,
|
||||
}
|
||||
}
|
||||
|
||||
// GalleryFileIDsLoader batches and caches requests
|
||||
type GalleryFileIDsLoader struct {
|
||||
// this method provides the data for the loader
|
||||
fetch func(keys []int) ([][]file.ID, []error)
|
||||
|
||||
// how long to done before sending a batch
|
||||
wait time.Duration
|
||||
|
||||
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||
maxBatch int
|
||||
|
||||
// INTERNAL
|
||||
|
||||
// lazily created cache
|
||||
cache map[int][]file.ID
|
||||
|
||||
// the current batch. keys will continue to be collected until timeout is hit,
|
||||
// then everything will be sent to the fetch method and out to the listeners
|
||||
batch *galleryFileIDsLoaderBatch
|
||||
|
||||
// mutex to prevent races
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
type galleryFileIDsLoaderBatch struct {
|
||||
keys []int
|
||||
data [][]file.ID
|
||||
error []error
|
||||
closing bool
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
// Load a ID by key, batching and caching will be applied automatically
|
||||
func (l *GalleryFileIDsLoader) Load(key int) ([]file.ID, error) {
|
||||
return l.LoadThunk(key)()
|
||||
}
|
||||
|
||||
// LoadThunk returns a function that when called will block waiting for a ID.
|
||||
// This method should be used if you want one goroutine to make requests to many
|
||||
// different data loaders without blocking until the thunk is called.
|
||||
func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
|
||||
l.mu.Lock()
|
||||
if it, ok := l.cache[key]; ok {
|
||||
l.mu.Unlock()
|
||||
return func() ([]file.ID, error) {
|
||||
return it, nil
|
||||
}
|
||||
}
|
||||
if l.batch == nil {
|
||||
l.batch = &galleryFileIDsLoaderBatch{done: make(chan struct{})}
|
||||
}
|
||||
batch := l.batch
|
||||
pos := batch.keyIndex(l, key)
|
||||
l.mu.Unlock()
|
||||
|
||||
return func() ([]file.ID, error) {
|
||||
<-batch.done
|
||||
|
||||
var data []file.ID
|
||||
if pos < len(batch.data) {
|
||||
data = batch.data[pos]
|
||||
}
|
||||
|
||||
var err error
|
||||
// its convenient to be able to return a single error for everything
|
||||
if len(batch.error) == 1 {
|
||||
err = batch.error[0]
|
||||
} else if batch.error != nil {
|
||||
err = batch.error[pos]
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
l.mu.Lock()
|
||||
l.unsafeSet(key, data)
|
||||
l.mu.Unlock()
|
||||
}
|
||||
|
||||
return data, err
|
||||
}
|
||||
}
|
||||
|
||||
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||
// sub batches depending on how the loader is configured
|
||||
func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
|
||||
results := make([]func() ([]file.ID, error), len(keys))
|
||||
|
||||
for i, key := range keys {
|
||||
results[i] = l.LoadThunk(key)
|
||||
}
|
||||
|
||||
iDs := make([][]file.ID, len(keys))
|
||||
errors := make([]error, len(keys))
|
||||
for i, thunk := range results {
|
||||
iDs[i], errors[i] = thunk()
|
||||
}
|
||||
return iDs, errors
|
||||
}
|
||||
|
||||
// LoadAllThunk returns a function that when called will block waiting for a IDs.
|
||||
// This method should be used if you want one goroutine to make requests to many
|
||||
// different data loaders without blocking until the thunk is called.
|
||||
func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
|
||||
results := make([]func() ([]file.ID, error), len(keys))
|
||||
for i, key := range keys {
|
||||
results[i] = l.LoadThunk(key)
|
||||
}
|
||||
return func() ([][]file.ID, []error) {
|
||||
iDs := make([][]file.ID, len(keys))
|
||||
errors := make([]error, len(keys))
|
||||
for i, thunk := range results {
|
||||
iDs[i], errors[i] = thunk()
|
||||
}
|
||||
return iDs, errors
|
||||
}
|
||||
}
|
||||
|
||||
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||
// and false is returned.
|
||||
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||
func (l *GalleryFileIDsLoader) Prime(key int, value []file.ID) bool {
|
||||
l.mu.Lock()
|
||||
var found bool
|
||||
if _, found = l.cache[key]; !found {
|
||||
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||
// and end up with the whole cache pointing to the same value.
|
||||
cpy := make([]file.ID, len(value))
|
||||
copy(cpy, value)
|
||||
l.unsafeSet(key, cpy)
|
||||
}
|
||||
l.mu.Unlock()
|
||||
return !found
|
||||
}
|
||||
|
||||
// Clear the value at key from the cache, if it exists
|
||||
func (l *GalleryFileIDsLoader) Clear(key int) {
|
||||
l.mu.Lock()
|
||||
delete(l.cache, key)
|
||||
l.mu.Unlock()
|
||||
}
|
||||
|
||||
func (l *GalleryFileIDsLoader) unsafeSet(key int, value []file.ID) {
|
||||
if l.cache == nil {
|
||||
l.cache = map[int][]file.ID{}
|
||||
}
|
||||
l.cache[key] = value
|
||||
}
|
||||
|
||||
// keyIndex will return the location of the key in the batch, if its not found
|
||||
// it will add the key to the batch
|
||||
func (b *galleryFileIDsLoaderBatch) keyIndex(l *GalleryFileIDsLoader, key int) int {
|
||||
for i, existingKey := range b.keys {
|
||||
if key == existingKey {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
pos := len(b.keys)
|
||||
b.keys = append(b.keys, key)
|
||||
if pos == 0 {
|
||||
go b.startTimer(l)
|
||||
}
|
||||
|
||||
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||
if !b.closing {
|
||||
b.closing = true
|
||||
l.batch = nil
|
||||
go b.end(l)
|
||||
}
|
||||
}
|
||||
|
||||
return pos
|
||||
}
|
||||
|
||||
func (b *galleryFileIDsLoaderBatch) startTimer(l *GalleryFileIDsLoader) {
|
||||
time.Sleep(l.wait)
|
||||
l.mu.Lock()
|
||||
|
||||
// we must have hit a batch limit and are already finalizing this batch
|
||||
if b.closing {
|
||||
l.mu.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
l.batch = nil
|
||||
l.mu.Unlock()
|
||||
|
||||
b.end(l)
|
||||
}
|
||||
|
||||
func (b *galleryFileIDsLoaderBatch) end(l *GalleryFileIDsLoader) {
|
||||
b.data, b.error = l.fetch(b.keys)
|
||||
close(b.done)
|
||||
}
|
||||
225
internal/api/loaders/imagefileidsloader_gen.go
Normal file
225
internal/api/loaders/imagefileidsloader_gen.go
Normal file
@@ -0,0 +1,225 @@
|
||||
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||
|
||||
package loaders
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
)
|
||||
|
||||
// ImageFileIDsLoaderConfig captures the config to create a new ImageFileIDsLoader
|
||||
type ImageFileIDsLoaderConfig struct {
|
||||
// Fetch is a method that provides the data for the loader
|
||||
Fetch func(keys []int) ([][]file.ID, []error)
|
||||
|
||||
// Wait is how long wait before sending a batch
|
||||
Wait time.Duration
|
||||
|
||||
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||
MaxBatch int
|
||||
}
|
||||
|
||||
// NewImageFileIDsLoader creates a new ImageFileIDsLoader given a fetch, wait, and maxBatch
|
||||
func NewImageFileIDsLoader(config ImageFileIDsLoaderConfig) *ImageFileIDsLoader {
|
||||
return &ImageFileIDsLoader{
|
||||
fetch: config.Fetch,
|
||||
wait: config.Wait,
|
||||
maxBatch: config.MaxBatch,
|
||||
}
|
||||
}
|
||||
|
||||
// ImageFileIDsLoader batches and caches requests
|
||||
type ImageFileIDsLoader struct {
|
||||
// this method provides the data for the loader
|
||||
fetch func(keys []int) ([][]file.ID, []error)
|
||||
|
||||
// how long to done before sending a batch
|
||||
wait time.Duration
|
||||
|
||||
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||
maxBatch int
|
||||
|
||||
// INTERNAL
|
||||
|
||||
// lazily created cache
|
||||
cache map[int][]file.ID
|
||||
|
||||
// the current batch. keys will continue to be collected until timeout is hit,
|
||||
// then everything will be sent to the fetch method and out to the listeners
|
||||
batch *imageFileIDsLoaderBatch
|
||||
|
||||
// mutex to prevent races
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
type imageFileIDsLoaderBatch struct {
|
||||
keys []int
|
||||
data [][]file.ID
|
||||
error []error
|
||||
closing bool
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
// Load a ID by key, batching and caching will be applied automatically
|
||||
func (l *ImageFileIDsLoader) Load(key int) ([]file.ID, error) {
|
||||
return l.LoadThunk(key)()
|
||||
}
|
||||
|
||||
// LoadThunk returns a function that when called will block waiting for a ID.
|
||||
// This method should be used if you want one goroutine to make requests to many
|
||||
// different data loaders without blocking until the thunk is called.
|
||||
func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
|
||||
l.mu.Lock()
|
||||
if it, ok := l.cache[key]; ok {
|
||||
l.mu.Unlock()
|
||||
return func() ([]file.ID, error) {
|
||||
return it, nil
|
||||
}
|
||||
}
|
||||
if l.batch == nil {
|
||||
l.batch = &imageFileIDsLoaderBatch{done: make(chan struct{})}
|
||||
}
|
||||
batch := l.batch
|
||||
pos := batch.keyIndex(l, key)
|
||||
l.mu.Unlock()
|
||||
|
||||
return func() ([]file.ID, error) {
|
||||
<-batch.done
|
||||
|
||||
var data []file.ID
|
||||
if pos < len(batch.data) {
|
||||
data = batch.data[pos]
|
||||
}
|
||||
|
||||
var err error
|
||||
// its convenient to be able to return a single error for everything
|
||||
if len(batch.error) == 1 {
|
||||
err = batch.error[0]
|
||||
} else if batch.error != nil {
|
||||
err = batch.error[pos]
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
l.mu.Lock()
|
||||
l.unsafeSet(key, data)
|
||||
l.mu.Unlock()
|
||||
}
|
||||
|
||||
return data, err
|
||||
}
|
||||
}
|
||||
|
||||
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||
// sub batches depending on how the loader is configured
|
||||
func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
|
||||
results := make([]func() ([]file.ID, error), len(keys))
|
||||
|
||||
for i, key := range keys {
|
||||
results[i] = l.LoadThunk(key)
|
||||
}
|
||||
|
||||
iDs := make([][]file.ID, len(keys))
|
||||
errors := make([]error, len(keys))
|
||||
for i, thunk := range results {
|
||||
iDs[i], errors[i] = thunk()
|
||||
}
|
||||
return iDs, errors
|
||||
}
|
||||
|
||||
// LoadAllThunk returns a function that when called will block waiting for a IDs.
|
||||
// This method should be used if you want one goroutine to make requests to many
|
||||
// different data loaders without blocking until the thunk is called.
|
||||
func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
|
||||
results := make([]func() ([]file.ID, error), len(keys))
|
||||
for i, key := range keys {
|
||||
results[i] = l.LoadThunk(key)
|
||||
}
|
||||
return func() ([][]file.ID, []error) {
|
||||
iDs := make([][]file.ID, len(keys))
|
||||
errors := make([]error, len(keys))
|
||||
for i, thunk := range results {
|
||||
iDs[i], errors[i] = thunk()
|
||||
}
|
||||
return iDs, errors
|
||||
}
|
||||
}
|
||||
|
||||
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||
// and false is returned.
|
||||
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||
func (l *ImageFileIDsLoader) Prime(key int, value []file.ID) bool {
|
||||
l.mu.Lock()
|
||||
var found bool
|
||||
if _, found = l.cache[key]; !found {
|
||||
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||
// and end up with the whole cache pointing to the same value.
|
||||
cpy := make([]file.ID, len(value))
|
||||
copy(cpy, value)
|
||||
l.unsafeSet(key, cpy)
|
||||
}
|
||||
l.mu.Unlock()
|
||||
return !found
|
||||
}
|
||||
|
||||
// Clear the value at key from the cache, if it exists
|
||||
func (l *ImageFileIDsLoader) Clear(key int) {
|
||||
l.mu.Lock()
|
||||
delete(l.cache, key)
|
||||
l.mu.Unlock()
|
||||
}
|
||||
|
||||
func (l *ImageFileIDsLoader) unsafeSet(key int, value []file.ID) {
|
||||
if l.cache == nil {
|
||||
l.cache = map[int][]file.ID{}
|
||||
}
|
||||
l.cache[key] = value
|
||||
}
|
||||
|
||||
// keyIndex will return the location of the key in the batch, if its not found
|
||||
// it will add the key to the batch
|
||||
func (b *imageFileIDsLoaderBatch) keyIndex(l *ImageFileIDsLoader, key int) int {
|
||||
for i, existingKey := range b.keys {
|
||||
if key == existingKey {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
pos := len(b.keys)
|
||||
b.keys = append(b.keys, key)
|
||||
if pos == 0 {
|
||||
go b.startTimer(l)
|
||||
}
|
||||
|
||||
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||
if !b.closing {
|
||||
b.closing = true
|
||||
l.batch = nil
|
||||
go b.end(l)
|
||||
}
|
||||
}
|
||||
|
||||
return pos
|
||||
}
|
||||
|
||||
func (b *imageFileIDsLoaderBatch) startTimer(l *ImageFileIDsLoader) {
|
||||
time.Sleep(l.wait)
|
||||
l.mu.Lock()
|
||||
|
||||
// we must have hit a batch limit and are already finalizing this batch
|
||||
if b.closing {
|
||||
l.mu.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
l.batch = nil
|
||||
l.mu.Unlock()
|
||||
|
||||
b.end(l)
|
||||
}
|
||||
|
||||
func (b *imageFileIDsLoaderBatch) end(l *ImageFileIDsLoader) {
|
||||
b.data, b.error = l.fetch(b.keys)
|
||||
close(b.done)
|
||||
}
|
||||
225
internal/api/loaders/scenefileidsloader_gen.go
Normal file
225
internal/api/loaders/scenefileidsloader_gen.go
Normal file
@@ -0,0 +1,225 @@
|
||||
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
|
||||
|
||||
package loaders
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
)
|
||||
|
||||
// SceneFileIDsLoaderConfig captures the config to create a new SceneFileIDsLoader
|
||||
type SceneFileIDsLoaderConfig struct {
|
||||
// Fetch is a method that provides the data for the loader
|
||||
Fetch func(keys []int) ([][]file.ID, []error)
|
||||
|
||||
// Wait is how long wait before sending a batch
|
||||
Wait time.Duration
|
||||
|
||||
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
|
||||
MaxBatch int
|
||||
}
|
||||
|
||||
// NewSceneFileIDsLoader creates a new SceneFileIDsLoader given a fetch, wait, and maxBatch
|
||||
func NewSceneFileIDsLoader(config SceneFileIDsLoaderConfig) *SceneFileIDsLoader {
|
||||
return &SceneFileIDsLoader{
|
||||
fetch: config.Fetch,
|
||||
wait: config.Wait,
|
||||
maxBatch: config.MaxBatch,
|
||||
}
|
||||
}
|
||||
|
||||
// SceneFileIDsLoader batches and caches requests
|
||||
type SceneFileIDsLoader struct {
|
||||
// this method provides the data for the loader
|
||||
fetch func(keys []int) ([][]file.ID, []error)
|
||||
|
||||
// how long to done before sending a batch
|
||||
wait time.Duration
|
||||
|
||||
// this will limit the maximum number of keys to send in one batch, 0 = no limit
|
||||
maxBatch int
|
||||
|
||||
// INTERNAL
|
||||
|
||||
// lazily created cache
|
||||
cache map[int][]file.ID
|
||||
|
||||
// the current batch. keys will continue to be collected until timeout is hit,
|
||||
// then everything will be sent to the fetch method and out to the listeners
|
||||
batch *sceneFileIDsLoaderBatch
|
||||
|
||||
// mutex to prevent races
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
type sceneFileIDsLoaderBatch struct {
|
||||
keys []int
|
||||
data [][]file.ID
|
||||
error []error
|
||||
closing bool
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
// Load a ID by key, batching and caching will be applied automatically
|
||||
func (l *SceneFileIDsLoader) Load(key int) ([]file.ID, error) {
|
||||
return l.LoadThunk(key)()
|
||||
}
|
||||
|
||||
// LoadThunk returns a function that when called will block waiting for a ID.
|
||||
// This method should be used if you want one goroutine to make requests to many
|
||||
// different data loaders without blocking until the thunk is called.
|
||||
func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) {
|
||||
l.mu.Lock()
|
||||
if it, ok := l.cache[key]; ok {
|
||||
l.mu.Unlock()
|
||||
return func() ([]file.ID, error) {
|
||||
return it, nil
|
||||
}
|
||||
}
|
||||
if l.batch == nil {
|
||||
l.batch = &sceneFileIDsLoaderBatch{done: make(chan struct{})}
|
||||
}
|
||||
batch := l.batch
|
||||
pos := batch.keyIndex(l, key)
|
||||
l.mu.Unlock()
|
||||
|
||||
return func() ([]file.ID, error) {
|
||||
<-batch.done
|
||||
|
||||
var data []file.ID
|
||||
if pos < len(batch.data) {
|
||||
data = batch.data[pos]
|
||||
}
|
||||
|
||||
var err error
|
||||
// its convenient to be able to return a single error for everything
|
||||
if len(batch.error) == 1 {
|
||||
err = batch.error[0]
|
||||
} else if batch.error != nil {
|
||||
err = batch.error[pos]
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
l.mu.Lock()
|
||||
l.unsafeSet(key, data)
|
||||
l.mu.Unlock()
|
||||
}
|
||||
|
||||
return data, err
|
||||
}
|
||||
}
|
||||
|
||||
// LoadAll fetches many keys at once. It will be broken into appropriate sized
|
||||
// sub batches depending on how the loader is configured
|
||||
func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) {
|
||||
results := make([]func() ([]file.ID, error), len(keys))
|
||||
|
||||
for i, key := range keys {
|
||||
results[i] = l.LoadThunk(key)
|
||||
}
|
||||
|
||||
iDs := make([][]file.ID, len(keys))
|
||||
errors := make([]error, len(keys))
|
||||
for i, thunk := range results {
|
||||
iDs[i], errors[i] = thunk()
|
||||
}
|
||||
return iDs, errors
|
||||
}
|
||||
|
||||
// LoadAllThunk returns a function that when called will block waiting for a IDs.
|
||||
// This method should be used if you want one goroutine to make requests to many
|
||||
// different data loaders without blocking until the thunk is called.
|
||||
func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) {
|
||||
results := make([]func() ([]file.ID, error), len(keys))
|
||||
for i, key := range keys {
|
||||
results[i] = l.LoadThunk(key)
|
||||
}
|
||||
return func() ([][]file.ID, []error) {
|
||||
iDs := make([][]file.ID, len(keys))
|
||||
errors := make([]error, len(keys))
|
||||
for i, thunk := range results {
|
||||
iDs[i], errors[i] = thunk()
|
||||
}
|
||||
return iDs, errors
|
||||
}
|
||||
}
|
||||
|
||||
// Prime the cache with the provided key and value. If the key already exists, no change is made
|
||||
// and false is returned.
|
||||
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
|
||||
func (l *SceneFileIDsLoader) Prime(key int, value []file.ID) bool {
|
||||
l.mu.Lock()
|
||||
var found bool
|
||||
if _, found = l.cache[key]; !found {
|
||||
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
|
||||
// and end up with the whole cache pointing to the same value.
|
||||
cpy := make([]file.ID, len(value))
|
||||
copy(cpy, value)
|
||||
l.unsafeSet(key, cpy)
|
||||
}
|
||||
l.mu.Unlock()
|
||||
return !found
|
||||
}
|
||||
|
||||
// Clear the value at key from the cache, if it exists
|
||||
func (l *SceneFileIDsLoader) Clear(key int) {
|
||||
l.mu.Lock()
|
||||
delete(l.cache, key)
|
||||
l.mu.Unlock()
|
||||
}
|
||||
|
||||
func (l *SceneFileIDsLoader) unsafeSet(key int, value []file.ID) {
|
||||
if l.cache == nil {
|
||||
l.cache = map[int][]file.ID{}
|
||||
}
|
||||
l.cache[key] = value
|
||||
}
|
||||
|
||||
// keyIndex will return the location of the key in the batch, if its not found
|
||||
// it will add the key to the batch
|
||||
func (b *sceneFileIDsLoaderBatch) keyIndex(l *SceneFileIDsLoader, key int) int {
|
||||
for i, existingKey := range b.keys {
|
||||
if key == existingKey {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
pos := len(b.keys)
|
||||
b.keys = append(b.keys, key)
|
||||
if pos == 0 {
|
||||
go b.startTimer(l)
|
||||
}
|
||||
|
||||
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
|
||||
if !b.closing {
|
||||
b.closing = true
|
||||
l.batch = nil
|
||||
go b.end(l)
|
||||
}
|
||||
}
|
||||
|
||||
return pos
|
||||
}
|
||||
|
||||
func (b *sceneFileIDsLoaderBatch) startTimer(l *SceneFileIDsLoader) {
|
||||
time.Sleep(l.wait)
|
||||
l.mu.Lock()
|
||||
|
||||
// we must have hit a batch limit and are already finalizing this batch
|
||||
if b.closing {
|
||||
l.mu.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
l.batch = nil
|
||||
l.mu.Unlock()
|
||||
|
||||
b.end(l)
|
||||
}
|
||||
|
||||
func (b *sceneFileIDsLoaderBatch) end(l *SceneFileIDsLoader) {
|
||||
b.data, b.error = l.fetch(b.keys)
|
||||
close(b.done)
|
||||
}
|
||||
@@ -12,10 +12,38 @@ import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*GalleryFile, error) {
|
||||
ret := make([]*GalleryFile, len(obj.Files))
|
||||
func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (file.File, error) {
|
||||
if obj.PrimaryFileID != nil {
|
||||
f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i, f := range obj.Files {
|
||||
return f, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]file.File, error) {
|
||||
fileIDs, err := loaders.From(ctx).GalleryFiles.Load(obj.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
|
||||
return files, firstError(errs)
|
||||
}
|
||||
|
||||
func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*GalleryFile, error) {
|
||||
files, err := r.getFiles(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret := make([]*GalleryFile, len(files))
|
||||
|
||||
for i, f := range files {
|
||||
base := f.Base()
|
||||
ret[i] = &GalleryFile{
|
||||
ID: strconv.Itoa(int(base.ID)),
|
||||
@@ -84,7 +112,10 @@ func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Fol
|
||||
}
|
||||
|
||||
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
|
||||
f := obj.PrimaryFile()
|
||||
f, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if f != nil {
|
||||
return &f.Base().ModTime, nil
|
||||
}
|
||||
|
||||
@@ -2,21 +2,69 @@ package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/internal/api/loaders"
|
||||
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (*file.ImageFile, error) {
|
||||
if obj.PrimaryFileID != nil {
|
||||
f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, ok := f.(*file.ImageFile)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("file %T is not an image file", f)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]*file.ImageFile, error) {
|
||||
fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
|
||||
ret := make([]*file.ImageFile, len(files))
|
||||
for i, bf := range files {
|
||||
f, ok := bf.(*file.ImageFile)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("file %T is not an image file", f)
|
||||
}
|
||||
|
||||
ret[i] = f
|
||||
}
|
||||
|
||||
return ret, firstError(errs)
|
||||
}
|
||||
|
||||
func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) {
|
||||
ret := obj.GetTitle()
|
||||
return &ret, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFileType, error) {
|
||||
f := obj.PrimaryFile()
|
||||
f, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
width := f.Width
|
||||
height := f.Height
|
||||
size := f.Size
|
||||
@@ -28,9 +76,14 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile
|
||||
}
|
||||
|
||||
func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) {
|
||||
ret := make([]*ImageFile, len(obj.Files))
|
||||
files, err := r.getFiles(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i, f := range obj.Files {
|
||||
ret := make([]*ImageFile, len(files))
|
||||
|
||||
for i, f := range files {
|
||||
ret[i] = &ImageFile{
|
||||
ID: strconv.Itoa(int(f.ID)),
|
||||
Path: f.Path,
|
||||
@@ -55,7 +108,10 @@ func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageF
|
||||
}
|
||||
|
||||
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
|
||||
f := obj.PrimaryFile()
|
||||
f, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if f != nil {
|
||||
return &f.ModTime, nil
|
||||
}
|
||||
|
||||
@@ -14,9 +14,56 @@ import (
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*file.VideoFile, error) {
|
||||
if obj.PrimaryFileID != nil {
|
||||
f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, ok := f.(*file.VideoFile)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("file %T is not an image file", f)
|
||||
}
|
||||
|
||||
obj.Files.SetPrimary(ret)
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*file.VideoFile, error) {
|
||||
fileIDs, err := loaders.From(ctx).SceneFiles.Load(obj.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs)
|
||||
ret := make([]*file.VideoFile, len(files))
|
||||
for i, bf := range files {
|
||||
f, ok := bf.(*file.VideoFile)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("file %T is not a video file", f)
|
||||
}
|
||||
|
||||
ret[i] = f
|
||||
}
|
||||
|
||||
obj.Files.Set(ret)
|
||||
|
||||
return ret, firstError(errs)
|
||||
}
|
||||
|
||||
func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
|
||||
if obj.PrimaryFile() != nil {
|
||||
return &obj.PrimaryFile().ModTime, nil
|
||||
f, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if f != nil {
|
||||
return &f.ModTime, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
@@ -31,7 +78,10 @@ func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, e
|
||||
|
||||
// File is deprecated
|
||||
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) {
|
||||
f := obj.PrimaryFile()
|
||||
f, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if f == nil {
|
||||
return nil, nil
|
||||
}
|
||||
@@ -52,9 +102,14 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.Sc
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) {
|
||||
ret := make([]*VideoFile, len(obj.Files))
|
||||
files, err := r.getFiles(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i, f := range obj.Files {
|
||||
ret := make([]*VideoFile, len(files))
|
||||
|
||||
for i, f := range files {
|
||||
ret[i] = &VideoFile{
|
||||
ID: strconv.Itoa(int(f.ID)),
|
||||
Path: f.Path,
|
||||
@@ -148,7 +203,10 @@ func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (re
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.VideoCaption, err error) {
|
||||
primaryFile := obj.PrimaryFile()
|
||||
primaryFile, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if primaryFile == nil {
|
||||
return nil, nil
|
||||
}
|
||||
@@ -265,7 +323,22 @@ func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||
phash := obj.Phash()
|
||||
f, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if f == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
val := f.Fingerprints.Get(file.FingerprintTypePhash)
|
||||
if val == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
phash, _ := val.(int64)
|
||||
|
||||
if phash != 0 {
|
||||
hexval := utils.PhashToString(phash)
|
||||
return &hexval, nil
|
||||
@@ -274,6 +347,12 @@ func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string,
|
||||
}
|
||||
|
||||
func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*manager.SceneStreamEndpoint, error) {
|
||||
// load the primary file into the scene
|
||||
_, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config := manager.GetInstance().Config
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
@@ -283,7 +362,10 @@ func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Interactive(ctx context.Context, obj *models.Scene) (bool, error) {
|
||||
primaryFile := obj.PrimaryFile()
|
||||
primaryFile, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if primaryFile == nil {
|
||||
return false, nil
|
||||
}
|
||||
@@ -292,7 +374,10 @@ func (r *sceneResolver) Interactive(ctx context.Context, obj *models.Scene) (boo
|
||||
}
|
||||
|
||||
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
|
||||
primaryFile := obj.PrimaryFile()
|
||||
primaryFile, err := r.getPrimaryFile(ctx, obj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if primaryFile == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
|
||||
c := config.GetInstance()
|
||||
|
||||
existingPaths := c.GetStashPaths()
|
||||
if len(input.Stashes) > 0 {
|
||||
if input.Stashes != nil {
|
||||
for _, s := range input.Stashes {
|
||||
// Only validate existence of new paths
|
||||
isNew := true
|
||||
|
||||
@@ -338,6 +338,10 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
||||
return fmt.Errorf("gallery with id %d not found", id)
|
||||
}
|
||||
|
||||
if err := gallery.LoadFiles(ctx, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
galleries = append(galleries, gallery)
|
||||
|
||||
imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile)
|
||||
@@ -357,7 +361,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
||||
|
||||
for _, gallery := range galleries {
|
||||
// don't delete stash library paths
|
||||
path := gallery.Path()
|
||||
path := gallery.Path
|
||||
if deleteFile && path != "" && !isStashPath(path) {
|
||||
// try to remove the folder - it is possible that it is not empty
|
||||
// so swallow the error if present
|
||||
@@ -370,15 +374,15 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
|
||||
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
|
||||
GalleryDestroyInput: input,
|
||||
Checksum: gallery.Checksum(),
|
||||
Path: gallery.Path(),
|
||||
Path: gallery.Path,
|
||||
}, nil)
|
||||
}
|
||||
|
||||
// call image destroy post hook as well
|
||||
for _, img := range imgsDestroyed {
|
||||
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
|
||||
Checksum: img.Checksum(),
|
||||
Path: img.Path(),
|
||||
Checksum: img.Checksum,
|
||||
Path: img.Path,
|
||||
}, nil)
|
||||
}
|
||||
|
||||
|
||||
@@ -240,8 +240,8 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
|
||||
// call post hook after performing the other actions
|
||||
r.hookExecutor.ExecutePostHooks(ctx, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
|
||||
ImageDestroyInput: input,
|
||||
Checksum: i.Checksum(),
|
||||
Path: i.Path(),
|
||||
Checksum: i.Checksum,
|
||||
Path: i.Path,
|
||||
}, nil)
|
||||
|
||||
return true, nil
|
||||
@@ -291,8 +291,8 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
|
||||
// call post hook after performing the other actions
|
||||
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, plugin.ImagesDestroyInput{
|
||||
ImagesDestroyInput: input,
|
||||
Checksum: image.Checksum(),
|
||||
Path: image.Path(),
|
||||
Checksum: image.Checksum,
|
||||
Path: image.Path,
|
||||
}, nil)
|
||||
}
|
||||
|
||||
|
||||
@@ -366,9 +366,9 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
|
||||
// call post hook after performing the other actions
|
||||
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
|
||||
SceneDestroyInput: input,
|
||||
Checksum: s.Checksum(),
|
||||
OSHash: s.OSHash(),
|
||||
Path: s.Path(),
|
||||
Checksum: s.Checksum,
|
||||
OSHash: s.OSHash,
|
||||
Path: s.Path,
|
||||
}, nil)
|
||||
|
||||
return true, nil
|
||||
@@ -422,9 +422,9 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
|
||||
// call post hook after performing the other actions
|
||||
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{
|
||||
ScenesDestroyInput: input,
|
||||
Checksum: scene.Checksum(),
|
||||
OSHash: scene.OSHash(),
|
||||
Path: scene.Path(),
|
||||
Checksum: scene.Checksum,
|
||||
OSHash: scene.OSHash,
|
||||
Path: scene.Path,
|
||||
}, nil)
|
||||
}
|
||||
|
||||
|
||||
@@ -86,7 +86,11 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
|
||||
if err == nil {
|
||||
result.Count = len(scenes)
|
||||
for _, s := range scenes {
|
||||
f := s.PrimaryFile()
|
||||
if err = s.LoadPrimaryFile(ctx, r.repository.File); err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
f := s.Files.Primary()
|
||||
if f == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -18,6 +18,11 @@ func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*manage
|
||||
idInt, _ := strconv.Atoi(*id)
|
||||
var err error
|
||||
scene, err = r.repository.Scene.Find(ctx, idInt)
|
||||
|
||||
if scene != nil {
|
||||
err = scene.LoadPrimaryFile(ctx, r.repository.File)
|
||||
}
|
||||
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -25,6 +25,7 @@ type ImageFinder interface {
|
||||
type imageRoutes struct {
|
||||
txnManager txn.Manager
|
||||
imageFinder ImageFinder
|
||||
fileFinder file.Finder
|
||||
}
|
||||
|
||||
func (rs imageRoutes) Routes() chi.Router {
|
||||
@@ -44,7 +45,7 @@ func (rs imageRoutes) Routes() chi.Router {
|
||||
|
||||
func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
|
||||
img := r.Context().Value(imageKey).(*models.Image)
|
||||
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum(), models.DefaultGthumbWidth)
|
||||
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum, models.DefaultGthumbWidth)
|
||||
|
||||
w.Header().Add("Cache-Control", "max-age=604800000")
|
||||
|
||||
@@ -54,7 +55,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
|
||||
http.ServeFile(w, r, filepath)
|
||||
} else {
|
||||
// don't return anything if there is no file
|
||||
f := img.PrimaryFile()
|
||||
f := img.Files.Primary()
|
||||
if f == nil {
|
||||
// TODO - probably want to return a placeholder
|
||||
http.Error(w, http.StatusText(404), 404)
|
||||
@@ -81,7 +82,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// write the generated thumbnail to disk if enabled
|
||||
if manager.GetInstance().Config.IsWriteImageThumbnails() {
|
||||
logger.Debugf("writing thumbnail to disk: %s", img.Path())
|
||||
logger.Debugf("writing thumbnail to disk: %s", img.Path)
|
||||
if err := fsutil.WriteFile(filepath, data); err != nil {
|
||||
logger.Errorf("error writing thumbnail for image %s: %s", img.Path, err)
|
||||
}
|
||||
@@ -97,12 +98,12 @@ func (rs imageRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// if image is in a zip file, we need to serve it specifically
|
||||
|
||||
if len(i.Files) == 0 {
|
||||
if i.Files.Primary() == nil {
|
||||
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
i.Files[0].Serve(&file.OsFS{}, w, r)
|
||||
i.Files.Primary().Serve(&file.OsFS{}, w, r)
|
||||
}
|
||||
|
||||
// endregion
|
||||
@@ -124,6 +125,10 @@ func (rs imageRoutes) ImageCtx(next http.Handler) http.Handler {
|
||||
image, _ = qb.Find(ctx, imageID)
|
||||
}
|
||||
|
||||
if image != nil {
|
||||
_ = image.LoadPrimaryFile(ctx, rs.fileFinder)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if readTxnErr != nil {
|
||||
|
||||
@@ -41,6 +41,7 @@ type CaptionFinder interface {
|
||||
type sceneRoutes struct {
|
||||
txnManager txn.Manager
|
||||
sceneFinder SceneFinder
|
||||
fileFinder file.Finder
|
||||
captionFinder CaptionFinder
|
||||
sceneMarkerFinder SceneMarkerFinder
|
||||
tagFinder scene.MarkerTagFinder
|
||||
@@ -94,7 +95,12 @@ func (rs sceneRoutes) StreamMKV(w http.ResponseWriter, r *http.Request) {
|
||||
// only allow mkv streaming if the scene container is an mkv already
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
|
||||
container, err := manager.GetSceneFileContainer(scene)
|
||||
pf := scene.Files.Primary()
|
||||
if pf == nil {
|
||||
return
|
||||
}
|
||||
|
||||
container, err := manager.GetVideoFileContainer(pf)
|
||||
if err != nil {
|
||||
logger.Errorf("[transcode] error getting container: %v", err)
|
||||
}
|
||||
@@ -121,10 +127,8 @@ func (rs sceneRoutes) StreamMp4(w http.ResponseWriter, r *http.Request) {
|
||||
func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
|
||||
ffprobe := manager.GetInstance().FFProbe
|
||||
videoFile, err := ffprobe.NewVideoFile(scene.Path())
|
||||
if err != nil {
|
||||
logger.Errorf("[stream] error reading video file: %v", err)
|
||||
pf := scene.Files.Primary()
|
||||
if pf == nil {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -134,7 +138,7 @@ func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", ffmpeg.MimeHLS)
|
||||
var str strings.Builder
|
||||
|
||||
ffmpeg.WriteHLSPlaylist(videoFile.Duration, r.URL.String(), &str)
|
||||
ffmpeg.WriteHLSPlaylist(pf.Duration, r.URL.String(), &str)
|
||||
|
||||
requestByteRange := createByteRange(r.Header.Get("Range"))
|
||||
if requestByteRange.RawString != "" {
|
||||
@@ -157,7 +161,10 @@ func (rs sceneRoutes) StreamTS(w http.ResponseWriter, r *http.Request) {
|
||||
func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamFormat ffmpeg.StreamFormat) {
|
||||
scene := r.Context().Value(sceneKey).(*models.Scene)
|
||||
|
||||
f := scene.PrimaryFile()
|
||||
f := scene.Files.Primary()
|
||||
if f == nil {
|
||||
return
|
||||
}
|
||||
logger.Debugf("Streaming as %s", streamFormat.MimeType)
|
||||
|
||||
// start stream based on query param, if provided
|
||||
@@ -306,7 +313,7 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
|
||||
s := r.Context().Value(sceneKey).(*models.Scene)
|
||||
funscript := video.GetFunscriptPath(s.Path())
|
||||
funscript := video.GetFunscriptPath(s.Path)
|
||||
serveFileNoCache(w, r, funscript)
|
||||
}
|
||||
|
||||
@@ -322,7 +329,7 @@ func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang strin
|
||||
|
||||
if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
|
||||
var err error
|
||||
primaryFile := s.PrimaryFile()
|
||||
primaryFile := s.Files.Primary()
|
||||
if primaryFile == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -330,7 +337,7 @@ func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang strin
|
||||
captions, err := rs.captionFinder.GetCaptions(ctx, primaryFile.Base().ID)
|
||||
for _, caption := range captions {
|
||||
if lang == caption.LanguageCode && ext == caption.CaptionType {
|
||||
sub, err := video.ReadSubs(caption.Path(s.Path()))
|
||||
sub, err := video.ReadSubs(caption.Path(s.Path))
|
||||
if err == nil {
|
||||
var b bytes.Buffer
|
||||
err = sub.WriteToWebVTT(&b)
|
||||
@@ -492,6 +499,10 @@ func (rs sceneRoutes) SceneCtx(next http.Handler) http.Handler {
|
||||
scene, _ = qb.Find(ctx, sceneID)
|
||||
}
|
||||
|
||||
if scene != nil {
|
||||
_ = scene.LoadPrimaryFile(ctx, rs.fileFinder)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if readTxnErr != nil {
|
||||
|
||||
@@ -140,6 +140,7 @@ func Start() error {
|
||||
r.Mount("/scene", sceneRoutes{
|
||||
txnManager: txnManager,
|
||||
sceneFinder: txnManager.Scene,
|
||||
fileFinder: txnManager.File,
|
||||
captionFinder: txnManager.File,
|
||||
sceneMarkerFinder: txnManager.SceneMarker,
|
||||
tagFinder: txnManager.Tag,
|
||||
@@ -147,6 +148,7 @@ func Start() error {
|
||||
r.Mount("/image", imageRoutes{
|
||||
txnManager: txnManager,
|
||||
imageFinder: txnManager.Image,
|
||||
fileFinder: txnManager.File,
|
||||
}.Routes())
|
||||
r.Mount("/studio", studioRoutes{
|
||||
txnManager: txnManager,
|
||||
|
||||
@@ -21,12 +21,12 @@ type GalleryTagUpdater interface {
|
||||
|
||||
func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger {
|
||||
var path string
|
||||
if s.Path() != "" {
|
||||
path = s.Path()
|
||||
if s.Path != "" {
|
||||
path = s.Path
|
||||
}
|
||||
|
||||
// only trim the extension if gallery is file-based
|
||||
trimExt := s.PrimaryFile() != nil
|
||||
trimExt := s.PrimaryFileID != nil
|
||||
|
||||
return tagger{
|
||||
ID: s.ID,
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -55,11 +54,7 @@ func TestGalleryPerformers(t *testing.T) {
|
||||
|
||||
gallery := models.Gallery{
|
||||
ID: galleryID,
|
||||
Files: []file.File{
|
||||
&file.BaseFile{
|
||||
Path: test.Path,
|
||||
},
|
||||
},
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
}
|
||||
err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil)
|
||||
@@ -102,11 +97,7 @@ func TestGalleryStudios(t *testing.T) {
|
||||
|
||||
gallery := models.Gallery{
|
||||
ID: galleryID,
|
||||
Files: []file.File{
|
||||
&file.BaseFile{
|
||||
Path: test.Path,
|
||||
},
|
||||
},
|
||||
}
|
||||
err := GalleryStudios(testCtx, &gallery, mockGalleryReader, mockStudioReader, nil)
|
||||
|
||||
@@ -179,11 +170,7 @@ func TestGalleryTags(t *testing.T) {
|
||||
|
||||
gallery := models.Gallery{
|
||||
ID: galleryID,
|
||||
Files: []file.File{
|
||||
&file.BaseFile{
|
||||
Path: test.Path,
|
||||
},
|
||||
},
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
}
|
||||
err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil)
|
||||
|
||||
@@ -24,7 +24,7 @@ func getImageFileTagger(s *models.Image, cache *match.Cache) tagger {
|
||||
ID: s.ID,
|
||||
Type: "image",
|
||||
Name: s.GetTitle(),
|
||||
Path: s.Path(),
|
||||
Path: s.Path,
|
||||
cache: cache,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package autotag
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -12,14 +11,6 @@ import (
|
||||
|
||||
const imageExt = "jpg"
|
||||
|
||||
func makeImageFile(p string) *file.ImageFile {
|
||||
return &file.ImageFile{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: p,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestImagePerformers(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
@@ -60,7 +51,7 @@ func TestImagePerformers(t *testing.T) {
|
||||
|
||||
image := models.Image{
|
||||
ID: imageID,
|
||||
Files: []*file.ImageFile{makeImageFile(test.Path)},
|
||||
Path: test.Path,
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
}
|
||||
err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil)
|
||||
@@ -103,7 +94,7 @@ func TestImageStudios(t *testing.T) {
|
||||
|
||||
image := models.Image{
|
||||
ID: imageID,
|
||||
Files: []*file.ImageFile{makeImageFile(test.Path)},
|
||||
Path: test.Path,
|
||||
}
|
||||
err := ImageStudios(testCtx, &image, mockImageReader, mockStudioReader, nil)
|
||||
|
||||
@@ -176,7 +167,7 @@ func TestImageTags(t *testing.T) {
|
||||
|
||||
image := models.Image{
|
||||
ID: imageID,
|
||||
Files: []*file.ImageFile{makeImageFile(test.Path)},
|
||||
Path: test.Path,
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
}
|
||||
err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil)
|
||||
|
||||
@@ -565,9 +565,9 @@ func TestParsePerformerScenes(t *testing.T) {
|
||||
|
||||
// title is only set on scenes where we expect performer to be set
|
||||
if scene.Title == expectedMatchTitle && len(performers) == 0 {
|
||||
t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path())
|
||||
t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path)
|
||||
} else if scene.Title != expectedMatchTitle && len(performers) > 0 {
|
||||
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path())
|
||||
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -616,13 +616,13 @@ func TestParseStudioScenes(t *testing.T) {
|
||||
// title is only set on scenes where we expect studio to be set
|
||||
if scene.Title == expectedMatchTitle {
|
||||
if scene.StudioID == nil {
|
||||
t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path())
|
||||
t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path)
|
||||
} else if scene.StudioID != nil && *scene.StudioID != studios[1].ID {
|
||||
t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID, scene.Path())
|
||||
t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID, scene.Path)
|
||||
}
|
||||
|
||||
} else if scene.Title != expectedMatchTitle && scene.StudioID != nil && *scene.StudioID == studios[1].ID {
|
||||
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path())
|
||||
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -673,9 +673,9 @@ func TestParseTagScenes(t *testing.T) {
|
||||
|
||||
// title is only set on scenes where we expect tag to be set
|
||||
if scene.Title == expectedMatchTitle && len(tags) == 0 {
|
||||
t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path())
|
||||
t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path)
|
||||
} else if (scene.Title != expectedMatchTitle) && len(tags) > 0 {
|
||||
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path())
|
||||
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -721,9 +721,9 @@ func TestParsePerformerImages(t *testing.T) {
|
||||
// title is only set on images where we expect performer to be set
|
||||
expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName
|
||||
if expectedMatch && len(performers) == 0 {
|
||||
t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path())
|
||||
t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path)
|
||||
} else if !expectedMatch && len(performers) > 0 {
|
||||
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path())
|
||||
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -772,13 +772,13 @@ func TestParseStudioImages(t *testing.T) {
|
||||
// title is only set on images where we expect studio to be set
|
||||
if image.Title == expectedMatchTitle {
|
||||
if image.StudioID == nil {
|
||||
t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path())
|
||||
t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path)
|
||||
} else if *image.StudioID != studios[1].ID {
|
||||
t.Errorf("Incorrect studio id %d set for path '%s'", *image.StudioID, image.Path())
|
||||
t.Errorf("Incorrect studio id %d set for path '%s'", *image.StudioID, image.Path)
|
||||
}
|
||||
|
||||
} else if image.Title != expectedMatchTitle && image.StudioID != nil && *image.StudioID == studios[1].ID {
|
||||
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path())
|
||||
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -830,9 +830,9 @@ func TestParseTagImages(t *testing.T) {
|
||||
// title is only set on images where we expect performer to be set
|
||||
expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName
|
||||
if expectedMatch && len(tags) == 0 {
|
||||
t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path())
|
||||
t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path)
|
||||
} else if !expectedMatch && len(tags) > 0 {
|
||||
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path())
|
||||
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -878,9 +878,9 @@ func TestParsePerformerGalleries(t *testing.T) {
|
||||
// title is only set on galleries where we expect performer to be set
|
||||
expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName
|
||||
if expectedMatch && len(performers) == 0 {
|
||||
t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path())
|
||||
t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path)
|
||||
} else if !expectedMatch && len(performers) > 0 {
|
||||
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path())
|
||||
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -929,13 +929,13 @@ func TestParseStudioGalleries(t *testing.T) {
|
||||
// title is only set on galleries where we expect studio to be set
|
||||
if gallery.Title == expectedMatchTitle {
|
||||
if gallery.StudioID == nil {
|
||||
t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path())
|
||||
t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path)
|
||||
} else if *gallery.StudioID != studios[1].ID {
|
||||
t.Errorf("Incorrect studio id %d set for path '%s'", *gallery.StudioID, gallery.Path())
|
||||
t.Errorf("Incorrect studio id %d set for path '%s'", *gallery.StudioID, gallery.Path)
|
||||
}
|
||||
|
||||
} else if gallery.Title != expectedMatchTitle && (gallery.StudioID != nil && *gallery.StudioID == studios[1].ID) {
|
||||
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path())
|
||||
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -987,9 +987,9 @@ func TestParseTagGalleries(t *testing.T) {
|
||||
// title is only set on galleries where we expect performer to be set
|
||||
expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName
|
||||
if expectedMatch && len(tags) == 0 {
|
||||
t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path())
|
||||
t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path)
|
||||
} else if !expectedMatch && len(tags) > 0 {
|
||||
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path())
|
||||
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
@@ -54,13 +53,7 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
scenes = append(scenes, &models.Scene{
|
||||
ID: i + 1,
|
||||
Files: []*file.VideoFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: p,
|
||||
},
|
||||
},
|
||||
},
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
})
|
||||
}
|
||||
@@ -140,7 +133,7 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
images = append(images, &models.Image{
|
||||
ID: i + 1,
|
||||
Files: []*file.ImageFile{makeImageFile(p)},
|
||||
Path: p,
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
})
|
||||
}
|
||||
@@ -221,11 +214,7 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
|
||||
v := p
|
||||
galleries = append(galleries, &models.Gallery{
|
||||
ID: i + 1,
|
||||
Files: []file.File{
|
||||
&file.BaseFile{
|
||||
Path: v,
|
||||
},
|
||||
},
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger {
|
||||
ID: s.ID,
|
||||
Type: "scene",
|
||||
Name: s.GetTitle(),
|
||||
Path: s.Path(),
|
||||
Path: s.Path,
|
||||
cache: cache,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -176,13 +175,7 @@ func TestScenePerformers(t *testing.T) {
|
||||
|
||||
scene := models.Scene{
|
||||
ID: sceneID,
|
||||
Files: []*file.VideoFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: test.Path,
|
||||
},
|
||||
},
|
||||
},
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
}
|
||||
|
||||
@@ -237,13 +230,7 @@ func TestSceneStudios(t *testing.T) {
|
||||
|
||||
scene := models.Scene{
|
||||
ID: sceneID,
|
||||
Files: []*file.VideoFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: test.Path,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := SceneStudios(testCtx, &scene, mockSceneReader, mockStudioReader, nil)
|
||||
|
||||
@@ -316,13 +303,7 @@ func TestSceneTags(t *testing.T) {
|
||||
|
||||
scene := models.Scene{
|
||||
ID: sceneID,
|
||||
Files: []*file.VideoFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: test.Path,
|
||||
},
|
||||
},
|
||||
},
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
}
|
||||
err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil)
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
@@ -101,13 +100,7 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
scenes = append(scenes, &models.Scene{
|
||||
ID: i + 1,
|
||||
Files: []*file.VideoFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: p,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -198,7 +191,7 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
images = append(images, &models.Image{
|
||||
ID: i + 1,
|
||||
Files: []*file.ImageFile{makeImageFile(p)},
|
||||
Path: p,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -288,11 +281,7 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
|
||||
v := p
|
||||
galleries = append(galleries, &models.Gallery{
|
||||
ID: i + 1,
|
||||
Files: []file.File{
|
||||
&file.BaseFile{
|
||||
Path: v,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
@@ -101,13 +100,7 @@ func testTagScenes(t *testing.T, tc testTagCase) {
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
scenes = append(scenes, &models.Scene{
|
||||
ID: i + 1,
|
||||
Files: []*file.VideoFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: p,
|
||||
},
|
||||
},
|
||||
},
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
})
|
||||
}
|
||||
@@ -200,7 +193,7 @@ func testTagImages(t *testing.T, tc testTagCase) {
|
||||
for i, p := range append(matchingPaths, falsePaths...) {
|
||||
images = append(images, &models.Image{
|
||||
ID: i + 1,
|
||||
Files: []*file.ImageFile{makeImageFile(p)},
|
||||
Path: p,
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
})
|
||||
}
|
||||
@@ -295,11 +288,7 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
|
||||
v := p
|
||||
galleries = append(galleries, &models.Gallery{
|
||||
ID: i + 1,
|
||||
Files: []file.File{
|
||||
&file.BaseFile{
|
||||
Path: v,
|
||||
},
|
||||
},
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -114,7 +114,7 @@ func sceneToContainer(scene *models.Scene, parent string, host string) interface
|
||||
duration int64
|
||||
)
|
||||
|
||||
f := scene.PrimaryFile()
|
||||
f := scene.Files.Primary()
|
||||
if f != nil {
|
||||
size = int(f.Size)
|
||||
bitrate = uint(f.BitRate)
|
||||
@@ -362,6 +362,10 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string)
|
||||
|
||||
if err := txn.WithTxn(context.TODO(), me.txnManager, func(ctx context.Context) error {
|
||||
scene, err = me.repository.SceneFinder.Find(ctx, sceneID)
|
||||
if scene != nil {
|
||||
err = scene.LoadPrimaryFile(ctx, me.repository.FileFinder)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/txn"
|
||||
@@ -15,6 +16,7 @@ import (
|
||||
|
||||
type Repository struct {
|
||||
SceneFinder SceneFinder
|
||||
FileFinder file.Finder
|
||||
StudioFinder StudioFinder
|
||||
TagFinder TagFinder
|
||||
PerformerFinder PerformerFinder
|
||||
|
||||
@@ -212,7 +212,7 @@ func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manage
|
||||
|
||||
// don't update anything if nothing was set
|
||||
if updater.IsEmpty() {
|
||||
logger.Debugf("Nothing to set for %s", s.Path())
|
||||
logger.Debugf("Nothing to set for %s", s.Path)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -225,7 +225,7 @@ func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manage
|
||||
if title.Ptr() != nil {
|
||||
as = fmt.Sprintf(" as %s", title.Value)
|
||||
}
|
||||
logger.Infof("Successfully identified %s%s using %s", s.Path(), as, result.source.Name)
|
||||
logger.Infof("Successfully identified %s%s using %s", s.Path, as, result.source.Name)
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
|
||||
@@ -385,9 +385,9 @@ func (m parseMapper) parse(scene *models.Scene) *sceneHolder {
|
||||
// scene path in the match. Otherwise, use the default behaviour of just
|
||||
// the file's basename
|
||||
// must be double \ because of the regex escaping
|
||||
filename := filepath.Base(scene.Path())
|
||||
filename := filepath.Base(scene.Path)
|
||||
if strings.Contains(m.regexString, `\\`) || strings.Contains(m.regexString, "/") {
|
||||
filename = scene.Path()
|
||||
filename = scene.Path
|
||||
}
|
||||
|
||||
result := m.regex.FindStringSubmatch(filename)
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
package manager
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func DeleteGalleryFile(gallery *models.Gallery) {
|
||||
path := gallery.Path()
|
||||
if path != "" {
|
||||
err := os.Remove(path)
|
||||
if err != nil {
|
||||
logger.Warnf("Could not delete file %s: %s", path, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -38,13 +38,11 @@ func (i *fileFolderImporter) PreImport(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*file.Folder, error) {
|
||||
path := filepath.FromSlash(baseJSON.Path)
|
||||
|
||||
ret := file.Folder{
|
||||
DirEntry: file.DirEntry{
|
||||
ModTime: baseJSON.ModTime.GetTime(),
|
||||
},
|
||||
Path: path,
|
||||
Path: baseJSON.Path,
|
||||
CreatedAt: baseJSON.CreatedAt.GetTime(),
|
||||
UpdatedAt: baseJSON.CreatedAt.GetTime(),
|
||||
}
|
||||
@@ -97,13 +95,11 @@ func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonsc
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*file.BaseFile, error) {
|
||||
path := filepath.FromSlash(baseJSON.Path)
|
||||
|
||||
baseFile := file.BaseFile{
|
||||
DirEntry: file.DirEntry{
|
||||
ModTime: baseJSON.ModTime.GetTime(),
|
||||
},
|
||||
Basename: filepath.Base(path),
|
||||
Basename: filepath.Base(baseJSON.Path),
|
||||
Size: baseJSON.Size,
|
||||
CreatedAt: baseJSON.CreatedAt.GetTime(),
|
||||
UpdatedAt: baseJSON.CreatedAt.GetTime(),
|
||||
@@ -124,7 +120,7 @@ func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSO
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirEntry) error {
|
||||
zipFilePath := filepath.FromSlash(i.Input.DirEntry().ZipFile)
|
||||
zipFilePath := i.Input.DirEntry().ZipFile
|
||||
if zipFilePath != "" {
|
||||
zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath)
|
||||
if err != nil {
|
||||
@@ -147,11 +143,11 @@ func (i *fileFolderImporter) PostImport(ctx context.Context, id int) error {
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) Name() string {
|
||||
return filepath.FromSlash(i.Input.DirEntry().Path)
|
||||
return i.Input.DirEntry().Path
|
||||
}
|
||||
|
||||
func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) {
|
||||
path := filepath.FromSlash(i.Input.DirEntry().Path)
|
||||
path := i.Input.DirEntry().Path
|
||||
existing, err := i.ReaderWriter.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -213,7 +209,7 @@ func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string,
|
||||
|
||||
func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) {
|
||||
// create folder hierarchy and set parent folder id
|
||||
path := filepath.FromSlash(i.Input.DirEntry().Path)
|
||||
path := i.Input.DirEntry().Path
|
||||
path = filepath.Dir(path)
|
||||
folder, err := i.createFolderHierarchy(ctx, path)
|
||||
if err != nil {
|
||||
|
||||
@@ -210,6 +210,7 @@ func initialize() error {
|
||||
|
||||
instance.DLNAService = dlna.NewService(instance.Repository, dlna.Repository{
|
||||
SceneFinder: instance.Repository.Scene,
|
||||
FileFinder: instance.Repository.File,
|
||||
StudioFinder: instance.Repository.Studio,
|
||||
TagFinder: instance.Repository.Tag,
|
||||
PerformerFinder: instance.Repository.Performer,
|
||||
|
||||
@@ -185,6 +185,9 @@ func (s *Manager) generateScreenshot(ctx context.Context, sceneId string, at *fl
|
||||
if err := s.Repository.WithTxn(ctx, func(ctx context.Context) error {
|
||||
var err error
|
||||
scene, err = s.Repository.Scene.Find(ctx, sceneIdInt)
|
||||
if scene != nil {
|
||||
err = scene.LoadPrimaryFile(ctx, s.Repository.File)
|
||||
}
|
||||
return err
|
||||
}); err != nil || scene == nil {
|
||||
logger.Errorf("failed to get scene for generate: %s", err.Error())
|
||||
|
||||
@@ -15,17 +15,22 @@ import (
|
||||
type ImageReaderWriter interface {
|
||||
models.ImageReaderWriter
|
||||
image.FinderCreatorUpdater
|
||||
models.ImageFileLoader
|
||||
GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error)
|
||||
}
|
||||
|
||||
type GalleryReaderWriter interface {
|
||||
models.GalleryReaderWriter
|
||||
gallery.FinderCreatorUpdater
|
||||
gallery.Finder
|
||||
models.FileLoader
|
||||
GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error)
|
||||
}
|
||||
|
||||
type SceneReaderWriter interface {
|
||||
models.SceneReaderWriter
|
||||
scene.CreatorUpdater
|
||||
GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error)
|
||||
}
|
||||
|
||||
type FileReaderWriter interface {
|
||||
|
||||
@@ -38,7 +38,7 @@ func (c *StreamRequestContext) Cancel() {
|
||||
}
|
||||
|
||||
func KillRunningStreams(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
|
||||
instance.ReadLockManager.Cancel(scene.Path())
|
||||
instance.ReadLockManager.Cancel(scene.Path)
|
||||
|
||||
sceneHash := scene.GetHash(fileNamingAlgo)
|
||||
|
||||
@@ -62,7 +62,7 @@ type SceneServer struct {
|
||||
func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWriter, r *http.Request) {
|
||||
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
|
||||
|
||||
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path(), scene.GetHash(fileNamingAlgo))
|
||||
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo))
|
||||
streamRequestCtx := NewStreamRequestContext(w, r)
|
||||
|
||||
// #2579 - hijacking and closing the connection here causes video playback to fail in Safari
|
||||
|
||||
@@ -5,36 +5,37 @@ import (
|
||||
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/ffmpeg"
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) {
|
||||
func GetVideoFileContainer(file *file.VideoFile) (ffmpeg.Container, error) {
|
||||
var container ffmpeg.Container
|
||||
format := scene.Format()
|
||||
format := file.Format
|
||||
if format != "" {
|
||||
container = ffmpeg.Container(format)
|
||||
} else { // container isn't in the DB
|
||||
// shouldn't happen, fallback to ffprobe
|
||||
ffprobe := GetInstance().FFProbe
|
||||
tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path())
|
||||
tmpVideoFile, err := ffprobe.NewVideoFile(file.Path)
|
||||
if err != nil {
|
||||
return ffmpeg.Container(""), fmt.Errorf("error reading video file: %v", err)
|
||||
}
|
||||
|
||||
return ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path())
|
||||
return ffmpeg.MatchContainer(tmpVideoFile.Container, file.Path)
|
||||
}
|
||||
|
||||
return container, nil
|
||||
}
|
||||
|
||||
func includeSceneStreamPath(scene *models.Scene, streamingResolution models.StreamingResolutionEnum, maxStreamingTranscodeSize models.StreamingResolutionEnum) bool {
|
||||
func includeSceneStreamPath(f *file.VideoFile, streamingResolution models.StreamingResolutionEnum, maxStreamingTranscodeSize models.StreamingResolutionEnum) bool {
|
||||
// convert StreamingResolutionEnum to ResolutionEnum so we can get the min
|
||||
// resolution
|
||||
convertedRes := models.ResolutionEnum(streamingResolution)
|
||||
|
||||
minResolution := convertedRes.GetMinResolution()
|
||||
sceneResolution := scene.GetMinResolution()
|
||||
sceneResolution := f.GetMinResolution()
|
||||
|
||||
// don't include if scene resolution is smaller than the streamingResolution
|
||||
if sceneResolution != 0 && sceneResolution < minResolution {
|
||||
@@ -70,6 +71,11 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
|
||||
return nil, fmt.Errorf("nil scene")
|
||||
}
|
||||
|
||||
pf := scene.Files.Primary()
|
||||
if pf == nil {
|
||||
return nil, fmt.Errorf("nil file")
|
||||
}
|
||||
|
||||
var ret []*SceneStreamEndpoint
|
||||
mimeWebm := ffmpeg.MimeWebm
|
||||
mimeHLS := ffmpeg.MimeHLS
|
||||
@@ -80,12 +86,12 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
|
||||
|
||||
// direct stream should only apply when the audio codec is supported
|
||||
audioCodec := ffmpeg.MissingUnsupported
|
||||
if scene.AudioCodec() != "" {
|
||||
audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec())
|
||||
if pf.AudioCodec != "" {
|
||||
audioCodec = ffmpeg.ProbeAudioCodec(pf.AudioCodec)
|
||||
}
|
||||
|
||||
// don't care if we can't get the container
|
||||
container, _ := GetSceneFileContainer(scene)
|
||||
container, _ := GetVideoFileContainer(pf)
|
||||
|
||||
if HasTranscode(scene, config.GetInstance().GetVideoFileNamingAlgorithm()) || ffmpeg.IsValidAudioForContainer(audioCodec, container) {
|
||||
label := "Direct stream"
|
||||
@@ -128,27 +134,27 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
|
||||
webmURL := directStreamURL + ".webm"
|
||||
mp4URL := directStreamURL + ".mp4"
|
||||
|
||||
if includeSceneStreamPath(scene, models.StreamingResolutionEnumFourK, maxStreamingTranscodeSize) {
|
||||
if includeSceneStreamPath(pf, models.StreamingResolutionEnumFourK, maxStreamingTranscodeSize) {
|
||||
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFourK, mimeMp4, webmLabelFourK))
|
||||
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFourK, mimeMp4, mp4LabelFourK))
|
||||
}
|
||||
|
||||
if includeSceneStreamPath(scene, models.StreamingResolutionEnumFullHd, maxStreamingTranscodeSize) {
|
||||
if includeSceneStreamPath(pf, models.StreamingResolutionEnumFullHd, maxStreamingTranscodeSize) {
|
||||
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumFullHd, mimeMp4, webmLabelFullHD))
|
||||
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumFullHd, mimeMp4, mp4LabelFullHD))
|
||||
}
|
||||
|
||||
if includeSceneStreamPath(scene, models.StreamingResolutionEnumStandardHd, maxStreamingTranscodeSize) {
|
||||
if includeSceneStreamPath(pf, models.StreamingResolutionEnumStandardHd, maxStreamingTranscodeSize) {
|
||||
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandardHd, mimeMp4, webmLabelStandardHD))
|
||||
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandardHd, mimeMp4, mp4LabelStandardHD))
|
||||
}
|
||||
|
||||
if includeSceneStreamPath(scene, models.StreamingResolutionEnumStandard, maxStreamingTranscodeSize) {
|
||||
if includeSceneStreamPath(pf, models.StreamingResolutionEnumStandard, maxStreamingTranscodeSize) {
|
||||
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumStandard, mimeMp4, webmLabelStandard))
|
||||
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumStandard, mimeMp4, mp4LabelStandard))
|
||||
}
|
||||
|
||||
if includeSceneStreamPath(scene, models.StreamingResolutionEnumLow, maxStreamingTranscodeSize) {
|
||||
if includeSceneStreamPath(pf, models.StreamingResolutionEnumLow, maxStreamingTranscodeSize) {
|
||||
webmStreams = append(webmStreams, makeStreamEndpoint(webmURL, models.StreamingResolutionEnumLow, mimeMp4, webmLabelLow))
|
||||
mp4Streams = append(mp4Streams, makeStreamEndpoint(mp4URL, models.StreamingResolutionEnumLow, mimeMp4, mp4LabelLow))
|
||||
}
|
||||
|
||||
@@ -699,17 +699,17 @@ func (t *autoTagSceneTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
if t.performers {
|
||||
if err := autotag.ScenePerformers(ctx, t.scene, r.Scene, r.Performer, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.Path(), err)
|
||||
return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.Path, err)
|
||||
}
|
||||
}
|
||||
if t.studios {
|
||||
if err := autotag.SceneStudios(ctx, t.scene, r.Scene, r.Studio, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.Path(), err)
|
||||
return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.Path, err)
|
||||
}
|
||||
}
|
||||
if t.tags {
|
||||
if err := autotag.SceneTags(ctx, t.scene, r.Scene, r.Tag, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.Path(), err)
|
||||
return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.Path, err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -736,17 +736,17 @@ func (t *autoTagImageTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
if t.performers {
|
||||
if err := autotag.ImagePerformers(ctx, t.image, r.Image, r.Performer, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging image performers for %s: %v", t.image.Path(), err)
|
||||
return fmt.Errorf("error tagging image performers for %s: %v", t.image.Path, err)
|
||||
}
|
||||
}
|
||||
if t.studios {
|
||||
if err := autotag.ImageStudios(ctx, t.image, r.Image, r.Studio, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging image studio for %s: %v", t.image.Path(), err)
|
||||
return fmt.Errorf("error tagging image studio for %s: %v", t.image.Path, err)
|
||||
}
|
||||
}
|
||||
if t.tags {
|
||||
if err := autotag.ImageTags(ctx, t.image, r.Image, r.Tag, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging image tags for %s: %v", t.image.Path(), err)
|
||||
return fmt.Errorf("error tagging image tags for %s: %v", t.image.Path, err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -773,17 +773,17 @@ func (t *autoTagGalleryTask) Start(ctx context.Context, wg *sync.WaitGroup) {
|
||||
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
if t.performers {
|
||||
if err := autotag.GalleryPerformers(ctx, t.gallery, r.Gallery, r.Performer, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.Path(), err)
|
||||
return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.Path, err)
|
||||
}
|
||||
}
|
||||
if t.studios {
|
||||
if err := autotag.GalleryStudios(ctx, t.gallery, r.Gallery, r.Studio, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.Path(), err)
|
||||
return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.Path, err)
|
||||
}
|
||||
}
|
||||
if t.tags {
|
||||
if err := autotag.GalleryTags(ctx, t.gallery, r.Gallery, r.Tag, t.cache); err != nil {
|
||||
return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.Path(), err)
|
||||
return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.Path, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -206,20 +206,24 @@ func (h *cleanHandler) deleteRelatedScenes(ctx context.Context, fileDeleter *fil
|
||||
}
|
||||
|
||||
for _, scene := range scenes {
|
||||
if err := scene.LoadFiles(ctx, sceneQB); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// only delete if the scene has no other files
|
||||
if len(scene.Files) <= 1 {
|
||||
if len(scene.Files.List()) <= 1 {
|
||||
logger.Infof("Deleting scene %q since it has no other related files", scene.GetTitle())
|
||||
if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
checksum := scene.Checksum()
|
||||
oshash := scene.OSHash()
|
||||
checksum := scene.Checksum
|
||||
oshash := scene.OSHash
|
||||
|
||||
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, scene.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
|
||||
Checksum: checksum,
|
||||
OSHash: oshash,
|
||||
Path: scene.Path(),
|
||||
Path: scene.Path,
|
||||
}, nil)
|
||||
}
|
||||
}
|
||||
@@ -236,8 +240,12 @@ func (h *cleanHandler) deleteRelatedGalleries(ctx context.Context, fileID file.I
|
||||
}
|
||||
|
||||
for _, g := range galleries {
|
||||
if err := g.LoadFiles(ctx, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// only delete if the gallery has no other files
|
||||
if len(g.Files) <= 1 {
|
||||
if len(g.Files.List()) <= 1 {
|
||||
logger.Infof("Deleting gallery %q since it has no other related files", g.GetTitle())
|
||||
if err := qb.Destroy(ctx, g.ID); err != nil {
|
||||
return err
|
||||
@@ -245,7 +253,7 @@ func (h *cleanHandler) deleteRelatedGalleries(ctx context.Context, fileID file.I
|
||||
|
||||
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
|
||||
Checksum: g.Checksum(),
|
||||
Path: g.Path(),
|
||||
Path: g.Path,
|
||||
}, nil)
|
||||
}
|
||||
}
|
||||
@@ -269,7 +277,7 @@ func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderI
|
||||
|
||||
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
|
||||
Checksum: g.Checksum(),
|
||||
Path: g.Path(),
|
||||
Path: g.Path,
|
||||
}, nil)
|
||||
}
|
||||
|
||||
@@ -290,15 +298,19 @@ func (h *cleanHandler) deleteRelatedImages(ctx context.Context, fileDeleter *fil
|
||||
}
|
||||
|
||||
for _, i := range images {
|
||||
if len(i.Files) <= 1 {
|
||||
if err := i.LoadFiles(ctx, imageQB); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(i.Files.List()) <= 1 {
|
||||
logger.Infof("Deleting image %q since it has no other related files", i.GetTitle())
|
||||
if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
|
||||
Checksum: i.Checksum(),
|
||||
Path: i.Path(),
|
||||
Checksum: i.Checksum,
|
||||
Path: i.Path,
|
||||
}, nil)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -328,6 +328,11 @@ func (t *ExportTask) populateGalleryImages(ctx context.Context, repo Repository)
|
||||
}
|
||||
|
||||
for _, g := range galleries {
|
||||
if err := g.LoadFiles(ctx, reader); err != nil {
|
||||
logger.Errorf("[galleries] <%s> failed to fetch files for gallery: %s", g.GetTitle(), err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
images, err := imageReader.FindByGalleryID(ctx, g.ID)
|
||||
if err != nil {
|
||||
logger.Errorf("[galleries] <%s> failed to fetch images for gallery: %s", g.Checksum, err.Error())
|
||||
@@ -400,7 +405,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry {
|
||||
BaseDirEntry: jsonschema.BaseDirEntry{
|
||||
Type: jsonschema.DirEntryTypeFile,
|
||||
ModTime: json.JSONTime{Time: bf.ModTime},
|
||||
Path: filepath.ToSlash(bf.Path),
|
||||
Path: bf.Path,
|
||||
CreatedAt: json.JSONTime{Time: bf.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: bf.UpdatedAt},
|
||||
},
|
||||
@@ -461,7 +466,7 @@ func folderToJSON(f file.Folder) jsonschema.DirEntry {
|
||||
base := jsonschema.BaseDirEntry{
|
||||
Type: jsonschema.DirEntryTypeFolder,
|
||||
ModTime: json.JSONTime{Time: f.ModTime},
|
||||
Path: filepath.ToSlash(f.Path),
|
||||
Path: f.Path,
|
||||
CreatedAt: json.JSONTime{Time: f.CreatedAt},
|
||||
UpdatedAt: json.JSONTime{Time: f.UpdatedAt},
|
||||
}
|
||||
@@ -497,7 +502,7 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
||||
}
|
||||
|
||||
// export files
|
||||
for _, f := range s.Files {
|
||||
for _, f := range s.Files.List() {
|
||||
exportFile(f, t)
|
||||
}
|
||||
|
||||
@@ -513,6 +518,13 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
||||
continue
|
||||
}
|
||||
|
||||
for _, g := range galleries {
|
||||
if err := g.LoadFiles(ctx, galleryReader); err != nil {
|
||||
logger.Errorf("[scenes] <%s> error getting scene gallery files: %s", sceneHash, err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
newSceneJSON.Galleries = gallery.GetRefs(galleries)
|
||||
|
||||
performers, err := performerReader.FindBySceneID(ctx, s.ID)
|
||||
@@ -565,13 +577,8 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
||||
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
|
||||
}
|
||||
|
||||
pf := s.PrimaryFile()
|
||||
basename := ""
|
||||
hash := ""
|
||||
if pf != nil {
|
||||
basename = pf.Basename
|
||||
hash = s.OSHash()
|
||||
}
|
||||
basename := filepath.Base(s.Path)
|
||||
hash := s.OSHash
|
||||
|
||||
fn := newSceneJSON.Filename(basename, hash)
|
||||
|
||||
@@ -632,12 +639,17 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
||||
tagReader := repo.Tag
|
||||
|
||||
for s := range jobChan {
|
||||
imageHash := s.Checksum()
|
||||
imageHash := s.Checksum
|
||||
|
||||
if err := s.LoadFiles(ctx, repo.Image); err != nil {
|
||||
logger.Errorf("[images] <%s> error getting image files: %s", imageHash, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
newImageJSON := image.ToBasicJSON(s)
|
||||
|
||||
// export files
|
||||
for _, f := range s.Files {
|
||||
for _, f := range s.Files.List() {
|
||||
exportFile(f, t)
|
||||
}
|
||||
|
||||
@@ -654,6 +666,13 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
||||
continue
|
||||
}
|
||||
|
||||
for _, g := range imageGalleries {
|
||||
if err := g.LoadFiles(ctx, galleryReader); err != nil {
|
||||
logger.Errorf("[images] <%s> error getting image gallery files: %s", imageHash, err.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
newImageJSON.Galleries = gallery.GetRefs(imageGalleries)
|
||||
|
||||
performers, err := performerReader.FindByImageID(ctx, s.ID)
|
||||
@@ -682,15 +701,7 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
|
||||
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
|
||||
}
|
||||
|
||||
pf := s.PrimaryFile()
|
||||
basename := ""
|
||||
hash := ""
|
||||
if pf != nil {
|
||||
basename = pf.Basename
|
||||
hash = s.Checksum()
|
||||
}
|
||||
|
||||
fn := newImageJSON.Filename(basename, hash)
|
||||
fn := newImageJSON.Filename(filepath.Base(s.Path), s.Checksum)
|
||||
|
||||
if err := t.json.saveImage(fn, newImageJSON); err != nil {
|
||||
logger.Errorf("[images] <%s> failed to save json: %s", imageHash, err.Error())
|
||||
@@ -749,6 +760,11 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
|
||||
tagReader := repo.Tag
|
||||
|
||||
for g := range jobChan {
|
||||
if err := g.LoadFiles(ctx, repo.Gallery); err != nil {
|
||||
logger.Errorf("[galleries] <%s> failed to fetch files for gallery: %s", g.GetTitle(), err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
galleryHash := g.Checksum()
|
||||
|
||||
newGalleryJSON, err := gallery.ToBasicJSON(g)
|
||||
@@ -758,7 +774,7 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
|
||||
}
|
||||
|
||||
// export files
|
||||
for _, f := range g.Files {
|
||||
for _, f := range g.Files.List() {
|
||||
exportFile(f, t)
|
||||
}
|
||||
|
||||
@@ -809,16 +825,13 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
|
||||
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
|
||||
}
|
||||
|
||||
pf := g.PrimaryFile()
|
||||
basename := ""
|
||||
// use id in case multiple galleries with the same basename
|
||||
hash := strconv.Itoa(g.ID)
|
||||
|
||||
switch {
|
||||
case pf != nil:
|
||||
basename = pf.Base().Basename
|
||||
case g.FolderPath != "":
|
||||
basename = filepath.Base(g.FolderPath)
|
||||
case g.Path != "":
|
||||
basename = filepath.Base(g.Path)
|
||||
default:
|
||||
basename = g.Title
|
||||
}
|
||||
|
||||
@@ -117,6 +117,10 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) {
|
||||
if len(j.input.SceneIDs) > 0 {
|
||||
scenes, err = qb.FindMany(ctx, sceneIDs)
|
||||
for _, s := range scenes {
|
||||
if err := s.LoadFiles(ctx, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
j.queueSceneJobs(ctx, g, s, queue, &totals)
|
||||
}
|
||||
}
|
||||
@@ -207,6 +211,11 @@ func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, que
|
||||
return totals
|
||||
}
|
||||
|
||||
if err := ss.LoadFiles(ctx, j.txnManager.Scene); err != nil {
|
||||
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
|
||||
return totals
|
||||
}
|
||||
|
||||
j.queueSceneJobs(ctx, g, ss, queue, &totals)
|
||||
}
|
||||
|
||||
@@ -277,7 +286,6 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
||||
options := getGeneratePreviewOptions(*generatePreviewOptions)
|
||||
|
||||
if utils.IsTrue(j.input.Previews) {
|
||||
|
||||
task := &GeneratePreviewTask{
|
||||
Scene: *scene,
|
||||
ImagePreview: utils.IsTrue(j.input.ImagePreviews),
|
||||
@@ -344,7 +352,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
||||
|
||||
if utils.IsTrue(j.input.Phashes) {
|
||||
// generate for all files in scene
|
||||
for _, f := range scene.Files {
|
||||
for _, f := range scene.Files.List() {
|
||||
task := &GeneratePhashTask{
|
||||
File: f,
|
||||
fileNamingAlgorithm: j.fileNamingAlgo,
|
||||
|
||||
@@ -18,7 +18,7 @@ type GenerateInteractiveHeatmapSpeedTask struct {
|
||||
}
|
||||
|
||||
func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string {
|
||||
return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path())
|
||||
return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path)
|
||||
}
|
||||
|
||||
func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
|
||||
@@ -27,7 +27,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
|
||||
}
|
||||
|
||||
videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
funscriptPath := video.GetFunscriptPath(t.Scene.Path())
|
||||
funscriptPath := video.GetFunscriptPath(t.Scene.Path)
|
||||
heatmapPath := instance.Paths.Scene.GetInteractiveHeatmapPath(videoChecksum)
|
||||
|
||||
generator := NewInteractiveHeatmapSpeedGenerator(funscriptPath, heatmapPath)
|
||||
@@ -42,7 +42,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
|
||||
median := generator.InteractiveSpeed
|
||||
|
||||
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
|
||||
primaryFile := t.Scene.PrimaryFile()
|
||||
primaryFile := t.Scene.Files.Primary()
|
||||
primaryFile.InteractiveSpeed = &median
|
||||
qb := t.TxnManager.File
|
||||
return qb.Update(ctx, primaryFile)
|
||||
@@ -53,7 +53,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
|
||||
}
|
||||
|
||||
func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool {
|
||||
primaryFile := t.Scene.PrimaryFile()
|
||||
primaryFile := t.Scene.Files.Primary()
|
||||
if primaryFile == nil || !primaryFile.Interactive {
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ type GenerateMarkersTask struct {
|
||||
|
||||
func (t *GenerateMarkersTask) GetDescription() string {
|
||||
if t.Scene != nil {
|
||||
return fmt.Sprintf("Generating markers for %s", t.Scene.Path())
|
||||
return fmt.Sprintf("Generating markers for %s", t.Scene.Path)
|
||||
} else if t.Marker != nil {
|
||||
return fmt.Sprintf("Generating marker preview for marker ID %d", t.Marker.ID)
|
||||
}
|
||||
@@ -57,7 +57,7 @@ func (t *GenerateMarkersTask) Start(ctx context.Context) {
|
||||
}
|
||||
|
||||
ffprobe := instance.FFProbe
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("error reading video file: %s", err.Error())
|
||||
return
|
||||
@@ -83,7 +83,7 @@ func (t *GenerateMarkersTask) generateSceneMarkers(ctx context.Context) {
|
||||
}
|
||||
|
||||
ffprobe := instance.FFProbe
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("error reading video file: %s", err.Error())
|
||||
return
|
||||
|
||||
@@ -23,7 +23,7 @@ type GeneratePreviewTask struct {
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) GetDescription() string {
|
||||
return fmt.Sprintf("Generating preview for %s", t.Scene.Path())
|
||||
return fmt.Sprintf("Generating preview for %s", t.Scene.Path)
|
||||
}
|
||||
|
||||
func (t *GeneratePreviewTask) Start(ctx context.Context) {
|
||||
@@ -32,7 +32,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
|
||||
}
|
||||
|
||||
ffprobe := instance.FFProbe
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("error reading video file: %v", err)
|
||||
return
|
||||
@@ -55,7 +55,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
|
||||
}
|
||||
|
||||
func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64) error {
|
||||
videoFilename := t.Scene.Path()
|
||||
videoFilename := t.Scene.Path
|
||||
|
||||
if err := t.generator.PreviewVideo(context.TODO(), videoFilename, videoDuration, videoChecksum, t.Options, true); err != nil {
|
||||
logger.Warnf("[generator] failed generating scene preview, trying fallback")
|
||||
@@ -68,7 +68,7 @@ func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration f
|
||||
}
|
||||
|
||||
func (t GeneratePreviewTask) generateWebp(videoChecksum string) error {
|
||||
videoFilename := t.Scene.Path()
|
||||
videoFilename := t.Scene.Path
|
||||
return t.generator.PreviewWebp(context.TODO(), videoFilename, videoChecksum)
|
||||
}
|
||||
|
||||
|
||||
@@ -20,18 +20,13 @@ type GenerateScreenshotTask struct {
|
||||
}
|
||||
|
||||
func (t *GenerateScreenshotTask) Start(ctx context.Context) {
|
||||
scenePath := t.Scene.Path()
|
||||
ffprobe := instance.FFProbe
|
||||
probeResult, err := ffprobe.NewVideoFile(scenePath)
|
||||
scenePath := t.Scene.Path
|
||||
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
videoFile := t.Scene.Files.Primary()
|
||||
|
||||
var at float64
|
||||
if t.ScreenshotAt == nil {
|
||||
at = float64(probeResult.Duration) * 0.2
|
||||
at = float64(videoFile.Duration) * 0.2
|
||||
} else {
|
||||
at = *t.ScreenshotAt
|
||||
}
|
||||
@@ -52,7 +47,7 @@ func (t *GenerateScreenshotTask) Start(ctx context.Context) {
|
||||
Overwrite: true,
|
||||
}
|
||||
|
||||
if err := g.Screenshot(context.TODO(), probeResult.Path, checksum, probeResult.Width, probeResult.Duration, generate.ScreenshotOptions{
|
||||
if err := g.Screenshot(context.TODO(), videoFile.Path, checksum, videoFile.Width, videoFile.Duration, generate.ScreenshotOptions{
|
||||
At: &at,
|
||||
}); err != nil {
|
||||
logger.Errorf("Error generating screenshot: %v", err)
|
||||
|
||||
@@ -16,7 +16,7 @@ type GenerateSpriteTask struct {
|
||||
}
|
||||
|
||||
func (t *GenerateSpriteTask) GetDescription() string {
|
||||
return fmt.Sprintf("Generating sprites for %s", t.Scene.Path())
|
||||
return fmt.Sprintf("Generating sprites for %s", t.Scene.Path)
|
||||
}
|
||||
|
||||
func (t *GenerateSpriteTask) Start(ctx context.Context) {
|
||||
@@ -25,7 +25,7 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) {
|
||||
}
|
||||
|
||||
ffprobe := instance.FFProbe
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("error reading video file: %s", err.Error())
|
||||
return
|
||||
@@ -51,6 +51,9 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) {
|
||||
|
||||
// required returns true if the sprite needs to be generated
|
||||
func (t GenerateSpriteTask) required() bool {
|
||||
if t.Scene.Path == "" {
|
||||
return false
|
||||
}
|
||||
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
|
||||
return !t.doesSpriteExist(sceneHash)
|
||||
}
|
||||
|
||||
@@ -131,7 +131,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
|
||||
}
|
||||
|
||||
var taskError error
|
||||
j.progress.ExecuteTask("Identifying "+s.Path(), func() {
|
||||
j.progress.ExecuteTask("Identifying "+s.Path, func() {
|
||||
task := identify.SceneIdentifier{
|
||||
SceneReaderUpdater: instance.Repository.Scene,
|
||||
StudioCreator: instance.Repository.Studio,
|
||||
@@ -151,7 +151,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
|
||||
})
|
||||
|
||||
if taskError != nil {
|
||||
logger.Errorf("Error encountered identifying %s: %v", s.Path(), taskError)
|
||||
logger.Errorf("Error encountered identifying %s: %v", s.Path, taskError)
|
||||
}
|
||||
|
||||
j.progress.Increment()
|
||||
|
||||
@@ -14,13 +14,13 @@ type MigrateHashTask struct {
|
||||
|
||||
// Start starts the task.
|
||||
func (t *MigrateHashTask) Start() {
|
||||
if t.Scene.OSHash() == "" || t.Scene.Checksum() == "" {
|
||||
if t.Scene.OSHash == "" || t.Scene.Checksum == "" {
|
||||
// nothing to do
|
||||
return
|
||||
}
|
||||
|
||||
oshash := t.Scene.OSHash()
|
||||
checksum := t.Scene.Checksum()
|
||||
oshash := t.Scene.OSHash
|
||||
checksum := t.Scene.Checksum
|
||||
|
||||
oldHash := oshash
|
||||
newHash := checksum
|
||||
|
||||
@@ -277,7 +277,7 @@ func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progre
|
||||
type imageThumbnailGenerator struct{}
|
||||
|
||||
func (g *imageThumbnailGenerator) GenerateThumbnail(ctx context.Context, i *models.Image, f *file.ImageFile) error {
|
||||
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum(), models.DefaultGthumbWidth)
|
||||
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
|
||||
exists, _ := fsutil.FileExists(thumbPath)
|
||||
if exists {
|
||||
return nil
|
||||
|
||||
@@ -23,7 +23,7 @@ type GenerateTranscodeTask struct {
|
||||
}
|
||||
|
||||
func (t *GenerateTranscodeTask) GetDescription() string {
|
||||
return fmt.Sprintf("Generating transcode for %s", t.Scene.Path())
|
||||
return fmt.Sprintf("Generating transcode for %s", t.Scene.Path)
|
||||
}
|
||||
|
||||
func (t *GenerateTranscodeTask) Start(ctc context.Context) {
|
||||
@@ -32,11 +32,13 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
f := t.Scene.Files.Primary()
|
||||
|
||||
ffprobe := instance.FFProbe
|
||||
var container ffmpeg.Container
|
||||
|
||||
var err error
|
||||
container, err = GetSceneFileContainer(&t.Scene)
|
||||
container, err = GetVideoFileContainer(f)
|
||||
if err != nil {
|
||||
logger.Errorf("[transcode] error getting scene container: %s", err.Error())
|
||||
return
|
||||
@@ -44,13 +46,13 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
|
||||
|
||||
var videoCodec string
|
||||
|
||||
if t.Scene.VideoCodec() != "" {
|
||||
videoCodec = t.Scene.VideoCodec()
|
||||
if f.VideoCodec != "" {
|
||||
videoCodec = f.VideoCodec
|
||||
}
|
||||
|
||||
audioCodec := ffmpeg.MissingUnsupported
|
||||
if t.Scene.AudioCodec() != "" {
|
||||
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec())
|
||||
if f.AudioCodec != "" {
|
||||
audioCodec = ffmpeg.ProbeAudioCodec(f.AudioCodec)
|
||||
}
|
||||
|
||||
if !t.Force && ffmpeg.IsStreamable(videoCodec, audioCodec, container) == nil {
|
||||
@@ -59,7 +61,7 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
|
||||
|
||||
// TODO - move transcode generation logic elsewhere
|
||||
|
||||
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
|
||||
videoFile, err := ffprobe.NewVideoFile(f.Path)
|
||||
if err != nil {
|
||||
logger.Errorf("[transcode] error reading video file: %s", err.Error())
|
||||
return
|
||||
@@ -100,6 +102,11 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
|
||||
// used only when counting files to generate, doesn't affect the actual transcode generation
|
||||
// if container is missing from DB it is treated as non supported in order not to delay the user
|
||||
func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
|
||||
f := t.Scene.Files.Primary()
|
||||
if f == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
hasTranscode := HasTranscode(&t.Scene, t.fileNamingAlgorithm)
|
||||
if !t.Overwrite && hasTranscode {
|
||||
return false
|
||||
@@ -110,17 +117,17 @@ func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
|
||||
}
|
||||
|
||||
var videoCodec string
|
||||
if t.Scene.VideoCodec() != "" {
|
||||
videoCodec = t.Scene.VideoCodec()
|
||||
if f.VideoCodec != "" {
|
||||
videoCodec = f.VideoCodec
|
||||
}
|
||||
container := ""
|
||||
audioCodec := ffmpeg.MissingUnsupported
|
||||
if t.Scene.AudioCodec() != "" {
|
||||
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec())
|
||||
if f.AudioCodec != "" {
|
||||
audioCodec = ffmpeg.ProbeAudioCodec(f.AudioCodec)
|
||||
}
|
||||
|
||||
if t.Scene.Format() != "" {
|
||||
container = t.Scene.Format()
|
||||
if f.Format != "" {
|
||||
container = f.Format
|
||||
}
|
||||
|
||||
if ffmpeg.IsStreamable(videoCodec, audioCodec, ffmpeg.Container(container)) == nil {
|
||||
|
||||
@@ -24,6 +24,26 @@ func (f Fingerprints) Get(type_ string) interface{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f Fingerprints) GetString(type_ string) string {
|
||||
fp := f.Get(type_)
|
||||
if fp != nil {
|
||||
s, _ := fp.(string)
|
||||
return s
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f Fingerprints) GetInt64(type_ string) int64 {
|
||||
fp := f.Get(type_)
|
||||
if fp != nil {
|
||||
v, _ := fp.(int64)
|
||||
return v
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
// AppendUnique appends a fingerprint to the list if a Fingerprint of the same type does not already exist in the list. If one does, then it is updated with o's Fingerprint value.
|
||||
func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints {
|
||||
ret := f
|
||||
|
||||
@@ -18,7 +18,8 @@ import (
|
||||
const (
|
||||
scanQueueSize = 200000
|
||||
// maximum number of times to retry in the event of a locked database
|
||||
maxRetries = 1000
|
||||
// use -1 to retry forever
|
||||
maxRetries = -1
|
||||
)
|
||||
|
||||
// Repository provides access to storage methods for files and folders.
|
||||
|
||||
@@ -15,3 +15,14 @@ type VideoFile struct {
|
||||
Interactive bool `json:"interactive"`
|
||||
InteractiveSpeed *int `json:"interactive_speed"`
|
||||
}
|
||||
|
||||
func (f VideoFile) GetMinResolution() int {
|
||||
w := f.Width
|
||||
h := f.Height
|
||||
|
||||
if w < h {
|
||||
return w
|
||||
}
|
||||
|
||||
return h
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ func (s *Service) destroyZipFileImages(ctx context.Context, i *models.Gallery, f
|
||||
}
|
||||
|
||||
// for zip-based galleries, delete the images as well first
|
||||
for _, f := range i.Files {
|
||||
for _, f := range i.Files.List() {
|
||||
// only do this where there are no other galleries related to the file
|
||||
otherGalleries, err := s.Repository.FindByFileID(ctx, f.Base().ID)
|
||||
if err != nil {
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
// does not convert the relationships to other objects.
|
||||
func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) {
|
||||
newGalleryJSON := jsonschema.Gallery{
|
||||
FolderPath: gallery.FolderPath,
|
||||
Title: gallery.Title,
|
||||
URL: gallery.URL,
|
||||
Details: gallery.Details,
|
||||
@@ -21,7 +20,11 @@ func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) {
|
||||
UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt},
|
||||
}
|
||||
|
||||
for _, f := range gallery.Files {
|
||||
if gallery.FolderID != nil {
|
||||
newGalleryJSON.FolderPath = gallery.Path
|
||||
}
|
||||
|
||||
for _, f := range gallery.Files.List() {
|
||||
newGalleryJSON.ZipFiles = append(newGalleryJSON.ZipFiles, f.Base().Path)
|
||||
}
|
||||
|
||||
@@ -69,10 +72,10 @@ func GetRefs(galleries []*models.Gallery) []jsonschema.GalleryRef {
|
||||
for _, gallery := range galleries {
|
||||
toAdd := jsonschema.GalleryRef{}
|
||||
switch {
|
||||
case gallery.FolderPath != "":
|
||||
toAdd.FolderPath = gallery.FolderPath
|
||||
case len(gallery.Files) > 0:
|
||||
for _, f := range gallery.Files {
|
||||
case gallery.FolderID != nil:
|
||||
toAdd.FolderPath = gallery.Path
|
||||
case len(gallery.Files.List()) > 0:
|
||||
for _, f := range gallery.Files.List() {
|
||||
toAdd.ZipFiles = append(toAdd.ZipFiles, f.Base().Path)
|
||||
}
|
||||
default:
|
||||
|
||||
@@ -3,6 +3,7 @@ package gallery
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
@@ -35,6 +36,7 @@ var (
|
||||
|
||||
const (
|
||||
studioName = "studioName"
|
||||
path = "path"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -45,6 +47,11 @@ var (
|
||||
func createFullGallery(id int) models.Gallery {
|
||||
return models.Gallery{
|
||||
ID: id,
|
||||
Files: models.NewRelatedFiles([]file.File{
|
||||
&file.BaseFile{
|
||||
Path: path,
|
||||
},
|
||||
}),
|
||||
Title: title,
|
||||
Date: &dateObj,
|
||||
Details: details,
|
||||
@@ -64,6 +71,7 @@ func createFullJSONGallery() *jsonschema.Gallery {
|
||||
Rating: rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
ZipFiles: []string{path},
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
},
|
||||
|
||||
@@ -3,7 +3,6 @@ package gallery
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
@@ -247,8 +246,10 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta
|
||||
}
|
||||
|
||||
func (i *Importer) populateFilesFolder(ctx context.Context) error {
|
||||
files := make([]file.File, 0)
|
||||
|
||||
for _, ref := range i.Input.ZipFiles {
|
||||
path := filepath.FromSlash(ref)
|
||||
path := ref
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
@@ -257,12 +258,14 @@ func (i *Importer) populateFilesFolder(ctx context.Context) error {
|
||||
if f == nil {
|
||||
return fmt.Errorf("gallery zip file '%s' not found", path)
|
||||
} else {
|
||||
i.gallery.Files = append(i.gallery.Files, f)
|
||||
files = append(files, f)
|
||||
}
|
||||
}
|
||||
|
||||
i.gallery.Files = models.NewRelatedFiles(files)
|
||||
|
||||
if i.Input.FolderPath != "" {
|
||||
path := filepath.FromSlash(i.Input.FolderPath)
|
||||
path := i.Input.FolderPath
|
||||
f, err := i.FolderFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding folder: %w", err)
|
||||
@@ -302,8 +305,8 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
var existing []*models.Gallery
|
||||
var err error
|
||||
switch {
|
||||
case len(i.gallery.Files) > 0:
|
||||
for _, f := range i.gallery.Files {
|
||||
case len(i.gallery.Files.List()) > 0:
|
||||
for _, f := range i.gallery.Files.List() {
|
||||
existing, err := i.ReaderWriter.FindByFileID(ctx, f.Base().ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -333,7 +336,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.gallery.Files {
|
||||
for _, f := range i.gallery.Files.List() {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs)
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
@@ -67,6 +68,7 @@ func TestImporterPreImport(t *testing.T) {
|
||||
Rating: &rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
Files: models.NewRelatedFiles([]file.File{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
CreatedAt: createdAt,
|
||||
|
||||
@@ -19,6 +19,7 @@ type FinderCreatorUpdater interface {
|
||||
Finder
|
||||
Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error
|
||||
AddFileID(ctx context.Context, id int, fileID file.ID) error
|
||||
models.FileLoader
|
||||
}
|
||||
|
||||
type SceneFinderUpdater interface {
|
||||
@@ -83,8 +84,12 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
|
||||
|
||||
func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f file.File) error {
|
||||
for _, i := range existing {
|
||||
if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, sf := range i.Files {
|
||||
for _, sf := range i.Files.List() {
|
||||
if sf.Base().ID == f.Base().ID {
|
||||
found = true
|
||||
break
|
||||
@@ -93,14 +98,14 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
|
||||
|
||||
if !found {
|
||||
logger.Infof("Adding %s to gallery %s", f.Base().Path, i.GetTitle())
|
||||
i.Files = append(i.Files, f)
|
||||
}
|
||||
|
||||
if err := h.CreatorUpdater.AddFileID(ctx, i.ID, f.Base().ID); err != nil {
|
||||
return fmt.Errorf("adding file to gallery: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ type FileDeleter struct {
|
||||
|
||||
// MarkGeneratedFiles marks for deletion the generated files for the provided image.
|
||||
func (d *FileDeleter) MarkGeneratedFiles(image *models.Image) error {
|
||||
thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum(), models.DefaultGthumbWidth)
|
||||
thumbPath := d.Paths.Generated.GetThumbnailPath(image.Checksum, models.DefaultGthumbWidth)
|
||||
exists, _ := fsutil.FileExists(thumbPath)
|
||||
if exists {
|
||||
return d.Files([]string{thumbPath})
|
||||
@@ -47,6 +47,10 @@ func (s *Service) DestroyZipImages(ctx context.Context, zipFile file.File, fileD
|
||||
}
|
||||
|
||||
for _, img := range imgs {
|
||||
if err := img.LoadFiles(ctx, s.Repository); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
const deleteFileInZip = false
|
||||
if err := s.destroyImage(ctx, img, fileDeleter, deleteGenerated, deleteFileInZip); err != nil {
|
||||
return nil, err
|
||||
@@ -77,7 +81,7 @@ func (s *Service) destroyImage(ctx context.Context, i *models.Image, fileDeleter
|
||||
|
||||
// deleteFiles deletes files for the image from the database and file system, if they are not in use by other images
|
||||
func (s *Service) deleteFiles(ctx context.Context, i *models.Image, fileDeleter *FileDeleter) error {
|
||||
for _, f := range i.Files {
|
||||
for _, f := range i.Files.List() {
|
||||
// only delete files where there is no other associated image
|
||||
otherImages, err := s.Repository.FindByFileID(ctx, f.ID)
|
||||
if err != nil {
|
||||
|
||||
@@ -26,7 +26,7 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image {
|
||||
newImageJSON.Organized = image.Organized
|
||||
newImageJSON.OCounter = image.OCounter
|
||||
|
||||
for _, f := range image.Files {
|
||||
for _, f := range image.Files.List() {
|
||||
newImageJSON.Files = append(newImageJSON.Files, f.Base().Path)
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ package image
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
@@ -30,6 +31,7 @@ var (
|
||||
|
||||
const (
|
||||
studioName = "studioName"
|
||||
path = "path"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -40,6 +42,13 @@ var (
|
||||
func createFullImage(id int) models.Image {
|
||||
return models.Image{
|
||||
ID: id,
|
||||
Files: models.NewRelatedImageFiles([]*file.ImageFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: path,
|
||||
},
|
||||
},
|
||||
}),
|
||||
Title: title,
|
||||
OCounter: ocounter,
|
||||
Rating: &rating,
|
||||
@@ -55,6 +64,7 @@ func createFullJSONImage() *jsonschema.Image {
|
||||
OCounter: ocounter,
|
||||
Rating: rating,
|
||||
Organized: organized,
|
||||
Files: []string{path},
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
},
|
||||
|
||||
@@ -8,5 +8,5 @@ import (
|
||||
)
|
||||
|
||||
func IsCover(img *models.Image) bool {
|
||||
return strings.HasSuffix(img.Path(), "cover.jpg")
|
||||
return strings.HasSuffix(img.Path, "cover.jpg")
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
@@ -28,13 +27,7 @@ func TestIsCover(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
for _, tc := range tests {
|
||||
img := &models.Image{
|
||||
Files: []*file.ImageFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: tc.fn,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
assert.Equal(tc.isCover, IsCover(img), "expected: %t for %s", tc.isCover, tc.fn)
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package image
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
@@ -91,8 +90,10 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
|
||||
}
|
||||
|
||||
func (i *Importer) populateFiles(ctx context.Context) error {
|
||||
files := make([]*file.ImageFile, 0)
|
||||
|
||||
for _, ref := range i.Input.Files {
|
||||
path := filepath.FromSlash(ref)
|
||||
path := ref
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
@@ -101,10 +102,12 @@ func (i *Importer) populateFiles(ctx context.Context) error {
|
||||
if f == nil {
|
||||
return fmt.Errorf("image file '%s' not found", path)
|
||||
} else {
|
||||
i.image.Files = append(i.image.Files, f.(*file.ImageFile))
|
||||
files = append(files, f.(*file.ImageFile))
|
||||
}
|
||||
}
|
||||
|
||||
i.image.Files = models.NewRelatedImageFiles(files)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -300,7 +303,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
var existing []*models.Image
|
||||
var err error
|
||||
|
||||
for _, f := range i.image.Files {
|
||||
for _, f := range i.image.Files.List() {
|
||||
existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -317,7 +320,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.image.Files {
|
||||
for _, f := range i.image.Files.List() {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ type FinderCreatorUpdater interface {
|
||||
Create(ctx context.Context, newImage *models.ImageCreateInput) error
|
||||
AddFileID(ctx context.Context, id int, fileID file.ID) error
|
||||
models.GalleryIDLoader
|
||||
models.ImageFileLoader
|
||||
}
|
||||
|
||||
type GalleryFinderCreator interface {
|
||||
@@ -145,8 +146,12 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
|
||||
|
||||
func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *file.ImageFile) error {
|
||||
for _, i := range existing {
|
||||
if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, sf := range i.Files {
|
||||
for _, sf := range i.Files.List() {
|
||||
if sf.ID == f.Base().ID {
|
||||
found = true
|
||||
break
|
||||
@@ -155,7 +160,6 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
|
||||
|
||||
if !found {
|
||||
logger.Infof("Adding %s to image %s", f.Path, i.GetTitle())
|
||||
i.Files = append(i.Files, f)
|
||||
|
||||
// associate with folder-based gallery if applicable
|
||||
if h.ScanConfig.GetCreateGalleriesFromFolders() {
|
||||
@@ -218,7 +222,7 @@ func (h *ScanHandler) associateFolderBasedGallery(ctx context.Context, newImage
|
||||
|
||||
if g != nil && !intslice.IntInclude(newImage.GalleryIDs.List(), g.ID) {
|
||||
newImage.GalleryIDs.Add(g.ID)
|
||||
logger.Infof("Adding %s to folder-based gallery %s", f.Base().Path, g.Path())
|
||||
logger.Infof("Adding %s to folder-based gallery %s", f.Base().Path, g.Path)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -15,6 +15,7 @@ type FinderByFile interface {
|
||||
type Repository interface {
|
||||
FinderByFile
|
||||
Destroyer
|
||||
models.ImageFileLoader
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
|
||||
@@ -307,7 +307,7 @@ func PathToScenes(ctx context.Context, name string, paths []string, sceneReader
|
||||
|
||||
r := nameToRegexp(name, useUnicode)
|
||||
for _, p := range scenes {
|
||||
if regexpMatchesPath(r, p.Path()) != -1 {
|
||||
if regexpMatchesPath(r, p.Path) != -1 {
|
||||
ret = append(ret, p)
|
||||
}
|
||||
}
|
||||
@@ -344,7 +344,7 @@ func PathToImages(ctx context.Context, name string, paths []string, imageReader
|
||||
|
||||
r := nameToRegexp(name, useUnicode)
|
||||
for _, p := range images {
|
||||
if regexpMatchesPath(r, p.Path()) != -1 {
|
||||
if regexpMatchesPath(r, p.Path) != -1 {
|
||||
ret = append(ret, p)
|
||||
}
|
||||
}
|
||||
@@ -381,7 +381,7 @@ func PathToGalleries(ctx context.Context, name string, paths []string, galleryRe
|
||||
|
||||
r := nameToRegexp(name, useUnicode)
|
||||
for _, p := range gallerys {
|
||||
path := p.Path()
|
||||
path := p.Path
|
||||
if path != "" && regexpMatchesPath(r, path) != -1 {
|
||||
ret = append(ret, p)
|
||||
}
|
||||
|
||||
@@ -500,6 +500,29 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte,
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetFiles provides a mock function with given fields: ctx, relatedID
|
||||
func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) {
|
||||
ret := _m.Called(ctx, relatedID)
|
||||
|
||||
var r0 []*file.VideoFile
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int) []*file.VideoFile); ok {
|
||||
r0 = rf(ctx, relatedID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*file.VideoFile)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||
r1 = rf(ctx, relatedID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetGalleryIDs provides a mock function with given fields: ctx, relatedID
|
||||
func (_m *SceneReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||
ret := _m.Called(ctx, relatedID)
|
||||
|
||||
@@ -2,7 +2,6 @@ package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
@@ -11,10 +10,6 @@ import (
|
||||
type Gallery struct {
|
||||
ID int `json:"id"`
|
||||
|
||||
// Path *string `json:"path"`
|
||||
// Checksum string `json:"checksum"`
|
||||
// Zip bool `json:"zip"`
|
||||
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
Date *Date `json:"date"`
|
||||
@@ -23,16 +18,15 @@ type Gallery struct {
|
||||
Organized bool `json:"organized"`
|
||||
StudioID *int `json:"studio_id"`
|
||||
|
||||
// FileModTime *time.Time `json:"file_mod_time"`
|
||||
|
||||
// transient - not persisted
|
||||
Files []file.File
|
||||
Files RelatedFiles
|
||||
// transient - not persisted
|
||||
PrimaryFileID *file.ID
|
||||
// transient - path of primary file or folder
|
||||
Path string
|
||||
|
||||
FolderID *file.FolderID `json:"folder_id"`
|
||||
|
||||
// transient - not persisted
|
||||
FolderPath string `json:"folder_path"`
|
||||
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
|
||||
@@ -41,6 +35,30 @@ type Gallery struct {
|
||||
PerformerIDs RelatedIDs `json:"performer_ids"`
|
||||
}
|
||||
|
||||
func (g *Gallery) LoadFiles(ctx context.Context, l FileLoader) error {
|
||||
return g.Files.load(func() ([]file.File, error) {
|
||||
return l.GetFiles(ctx, g.ID)
|
||||
})
|
||||
}
|
||||
|
||||
func (g *Gallery) LoadPrimaryFile(ctx context.Context, l file.Finder) error {
|
||||
return g.Files.loadPrimary(func() (file.File, error) {
|
||||
if g.PrimaryFileID == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
f, err := l.Find(ctx, *g.PrimaryFileID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(f) > 0 {
|
||||
return f[0], nil
|
||||
}
|
||||
return nil, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (g *Gallery) LoadSceneIDs(ctx context.Context, l SceneIDLoader) error {
|
||||
return g.SceneIDs.load(func() ([]int, error) {
|
||||
return l.GetSceneIDs(ctx, g.ID)
|
||||
@@ -59,24 +77,8 @@ func (g *Gallery) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||
})
|
||||
}
|
||||
|
||||
func (g Gallery) PrimaryFile() file.File {
|
||||
if len(g.Files) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return g.Files[0]
|
||||
}
|
||||
|
||||
func (g Gallery) Path() string {
|
||||
if p := g.PrimaryFile(); p != nil {
|
||||
return p.Base().Path
|
||||
}
|
||||
|
||||
return g.FolderPath
|
||||
}
|
||||
|
||||
func (g Gallery) Checksum() string {
|
||||
if p := g.PrimaryFile(); p != nil {
|
||||
if p := g.Files.Primary(); p != nil {
|
||||
v := p.Base().Fingerprints.Get(file.FingerprintTypeMD5)
|
||||
if v == nil {
|
||||
return ""
|
||||
@@ -123,15 +125,7 @@ func (g Gallery) GetTitle() string {
|
||||
return g.Title
|
||||
}
|
||||
|
||||
if len(g.Files) > 0 {
|
||||
return filepath.Base(g.Path())
|
||||
}
|
||||
|
||||
if g.FolderPath != "" {
|
||||
return g.FolderPath
|
||||
}
|
||||
|
||||
return ""
|
||||
return g.Path
|
||||
}
|
||||
|
||||
const DefaultGthumbWidth int = 640
|
||||
|
||||
@@ -2,6 +2,8 @@ package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
@@ -18,7 +20,12 @@ type Image struct {
|
||||
StudioID *int `json:"studio_id"`
|
||||
|
||||
// transient - not persisted
|
||||
Files []*file.ImageFile
|
||||
Files RelatedImageFiles
|
||||
PrimaryFileID *file.ID
|
||||
// transient - path of primary file - empty if no files
|
||||
Path string
|
||||
// transient - checksum of primary file - empty if no files
|
||||
Checksum string
|
||||
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
@@ -28,6 +35,35 @@ type Image struct {
|
||||
PerformerIDs RelatedIDs `json:"performer_ids"`
|
||||
}
|
||||
|
||||
func (i *Image) LoadFiles(ctx context.Context, l ImageFileLoader) error {
|
||||
return i.Files.load(func() ([]*file.ImageFile, error) {
|
||||
return l.GetFiles(ctx, i.ID)
|
||||
})
|
||||
}
|
||||
|
||||
func (i *Image) LoadPrimaryFile(ctx context.Context, l file.Finder) error {
|
||||
return i.Files.loadPrimary(func() (*file.ImageFile, error) {
|
||||
if i.PrimaryFileID == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
f, err := l.Find(ctx, *i.PrimaryFileID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var vf *file.ImageFile
|
||||
if len(f) > 0 {
|
||||
var ok bool
|
||||
vf, ok = f[0].(*file.ImageFile)
|
||||
if !ok {
|
||||
return nil, errors.New("not an image file")
|
||||
}
|
||||
}
|
||||
return vf, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (i *Image) LoadGalleryIDs(ctx context.Context, l GalleryIDLoader) error {
|
||||
return i.GalleryIDs.load(func() ([]int, error) {
|
||||
return l.GetGalleryIDs(ctx, i.ID)
|
||||
@@ -46,34 +82,6 @@ func (i *Image) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||
})
|
||||
}
|
||||
|
||||
func (i Image) PrimaryFile() *file.ImageFile {
|
||||
if len(i.Files) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return i.Files[0]
|
||||
}
|
||||
|
||||
func (i Image) Path() string {
|
||||
if p := i.PrimaryFile(); p != nil {
|
||||
return p.Path
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (i Image) Checksum() string {
|
||||
if p := i.PrimaryFile(); p != nil {
|
||||
v := p.Fingerprints.Get(file.FingerprintTypeMD5)
|
||||
if v == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
return v.(string)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetTitle returns the title of the image. If the Title field is empty,
|
||||
// then the base filename is returned.
|
||||
func (i Image) GetTitle() string {
|
||||
@@ -81,8 +89,8 @@ func (i Image) GetTitle() string {
|
||||
return i.Title
|
||||
}
|
||||
|
||||
if p := i.PrimaryFile(); p != nil {
|
||||
return p.Basename
|
||||
if i.Path != "" {
|
||||
return filepath.Base(i.Path)
|
||||
}
|
||||
|
||||
return ""
|
||||
|
||||
@@ -2,6 +2,7 @@ package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
@@ -22,7 +23,14 @@ type Scene struct {
|
||||
StudioID *int `json:"studio_id"`
|
||||
|
||||
// transient - not persisted
|
||||
Files []*file.VideoFile
|
||||
Files RelatedVideoFiles
|
||||
PrimaryFileID *file.ID
|
||||
// transient - path of primary file - empty if no files
|
||||
Path string
|
||||
// transient - oshash of primary file - empty if no files
|
||||
OSHash string
|
||||
// transient - checksum of primary file - empty if no files
|
||||
Checksum string
|
||||
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
@@ -34,6 +42,35 @@ type Scene struct {
|
||||
StashIDs RelatedStashIDs `json:"stash_ids"`
|
||||
}
|
||||
|
||||
func (s *Scene) LoadFiles(ctx context.Context, l VideoFileLoader) error {
|
||||
return s.Files.load(func() ([]*file.VideoFile, error) {
|
||||
return l.GetFiles(ctx, s.ID)
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Scene) LoadPrimaryFile(ctx context.Context, l file.Finder) error {
|
||||
return s.Files.loadPrimary(func() (*file.VideoFile, error) {
|
||||
if s.PrimaryFileID == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
f, err := l.Find(ctx, *s.PrimaryFileID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var vf *file.VideoFile
|
||||
if len(f) > 0 {
|
||||
var ok bool
|
||||
vf, ok = f[0].(*file.VideoFile)
|
||||
if !ok {
|
||||
return nil, errors.New("not a video file")
|
||||
}
|
||||
}
|
||||
return vf, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Scene) LoadGalleryIDs(ctx context.Context, l GalleryIDLoader) error {
|
||||
return s.GalleryIDs.load(func() ([]int, error) {
|
||||
return l.GetGalleryIDs(ctx, s.ID)
|
||||
@@ -85,89 +122,13 @@ func (s *Scene) LoadRelationships(ctx context.Context, l SceneReader) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.LoadFiles(ctx, l); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s Scene) PrimaryFile() *file.VideoFile {
|
||||
if len(s.Files) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return s.Files[0]
|
||||
}
|
||||
|
||||
func (s Scene) Path() string {
|
||||
if p := s.PrimaryFile(); p != nil {
|
||||
return p.Base().Path
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (s Scene) getHash(type_ string) string {
|
||||
if p := s.PrimaryFile(); p != nil {
|
||||
v := p.Base().Fingerprints.Get(type_)
|
||||
if v == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
return v.(string)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (s Scene) Checksum() string {
|
||||
return s.getHash(file.FingerprintTypeMD5)
|
||||
}
|
||||
|
||||
func (s Scene) OSHash() string {
|
||||
return s.getHash(file.FingerprintTypeOshash)
|
||||
}
|
||||
|
||||
func (s Scene) Phash() int64 {
|
||||
if p := s.PrimaryFile(); p != nil {
|
||||
v := p.Base().Fingerprints.Get(file.FingerprintTypePhash)
|
||||
if v == nil {
|
||||
return 0
|
||||
}
|
||||
|
||||
return v.(int64)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (s Scene) Duration() float64 {
|
||||
if p := s.PrimaryFile(); p != nil {
|
||||
return p.Duration
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
func (s Scene) Format() string {
|
||||
if p := s.PrimaryFile(); p != nil {
|
||||
return p.Format
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (s Scene) VideoCodec() string {
|
||||
if p := s.PrimaryFile(); p != nil {
|
||||
return p.VideoCodec
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (s Scene) AudioCodec() string {
|
||||
if p := s.PrimaryFile(); p != nil {
|
||||
return p.AudioCodec
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// ScenePartial represents part of a Scene object. It is used to update
|
||||
// the database entry.
|
||||
type ScenePartial struct {
|
||||
@@ -258,43 +219,22 @@ func (s Scene) GetTitle() string {
|
||||
return s.Title
|
||||
}
|
||||
|
||||
return filepath.Base(s.Path())
|
||||
return filepath.Base(s.Path)
|
||||
}
|
||||
|
||||
// GetHash returns the hash of the scene, based on the hash algorithm provided. If
|
||||
// hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned.
|
||||
func (s Scene) GetHash(hashAlgorithm HashAlgorithm) string {
|
||||
f := s.PrimaryFile()
|
||||
if f == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
switch hashAlgorithm {
|
||||
case HashAlgorithmMd5:
|
||||
return f.Base().Fingerprints.Get(file.FingerprintTypeMD5).(string)
|
||||
return s.Checksum
|
||||
case HashAlgorithmOshash:
|
||||
return f.Base().Fingerprints.Get(file.FingerprintTypeOshash).(string)
|
||||
return s.OSHash
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (s Scene) GetMinResolution() int {
|
||||
f := s.PrimaryFile()
|
||||
if f == nil {
|
||||
return 0
|
||||
}
|
||||
|
||||
w := f.Width
|
||||
h := f.Height
|
||||
|
||||
if w < h {
|
||||
return w
|
||||
}
|
||||
|
||||
return h
|
||||
}
|
||||
|
||||
// SceneFileType represents the file metadata for a scene.
|
||||
type SceneFileType struct {
|
||||
Size *string `graphql:"size" json:"size"`
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
package models
|
||||
|
||||
import "context"
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
)
|
||||
|
||||
type SceneIDLoader interface {
|
||||
GetSceneIDs(ctx context.Context, relatedID int) ([]int, error)
|
||||
@@ -26,6 +30,18 @@ type StashIDLoader interface {
|
||||
GetStashIDs(ctx context.Context, relatedID int) ([]StashID, error)
|
||||
}
|
||||
|
||||
type VideoFileLoader interface {
|
||||
GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error)
|
||||
}
|
||||
|
||||
type ImageFileLoader interface {
|
||||
GetFiles(ctx context.Context, relatedID int) ([]*file.ImageFile, error)
|
||||
}
|
||||
|
||||
type FileLoader interface {
|
||||
GetFiles(ctx context.Context, relatedID int) ([]file.File, error)
|
||||
}
|
||||
|
||||
// RelatedIDs represents a list of related IDs.
|
||||
// TODO - this can be made generic
|
||||
type RelatedIDs struct {
|
||||
@@ -189,3 +205,266 @@ func (r *RelatedStashIDs) load(fn func() ([]StashID, error)) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type RelatedVideoFiles struct {
|
||||
primaryFile *file.VideoFile
|
||||
files []*file.VideoFile
|
||||
primaryLoaded bool
|
||||
}
|
||||
|
||||
func NewRelatedVideoFiles(files []*file.VideoFile) RelatedVideoFiles {
|
||||
ret := RelatedVideoFiles{
|
||||
files: files,
|
||||
primaryLoaded: true,
|
||||
}
|
||||
|
||||
if len(files) > 0 {
|
||||
ret.primaryFile = files[0]
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (r *RelatedVideoFiles) SetPrimary(f *file.VideoFile) {
|
||||
r.primaryFile = f
|
||||
r.primaryLoaded = true
|
||||
}
|
||||
|
||||
func (r *RelatedVideoFiles) Set(f []*file.VideoFile) {
|
||||
r.files = f
|
||||
if len(r.files) > 0 {
|
||||
r.primaryFile = r.files[0]
|
||||
}
|
||||
|
||||
r.primaryLoaded = true
|
||||
}
|
||||
|
||||
// Loaded returns true if the relationship has been loaded.
|
||||
func (r RelatedVideoFiles) Loaded() bool {
|
||||
return r.files != nil
|
||||
}
|
||||
|
||||
// Loaded returns true if the primary file relationship has been loaded.
|
||||
func (r RelatedVideoFiles) PrimaryLoaded() bool {
|
||||
return r.primaryLoaded
|
||||
}
|
||||
|
||||
// List returns the related files. Panics if the relationship has not been loaded.
|
||||
func (r RelatedVideoFiles) List() []*file.VideoFile {
|
||||
if !r.Loaded() {
|
||||
panic("relationship has not been loaded")
|
||||
}
|
||||
|
||||
return r.files
|
||||
}
|
||||
|
||||
// Primary returns the primary file. Panics if the relationship has not been loaded.
|
||||
func (r RelatedVideoFiles) Primary() *file.VideoFile {
|
||||
if !r.PrimaryLoaded() {
|
||||
panic("relationship has not been loaded")
|
||||
}
|
||||
|
||||
return r.primaryFile
|
||||
}
|
||||
|
||||
func (r *RelatedVideoFiles) load(fn func() ([]*file.VideoFile, error)) error {
|
||||
if r.Loaded() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
r.files, err = fn()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(r.files) > 0 {
|
||||
r.primaryFile = r.files[0]
|
||||
}
|
||||
|
||||
r.primaryLoaded = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) error {
|
||||
if r.PrimaryLoaded() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
r.primaryFile, err = fn()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.primaryLoaded = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type RelatedImageFiles struct {
|
||||
primaryFile *file.ImageFile
|
||||
files []*file.ImageFile
|
||||
primaryLoaded bool
|
||||
}
|
||||
|
||||
func NewRelatedImageFiles(files []*file.ImageFile) RelatedImageFiles {
|
||||
ret := RelatedImageFiles{
|
||||
files: files,
|
||||
primaryLoaded: true,
|
||||
}
|
||||
|
||||
if len(files) > 0 {
|
||||
ret.primaryFile = files[0]
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// Loaded returns true if the relationship has been loaded.
|
||||
func (r RelatedImageFiles) Loaded() bool {
|
||||
return r.files != nil
|
||||
}
|
||||
|
||||
// Loaded returns true if the primary file relationship has been loaded.
|
||||
func (r RelatedImageFiles) PrimaryLoaded() bool {
|
||||
return r.primaryLoaded
|
||||
}
|
||||
|
||||
// List returns the related files. Panics if the relationship has not been loaded.
|
||||
func (r RelatedImageFiles) List() []*file.ImageFile {
|
||||
if !r.Loaded() {
|
||||
panic("relationship has not been loaded")
|
||||
}
|
||||
|
||||
return r.files
|
||||
}
|
||||
|
||||
// Primary returns the primary file. Panics if the relationship has not been loaded.
|
||||
func (r RelatedImageFiles) Primary() *file.ImageFile {
|
||||
if !r.PrimaryLoaded() {
|
||||
panic("relationship has not been loaded")
|
||||
}
|
||||
|
||||
return r.primaryFile
|
||||
}
|
||||
|
||||
func (r *RelatedImageFiles) load(fn func() ([]*file.ImageFile, error)) error {
|
||||
if r.Loaded() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
r.files, err = fn()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(r.files) > 0 {
|
||||
r.primaryFile = r.files[0]
|
||||
}
|
||||
|
||||
r.primaryLoaded = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *RelatedImageFiles) loadPrimary(fn func() (*file.ImageFile, error)) error {
|
||||
if r.PrimaryLoaded() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
r.primaryFile, err = fn()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.primaryLoaded = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type RelatedFiles struct {
|
||||
primaryFile file.File
|
||||
files []file.File
|
||||
primaryLoaded bool
|
||||
}
|
||||
|
||||
func NewRelatedFiles(files []file.File) RelatedFiles {
|
||||
ret := RelatedFiles{
|
||||
files: files,
|
||||
primaryLoaded: true,
|
||||
}
|
||||
|
||||
if len(files) > 0 {
|
||||
ret.primaryFile = files[0]
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// Loaded returns true if the relationship has been loaded.
|
||||
func (r RelatedFiles) Loaded() bool {
|
||||
return r.files != nil
|
||||
}
|
||||
|
||||
// Loaded returns true if the primary file relationship has been loaded.
|
||||
func (r RelatedFiles) PrimaryLoaded() bool {
|
||||
return r.primaryLoaded
|
||||
}
|
||||
|
||||
// List returns the related files. Panics if the relationship has not been loaded.
|
||||
func (r RelatedFiles) List() []file.File {
|
||||
if !r.Loaded() {
|
||||
panic("relationship has not been loaded")
|
||||
}
|
||||
|
||||
return r.files
|
||||
}
|
||||
|
||||
// Primary returns the primary file. Panics if the relationship has not been loaded.
|
||||
func (r RelatedFiles) Primary() file.File {
|
||||
if !r.PrimaryLoaded() {
|
||||
panic("relationship has not been loaded")
|
||||
}
|
||||
|
||||
return r.primaryFile
|
||||
}
|
||||
|
||||
func (r *RelatedFiles) load(fn func() ([]file.File, error)) error {
|
||||
if r.Loaded() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
r.files, err = fn()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(r.files) > 0 {
|
||||
r.primaryFile = r.files[0]
|
||||
}
|
||||
|
||||
r.primaryLoaded = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *RelatedFiles) loadPrimary(fn func() (file.File, error)) error {
|
||||
if r.PrimaryLoaded() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
r.primaryFile, err = fn()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.primaryLoaded = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -139,6 +139,7 @@ type SceneReader interface {
|
||||
TagIDLoader
|
||||
SceneMovieLoader
|
||||
StashIDLoader
|
||||
VideoFileLoader
|
||||
|
||||
CountByPerformerID(ctx context.Context, performerID int) (int, error)
|
||||
// FindByStudioID(studioID int) ([]*Scene, error)
|
||||
|
||||
@@ -161,7 +161,7 @@ func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter
|
||||
|
||||
// deleteFiles deletes files from the database and file system
|
||||
func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter) error {
|
||||
for _, f := range scene.Files {
|
||||
for _, f := range scene.Files.List() {
|
||||
// only delete files where there is no other associated scene
|
||||
otherScenes, err := s.Repository.FindByFileID(ctx, f.ID)
|
||||
if err != nil {
|
||||
|
||||
@@ -56,7 +56,7 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
|
||||
newSceneJSON.Organized = scene.Organized
|
||||
newSceneJSON.OCounter = scene.OCounter
|
||||
|
||||
for _, f := range scene.Files {
|
||||
for _, f := range scene.Files.List() {
|
||||
newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path)
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ package scene
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/json"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
@@ -72,7 +73,10 @@ var stashID = models.StashID{
|
||||
Endpoint: "Endpoint",
|
||||
}
|
||||
|
||||
const imageBase64 = "aW1hZ2VCeXRlcw=="
|
||||
const (
|
||||
path = "path"
|
||||
imageBase64 = "aW1hZ2VCeXRlcw=="
|
||||
)
|
||||
|
||||
var (
|
||||
createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
|
||||
@@ -89,6 +93,13 @@ func createFullScene(id int) models.Scene {
|
||||
Rating: &rating,
|
||||
Organized: organized,
|
||||
URL: url,
|
||||
Files: models.NewRelatedVideoFiles([]*file.VideoFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: path,
|
||||
},
|
||||
},
|
||||
}),
|
||||
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
||||
stashID,
|
||||
}),
|
||||
@@ -100,6 +111,13 @@ func createFullScene(id int) models.Scene {
|
||||
func createEmptyScene(id int) models.Scene {
|
||||
return models.Scene{
|
||||
ID: id,
|
||||
Files: models.NewRelatedVideoFiles([]*file.VideoFile{
|
||||
{
|
||||
BaseFile: &file.BaseFile{
|
||||
Path: path,
|
||||
},
|
||||
},
|
||||
}),
|
||||
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||
CreatedAt: createTime,
|
||||
UpdatedAt: updateTime,
|
||||
@@ -109,6 +127,7 @@ func createEmptyScene(id int) models.Scene {
|
||||
func createFullJSONScene(image string) *jsonschema.Scene {
|
||||
return &jsonschema.Scene{
|
||||
Title: title,
|
||||
Files: []string{path},
|
||||
Date: date,
|
||||
Details: details,
|
||||
OCounter: ocounter,
|
||||
@@ -130,6 +149,7 @@ func createFullJSONScene(image string) *jsonschema.Scene {
|
||||
|
||||
func createEmptyJSONScene() *jsonschema.Scene {
|
||||
return &jsonschema.Scene{
|
||||
Files: []string{path},
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
},
|
||||
|
||||
@@ -3,7 +3,6 @@ package scene
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
@@ -109,8 +108,10 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
|
||||
}
|
||||
|
||||
func (i *Importer) populateFiles(ctx context.Context) error {
|
||||
files := make([]*file.VideoFile, 0)
|
||||
|
||||
for _, ref := range i.Input.Files {
|
||||
path := filepath.FromSlash(ref)
|
||||
path := ref
|
||||
f, err := i.FileFinder.FindByPath(ctx, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error finding file: %w", err)
|
||||
@@ -119,10 +120,12 @@ func (i *Importer) populateFiles(ctx context.Context) error {
|
||||
if f == nil {
|
||||
return fmt.Errorf("scene file '%s' not found", path)
|
||||
} else {
|
||||
i.scene.Files = append(i.scene.Files, f.(*file.VideoFile))
|
||||
files = append(files, f.(*file.VideoFile))
|
||||
}
|
||||
}
|
||||
|
||||
i.scene.Files = models.NewRelatedVideoFiles(files)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -374,7 +377,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
var existing []*models.Scene
|
||||
var err error
|
||||
|
||||
for _, f := range i.scene.Files {
|
||||
for _, f := range i.scene.Files.List() {
|
||||
existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -391,7 +394,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
|
||||
|
||||
func (i *Importer) Create(ctx context.Context) (*int, error) {
|
||||
var fileIDs []file.ID
|
||||
for _, f := range i.scene.Files {
|
||||
for _, f := range i.scene.Files.List() {
|
||||
fileIDs = append(fileIDs, f.Base().ID)
|
||||
}
|
||||
if err := i.ReaderWriter.Create(ctx, &i.scene, fileIDs); err != nil {
|
||||
|
||||
@@ -24,6 +24,7 @@ type CreatorUpdater interface {
|
||||
Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error
|
||||
UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error)
|
||||
AddFileID(ctx context.Context, id int, fileID file.ID) error
|
||||
models.VideoFileLoader
|
||||
}
|
||||
|
||||
type ScanGenerator interface {
|
||||
@@ -116,8 +117,12 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File) error {
|
||||
|
||||
func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *file.VideoFile) error {
|
||||
for _, s := range existing {
|
||||
if err := s.LoadFiles(ctx, h.CreatorUpdater); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, sf := range s.Files {
|
||||
for _, sf := range s.Files.List() {
|
||||
if sf.ID == f.ID {
|
||||
found = true
|
||||
break
|
||||
@@ -126,7 +131,6 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.
|
||||
|
||||
if !found {
|
||||
logger.Infof("Adding %s to scene %s", f.Path, s.GetTitle())
|
||||
s.Files = append(s.Files, f)
|
||||
|
||||
if err := h.CreatorUpdater.AddFileID(ctx, s.ID, f.ID); err != nil {
|
||||
return fmt.Errorf("adding file to scene: %w", err)
|
||||
|
||||
@@ -95,7 +95,7 @@ func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scen
|
||||
|
||||
// populate performers, studio and tags based on scene path
|
||||
if err := txn.WithTxn(ctx, s.txnManager, func(ctx context.Context) error {
|
||||
path := scene.Path()
|
||||
path := scene.Path
|
||||
performers, err := autotagMatchPerformers(ctx, path, s.performerReader, trimExt)
|
||||
if err != nil {
|
||||
return fmt.Errorf("autotag scraper viaScene: %w", err)
|
||||
@@ -127,20 +127,20 @@ func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scen
|
||||
}
|
||||
|
||||
func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error) {
|
||||
path := gallery.Path()
|
||||
path := gallery.Path
|
||||
if path == "" {
|
||||
// not valid for non-path-based galleries
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// only trim extension if gallery is file-based
|
||||
trimExt := gallery.PrimaryFile() != nil
|
||||
trimExt := gallery.PrimaryFileID != nil
|
||||
|
||||
var ret *ScrapedGallery
|
||||
|
||||
// populate performers, studio and tags based on scene path
|
||||
if err := txn.WithTxn(ctx, s.txnManager, func(ctx context.Context) error {
|
||||
path := gallery.Path()
|
||||
path := gallery.Path
|
||||
performers, err := autotagMatchPerformers(ctx, path, s.performerReader, trimExt)
|
||||
if err != nil {
|
||||
return fmt.Errorf("autotag scraper viaGallery: %w", err)
|
||||
|
||||
@@ -68,6 +68,7 @@ type TagFinder interface {
|
||||
|
||||
type GalleryFinder interface {
|
||||
Find(ctx context.Context, id int) (*models.Gallery, error)
|
||||
models.FileLoader
|
||||
}
|
||||
|
||||
type Repository struct {
|
||||
@@ -364,6 +365,11 @@ func (c Cache) getGallery(ctx context.Context, galleryID int) (*models.Gallery,
|
||||
if err := txn.WithTxn(ctx, c.txnManager, func(ctx context.Context) error {
|
||||
var err error
|
||||
ret, err = c.repository.GalleryFinder.Find(ctx, galleryID)
|
||||
|
||||
if ret != nil {
|
||||
err = ret.LoadFiles(ctx, c.repository.GalleryFinder)
|
||||
}
|
||||
|
||||
return err
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -13,9 +13,9 @@ type queryURLParameters map[string]string
|
||||
|
||||
func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
|
||||
ret := make(queryURLParameters)
|
||||
ret["checksum"] = scene.Checksum()
|
||||
ret["oshash"] = scene.OSHash()
|
||||
ret["filename"] = filepath.Base(scene.Path())
|
||||
ret["checksum"] = scene.Checksum
|
||||
ret["oshash"] = scene.OSHash
|
||||
ret["filename"] = filepath.Base(scene.Path)
|
||||
|
||||
if scene.Title != "" {
|
||||
ret["title"] = scene.Title
|
||||
@@ -53,8 +53,8 @@ func queryURLParametersFromGallery(gallery *models.Gallery) queryURLParameters {
|
||||
ret := make(queryURLParameters)
|
||||
ret["checksum"] = gallery.Checksum()
|
||||
|
||||
if gallery.Path() != "" {
|
||||
ret["filename"] = filepath.Base(gallery.Path())
|
||||
if gallery.Path != "" {
|
||||
ret["filename"] = filepath.Base(gallery.Path)
|
||||
}
|
||||
if gallery.Title != "" {
|
||||
ret["title"] = gallery.Title
|
||||
|
||||
@@ -229,8 +229,8 @@ func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sce
|
||||
Oshash *string `graphql:"oshash" json:"oshash"`
|
||||
}
|
||||
|
||||
checksum := scene.Checksum()
|
||||
oshash := scene.OSHash()
|
||||
checksum := scene.Checksum
|
||||
oshash := scene.OSHash
|
||||
|
||||
input := SceneHashInput{
|
||||
Checksum: &checksum,
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"golang.org/x/text/language"
|
||||
|
||||
"github.com/Yamashou/gqlgenc/graphqljson"
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/match"
|
||||
@@ -33,6 +34,7 @@ import (
|
||||
type SceneReader interface {
|
||||
Find(ctx context.Context, id int) (*models.Scene, error)
|
||||
models.StashIDLoader
|
||||
models.VideoFileLoader
|
||||
}
|
||||
|
||||
type PerformerReader interface {
|
||||
@@ -140,9 +142,14 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int)
|
||||
return fmt.Errorf("scene with id %d not found", sceneID)
|
||||
}
|
||||
|
||||
if err := scene.LoadFiles(ctx, c.repository.Scene); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var sceneFPs []*graphql.FingerprintQueryInput
|
||||
|
||||
checksum := scene.Checksum()
|
||||
for _, f := range scene.Files.List() {
|
||||
checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5)
|
||||
if checksum != "" {
|
||||
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
|
||||
Hash: checksum,
|
||||
@@ -150,7 +157,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int)
|
||||
})
|
||||
}
|
||||
|
||||
oshash := scene.OSHash()
|
||||
oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash)
|
||||
if oshash != "" {
|
||||
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
|
||||
Hash: oshash,
|
||||
@@ -158,7 +165,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int)
|
||||
})
|
||||
}
|
||||
|
||||
phash := scene.Phash()
|
||||
phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash)
|
||||
if phash != 0 {
|
||||
phashStr := utils.PhashToString(phash)
|
||||
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
|
||||
@@ -166,6 +173,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int)
|
||||
Algorithm: graphql.FingerprintAlgorithmPhash,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fingerprints = append(fingerprints, sceneFPs)
|
||||
}
|
||||
@@ -232,6 +240,10 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin
|
||||
return err
|
||||
}
|
||||
|
||||
if err := scene.LoadFiles(ctx, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
stashIDs := scene.StashIDs.List()
|
||||
sceneStashID := ""
|
||||
for _, stashID := range stashIDs {
|
||||
@@ -241,8 +253,11 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin
|
||||
}
|
||||
|
||||
if sceneStashID != "" {
|
||||
duration := scene.Duration()
|
||||
if checksum := scene.Checksum(); checksum != "" && duration != 0 {
|
||||
for _, f := range scene.Files.List() {
|
||||
duration := f.Duration
|
||||
|
||||
if duration != 0 {
|
||||
if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" {
|
||||
fingerprint := graphql.FingerprintInput{
|
||||
Hash: checksum,
|
||||
Algorithm: graphql.FingerprintAlgorithmMd5,
|
||||
@@ -254,7 +269,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin
|
||||
})
|
||||
}
|
||||
|
||||
if oshash := scene.OSHash(); oshash != "" && duration != 0 {
|
||||
if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" {
|
||||
fingerprint := graphql.FingerprintInput{
|
||||
Hash: oshash,
|
||||
Algorithm: graphql.FingerprintAlgorithmOshash,
|
||||
@@ -266,7 +281,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin
|
||||
})
|
||||
}
|
||||
|
||||
if phash := scene.Phash(); phash != 0 && duration != 0 {
|
||||
if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 {
|
||||
fingerprint := graphql.FingerprintInput{
|
||||
Hash: utils.PhashToString(phash),
|
||||
Algorithm: graphql.FingerprintAlgorithmPhash,
|
||||
@@ -279,6 +294,8 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
@@ -778,7 +795,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
||||
}
|
||||
for _, stashID := range stashIDs {
|
||||
c := stashID
|
||||
if c.Endpoint == endpoint {
|
||||
if stashID.Endpoint == endpoint {
|
||||
studioDraft.ID = &c.StashID
|
||||
break
|
||||
}
|
||||
@@ -787,8 +804,13 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
||||
}
|
||||
|
||||
fingerprints := []*graphql.FingerprintInput{}
|
||||
duration := scene.Duration()
|
||||
if oshash := scene.OSHash(); oshash != "" && duration != 0 {
|
||||
|
||||
// submit all file fingerprints
|
||||
for _, f := range scene.Files.List() {
|
||||
duration := f.Duration
|
||||
|
||||
if duration != 0 {
|
||||
if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" {
|
||||
fingerprint := graphql.FingerprintInput{
|
||||
Hash: oshash,
|
||||
Algorithm: graphql.FingerprintAlgorithmOshash,
|
||||
@@ -797,7 +819,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
||||
fingerprints = append(fingerprints, &fingerprint)
|
||||
}
|
||||
|
||||
if checksum := scene.Checksum(); checksum != "" && duration != 0 {
|
||||
if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" {
|
||||
fingerprint := graphql.FingerprintInput{
|
||||
Hash: checksum,
|
||||
Algorithm: graphql.FingerprintAlgorithmMd5,
|
||||
@@ -806,7 +828,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
||||
fingerprints = append(fingerprints, &fingerprint)
|
||||
}
|
||||
|
||||
if phash := scene.Phash(); phash != 0 && duration != 0 {
|
||||
if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 {
|
||||
fingerprint := graphql.FingerprintInput{
|
||||
Hash: utils.PhashToString(phash),
|
||||
Algorithm: graphql.FingerprintAlgorithmPhash,
|
||||
@@ -814,6 +836,8 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
||||
}
|
||||
fingerprints = append(fingerprints, &fingerprint)
|
||||
}
|
||||
}
|
||||
}
|
||||
draft.Fingerprints = fingerprints
|
||||
|
||||
scenePerformers, err := pqb.FindBySceneID(ctx, scene.ID)
|
||||
@@ -854,6 +878,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
||||
}
|
||||
draft.Tags = tags
|
||||
|
||||
if imagePath != "" {
|
||||
exists, _ := fsutil.FileExists(imagePath)
|
||||
if exists {
|
||||
file, err := os.Open(imagePath)
|
||||
@@ -861,6 +886,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo
|
||||
image = file
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stashIDs := scene.StashIDs.List()
|
||||
var stashID *string
|
||||
|
||||
@@ -60,8 +60,17 @@ func (r *galleryRow) fromGallery(o models.Gallery) {
|
||||
r.UpdatedAt = o.UpdatedAt
|
||||
}
|
||||
|
||||
func (r *galleryRow) resolve() *models.Gallery {
|
||||
return &models.Gallery{
|
||||
type galleryQueryRow struct {
|
||||
galleryRow
|
||||
FolderPath zero.String `db:"folder_path"`
|
||||
PrimaryFileID null.Int `db:"primary_file_id"`
|
||||
PrimaryFileFolderPath zero.String `db:"primary_file_folder_path"`
|
||||
PrimaryFileBasename zero.String `db:"primary_file_basename"`
|
||||
PrimaryFileChecksum zero.String `db:"primary_file_checksum"`
|
||||
}
|
||||
|
||||
func (r *galleryQueryRow) resolve() *models.Gallery {
|
||||
ret := &models.Gallery{
|
||||
ID: r.ID,
|
||||
Title: r.Title.String,
|
||||
URL: r.URL.String,
|
||||
@@ -71,10 +80,18 @@ func (r *galleryRow) resolve() *models.Gallery {
|
||||
Organized: r.Organized,
|
||||
StudioID: nullIntPtr(r.StudioID),
|
||||
FolderID: nullIntFolderIDPtr(r.FolderID),
|
||||
// FolderPath: r.FolderPath.String,
|
||||
PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID),
|
||||
CreatedAt: r.CreatedAt,
|
||||
UpdatedAt: r.UpdatedAt,
|
||||
}
|
||||
|
||||
if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid {
|
||||
ret.Path = filepath.Join(r.PrimaryFileFolderPath.String, r.PrimaryFileBasename.String)
|
||||
} else if r.FolderPath.Valid {
|
||||
ret.Path = r.FolderPath.String
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
type galleryRowRecord struct {
|
||||
@@ -184,14 +201,16 @@ func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.Galler
|
||||
}
|
||||
}
|
||||
|
||||
fileIDs := make([]file.ID, len(updatedObject.Files))
|
||||
for i, f := range updatedObject.Files {
|
||||
if updatedObject.Files.Loaded() {
|
||||
fileIDs := make([]file.ID, len(updatedObject.Files.List()))
|
||||
for i, f := range updatedObject.Files.List() {
|
||||
fileIDs[i] = f.Base().ID
|
||||
}
|
||||
|
||||
if err := galleriesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -235,7 +254,33 @@ func (qb *GalleryStore) Destroy(ctx context.Context, id int) error {
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) selectDataset() *goqu.SelectDataset {
|
||||
return dialect.From(qb.table()).Select(qb.table().All())
|
||||
table := qb.table()
|
||||
files := fileTableMgr.table
|
||||
folders := folderTableMgr.table
|
||||
galleryFolder := folderTableMgr.table.As("gallery_folder")
|
||||
|
||||
return dialect.From(table).LeftJoin(
|
||||
galleriesFilesJoinTable,
|
||||
goqu.On(
|
||||
galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn)),
|
||||
galleriesFilesJoinTable.Col("primary").Eq(1),
|
||||
),
|
||||
).LeftJoin(
|
||||
files,
|
||||
goqu.On(files.Col(idColumn).Eq(galleriesFilesJoinTable.Col(fileIDColumn))),
|
||||
).LeftJoin(
|
||||
folders,
|
||||
goqu.On(folders.Col(idColumn).Eq(files.Col("parent_folder_id"))),
|
||||
).LeftJoin(
|
||||
galleryFolder,
|
||||
goqu.On(galleryFolder.Col(idColumn).Eq(table.Col("folder_id"))),
|
||||
).Select(
|
||||
qb.table().All(),
|
||||
galleriesFilesJoinTable.Col(fileIDColumn).As("primary_file_id"),
|
||||
folders.Col("path").As("primary_file_folder_path"),
|
||||
files.Col("basename").As("primary_file_basename"),
|
||||
galleryFolder.Col("path").As("folder_path"),
|
||||
)
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Gallery, error) {
|
||||
@@ -255,7 +300,7 @@ func (qb *GalleryStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*
|
||||
const single = false
|
||||
var ret []*models.Gallery
|
||||
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
|
||||
var f galleryRow
|
||||
var f galleryQueryRow
|
||||
if err := r.StructScan(&f); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -268,38 +313,10 @@ func (qb *GalleryStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, s := range ret {
|
||||
if err := qb.resolveRelationships(ctx, s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) resolveRelationships(ctx context.Context, s *models.Gallery) error {
|
||||
var err error
|
||||
|
||||
// files
|
||||
s.Files, err = qb.getFiles(ctx, s.ID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("resolving gallery files: %w", err)
|
||||
}
|
||||
|
||||
// folder
|
||||
if s.FolderID != nil {
|
||||
folder, err := qb.folderStore.Find(ctx, *s.FolderID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("resolving gallery folder: %w", err)
|
||||
}
|
||||
|
||||
s.FolderPath = folder.Path
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) getFiles(ctx context.Context, id int) ([]file.File, error) {
|
||||
func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]file.File, error) {
|
||||
fileIDs, err := qb.filesRepository().get(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -317,6 +334,11 @@ func (qb *GalleryStore) getFiles(ctx context.Context, id int) ([]file.File, erro
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) {
|
||||
const primaryOnly = false
|
||||
return qb.filesRepository().getMany(ctx, ids, primaryOnly)
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) Find(ctx context.Context, id int) (*models.Gallery, error) {
|
||||
q := qb.selectDataset().Where(qb.tableMgr.byID(id))
|
||||
|
||||
|
||||
@@ -33,6 +33,19 @@ func loadGalleryRelationships(ctx context.Context, expected models.Gallery, actu
|
||||
return err
|
||||
}
|
||||
}
|
||||
if expected.Files.Loaded() {
|
||||
if err := actual.LoadFiles(ctx, db.Gallery); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// clear Path, Checksum, PrimaryFileID
|
||||
if expected.Path == "" {
|
||||
actual.Path = ""
|
||||
}
|
||||
if expected.PrimaryFileID == nil {
|
||||
actual.PrimaryFileID = nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -71,7 +84,6 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
||||
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||
Files: []file.File{},
|
||||
},
|
||||
false,
|
||||
},
|
||||
@@ -85,9 +97,9 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
||||
Rating: &rating,
|
||||
Organized: true,
|
||||
StudioID: &studioIDs[studioIdxWithScene],
|
||||
Files: []file.File{
|
||||
Files: models.NewRelatedFiles([]file.File{
|
||||
galleryFile,
|
||||
},
|
||||
}),
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||
@@ -134,8 +146,8 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
||||
|
||||
s := tt.newObject
|
||||
var fileIDs []file.ID
|
||||
if len(s.Files) > 0 {
|
||||
fileIDs = []file.ID{s.Files[0].Base().ID}
|
||||
if s.Files.Loaded() {
|
||||
fileIDs = []file.ID{s.Files.List()[0].Base().ID}
|
||||
}
|
||||
|
||||
if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr {
|
||||
@@ -217,9 +229,9 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
Rating: &rating,
|
||||
Organized: true,
|
||||
StudioID: &studioIDs[studioIdxWithScene],
|
||||
Files: []file.File{
|
||||
Files: models.NewRelatedFiles([]file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithScene),
|
||||
},
|
||||
}),
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||
@@ -232,9 +244,6 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
"clear nullables",
|
||||
&models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithImage],
|
||||
Files: []file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithImage),
|
||||
},
|
||||
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -248,9 +257,6 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
"clear scene ids",
|
||||
&models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithScene],
|
||||
Files: []file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithScene),
|
||||
},
|
||||
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -264,9 +270,6 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
"clear tag ids",
|
||||
&models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithTag],
|
||||
Files: []file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithTag),
|
||||
},
|
||||
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -280,9 +283,6 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
"clear performer ids",
|
||||
&models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithPerformer],
|
||||
Files: []file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithPerformer),
|
||||
},
|
||||
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -296,9 +296,6 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
"invalid studio id",
|
||||
&models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithImage],
|
||||
Files: []file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithImage),
|
||||
},
|
||||
Organized: true,
|
||||
StudioID: &invalidID,
|
||||
CreatedAt: createdAt,
|
||||
@@ -310,9 +307,6 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
"invalid scene id",
|
||||
&models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithImage],
|
||||
Files: []file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithImage),
|
||||
},
|
||||
Organized: true,
|
||||
SceneIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
CreatedAt: createdAt,
|
||||
@@ -324,9 +318,6 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
"invalid tag id",
|
||||
&models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithImage],
|
||||
Files: []file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithImage),
|
||||
},
|
||||
Organized: true,
|
||||
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
CreatedAt: createdAt,
|
||||
@@ -338,9 +329,6 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
"invalid performer id",
|
||||
&models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithImage],
|
||||
Files: []file.File{
|
||||
makeGalleryFileWithID(galleryIdxWithImage),
|
||||
},
|
||||
Organized: true,
|
||||
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
CreatedAt: createdAt,
|
||||
@@ -385,9 +373,11 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
||||
}
|
||||
|
||||
func clearGalleryFileIDs(gallery *models.Gallery) {
|
||||
for _, f := range gallery.Files {
|
||||
if gallery.Files.Loaded() {
|
||||
for _, f := range gallery.Files.List() {
|
||||
f.Base().ID = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func clearGalleryPartial() models.GalleryPartial {
|
||||
@@ -459,9 +449,9 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
Rating: &rating,
|
||||
Organized: true,
|
||||
StudioID: &studioIDs[studioIdxWithGallery],
|
||||
Files: []file.File{
|
||||
Files: models.NewRelatedFiles([]file.File{
|
||||
makeGalleryFile(galleryIdxWithImage),
|
||||
},
|
||||
}),
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdxWithGallery]}),
|
||||
@@ -476,9 +466,9 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
clearGalleryPartial(),
|
||||
models.Gallery{
|
||||
ID: galleryIDs[galleryIdxWithImage],
|
||||
Files: []file.File{
|
||||
Files: models.NewRelatedFiles([]file.File{
|
||||
makeGalleryFile(galleryIdxWithImage),
|
||||
},
|
||||
}),
|
||||
SceneIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -509,12 +499,12 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
clearGalleryFileIDs(got)
|
||||
// load relationships
|
||||
if err := loadGalleryRelationships(ctx, tt.want, got); err != nil {
|
||||
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
clearGalleryFileIDs(got)
|
||||
assert.Equal(tt.want, *got)
|
||||
|
||||
s, err := qb.Find(ctx, tt.id)
|
||||
@@ -522,12 +512,12 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
t.Errorf("galleryQueryBuilder.Find() error = %v", err)
|
||||
}
|
||||
|
||||
clearGalleryFileIDs(s)
|
||||
// load relationships
|
||||
if err := loadGalleryRelationships(ctx, tt.want, s); err != nil {
|
||||
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
clearGalleryFileIDs(s)
|
||||
assert.Equal(tt.want, *s)
|
||||
})
|
||||
}
|
||||
@@ -858,7 +848,7 @@ func makeGalleryWithID(index int) *models.Gallery {
|
||||
ret.Date = nil
|
||||
}
|
||||
|
||||
ret.Files = []file.File{makeGalleryFile(index)}
|
||||
ret.Files = models.NewRelatedFiles([]file.File{makeGalleryFile(index)})
|
||||
|
||||
return ret
|
||||
}
|
||||
@@ -908,13 +898,12 @@ func Test_galleryQueryBuilder_Find(t *testing.T) {
|
||||
}
|
||||
|
||||
if got != nil {
|
||||
clearGalleryFileIDs(got)
|
||||
|
||||
// load relationships
|
||||
if err := loadGalleryRelationships(ctx, *tt.want, got); err != nil {
|
||||
t.Errorf("loadGalleryRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
clearGalleryFileIDs(got)
|
||||
}
|
||||
assert.Equal(tt.want, got)
|
||||
})
|
||||
@@ -923,14 +912,13 @@ func Test_galleryQueryBuilder_Find(t *testing.T) {
|
||||
|
||||
func postFindGalleries(ctx context.Context, want []*models.Gallery, got []*models.Gallery) error {
|
||||
for i, s := range got {
|
||||
clearGalleryFileIDs(s)
|
||||
|
||||
// load relationships
|
||||
if i < len(want) {
|
||||
if err := loadGalleryRelationships(ctx, *want[i], s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
clearGalleryFileIDs(s)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -1490,7 +1478,7 @@ func TestGalleryQueryPath(t *testing.T) {
|
||||
assert.NotEqual(t, 0, count)
|
||||
|
||||
for _, gallery := range got {
|
||||
verifyString(t, gallery.Path(), tt.input)
|
||||
verifyString(t, gallery.Path, tt.input)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1508,7 +1496,7 @@ func verifyGalleriesPath(ctx context.Context, t *testing.T, pathCriterion models
|
||||
}
|
||||
|
||||
for _, gallery := range galleries {
|
||||
verifyString(t, gallery.Path(), pathCriterion)
|
||||
verifyString(t, gallery.Path, pathCriterion)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1541,8 +1529,8 @@ func TestGalleryQueryPathOr(t *testing.T) {
|
||||
return nil
|
||||
}
|
||||
|
||||
assert.Equal(t, gallery1Path, galleries[0].Path())
|
||||
assert.Equal(t, gallery2Path, galleries[1].Path())
|
||||
assert.Equal(t, gallery1Path, galleries[0].Path)
|
||||
assert.Equal(t, gallery2Path, galleries[1].Path)
|
||||
|
||||
return nil
|
||||
})
|
||||
@@ -1575,7 +1563,7 @@ func TestGalleryQueryPathAndRating(t *testing.T) {
|
||||
return nil
|
||||
}
|
||||
|
||||
assert.Equal(t, galleryPath, galleries[0].Path())
|
||||
assert.Equal(t, galleryPath, galleries[0].Path)
|
||||
assert.Equal(t, *galleryRating, *galleries[0].Rating)
|
||||
|
||||
return nil
|
||||
@@ -1610,7 +1598,7 @@ func TestGalleryQueryPathNotRating(t *testing.T) {
|
||||
galleries := queryGallery(ctx, t, sqb, &galleryFilter, nil)
|
||||
|
||||
for _, gallery := range galleries {
|
||||
verifyString(t, gallery.Path(), pathCriterion)
|
||||
verifyString(t, gallery.Path, pathCriterion)
|
||||
ratingCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyIntPtr(t, gallery.Rating, ratingCriterion)
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
@@ -48,17 +49,35 @@ func (r *imageRow) fromImage(i models.Image) {
|
||||
r.UpdatedAt = i.UpdatedAt
|
||||
}
|
||||
|
||||
func (r *imageRow) resolve() *models.Image {
|
||||
return &models.Image{
|
||||
type imageQueryRow struct {
|
||||
imageRow
|
||||
PrimaryFileID null.Int `db:"primary_file_id"`
|
||||
PrimaryFileFolderPath zero.String `db:"primary_file_folder_path"`
|
||||
PrimaryFileBasename zero.String `db:"primary_file_basename"`
|
||||
PrimaryFileChecksum zero.String `db:"primary_file_checksum"`
|
||||
}
|
||||
|
||||
func (r *imageQueryRow) resolve() *models.Image {
|
||||
ret := &models.Image{
|
||||
ID: r.ID,
|
||||
Title: r.Title.String,
|
||||
Rating: nullIntPtr(r.Rating),
|
||||
Organized: r.Organized,
|
||||
OCounter: r.OCounter,
|
||||
StudioID: nullIntPtr(r.StudioID),
|
||||
|
||||
PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID),
|
||||
Checksum: r.PrimaryFileChecksum.String,
|
||||
|
||||
CreatedAt: r.CreatedAt,
|
||||
UpdatedAt: r.UpdatedAt,
|
||||
}
|
||||
|
||||
if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid {
|
||||
ret.Path = filepath.Join(r.PrimaryFileFolderPath.String, r.PrimaryFileBasename.String)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
type imageRowRecord struct {
|
||||
@@ -203,15 +222,16 @@ func (qb *ImageStore) Update(ctx context.Context, updatedObject *models.Image) e
|
||||
}
|
||||
}
|
||||
|
||||
fileIDs := make([]file.ID, len(updatedObject.Files))
|
||||
for i, f := range updatedObject.Files {
|
||||
if updatedObject.Files.Loaded() {
|
||||
fileIDs := make([]file.ID, len(updatedObject.Files.List()))
|
||||
for i, f := range updatedObject.Files.List() {
|
||||
fileIDs[i] = f.ID
|
||||
}
|
||||
|
||||
if err := imagesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -247,7 +267,36 @@ func (qb *ImageStore) FindMany(ctx context.Context, ids []int) ([]*models.Image,
|
||||
}
|
||||
|
||||
func (qb *ImageStore) selectDataset() *goqu.SelectDataset {
|
||||
return dialect.From(qb.table()).Select(qb.table().All())
|
||||
table := qb.table()
|
||||
files := fileTableMgr.table
|
||||
folders := folderTableMgr.table
|
||||
checksum := fingerprintTableMgr.table
|
||||
|
||||
return dialect.From(table).LeftJoin(
|
||||
imagesFilesJoinTable,
|
||||
goqu.On(
|
||||
imagesFilesJoinTable.Col(imageIDColumn).Eq(table.Col(idColumn)),
|
||||
imagesFilesJoinTable.Col("primary").Eq(1),
|
||||
),
|
||||
).LeftJoin(
|
||||
files,
|
||||
goqu.On(files.Col(idColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn))),
|
||||
).LeftJoin(
|
||||
folders,
|
||||
goqu.On(folders.Col(idColumn).Eq(files.Col("parent_folder_id"))),
|
||||
).LeftJoin(
|
||||
checksum,
|
||||
goqu.On(
|
||||
checksum.Col(fileIDColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn)),
|
||||
checksum.Col("type").Eq(file.FingerprintTypeMD5),
|
||||
),
|
||||
).Select(
|
||||
qb.table().All(),
|
||||
imagesFilesJoinTable.Col(fileIDColumn).As("primary_file_id"),
|
||||
folders.Col("path").As("primary_file_folder_path"),
|
||||
files.Col("basename").As("primary_file_basename"),
|
||||
checksum.Col("fingerprint").As("primary_file_checksum"),
|
||||
)
|
||||
}
|
||||
|
||||
func (qb *ImageStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Image, error) {
|
||||
@@ -267,7 +316,7 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
|
||||
const single = false
|
||||
var ret []*models.Image
|
||||
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
|
||||
var f imageRow
|
||||
var f imageQueryRow
|
||||
if err := r.StructScan(&f); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -280,28 +329,10 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, i := range ret {
|
||||
if err := qb.resolveRelationships(ctx, i); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *ImageStore) resolveRelationships(ctx context.Context, i *models.Image) error {
|
||||
var err error
|
||||
|
||||
// files
|
||||
i.Files, err = qb.getFiles(ctx, i.ID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("resolving image files: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *ImageStore) getFiles(ctx context.Context, id int) ([]*file.ImageFile, error) {
|
||||
func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]*file.ImageFile, error) {
|
||||
fileIDs, err := qb.filesRepository().get(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -325,6 +356,11 @@ func (qb *ImageStore) getFiles(ctx context.Context, id int) ([]*file.ImageFile,
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) {
|
||||
const primaryOnly = false
|
||||
return qb.filesRepository().getMany(ctx, ids, primaryOnly)
|
||||
}
|
||||
|
||||
func (qb *ImageStore) find(ctx context.Context, id int) (*models.Image, error) {
|
||||
q := qb.selectDataset().Where(qb.tableMgr.byID(id))
|
||||
|
||||
@@ -428,16 +464,7 @@ func (qb *ImageStore) FindByGalleryID(ctx context.Context, galleryID int) ([]*mo
|
||||
galleriesImagesJoinTable.Col("gallery_id").Eq(galleryID),
|
||||
)
|
||||
|
||||
q := qb.selectDataset().Prepared(true).LeftJoin(
|
||||
imagesFilesJoinTable,
|
||||
goqu.On(imagesFilesJoinTable.Col(imageIDColumn).Eq(table.Col(idColumn))),
|
||||
).LeftJoin(
|
||||
fileTable,
|
||||
goqu.On(fileTable.Col(idColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn))),
|
||||
).LeftJoin(
|
||||
folderTable,
|
||||
goqu.On(folderTable.Col(idColumn).Eq(fileTable.Col("parent_folder_id"))),
|
||||
).Where(
|
||||
q := qb.selectDataset().Prepared(true).Where(
|
||||
table.Col(idColumn).Eq(
|
||||
sq,
|
||||
),
|
||||
|
||||
@@ -31,6 +31,22 @@ func loadImageRelationships(ctx context.Context, expected models.Image, actual *
|
||||
return err
|
||||
}
|
||||
}
|
||||
if expected.Files.Loaded() {
|
||||
if err := actual.LoadFiles(ctx, db.Image); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// clear Path, Checksum, PrimaryFileID
|
||||
if expected.Path == "" {
|
||||
actual.Path = ""
|
||||
}
|
||||
if expected.Checksum == "" {
|
||||
actual.Checksum = ""
|
||||
}
|
||||
if expected.PrimaryFileID == nil {
|
||||
actual.PrimaryFileID = nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -64,7 +80,6 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||
Files: []*file.ImageFile{},
|
||||
},
|
||||
false,
|
||||
},
|
||||
@@ -76,9 +91,11 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
||||
Organized: true,
|
||||
OCounter: ocounter,
|
||||
StudioID: &studioIDs[studioIdxWithImage],
|
||||
Files: []*file.ImageFile{
|
||||
Files: models.NewRelatedImageFiles([]*file.ImageFile{
|
||||
imageFile.(*file.ImageFile),
|
||||
},
|
||||
}),
|
||||
PrimaryFileID: &imageFile.Base().ID,
|
||||
Path: imageFile.Base().Path,
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||
@@ -124,10 +141,11 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
var fileIDs []file.ID
|
||||
for _, f := range tt.newObject.Files {
|
||||
if tt.newObject.Files.Loaded() {
|
||||
for _, f := range tt.newObject.Files.List() {
|
||||
fileIDs = append(fileIDs, f.ID)
|
||||
}
|
||||
|
||||
}
|
||||
s := tt.newObject
|
||||
if err := qb.Create(ctx, &models.ImageCreateInput{
|
||||
Image: &s,
|
||||
@@ -174,9 +192,11 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
||||
}
|
||||
|
||||
func clearImageFileIDs(image *models.Image) {
|
||||
for _, f := range image.Files {
|
||||
if image.Files.Loaded() {
|
||||
for _, f := range image.Files.List() {
|
||||
f.Base().ID = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func makeImageFileWithID(i int) *file.ImageFile {
|
||||
@@ -208,9 +228,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
Organized: true,
|
||||
OCounter: ocounter,
|
||||
StudioID: &studioIDs[studioIdxWithImage],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithGallery),
|
||||
},
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||
@@ -223,9 +240,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
"clear nullables",
|
||||
&models.Image{
|
||||
ID: imageIDs[imageIdxWithGallery],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithGallery),
|
||||
},
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -239,9 +253,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
"clear gallery ids",
|
||||
&models.Image{
|
||||
ID: imageIDs[imageIdxWithGallery],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithGallery),
|
||||
},
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -255,9 +266,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
"clear tag ids",
|
||||
&models.Image{
|
||||
ID: imageIDs[imageIdxWithTag],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithTag),
|
||||
},
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -271,9 +279,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
"clear performer ids",
|
||||
&models.Image{
|
||||
ID: imageIDs[imageIdxWithPerformer],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithPerformer),
|
||||
},
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -287,9 +292,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
"invalid studio id",
|
||||
&models.Image{
|
||||
ID: imageIDs[imageIdxWithGallery],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithGallery),
|
||||
},
|
||||
Organized: true,
|
||||
StudioID: &invalidID,
|
||||
CreatedAt: createdAt,
|
||||
@@ -301,9 +303,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
"invalid gallery id",
|
||||
&models.Image{
|
||||
ID: imageIDs[imageIdxWithGallery],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithGallery),
|
||||
},
|
||||
Organized: true,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
CreatedAt: createdAt,
|
||||
@@ -315,9 +314,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
"invalid tag id",
|
||||
&models.Image{
|
||||
ID: imageIDs[imageIdxWithGallery],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithGallery),
|
||||
},
|
||||
Organized: true,
|
||||
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
CreatedAt: createdAt,
|
||||
@@ -329,9 +325,6 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
||||
"invalid performer id",
|
||||
&models.Image{
|
||||
ID: imageIDs[imageIdxWithGallery],
|
||||
Files: []*file.ImageFile{
|
||||
makeImageFileWithID(imageIdxWithGallery),
|
||||
},
|
||||
Organized: true,
|
||||
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
CreatedAt: createdAt,
|
||||
@@ -433,9 +426,9 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
Organized: true,
|
||||
OCounter: ocounter,
|
||||
StudioID: &studioIDs[studioIdxWithImage],
|
||||
Files: []*file.ImageFile{
|
||||
Files: models.NewRelatedImageFiles([]*file.ImageFile{
|
||||
makeImageFile(imageIdx1WithGallery),
|
||||
},
|
||||
}),
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||
@@ -451,9 +444,9 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
models.Image{
|
||||
ID: imageIDs[imageIdx1WithGallery],
|
||||
OCounter: getOCounter(imageIdx1WithGallery),
|
||||
Files: []*file.ImageFile{
|
||||
Files: models.NewRelatedImageFiles([]*file.ImageFile{
|
||||
makeImageFile(imageIdx1WithGallery),
|
||||
},
|
||||
}),
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -484,12 +477,12 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
clearImageFileIDs(got)
|
||||
// load relationships
|
||||
if err := loadImageRelationships(ctx, tt.want, got); err != nil {
|
||||
t.Errorf("loadImageRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
clearImageFileIDs(got)
|
||||
|
||||
assert.Equal(tt.want, *got)
|
||||
|
||||
@@ -498,12 +491,12 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
t.Errorf("imageQueryBuilder.Find() error = %v", err)
|
||||
}
|
||||
|
||||
clearImageFileIDs(s)
|
||||
// load relationships
|
||||
if err := loadImageRelationships(ctx, tt.want, s); err != nil {
|
||||
t.Errorf("loadImageRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
clearImageFileIDs(s)
|
||||
assert.Equal(tt.want, *s)
|
||||
})
|
||||
}
|
||||
@@ -952,7 +945,7 @@ func makeImageWithID(index int) *models.Image {
|
||||
ret := makeImage(index)
|
||||
ret.ID = imageIDs[index]
|
||||
|
||||
ret.Files = []*file.ImageFile{makeImageFile(index)}
|
||||
ret.Files = models.NewRelatedImageFiles([]*file.ImageFile{makeImageFile(index)})
|
||||
|
||||
return ret
|
||||
}
|
||||
@@ -1002,13 +995,12 @@ func Test_imageQueryBuilder_Find(t *testing.T) {
|
||||
}
|
||||
|
||||
if got != nil {
|
||||
clearImageFileIDs(got)
|
||||
|
||||
// load relationships
|
||||
if err := loadImageRelationships(ctx, *tt.want, got); err != nil {
|
||||
t.Errorf("loadImageRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
clearImageFileIDs(got)
|
||||
}
|
||||
assert.Equal(tt.want, got)
|
||||
})
|
||||
@@ -1017,14 +1009,13 @@ func Test_imageQueryBuilder_Find(t *testing.T) {
|
||||
|
||||
func postFindImages(ctx context.Context, want []*models.Image, got []*models.Image) error {
|
||||
for i, s := range got {
|
||||
clearImageFileIDs(s)
|
||||
|
||||
// load relationships
|
||||
if i < len(want) {
|
||||
if err := loadImageRelationships(ctx, *want[i], s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
clearImageFileIDs(s)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -1546,7 +1537,7 @@ func verifyImagePath(t *testing.T, pathCriterion models.StringCriterionInput, ex
|
||||
assert.Equal(t, expected, len(images), "number of returned images")
|
||||
|
||||
for _, image := range images {
|
||||
verifyString(t, image.Path(), pathCriterion)
|
||||
verifyString(t, image.Path, pathCriterion)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -1582,8 +1573,8 @@ func TestImageQueryPathOr(t *testing.T) {
|
||||
return nil
|
||||
}
|
||||
|
||||
assert.Equal(t, image1Path, images[0].Path())
|
||||
assert.Equal(t, image2Path, images[1].Path())
|
||||
assert.Equal(t, image1Path, images[0].Path)
|
||||
assert.Equal(t, image2Path, images[1].Path)
|
||||
|
||||
return nil
|
||||
})
|
||||
@@ -1613,7 +1604,7 @@ func TestImageQueryPathAndRating(t *testing.T) {
|
||||
images := queryImages(ctx, t, sqb, &imageFilter, nil)
|
||||
|
||||
assert.Len(t, images, 1)
|
||||
assert.Equal(t, imagePath, images[0].Path())
|
||||
assert.Equal(t, imagePath, images[0].Path)
|
||||
assert.Equal(t, int(imageRating.Int64), *images[0].Rating)
|
||||
|
||||
return nil
|
||||
@@ -1648,7 +1639,7 @@ func TestImageQueryPathNotRating(t *testing.T) {
|
||||
images := queryImages(ctx, t, sqb, &imageFilter, nil)
|
||||
|
||||
for _, image := range images {
|
||||
verifyString(t, image.Path(), pathCriterion)
|
||||
verifyString(t, image.Path, pathCriterion)
|
||||
ratingCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyIntPtr(t, image.Rating, ratingCriterion)
|
||||
}
|
||||
@@ -1802,7 +1793,12 @@ func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) {
|
||||
}
|
||||
|
||||
for _, image := range images {
|
||||
verifyImageResolution(t, image.Files[0].Height, resolution)
|
||||
if err := image.LoadPrimaryFile(ctx, db.File); err != nil {
|
||||
t.Errorf("Error loading primary file: %s", err.Error())
|
||||
return nil
|
||||
}
|
||||
|
||||
verifyImageResolution(t, image.Files.Primary().Height, resolution)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -477,6 +477,61 @@ type filesRepository struct {
|
||||
repository
|
||||
}
|
||||
|
||||
type relatedFileRow struct {
|
||||
ID int `db:"id"`
|
||||
FileID file.ID `db:"file_id"`
|
||||
Primary bool `db:"primary"`
|
||||
}
|
||||
|
||||
func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]file.ID, error) {
|
||||
var primaryClause string
|
||||
if primaryOnly {
|
||||
primaryClause = " AND `primary` = 1"
|
||||
}
|
||||
|
||||
query := fmt.Sprintf("SELECT %s as id, file_id, `primary` from %s WHERE %[1]s IN %[3]s%s", r.idColumn, r.tableName, getInBinding(len(ids)), primaryClause)
|
||||
|
||||
idi := make([]interface{}, len(ids))
|
||||
for i, id := range ids {
|
||||
idi[i] = id
|
||||
}
|
||||
|
||||
var fileRows []relatedFileRow
|
||||
if err := r.queryFunc(ctx, query, idi, false, func(rows *sqlx.Rows) error {
|
||||
var f relatedFileRow
|
||||
|
||||
if err := rows.StructScan(&f); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fileRows = append(fileRows, f)
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret := make([][]file.ID, len(ids))
|
||||
idToIndex := make(map[int]int)
|
||||
for i, id := range ids {
|
||||
idToIndex[id] = i
|
||||
}
|
||||
|
||||
for _, row := range fileRows {
|
||||
id := row.ID
|
||||
fileID := row.FileID
|
||||
|
||||
if row.Primary {
|
||||
// prepend to list
|
||||
ret[idToIndex[id]] = append([]file.ID{fileID}, ret[idToIndex[id]]...)
|
||||
} else {
|
||||
ret[idToIndex[id]] = append(ret[idToIndex[id]], row.FileID)
|
||||
}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) {
|
||||
query := fmt.Sprintf("SELECT file_id, `primary` from %s WHERE %s = ?", r.tableName, r.idColumn)
|
||||
|
||||
|
||||
@@ -82,8 +82,17 @@ func (r *sceneRow) fromScene(o models.Scene) {
|
||||
r.UpdatedAt = o.UpdatedAt
|
||||
}
|
||||
|
||||
func (r *sceneRow) resolve() *models.Scene {
|
||||
return &models.Scene{
|
||||
type sceneQueryRow struct {
|
||||
sceneRow
|
||||
PrimaryFileID null.Int `db:"primary_file_id"`
|
||||
PrimaryFileFolderPath zero.String `db:"primary_file_folder_path"`
|
||||
PrimaryFileBasename zero.String `db:"primary_file_basename"`
|
||||
PrimaryFileOshash zero.String `db:"primary_file_oshash"`
|
||||
PrimaryFileChecksum zero.String `db:"primary_file_checksum"`
|
||||
}
|
||||
|
||||
func (r *sceneQueryRow) resolve() *models.Scene {
|
||||
ret := &models.Scene{
|
||||
ID: r.ID,
|
||||
Title: r.Title.String,
|
||||
Details: r.Details.String,
|
||||
@@ -93,9 +102,20 @@ func (r *sceneRow) resolve() *models.Scene {
|
||||
Organized: r.Organized,
|
||||
OCounter: r.OCounter,
|
||||
StudioID: nullIntPtr(r.StudioID),
|
||||
|
||||
PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID),
|
||||
OSHash: r.PrimaryFileOshash.String,
|
||||
Checksum: r.PrimaryFileChecksum.String,
|
||||
|
||||
CreatedAt: r.CreatedAt,
|
||||
UpdatedAt: r.UpdatedAt,
|
||||
}
|
||||
|
||||
if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid {
|
||||
ret.Path = filepath.Join(r.PrimaryFileFolderPath.String, r.PrimaryFileBasename.String)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
type sceneRowRecord struct {
|
||||
@@ -278,14 +298,16 @@ func (qb *SceneStore) Update(ctx context.Context, updatedObject *models.Scene) e
|
||||
}
|
||||
}
|
||||
|
||||
fileIDs := make([]file.ID, len(updatedObject.Files))
|
||||
for i, f := range updatedObject.Files {
|
||||
if updatedObject.Files.Loaded() {
|
||||
fileIDs := make([]file.ID, len(updatedObject.Files.List()))
|
||||
for i, f := range updatedObject.Files.List() {
|
||||
fileIDs[i] = f.ID
|
||||
}
|
||||
|
||||
if err := scenesFilesTableMgr.replaceJoins(ctx, updatedObject.ID, fileIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -333,7 +355,43 @@ func (qb *SceneStore) FindMany(ctx context.Context, ids []int) ([]*models.Scene,
|
||||
|
||||
func (qb *SceneStore) selectDataset() *goqu.SelectDataset {
|
||||
table := qb.table()
|
||||
return dialect.From(table).Select(table.All())
|
||||
files := fileTableMgr.table
|
||||
folders := folderTableMgr.table
|
||||
checksum := fingerprintTableMgr.table.As("fingerprint_md5")
|
||||
oshash := fingerprintTableMgr.table.As("fingerprint_oshash")
|
||||
|
||||
return dialect.From(table).LeftJoin(
|
||||
scenesFilesJoinTable,
|
||||
goqu.On(
|
||||
scenesFilesJoinTable.Col(sceneIDColumn).Eq(table.Col(idColumn)),
|
||||
scenesFilesJoinTable.Col("primary").Eq(1),
|
||||
),
|
||||
).LeftJoin(
|
||||
files,
|
||||
goqu.On(files.Col(idColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn))),
|
||||
).LeftJoin(
|
||||
folders,
|
||||
goqu.On(folders.Col(idColumn).Eq(files.Col("parent_folder_id"))),
|
||||
).LeftJoin(
|
||||
checksum,
|
||||
goqu.On(
|
||||
checksum.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)),
|
||||
checksum.Col("type").Eq(file.FingerprintTypeMD5),
|
||||
),
|
||||
).LeftJoin(
|
||||
oshash,
|
||||
goqu.On(
|
||||
oshash.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)),
|
||||
oshash.Col("type").Eq(file.FingerprintTypeOshash),
|
||||
),
|
||||
).Select(
|
||||
qb.table().All(),
|
||||
scenesFilesJoinTable.Col(fileIDColumn).As("primary_file_id"),
|
||||
folders.Col("path").As("primary_file_folder_path"),
|
||||
files.Col("basename").As("primary_file_basename"),
|
||||
checksum.Col("fingerprint").As("primary_file_checksum"),
|
||||
oshash.Col("fingerprint").As("primary_file_oshash"),
|
||||
)
|
||||
}
|
||||
|
||||
func (qb *SceneStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Scene, error) {
|
||||
@@ -353,7 +411,7 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
|
||||
const single = false
|
||||
var ret []*models.Scene
|
||||
if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error {
|
||||
var f sceneRow
|
||||
var f sceneQueryRow
|
||||
if err := r.StructScan(&f); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -366,28 +424,10 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, s := range ret {
|
||||
if err := qb.resolveRelationships(ctx, s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *SceneStore) resolveRelationships(ctx context.Context, s *models.Scene) error {
|
||||
var err error
|
||||
|
||||
// files
|
||||
s.Files, err = qb.getFiles(ctx, s.ID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("resolving scene files: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *SceneStore) getFiles(ctx context.Context, id int) ([]*file.VideoFile, error) {
|
||||
func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, error) {
|
||||
fileIDs, err := qb.filesRepository().get(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -411,6 +451,11 @@ func (qb *SceneStore) getFiles(ctx context.Context, id int) ([]*file.VideoFile,
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) {
|
||||
const primaryOnly = false
|
||||
return qb.filesRepository().getMany(ctx, ids, primaryOnly)
|
||||
}
|
||||
|
||||
func (qb *SceneStore) find(ctx context.Context, id int) (*models.Scene, error) {
|
||||
q := qb.selectDataset().Where(qb.tableMgr.byID(id))
|
||||
|
||||
@@ -680,16 +725,8 @@ func (qb *SceneStore) All(ctx context.Context) ([]*models.Scene, error) {
|
||||
table := qb.table()
|
||||
fileTable := fileTableMgr.table
|
||||
folderTable := folderTableMgr.table
|
||||
return qb.getMany(ctx, qb.selectDataset().LeftJoin(
|
||||
scenesFilesJoinTable,
|
||||
goqu.On(scenesFilesJoinTable.Col(sceneIDColumn).Eq(table.Col(idColumn))),
|
||||
).LeftJoin(
|
||||
fileTable,
|
||||
goqu.On(fileTable.Col(idColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn))),
|
||||
).LeftJoin(
|
||||
folderTable,
|
||||
goqu.On(folderTable.Col(idColumn).Eq(fileTable.Col("parent_folder_id"))),
|
||||
).Order(
|
||||
|
||||
return qb.getMany(ctx, qb.selectDataset().Order(
|
||||
folderTable.Col("path").Asc(),
|
||||
fileTable.Col("basename").Asc(),
|
||||
table.Col("date").Asc(),
|
||||
|
||||
@@ -47,6 +47,25 @@ func loadSceneRelationships(ctx context.Context, expected models.Scene, actual *
|
||||
return err
|
||||
}
|
||||
}
|
||||
if expected.Files.Loaded() {
|
||||
if err := actual.LoadFiles(ctx, db.Scene); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// clear Path, Checksum, PrimaryFileID
|
||||
if expected.Path == "" {
|
||||
actual.Path = ""
|
||||
}
|
||||
if expected.Checksum == "" {
|
||||
actual.Checksum = ""
|
||||
}
|
||||
if expected.OSHash == "" {
|
||||
actual.OSHash = ""
|
||||
}
|
||||
if expected.PrimaryFileID == nil {
|
||||
actual.PrimaryFileID = nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -113,7 +132,6 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
||||
Endpoint: endpoint2,
|
||||
},
|
||||
}),
|
||||
Files: []*file.VideoFile{},
|
||||
},
|
||||
false,
|
||||
},
|
||||
@@ -128,9 +146,9 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
||||
Organized: true,
|
||||
OCounter: ocounter,
|
||||
StudioID: &studioIDs[studioIdxWithScene],
|
||||
Files: []*file.VideoFile{
|
||||
Files: models.NewRelatedVideoFiles([]*file.VideoFile{
|
||||
videoFile.(*file.VideoFile),
|
||||
},
|
||||
}),
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||
@@ -208,9 +226,11 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
var fileIDs []file.ID
|
||||
for _, f := range tt.newObject.Files {
|
||||
if tt.newObject.Files.Loaded() {
|
||||
for _, f := range tt.newObject.Files.List() {
|
||||
fileIDs = append(fileIDs, f.ID)
|
||||
}
|
||||
}
|
||||
|
||||
s := tt.newObject
|
||||
if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr {
|
||||
@@ -258,9 +278,11 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
||||
}
|
||||
|
||||
func clearSceneFileIDs(scene *models.Scene) {
|
||||
for _, f := range scene.Files {
|
||||
if scene.Files.Loaded() {
|
||||
for _, f := range scene.Files.List() {
|
||||
f.Base().ID = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func makeSceneFileWithID(i int) *file.VideoFile {
|
||||
@@ -297,9 +319,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"full",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithGallery],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithGallery),
|
||||
},
|
||||
Title: title,
|
||||
Details: details,
|
||||
URL: url,
|
||||
@@ -340,9 +359,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"clear nullables",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithSpacedName],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithSpacedName),
|
||||
},
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -355,9 +371,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"clear gallery ids",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithGallery],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithGallery),
|
||||
},
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
},
|
||||
false,
|
||||
@@ -366,9 +379,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"clear tag ids",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithTag],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithTag),
|
||||
},
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
},
|
||||
false,
|
||||
@@ -377,9 +387,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"clear performer ids",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithPerformer],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithPerformer),
|
||||
},
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
},
|
||||
false,
|
||||
@@ -388,9 +395,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"clear movies",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithMovie],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithMovie),
|
||||
},
|
||||
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
||||
},
|
||||
false,
|
||||
@@ -399,9 +403,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"invalid studio id",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithGallery],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithGallery),
|
||||
},
|
||||
StudioID: &invalidID,
|
||||
},
|
||||
true,
|
||||
@@ -410,9 +411,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"invalid gallery id",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithGallery],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithGallery),
|
||||
},
|
||||
GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
},
|
||||
true,
|
||||
@@ -421,9 +419,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"invalid tag id",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithGallery],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithGallery),
|
||||
},
|
||||
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
},
|
||||
true,
|
||||
@@ -432,9 +427,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"invalid performer id",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithGallery],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithGallery),
|
||||
},
|
||||
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
},
|
||||
true,
|
||||
@@ -443,9 +435,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
||||
"invalid movie id",
|
||||
&models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithSpacedName],
|
||||
Files: []*file.VideoFile{
|
||||
makeSceneFileWithID(sceneIdxWithSpacedName),
|
||||
},
|
||||
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||
{
|
||||
MovieID: invalidID,
|
||||
@@ -585,9 +574,9 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
},
|
||||
models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithSpacedName],
|
||||
Files: []*file.VideoFile{
|
||||
Files: models.NewRelatedVideoFiles([]*file.VideoFile{
|
||||
makeSceneFile(sceneIdxWithSpacedName),
|
||||
},
|
||||
}),
|
||||
Title: title,
|
||||
Details: details,
|
||||
URL: url,
|
||||
@@ -630,9 +619,9 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
clearScenePartial(),
|
||||
models.Scene{
|
||||
ID: sceneIDs[sceneIdxWithSpacedName],
|
||||
Files: []*file.VideoFile{
|
||||
Files: models.NewRelatedVideoFiles([]*file.VideoFile{
|
||||
makeSceneFile(sceneIdxWithSpacedName),
|
||||
},
|
||||
}),
|
||||
GalleryIDs: models.NewRelatedIDs([]int{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{}),
|
||||
@@ -665,15 +654,15 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
// ignore file ids
|
||||
clearSceneFileIDs(got)
|
||||
|
||||
// load relationships
|
||||
if err := loadSceneRelationships(ctx, tt.want, got); err != nil {
|
||||
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// ignore file ids
|
||||
clearSceneFileIDs(got)
|
||||
|
||||
assert.Equal(tt.want, *got)
|
||||
|
||||
s, err := qb.Find(ctx, tt.id)
|
||||
@@ -681,14 +670,13 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
||||
}
|
||||
|
||||
// ignore file ids
|
||||
clearSceneFileIDs(s)
|
||||
|
||||
// load relationships
|
||||
if err := loadSceneRelationships(ctx, tt.want, s); err != nil {
|
||||
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
// ignore file ids
|
||||
clearSceneFileIDs(s)
|
||||
|
||||
assert.Equal(tt.want, *s)
|
||||
})
|
||||
@@ -1338,7 +1326,7 @@ func makeSceneWithID(index int) *models.Scene {
|
||||
ret.Date = nil
|
||||
}
|
||||
|
||||
ret.Files = []*file.VideoFile{makeSceneFile(index)}
|
||||
ret.Files = models.NewRelatedVideoFiles([]*file.VideoFile{makeSceneFile(index)})
|
||||
|
||||
return ret
|
||||
}
|
||||
@@ -1401,13 +1389,13 @@ func Test_sceneQueryBuilder_Find(t *testing.T) {
|
||||
}
|
||||
|
||||
if got != nil {
|
||||
clearSceneFileIDs(got)
|
||||
|
||||
// load relationships
|
||||
if err := loadSceneRelationships(ctx, *tt.want, got); err != nil {
|
||||
t.Errorf("loadSceneRelationships() error = %v", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
clearSceneFileIDs(got)
|
||||
}
|
||||
|
||||
assert.Equal(tt.want, got)
|
||||
@@ -1419,14 +1407,13 @@ func Test_sceneQueryBuilder_Find(t *testing.T) {
|
||||
|
||||
func postFindScenes(ctx context.Context, want []*models.Scene, got []*models.Scene) error {
|
||||
for i, s := range got {
|
||||
clearSceneFileIDs(s)
|
||||
|
||||
// load relationships
|
||||
if i < len(want) {
|
||||
if err := loadSceneRelationships(ctx, *want[i], s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
clearSceneFileIDs(s)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -1935,7 +1922,7 @@ func TestSceneWall(t *testing.T) {
|
||||
scene := scenes[0]
|
||||
assert.Equal(t, sceneIDs[sceneIdx], scene.ID)
|
||||
scenePath := getFilePath(folderIdxWithSceneFiles, getSceneBasename(sceneIdx))
|
||||
assert.Equal(t, scenePath, scene.Path())
|
||||
assert.Equal(t, scenePath, scene.Path)
|
||||
|
||||
wallQuery = "not exist"
|
||||
scenes, err = sqb.Wall(ctx, &wallQuery)
|
||||
@@ -2248,8 +2235,8 @@ func TestSceneQueryPathOr(t *testing.T) {
|
||||
if !assert.Len(t, scenes, 2) {
|
||||
return nil
|
||||
}
|
||||
assert.Equal(t, scene1Path, scenes[0].Path())
|
||||
assert.Equal(t, scene2Path, scenes[1].Path())
|
||||
assert.Equal(t, scene1Path, scenes[0].Path)
|
||||
assert.Equal(t, scene2Path, scenes[1].Path)
|
||||
|
||||
return nil
|
||||
})
|
||||
@@ -2281,7 +2268,7 @@ func TestSceneQueryPathAndRating(t *testing.T) {
|
||||
if !assert.Len(t, scenes, 1) {
|
||||
return nil
|
||||
}
|
||||
assert.Equal(t, scenePath, scenes[0].Path())
|
||||
assert.Equal(t, scenePath, scenes[0].Path)
|
||||
assert.Equal(t, sceneRating, *scenes[0].Rating)
|
||||
|
||||
return nil
|
||||
@@ -2316,7 +2303,7 @@ func TestSceneQueryPathNotRating(t *testing.T) {
|
||||
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
||||
|
||||
for _, scene := range scenes {
|
||||
verifyString(t, scene.Path(), pathCriterion)
|
||||
verifyString(t, scene.Path, pathCriterion)
|
||||
ratingCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyIntPtr(t, scene.Rating, ratingCriterion)
|
||||
}
|
||||
@@ -2394,7 +2381,7 @@ func verifyScenesPath(t *testing.T, pathCriterion models.StringCriterionInput) {
|
||||
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
||||
|
||||
for _, scene := range scenes {
|
||||
verifyString(t, scene.Path(), pathCriterion)
|
||||
verifyString(t, scene.Path, pathCriterion)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -2662,7 +2649,12 @@ func verifyScenesDuration(t *testing.T, durationCriterion models.IntCriterionInp
|
||||
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
||||
|
||||
for _, scene := range scenes {
|
||||
duration := scene.Duration()
|
||||
if err := scene.LoadPrimaryFile(ctx, db.File); err != nil {
|
||||
t.Errorf("Error querying scene files: %v", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
duration := scene.Files.Primary().Duration
|
||||
if durationCriterion.Modifier == models.CriterionModifierEquals {
|
||||
assert.True(t, duration >= float64(durationCriterion.Value) && duration < float64(durationCriterion.Value+1))
|
||||
} else if durationCriterion.Modifier == models.CriterionModifierNotEquals {
|
||||
@@ -2732,7 +2724,11 @@ func verifyScenesResolution(t *testing.T, resolution models.ResolutionEnum) {
|
||||
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
||||
|
||||
for _, scene := range scenes {
|
||||
f := scene.PrimaryFile()
|
||||
if err := scene.LoadPrimaryFile(ctx, db.File); err != nil {
|
||||
t.Errorf("Error querying scene files: %v", err)
|
||||
return nil
|
||||
}
|
||||
f := scene.Files.Primary()
|
||||
height := 0
|
||||
if f != nil {
|
||||
height = f.Height
|
||||
|
||||
@@ -66,6 +66,7 @@ func WithDatabase(ctx context.Context, p DatabaseProvider, fn TxnFunc) error {
|
||||
|
||||
type Retryer struct {
|
||||
Manager Manager
|
||||
// use value < 0 to retry forever
|
||||
Retries int
|
||||
OnFail func(ctx context.Context, err error, attempt int) error
|
||||
}
|
||||
@@ -73,7 +74,7 @@ type Retryer struct {
|
||||
func (r Retryer) WithTxn(ctx context.Context, fn TxnFunc) error {
|
||||
var attempt int
|
||||
var err error
|
||||
for attempt = 1; attempt <= r.Retries; attempt++ {
|
||||
for attempt = 1; attempt <= r.Retries || r.Retries < 0; attempt++ {
|
||||
err = WithTxn(ctx, r.Manager, fn)
|
||||
|
||||
if err == nil {
|
||||
|
||||
@@ -171,7 +171,7 @@ export const App: React.FC = () => {
|
||||
}
|
||||
|
||||
function maybeRenderReleaseNotes() {
|
||||
if (setupMatch) {
|
||||
if (setupMatch || config.loading || config.error) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user